From 7ab205375f34ab38507ffe198e8710ee3b88ede5 Mon Sep 17 00:00:00 2001 From: Johannes Batzill Date: Thu, 21 Sep 2023 11:30:43 -0700 Subject: [PATCH 01/14] remove all drone files but .github and .github_changelog_generator --- .dockerignore | 2 - .drone.yml | 107 - .gitignore | 13 - BUILDING | 11 - BUILDING_OSS | 11 - CHANGELOG.md | 797 ----- HISTORY.md | 326 -- LICENSE | 89 - NOTICE | 14 - Taskfile.yml | 129 - cmd/drone-server/bootstrap/bootstrap.go | 123 - cmd/drone-server/bootstrap/bootstrap_test.go | 170 - cmd/drone-server/config/config.go | 644 ---- cmd/drone-server/config/config_test.go | 48 - cmd/drone-server/inject_client.go | 286 -- cmd/drone-server/inject_external.go | 31 - cmd/drone-server/inject_license.go | 53 - cmd/drone-server/inject_login.go | 223 -- cmd/drone-server/inject_plugin.go | 171 - cmd/drone-server/inject_runner.go | 78 - cmd/drone-server/inject_scheduler.go | 34 - cmd/drone-server/inject_server.go | 147 - cmd/drone-server/inject_service.go | 220 -- cmd/drone-server/inject_store.go | 181 - cmd/drone-server/main.go | 193 -- cmd/drone-server/wire.go | 39 - cmd/drone-server/wire_gen.go | 120 - core/admission.go | 25 - core/batch.go | 31 - core/build.go | 143 - core/build_test.go | 7 - core/cancel.go | 27 - core/card.go | 41 - core/commit.go | 59 - core/config.go | 40 - core/convert.go | 35 - core/cron.go | 117 - core/cron_test.go | 7 - core/event.go | 26 - core/file.go | 40 - core/hook.go | 70 - core/hook_test.go | 7 - core/license.go | 75 - core/license_test.go | 7 - core/linker.go | 23 - core/logs.go | 70 - core/netrc.go | 58 - core/netrc_test.go | 7 - core/org.go | 38 - core/perm.go | 68 - core/pubsub.go | 37 - core/registry.go | 61 - core/renewer.go | 24 - core/repo.go | 161 - core/sched.go | 58 - core/secret.go | 135 - core/secret_test.go | 78 - core/session.go | 33 - core/stage.go | 106 - core/stage_test.go | 73 - core/status.go | 54 - core/step.go | 69 - core/step_test.go | 25 - core/syncer.go | 22 - core/system.go | 23 - core/template.go | 79 - core/transfer.go | 23 - core/trigger.go | 30 - core/trigger_test.go | 7 - core/user.go | 115 - core/user_test.go | 68 - core/validate.go | 49 - core/webhook.go | 59 - docker/Dockerfile.agent.linux.amd64 | 19 - docker/Dockerfile.agent.linux.arm | 12 - docker/Dockerfile.agent.linux.arm64 | 12 - docker/Dockerfile.agent.windows.1803 | 14 - docker/Dockerfile.agent.windows.1809 | 14 - docker/Dockerfile.agent.windows.1903 | 14 - docker/Dockerfile.controller.linux.amd64 | 15 - docker/Dockerfile.controller.linux.arm | 16 - docker/Dockerfile.controller.linux.arm64 | 16 - docker/Dockerfile.controller.windows.1803 | 12 - docker/Dockerfile.controller.windows.1809 | 12 - docker/Dockerfile.server.linux.amd64 | 27 - docker/Dockerfile.server.linux.arm | 27 - docker/Dockerfile.server.linux.arm64 | 27 - docker/compose/README.md | 57 - docker/compose/drone-gitea/docker-compose.yml | 40 - .../compose/drone-github/docker-compose.yml | 29 - docker/compose/gitea/docker-compose.yml | 21 - docker/manifest.agent.tmpl | 43 - docker/manifest.controller.tmpl | 37 - docker/manifest.server.tmpl | 19 - go.mod | 63 - go.sum | 414 --- handler/api/acl/acl.go | 59 - handler/api/acl/acl_test.go | 144 - handler/api/acl/check.go | 146 - handler/api/acl/check_test.go | 827 ----- handler/api/acl/org.go | 86 - handler/api/acl/org_test.go | 205 -- handler/api/acl/repo.go | 136 - handler/api/acl/repo_test.go | 205 -- handler/api/api.go | 401 --- handler/api/auth/auth.go | 56 - handler/api/auth/auth_test.go | 88 - handler/api/badge/badge.go | 23 - handler/api/badge/status.go | 77 - handler/api/badge/status_test.go | 232 -- handler/api/builds/builds.go | 43 - handler/api/builds/builds_oss.go | 37 - handler/api/builds/builds_test.go | 75 - handler/api/card/create.go | 103 - handler/api/card/create_test.go | 168 - handler/api/card/delete.go | 86 - handler/api/card/delete_test.go | 156 - handler/api/card/find.go | 82 - handler/api/card/find_test.go | 114 - handler/api/card/none.go | 76 - handler/api/ccmenu/cc.go | 64 - handler/api/ccmenu/cc_test.go | 146 - handler/api/ccmenu/ccmenu.go | 48 - handler/api/ccmenu/ccmenu_oss.go | 31 - handler/api/ccmenu/ccmenu_test.go | 135 - handler/api/errors/errors.go | 43 - handler/api/errors/errors_test.go | 14 - handler/api/events/build.go | 116 - handler/api/events/build_test.go | 5 - handler/api/events/global.go | 104 - handler/api/events/logs.go | 128 - handler/api/events/logs_test.go | 15 - handler/api/queue/items.go | 31 - handler/api/queue/items_test.go | 17 - handler/api/queue/none.go | 40 - handler/api/queue/pause.go | 31 - handler/api/queue/pause_test.go | 7 - handler/api/queue/resume.go | 31 - handler/api/queue/resume_test.go | 7 - handler/api/render/render.go | 122 - handler/api/render/render_test.go | 207 -- handler/api/repos/all.go | 55 - handler/api/repos/builds/branches/create.go | 15 - .../api/repos/builds/branches/create_test.go | 5 - handler/api/repos/builds/branches/delete.go | 62 - .../api/repos/builds/branches/delete_test.go | 5 - handler/api/repos/builds/branches/list.go | 61 - .../api/repos/builds/branches/list_test.go | 5 - handler/api/repos/builds/cancel.go | 220 -- handler/api/repos/builds/cancel_test.go | 78 - handler/api/repos/builds/create.go | 124 - handler/api/repos/builds/create_test.go | 173 - handler/api/repos/builds/deploys/create.go | 15 - .../api/repos/builds/deploys/create_test.go | 5 - handler/api/repos/builds/deploys/delete.go | 62 - .../api/repos/builds/deploys/delete_test.go | 5 - handler/api/repos/builds/deploys/list.go | 61 - handler/api/repos/builds/deploys/list_test.go | 5 - handler/api/repos/builds/find.go | 66 - handler/api/repos/builds/find_test.go | 181 - handler/api/repos/builds/latest.go | 64 - handler/api/repos/builds/latest_test.go | 154 - handler/api/repos/builds/list.go | 88 - handler/api/repos/builds/list_test.go | 235 -- handler/api/repos/builds/logs/delete.go | 83 - handler/api/repos/builds/logs/delete_test.go | 5 - handler/api/repos/builds/logs/find.go | 90 - handler/api/repos/builds/logs/find_test.go | 5 - handler/api/repos/builds/promote.go | 103 - handler/api/repos/builds/promote_oss.go | 37 - handler/api/repos/builds/promote_test.go | 269 -- handler/api/repos/builds/pulls/create.go | 15 - handler/api/repos/builds/pulls/create_test.go | 5 - handler/api/repos/builds/pulls/delete.go | 62 - handler/api/repos/builds/pulls/delete_test.go | 5 - handler/api/repos/builds/pulls/list.go | 61 - handler/api/repos/builds/pulls/list_test.go | 5 - handler/api/repos/builds/purge.go | 45 - handler/api/repos/builds/purge_oss.go | 28 - handler/api/repos/builds/purge_test.go | 144 - handler/api/repos/builds/retry.go | 116 - handler/api/repos/builds/retry_test.go | 231 -- handler/api/repos/builds/rollback.go | 103 - handler/api/repos/builds/rollback_oss.go | 37 - handler/api/repos/builds/rollback_test.go | 7 - handler/api/repos/builds/stages/approve.go | 89 - .../api/repos/builds/stages/approve_test.go | 628 ---- handler/api/repos/builds/stages/decline.go | 89 - .../api/repos/builds/stages/decline_test.go | 263 -- handler/api/repos/chown.go | 63 - handler/api/repos/chown_test.go | 125 - handler/api/repos/collabs/find.go | 67 - handler/api/repos/collabs/find_test.go | 162 - handler/api/repos/collabs/list.go | 53 - handler/api/repos/collabs/list_test.go | 150 - handler/api/repos/collabs/none.go | 40 - handler/api/repos/collabs/remove.go | 79 - handler/api/repos/collabs/remove_test.go | 185 - handler/api/repos/crons/create.go | 65 - handler/api/repos/crons/create_test.go | 224 -- handler/api/repos/crons/delete.go | 47 - handler/api/repos/crons/delete_test.go | 147 - handler/api/repos/crons/exec.go | 101 - handler/api/repos/crons/exec_test.go | 7 - handler/api/repos/crons/find.go | 43 - handler/api/repos/crons/find_test.go | 119 - handler/api/repos/crons/list.go | 41 - handler/api/repos/crons/list_test.go | 137 - handler/api/repos/crons/none.go | 53 - handler/api/repos/crons/update.go | 67 - handler/api/repos/crons/update_test.go | 164 - handler/api/repos/disable.go | 91 - handler/api/repos/disable_test.go | 163 - handler/api/repos/enable.go | 126 - handler/api/repos/enable_test.go | 181 - handler/api/repos/encrypt/encrypt.go | 91 - handler/api/repos/encrypt/encrypt_test.go | 1 - handler/api/repos/find.go | 34 - handler/api/repos/find_test.go | 76 - handler/api/repos/repair.go | 112 - handler/api/repos/repair_test.go | 352 -- handler/api/repos/secrets/create.go | 73 - handler/api/repos/secrets/create_test.go | 186 -- handler/api/repos/secrets/delete.go | 48 - handler/api/repos/secrets/delete_test.go | 147 - handler/api/repos/secrets/find.go | 44 - handler/api/repos/secrets/find_test.go | 119 - handler/api/repos/secrets/list.go | 47 - handler/api/repos/secrets/list_test.go | 148 - handler/api/repos/secrets/none.go | 48 - handler/api/repos/secrets/update.go | 82 - handler/api/repos/secrets/update_test.go | 229 -- handler/api/repos/sign/sign.go | 63 - handler/api/repos/sign/sign_test.go | 5 - handler/api/repos/update.go | 143 - handler/api/repos/update_test.go | 295 -- handler/api/request/context.go | 64 - handler/api/request/context_test.go | 5 - handler/api/secrets/all.go | 33 - handler/api/secrets/all_test.go | 65 - handler/api/secrets/create.go | 60 - handler/api/secrets/create_test.go | 139 - handler/api/secrets/delete.go | 38 - handler/api/secrets/delete_test.go | 105 - handler/api/secrets/find.go | 35 - handler/api/secrets/find_test.go | 81 - handler/api/secrets/list.go | 36 - handler/api/secrets/list_test.go | 105 - handler/api/secrets/none.go | 52 - handler/api/secrets/update.go | 72 - handler/api/secrets/update_test.go | 180 - handler/api/system/license.go | 22 - handler/api/system/limits.go | 7 - handler/api/system/none.go | 45 - handler/api/system/stats.go | 235 -- handler/api/system/stats_test.go | 17 - handler/api/template/all.go | 27 - handler/api/template/all_test.go | 84 - handler/api/template/create.go | 72 - handler/api/template/create_test.go | 154 - handler/api/template/delete.go | 38 - handler/api/template/delete_test.go | 105 - handler/api/template/find.go | 34 - handler/api/template/find_test.go | 74 - handler/api/template/list.go | 30 - handler/api/template/list_test.go | 70 - handler/api/template/none.go | 56 - handler/api/template/update.go | 67 - handler/api/template/update_test.go | 148 - handler/api/user/activity.go | 40 - handler/api/user/find.go | 32 - handler/api/user/find_test.go | 40 - handler/api/user/remote/repo.go | 60 - handler/api/user/remote/repos.go | 41 - handler/api/user/repos.go | 47 - handler/api/user/repos_test.go | 94 - handler/api/user/sync.go | 65 - handler/api/user/sync_test.go | 5 - handler/api/user/token.go | 46 - handler/api/user/token_test.go | 124 - handler/api/user/update.go | 52 - handler/api/user/update_test.go | 130 - handler/api/users/create.go | 116 - handler/api/users/create_test.go | 222 -- handler/api/users/delete.go | 71 - handler/api/users/delete_test.go | 102 - handler/api/users/find.go | 53 - handler/api/users/find_test.go | 129 - handler/api/users/list.go | 38 - handler/api/users/list_test.go | 77 - handler/api/users/repos.go | 54 - handler/api/users/token.go | 51 - handler/api/users/token_test.go | 131 - handler/api/users/update.go | 88 - handler/api/users/update_test.go | 168 - handler/health/health.go | 43 - handler/health/health_test.go | 21 - handler/web/hook.go | 135 - handler/web/link/link.go | 68 - handler/web/link/link_test.go | 15 - handler/web/login.go | 229 -- handler/web/login_test.go | 5 - handler/web/logout.go | 33 - handler/web/logout_test.go | 25 - handler/web/pages.go | 172 - handler/web/varz.go | 75 - handler/web/varz_test.go | 66 - handler/web/version.go | 36 - handler/web/version_test.go | 33 - handler/web/web.go | 141 - handler/web/writer.go | 98 - handler/web/writer_test.go | 144 - livelog/livelog.go | 32 - livelog/livelog_oss.go | 27 - livelog/stream.go | 92 - livelog/stream_redis.go | 226 -- livelog/stream_test.go | 119 - livelog/streamer.go | 94 - livelog/streamer_test.go | 119 - livelog/sub.go | 50 - livelog/sub_test.go | 83 - logger/handler.go | 55 - logger/handler_test.go | 38 - logger/logger.go | 50 - logger/logger_test.go | 47 - metric/builds.go | 52 - metric/builds_test.go | 137 - metric/handler.go | 53 - metric/handler_oss.go | 35 - metric/handler_test.go | 88 - metric/license.go | 16 - metric/license_test.go | 7 - metric/metric.go | 7 - metric/metric_oss.go | 27 - metric/repos.go | 26 - metric/repos_test.go | 56 - metric/sink/config.go | 34 - metric/sink/datadog.go | 172 - metric/sink/datadog_test.go | 95 - metric/sink/tags.go | 93 - metric/stages.go | 39 - metric/stages_test.go | 97 - metric/users.go | 30 - metric/users_test.go | 56 - mock/mock.go | 9 - mock/mock_gen.go | 2976 ----------------- mock/mockscm/mock.go | 9 - mock/mockscm/mock_gen.go | 815 ----- operator/manager/manager.go | 557 --- operator/manager/manager_test.go | 15 - operator/manager/rpc/client.go | 329 -- operator/manager/rpc/client_test.go | 506 --- operator/manager/rpc/error.go | 16 - operator/manager/rpc/server.go | 286 -- operator/manager/rpc/server_oss.go | 101 - operator/manager/rpc/server_test.go | 7 - operator/manager/rpc/types.go | 67 - operator/manager/rpc2/client.go | 7 - operator/manager/rpc2/handler.go | 321 -- operator/manager/rpc2/server.go | 58 - operator/manager/rpc2/server_oss.go | 33 - operator/manager/rpc2/types.go | 27 - operator/manager/setup.go | 184 - operator/manager/setup_test.go | 5 - operator/manager/teardown.go | 354 -- operator/manager/teardown_test.go | 5 - operator/manager/updater.go | 100 - operator/manager/util.go | 112 - operator/manager/util_test.go | 5 - operator/runner/after.go | 15 - operator/runner/after_test.go | 5 - operator/runner/before.go | 15 - operator/runner/before_test.go | 5 - operator/runner/convert.go | 77 - operator/runner/convert_test.go | 100 - operator/runner/env.go | 168 - operator/runner/env_test.go | 50 - operator/runner/machine/client.go | 47 - operator/runner/machine/config.go | 65 - operator/runner/machine/config_test.go | 7 - operator/runner/machine/machine.go | 57 - operator/runner/machine/machine_test.go | 15 - operator/runner/runner.go | 611 ---- operator/runner/runner_test.go | 15 - operator/runner/secrets.go | 25 - plugin/admission/account.go | 73 - plugin/admission/account_oss.go | 24 - plugin/admission/account_test.go | 108 - plugin/admission/combine.go | 39 - plugin/admission/combine_test.go | 43 - plugin/admission/external.go | 75 - plugin/admission/external_oss.go | 24 - plugin/admission/external_test.go | 7 - plugin/admission/nobot.go | 59 - plugin/admission/nobot_oss.go | 28 - plugin/admission/nobot_test.go | 95 - plugin/admission/noop.go | 28 - plugin/admission/open.go | 49 - plugin/admission/open_test.go | 36 - plugin/config/combine.go | 52 - plugin/config/combine_test.go | 128 - plugin/config/global.go | 136 - plugin/config/global_oss.go | 35 - plugin/config/global_test.go | 126 - plugin/config/jsonnet.go | 77 - plugin/config/jsonnet_oss.go | 24 - plugin/config/jsonnet_test.go | 7 - plugin/config/memoize.go | 106 - plugin/config/memoize_oss.go | 29 - plugin/config/memoize_test.go | 159 - plugin/config/repo.go | 41 - plugin/config/repo_test.go | 75 - plugin/converter/combine.go | 56 - plugin/converter/combine_test.go | 122 - plugin/converter/jsonnet.go | 55 - plugin/converter/jsonnet/jsonnet.go | 202 -- plugin/converter/jsonnet/jsonnet_test.go | 108 - plugin/converter/jsonnet_oss.go | 27 - plugin/converter/jsonnet_test.go | 293 -- plugin/converter/legacy.go | 34 - plugin/converter/legacy_oss.go | 27 - plugin/converter/memoize.go | 111 - plugin/converter/memoize_oss.go | 29 - plugin/converter/memoize_test.go | 159 - plugin/converter/noop.go | 29 - plugin/converter/remote.go | 149 - plugin/converter/remote_oss.go | 29 - plugin/converter/remote_test.go | 55 - plugin/converter/starlark.go | 65 - plugin/converter/starlark/args.go | 179 - plugin/converter/starlark/starlark.go | 149 - plugin/converter/starlark/starlark_test.go | 107 - plugin/converter/starlark/write.go | 99 - plugin/converter/starlark_oss.go | 23 - plugin/converter/starlark_test.go | 198 -- plugin/converter/template.go | 135 - plugin/converter/template_oss.go | 37 - plugin/converter/template_test.go | 553 --- plugin/converter/testdata/drone.yml | 6 - plugin/converter/testdata/input.jsonnet | 18 - .../converter/testdata/input.jsonnet.golden | 15 - .../converter/testdata/jsonnet.template.yml | 6 - plugin/converter/testdata/multi.star | 6 - plugin/converter/testdata/multi.star.golden | 2 - plugin/converter/testdata/single.jsonnet | 14 - plugin/converter/testdata/single.star | 11 - plugin/converter/testdata/single.star.golden | 1 - .../testdata/starlark-nested.template.yml | 8 - .../testdata/starlark.input-nested.star | 8 - .../starlark.input-nested.star.golden | 1 - plugin/converter/testdata/starlark.input.star | 14 - .../testdata/starlark.input.star.golden | 1 - .../converter/testdata/starlark.template.yml | 7 - plugin/converter/testdata/yaml.input.golden | 8 - plugin/converter/testdata/yaml.input.yml | 8 - .../testdata/yaml.template.comment.yml | 8 - .../testdata/yaml.template.invalid.yml | 6 - plugin/converter/testdata/yaml.template.yml | 6 - plugin/registry/auths/auth.go | 96 - plugin/registry/auths/auth_test.go | 112 - plugin/registry/auths/testdata/config.json | 7 - plugin/registry/auths/testdata/config2.json | 7 - plugin/registry/combine.go | 59 - plugin/registry/combine_test.go | 52 - plugin/registry/encrypted.go | 121 - plugin/registry/endpoint.go | 62 - plugin/registry/endpoint_oss.go | 24 - plugin/registry/endpoint_test.go | 89 - plugin/registry/external.go | 183 - plugin/registry/external_oss.go | 24 - plugin/registry/external_test.go | 7 - plugin/registry/file.go | 47 - plugin/registry/file_oss.go | 24 - plugin/registry/file_test.go | 41 - plugin/registry/noop.go | 27 - plugin/registry/static.go | 70 - plugin/registry/static_test.go | 121 - plugin/secret/combine.go | 68 - plugin/secret/combine_test.go | 5 - plugin/secret/encrypted.go | 119 - plugin/secret/encrypted_test.go | 15 - plugin/secret/external.go | 175 - plugin/secret/external_oss.go | 34 - plugin/secret/external_test.go | 5 - plugin/secret/static.go | 48 - plugin/secret/static_test.go | 98 - plugin/validator/combine.go | 40 - plugin/validator/combine_test.go | 58 - plugin/validator/filter.go | 66 - plugin/validator/filter_test.go | 70 - plugin/validator/noop.go | 27 - plugin/validator/remote.go | 123 - plugin/validator/remote_oss.go | 29 - plugin/validator/remote_test.go | 7 - plugin/webhook/config.go | 25 - plugin/webhook/webhook.go | 138 - plugin/webhook/webhook_oss.go | 34 - plugin/webhook/webhook_test.go | 163 - pubsub/doc.go | 18 - pubsub/hub.go | 73 - pubsub/hub_redis.go | 124 - pubsub/hub_test.go | 58 - pubsub/pubsub.go | 32 - pubsub/pubsub_oss.go | 27 - pubsub/sub.go | 50 - pubsub/sub_test.go | 85 - scheduler/queue/canceller.go | 90 - scheduler/queue/canceller_redis.go | 160 - scheduler/queue/canceller_test.go | 36 - scheduler/queue/queue.go | 348 -- scheduler/queue/queue_test.go | 418 --- scheduler/queue/scheduler.go | 29 - scheduler/queue/scheduler_non_oss.go | 46 - scheduler/queue/scheduler_oss.go | 33 - scheduler/queue/scheduler_redis.go | 31 - scheduler/scheduler.go | 7 - scripts/build.sh | 8 - server/server.go | 167 - service/canceler/canceler.go | 269 -- service/canceler/canceler_test.go | 143 - service/canceler/match.go | 52 - service/canceler/match_test.go | 179 - service/canceler/reaper/reaper.go | 208 -- service/canceler/reaper/reaper_test.go | 396 --- service/canceler/reaper/util.go | 28 - service/canceler/reaper/util_test.go | 75 - service/commit/commit.go | 141 - service/commit/commit_test.go | 345 -- service/content/cache/contents.go | 52 - service/content/cache/contents_oss.go | 25 - service/content/cache/contents_test.go | 100 - service/content/content.go | 100 - service/content/content_test.go | 96 - service/hook/hook.go | 72 - service/hook/hook_test.go | 130 - service/hook/parser/parse.go | 406 --- service/hook/parser/parse_test.go | 5 - .../parser/testdata/bitbucket_pull_open.json | 43 - .../testdata/bitbucket_pull_open.json.golden | 0 .../hook/parser/testdata/bitbucket_push.json | 41 - .../testdata/bitbucket_push.json.golden | 0 .../hook/parser/testdata/bitbucket_tag.json | 0 .../parser/testdata/bitbucket_tag.json.golden | 26 - .../hook/parser/testdata/gitea_pull_open.json | 47 - .../testdata/gitea_pull_open.json.golden | 0 service/hook/parser/testdata/gitea_push.json | 41 - .../parser/testdata/gitea_push.json.golden | 0 service/hook/parser/testdata/gitea_tag.json | 26 - .../parser/testdata/gitea_tag.json.golden | 0 .../parser/testdata/github_pull_create.json | 43 - .../testdata/github_pull_create.json.golden | 0 .../parser/testdata/github_pull_sync.json | 43 - .../testdata/github_pull_sync.json.golden | 0 service/hook/parser/testdata/github_push.json | 41 - .../parser/testdata/github_push.json.golden | 0 .../hook/parser/testdata/github_push_tag.json | 41 - .../testdata/github_push_tag.json.golden | 0 .../parser/testdata/gitlab_pull_open.json | 43 - .../testdata/gitlab_pull_open.json.golden | 0 service/hook/parser/testdata/gitlab_push.json | 41 - .../parser/testdata/gitlab_push.json.golden | 0 service/hook/parser/testdata/gitlab_tag.json | 26 - .../parser/testdata/gitlab_tag.json.golden | 0 .../parser/testdata/gogs_pull_create.json | 43 - service/hook/parser/testdata/gogs_push.json | 41 - .../parser/testdata/gogs_push.json.golden | 0 service/hook/parser/testdata/gogs_tag.json | 26 - .../hook/parser/testdata/gogs_tag.json.golden | 0 .../parser/testdata/stash_pull_create.json | 43 - .../testdata/stash_pull_create.json.golden | 0 service/hook/parser/testdata/stash_push.json | 41 - .../parser/testdata/stash_push.json.golden | 0 .../hook/parser/testdata/stash_push_tag.json | 26 - .../testdata/stash_push_tag.json.golden | 0 service/hook/util.go | 60 - service/hook/util_test.go | 176 - service/license/load.go | 89 - service/license/load_test.go | 18 - service/license/nolimit.go | 28 - service/license/nolimit_oss.go | 28 - service/license/service.go | 74 - service/linker/linker.go | 40 - service/linker/linker_test.go | 15 - service/netrc/netrc.go | 91 - service/netrc/netrc_test.go | 236 -- service/org/cache.go | 99 - service/org/cache_test.go | 90 - service/org/org.go | 92 - service/org/org_test.go | 126 - service/redisdb/lockerr.go | 32 - service/redisdb/redisdb.go | 186 -- service/repo/repo.go | 110 - service/repo/repo_test.go | 289 -- service/repo/util.go | 59 - service/repo/util_test.go | 98 - service/status/status.go | 95 - service/status/status_test.go | 119 - service/status/util.go | 93 - service/status/util_test.go | 169 - service/syncer/filter.go | 40 - service/syncer/filter_oss.go | 33 - service/syncer/filter_test.go | 47 - service/syncer/syncer.go | 239 -- service/syncer/syncer_test.go | 485 --- service/syncer/util.go | 62 - service/syncer/util_test.go | 195 -- service/token/renew.go | 77 - service/token/renew_test.go | 7 - service/transfer/transfer.go | 113 - service/transfer/transfer_test.go | 119 - service/user/user.go | 77 - service/user/user_test.go | 89 - session/config.go | 33 - session/session.go | 113 - session/session_test.go | 217 -- session/testdata/mapping.json | 4 - store/batch/batch.go | 360 -- store/batch/batch_test.go | 338 -- store/batch2/batch.go | 428 --- store/batch2/batch_test.go | 397 --- store/build/build.go | 843 ----- store/build/build_test.go | 482 --- store/build/scan.go | 172 - store/card/card.go | 149 - store/card/card_oss.go | 47 - store/card/card_test.go | 122 - store/card/scan.go | 33 - store/cron/cron.go | 244 -- store/cron/cron_oss.go | 59 - store/cron/cron_test.go | 210 -- store/cron/scan.go | 71 - store/logs/azureblob.go | 100 - store/logs/azureblob_oss.go | 24 - store/logs/combine.go | 61 - store/logs/logs.go | 135 - store/logs/logs_test.go | 125 - store/logs/s3.go | 99 - store/logs/s3_oss.go | 24 - store/logs/s3_test.go | 37 - store/logs/scan.go | 26 - store/perm/perm.go | 177 - store/perm/perm_test.go | 165 - store/perm/scan.go | 86 - store/repos/repos.go | 585 ---- store/repos/repos_test.go | 352 -- store/repos/scan.go | 259 -- store/repos/scan_test.go | 5 - store/repos/testdata/repo.json | 53 - store/repos/testdata/repo.json.golden | 53 - store/repos/type.go | 105 - store/secret/global/scan.go | 74 - store/secret/global/secret.go | 233 -- store/secret/global/secret_oss.go | 60 - store/secret/global/secret_test.go | 165 - store/secret/scan.go | 72 - store/secret/secret.go | 212 -- store/secret/secret_oss.go | 56 - store/secret/secret_test.go | 238 -- store/shared/db/conn.go | 86 - store/shared/db/conn_oss.go | 45 - store/shared/db/conn_test.go | 5 - store/shared/db/db.go | 134 - store/shared/db/db_test.go | 5 - store/shared/db/dbtest/dbtest.go | 69 - store/shared/db/error.go | 22 - store/shared/db/nop.go | 22 - store/shared/encrypt/aesgcm.go | 80 - store/shared/encrypt/aesgcm_test.go | 55 - store/shared/encrypt/encrypt.go | 47 - store/shared/encrypt/none.go | 29 - store/shared/encrypt/none_test.go | 22 - store/shared/migrate/README.md | 32 - store/shared/migrate/mysql/ddl.go | 9 - store/shared/migrate/mysql/ddl_gen.go | 793 ----- .../mysql/files/001_create_table_user.sql | 22 - .../mysql/files/002_create_table_repos.sql | 55 - .../mysql/files/003_create_table_perms.sql | 23 - .../mysql/files/004_create_table_builds.sql | 58 - .../mysql/files/005_create_table_stages.sql | 67 - .../mysql/files/006_create_table_steps.sql | 20 - .../mysql/files/007_create_table_logs.sql | 6 - .../mysql/files/008_create_table_cron.sql | 27 - .../mysql/files/009_create_table_secrets.sql | 20 - .../mysql/files/010_create_table_nodes.sql | 33 - .../files/011_add_column_builds_cron.sql | 3 - .../files/012_create_table_global_secrets.sql | 12 - .../files/013_add_column_builds_deploy_id.sql | 3 - .../mysql/files/014_create_table_refs.sql | 16 - .../files/015_create_table_templates.sql | 15 - .../mysql/files/016_add_columns_steps.sql | 11 - .../mysql/files/017_create_table_cards.sql | 16 - .../mysql/files/018_amend_table_cards.sql | 16 - store/shared/migrate/postgres/ddl.go | 9 - store/shared/migrate/postgres/ddl_gen.go | 771 ----- .../postgres/files/001_create_table_user.sql | 22 - .../postgres/files/002_create_table_repos.sql | 55 - .../postgres/files/003_create_table_perms.sql | 23 - .../files/004_create_table_builds.sql | 63 - .../files/005_create_table_stages.sql | 44 - .../postgres/files/006_create_table_steps.sql | 20 - .../postgres/files/007_create_table_logs.sql | 6 - .../postgres/files/008_create_table_cron.sql | 27 - .../files/009_create_table_secrets.sql | 20 - .../postgres/files/010_create_table_nodes.sql | 33 - .../files/011_add_column_builds_cron.sql | 3 - .../files/012_create_table_org_secrets.sql | 12 - .../files/013_add_column_builds_deploy_id.sql | 3 - .../postgres/files/015_create_table_refs.sql | 16 - .../files/016_create_template_tables.sql | 15 - .../postgres/files/017_add_columns_steps.sql | 11 - .../postgres/files/018_create_table_cards.sql | 16 - .../postgres/files/019_amend_table_cards.sql | 16 - store/shared/migrate/sqlite/ddl.go | 17 - store/shared/migrate/sqlite/ddl_gen.go | 773 ----- .../sqlite/files/001_create_table_user.sql | 22 - .../sqlite/files/002_create_table_repos.sql | 55 - .../sqlite/files/003_create_table_perms.sql | 23 - .../sqlite/files/004_create_table_builds.sql | 62 - .../sqlite/files/005_create_table_stages.sql | 45 - .../sqlite/files/006_create_table_steps.sql | 21 - .../sqlite/files/007_create_table_logs.sql | 7 - .../sqlite/files/008_create_table_cron.sql | 27 - .../sqlite/files/009_create_table_secrets.sql | 20 - .../sqlite/files/010_create_table_nodes.sql | 33 - .../files/011_add_column_builds_cron.sql | 3 - .../files/012_create_table_org_secrets.sql | 12 - .../files/013_add_column_builds_deploy_id.sql | 3 - .../sqlite/files/014_create_table_refs.sql | 16 - .../files/015_create_template_tables.sql | 15 - .../sqlite/files/016_add_columns_steps.sql | 11 - .../sqlite/files/017_create_table_cards.sql | 16 - .../sqlite/files/018_amend_table_cards.sql | 16 - store/stage/scan.go | 206 -- store/stage/stage.go | 424 --- store/stage/stage_test.go | 252 -- store/stage/type.go | 66 - store/step/scan.go | 94 - store/step/step.go | 236 -- store/step/step_test.go | 199 -- store/store.go | 15 - store/template/scan.go | 61 - store/template/template.go | 234 -- store/template/template_oss.go | 58 - store/template/template_test.go | 224 -- store/user/scan.go | 107 - store/user/user.go | 332 -- store/user/user_test.go | 245 -- trigger/change.go | 72 - trigger/change_test.go | 136 - trigger/cron/cron.go | 196 -- trigger/cron/cron_oss.go | 43 - trigger/cron/cron_test.go | 489 --- trigger/dag/dag.go | 148 - trigger/dag/dag_test.go | 211 -- trigger/skip.go | 104 - trigger/skip_test.go | 266 -- trigger/trigger.go | 643 ---- trigger/trigger_test.go | 659 ---- version/version.go | 44 - version/version_test.go | 16 - web/README | 1 - 761 files changed, 72822 deletions(-) delete mode 100644 .dockerignore delete mode 100644 .drone.yml delete mode 100644 .gitignore delete mode 100644 BUILDING delete mode 100644 BUILDING_OSS delete mode 100644 CHANGELOG.md delete mode 100644 HISTORY.md delete mode 100644 LICENSE delete mode 100644 NOTICE delete mode 100644 Taskfile.yml delete mode 100644 cmd/drone-server/bootstrap/bootstrap.go delete mode 100644 cmd/drone-server/bootstrap/bootstrap_test.go delete mode 100644 cmd/drone-server/config/config.go delete mode 100644 cmd/drone-server/config/config_test.go delete mode 100644 cmd/drone-server/inject_client.go delete mode 100644 cmd/drone-server/inject_external.go delete mode 100644 cmd/drone-server/inject_license.go delete mode 100644 cmd/drone-server/inject_login.go delete mode 100644 cmd/drone-server/inject_plugin.go delete mode 100644 cmd/drone-server/inject_runner.go delete mode 100644 cmd/drone-server/inject_scheduler.go delete mode 100644 cmd/drone-server/inject_server.go delete mode 100644 cmd/drone-server/inject_service.go delete mode 100644 cmd/drone-server/inject_store.go delete mode 100644 cmd/drone-server/main.go delete mode 100644 cmd/drone-server/wire.go delete mode 100644 cmd/drone-server/wire_gen.go delete mode 100644 core/admission.go delete mode 100644 core/batch.go delete mode 100644 core/build.go delete mode 100644 core/build_test.go delete mode 100644 core/cancel.go delete mode 100644 core/card.go delete mode 100644 core/commit.go delete mode 100644 core/config.go delete mode 100644 core/convert.go delete mode 100644 core/cron.go delete mode 100644 core/cron_test.go delete mode 100644 core/event.go delete mode 100644 core/file.go delete mode 100644 core/hook.go delete mode 100644 core/hook_test.go delete mode 100644 core/license.go delete mode 100644 core/license_test.go delete mode 100644 core/linker.go delete mode 100644 core/logs.go delete mode 100644 core/netrc.go delete mode 100644 core/netrc_test.go delete mode 100644 core/org.go delete mode 100644 core/perm.go delete mode 100644 core/pubsub.go delete mode 100644 core/registry.go delete mode 100644 core/renewer.go delete mode 100644 core/repo.go delete mode 100644 core/sched.go delete mode 100644 core/secret.go delete mode 100644 core/secret_test.go delete mode 100644 core/session.go delete mode 100644 core/stage.go delete mode 100644 core/stage_test.go delete mode 100644 core/status.go delete mode 100644 core/step.go delete mode 100644 core/step_test.go delete mode 100644 core/syncer.go delete mode 100644 core/system.go delete mode 100644 core/template.go delete mode 100644 core/transfer.go delete mode 100644 core/trigger.go delete mode 100644 core/trigger_test.go delete mode 100644 core/user.go delete mode 100644 core/user_test.go delete mode 100644 core/validate.go delete mode 100644 core/webhook.go delete mode 100644 docker/Dockerfile.agent.linux.amd64 delete mode 100644 docker/Dockerfile.agent.linux.arm delete mode 100644 docker/Dockerfile.agent.linux.arm64 delete mode 100644 docker/Dockerfile.agent.windows.1803 delete mode 100644 docker/Dockerfile.agent.windows.1809 delete mode 100644 docker/Dockerfile.agent.windows.1903 delete mode 100644 docker/Dockerfile.controller.linux.amd64 delete mode 100644 docker/Dockerfile.controller.linux.arm delete mode 100644 docker/Dockerfile.controller.linux.arm64 delete mode 100644 docker/Dockerfile.controller.windows.1803 delete mode 100644 docker/Dockerfile.controller.windows.1809 delete mode 100644 docker/Dockerfile.server.linux.amd64 delete mode 100644 docker/Dockerfile.server.linux.arm delete mode 100644 docker/Dockerfile.server.linux.arm64 delete mode 100644 docker/compose/README.md delete mode 100644 docker/compose/drone-gitea/docker-compose.yml delete mode 100644 docker/compose/drone-github/docker-compose.yml delete mode 100644 docker/compose/gitea/docker-compose.yml delete mode 100644 docker/manifest.agent.tmpl delete mode 100644 docker/manifest.controller.tmpl delete mode 100644 docker/manifest.server.tmpl delete mode 100644 go.mod delete mode 100644 go.sum delete mode 100644 handler/api/acl/acl.go delete mode 100644 handler/api/acl/acl_test.go delete mode 100644 handler/api/acl/check.go delete mode 100644 handler/api/acl/check_test.go delete mode 100644 handler/api/acl/org.go delete mode 100644 handler/api/acl/org_test.go delete mode 100644 handler/api/acl/repo.go delete mode 100644 handler/api/acl/repo_test.go delete mode 100644 handler/api/api.go delete mode 100644 handler/api/auth/auth.go delete mode 100644 handler/api/auth/auth_test.go delete mode 100644 handler/api/badge/badge.go delete mode 100644 handler/api/badge/status.go delete mode 100644 handler/api/badge/status_test.go delete mode 100644 handler/api/builds/builds.go delete mode 100644 handler/api/builds/builds_oss.go delete mode 100644 handler/api/builds/builds_test.go delete mode 100644 handler/api/card/create.go delete mode 100644 handler/api/card/create_test.go delete mode 100644 handler/api/card/delete.go delete mode 100644 handler/api/card/delete_test.go delete mode 100644 handler/api/card/find.go delete mode 100644 handler/api/card/find_test.go delete mode 100644 handler/api/card/none.go delete mode 100644 handler/api/ccmenu/cc.go delete mode 100644 handler/api/ccmenu/cc_test.go delete mode 100644 handler/api/ccmenu/ccmenu.go delete mode 100644 handler/api/ccmenu/ccmenu_oss.go delete mode 100644 handler/api/ccmenu/ccmenu_test.go delete mode 100644 handler/api/errors/errors.go delete mode 100644 handler/api/errors/errors_test.go delete mode 100644 handler/api/events/build.go delete mode 100644 handler/api/events/build_test.go delete mode 100644 handler/api/events/global.go delete mode 100644 handler/api/events/logs.go delete mode 100644 handler/api/events/logs_test.go delete mode 100644 handler/api/queue/items.go delete mode 100644 handler/api/queue/items_test.go delete mode 100644 handler/api/queue/none.go delete mode 100644 handler/api/queue/pause.go delete mode 100644 handler/api/queue/pause_test.go delete mode 100644 handler/api/queue/resume.go delete mode 100644 handler/api/queue/resume_test.go delete mode 100644 handler/api/render/render.go delete mode 100644 handler/api/render/render_test.go delete mode 100644 handler/api/repos/all.go delete mode 100644 handler/api/repos/builds/branches/create.go delete mode 100644 handler/api/repos/builds/branches/create_test.go delete mode 100644 handler/api/repos/builds/branches/delete.go delete mode 100644 handler/api/repos/builds/branches/delete_test.go delete mode 100644 handler/api/repos/builds/branches/list.go delete mode 100644 handler/api/repos/builds/branches/list_test.go delete mode 100644 handler/api/repos/builds/cancel.go delete mode 100644 handler/api/repos/builds/cancel_test.go delete mode 100644 handler/api/repos/builds/create.go delete mode 100644 handler/api/repos/builds/create_test.go delete mode 100644 handler/api/repos/builds/deploys/create.go delete mode 100644 handler/api/repos/builds/deploys/create_test.go delete mode 100644 handler/api/repos/builds/deploys/delete.go delete mode 100644 handler/api/repos/builds/deploys/delete_test.go delete mode 100644 handler/api/repos/builds/deploys/list.go delete mode 100644 handler/api/repos/builds/deploys/list_test.go delete mode 100644 handler/api/repos/builds/find.go delete mode 100644 handler/api/repos/builds/find_test.go delete mode 100644 handler/api/repos/builds/latest.go delete mode 100644 handler/api/repos/builds/latest_test.go delete mode 100644 handler/api/repos/builds/list.go delete mode 100644 handler/api/repos/builds/list_test.go delete mode 100644 handler/api/repos/builds/logs/delete.go delete mode 100644 handler/api/repos/builds/logs/delete_test.go delete mode 100644 handler/api/repos/builds/logs/find.go delete mode 100644 handler/api/repos/builds/logs/find_test.go delete mode 100644 handler/api/repos/builds/promote.go delete mode 100644 handler/api/repos/builds/promote_oss.go delete mode 100644 handler/api/repos/builds/promote_test.go delete mode 100644 handler/api/repos/builds/pulls/create.go delete mode 100644 handler/api/repos/builds/pulls/create_test.go delete mode 100644 handler/api/repos/builds/pulls/delete.go delete mode 100644 handler/api/repos/builds/pulls/delete_test.go delete mode 100644 handler/api/repos/builds/pulls/list.go delete mode 100644 handler/api/repos/builds/pulls/list_test.go delete mode 100644 handler/api/repos/builds/purge.go delete mode 100644 handler/api/repos/builds/purge_oss.go delete mode 100644 handler/api/repos/builds/purge_test.go delete mode 100644 handler/api/repos/builds/retry.go delete mode 100644 handler/api/repos/builds/retry_test.go delete mode 100644 handler/api/repos/builds/rollback.go delete mode 100644 handler/api/repos/builds/rollback_oss.go delete mode 100644 handler/api/repos/builds/rollback_test.go delete mode 100644 handler/api/repos/builds/stages/approve.go delete mode 100644 handler/api/repos/builds/stages/approve_test.go delete mode 100644 handler/api/repos/builds/stages/decline.go delete mode 100644 handler/api/repos/builds/stages/decline_test.go delete mode 100644 handler/api/repos/chown.go delete mode 100644 handler/api/repos/chown_test.go delete mode 100644 handler/api/repos/collabs/find.go delete mode 100644 handler/api/repos/collabs/find_test.go delete mode 100644 handler/api/repos/collabs/list.go delete mode 100644 handler/api/repos/collabs/list_test.go delete mode 100644 handler/api/repos/collabs/none.go delete mode 100644 handler/api/repos/collabs/remove.go delete mode 100644 handler/api/repos/collabs/remove_test.go delete mode 100644 handler/api/repos/crons/create.go delete mode 100644 handler/api/repos/crons/create_test.go delete mode 100644 handler/api/repos/crons/delete.go delete mode 100644 handler/api/repos/crons/delete_test.go delete mode 100644 handler/api/repos/crons/exec.go delete mode 100644 handler/api/repos/crons/exec_test.go delete mode 100644 handler/api/repos/crons/find.go delete mode 100644 handler/api/repos/crons/find_test.go delete mode 100644 handler/api/repos/crons/list.go delete mode 100644 handler/api/repos/crons/list_test.go delete mode 100644 handler/api/repos/crons/none.go delete mode 100644 handler/api/repos/crons/update.go delete mode 100644 handler/api/repos/crons/update_test.go delete mode 100644 handler/api/repos/disable.go delete mode 100644 handler/api/repos/disable_test.go delete mode 100644 handler/api/repos/enable.go delete mode 100644 handler/api/repos/enable_test.go delete mode 100644 handler/api/repos/encrypt/encrypt.go delete mode 100644 handler/api/repos/encrypt/encrypt_test.go delete mode 100644 handler/api/repos/find.go delete mode 100644 handler/api/repos/find_test.go delete mode 100644 handler/api/repos/repair.go delete mode 100644 handler/api/repos/repair_test.go delete mode 100644 handler/api/repos/secrets/create.go delete mode 100644 handler/api/repos/secrets/create_test.go delete mode 100644 handler/api/repos/secrets/delete.go delete mode 100644 handler/api/repos/secrets/delete_test.go delete mode 100644 handler/api/repos/secrets/find.go delete mode 100644 handler/api/repos/secrets/find_test.go delete mode 100644 handler/api/repos/secrets/list.go delete mode 100644 handler/api/repos/secrets/list_test.go delete mode 100644 handler/api/repos/secrets/none.go delete mode 100644 handler/api/repos/secrets/update.go delete mode 100644 handler/api/repos/secrets/update_test.go delete mode 100644 handler/api/repos/sign/sign.go delete mode 100644 handler/api/repos/sign/sign_test.go delete mode 100644 handler/api/repos/update.go delete mode 100644 handler/api/repos/update_test.go delete mode 100644 handler/api/request/context.go delete mode 100644 handler/api/request/context_test.go delete mode 100644 handler/api/secrets/all.go delete mode 100644 handler/api/secrets/all_test.go delete mode 100644 handler/api/secrets/create.go delete mode 100644 handler/api/secrets/create_test.go delete mode 100644 handler/api/secrets/delete.go delete mode 100644 handler/api/secrets/delete_test.go delete mode 100644 handler/api/secrets/find.go delete mode 100644 handler/api/secrets/find_test.go delete mode 100644 handler/api/secrets/list.go delete mode 100644 handler/api/secrets/list_test.go delete mode 100644 handler/api/secrets/none.go delete mode 100644 handler/api/secrets/update.go delete mode 100644 handler/api/secrets/update_test.go delete mode 100644 handler/api/system/license.go delete mode 100644 handler/api/system/limits.go delete mode 100644 handler/api/system/none.go delete mode 100644 handler/api/system/stats.go delete mode 100644 handler/api/system/stats_test.go delete mode 100644 handler/api/template/all.go delete mode 100644 handler/api/template/all_test.go delete mode 100644 handler/api/template/create.go delete mode 100644 handler/api/template/create_test.go delete mode 100644 handler/api/template/delete.go delete mode 100644 handler/api/template/delete_test.go delete mode 100644 handler/api/template/find.go delete mode 100644 handler/api/template/find_test.go delete mode 100644 handler/api/template/list.go delete mode 100644 handler/api/template/list_test.go delete mode 100644 handler/api/template/none.go delete mode 100644 handler/api/template/update.go delete mode 100644 handler/api/template/update_test.go delete mode 100644 handler/api/user/activity.go delete mode 100644 handler/api/user/find.go delete mode 100644 handler/api/user/find_test.go delete mode 100644 handler/api/user/remote/repo.go delete mode 100644 handler/api/user/remote/repos.go delete mode 100644 handler/api/user/repos.go delete mode 100644 handler/api/user/repos_test.go delete mode 100644 handler/api/user/sync.go delete mode 100644 handler/api/user/sync_test.go delete mode 100644 handler/api/user/token.go delete mode 100644 handler/api/user/token_test.go delete mode 100644 handler/api/user/update.go delete mode 100644 handler/api/user/update_test.go delete mode 100644 handler/api/users/create.go delete mode 100644 handler/api/users/create_test.go delete mode 100644 handler/api/users/delete.go delete mode 100644 handler/api/users/delete_test.go delete mode 100644 handler/api/users/find.go delete mode 100644 handler/api/users/find_test.go delete mode 100644 handler/api/users/list.go delete mode 100644 handler/api/users/list_test.go delete mode 100644 handler/api/users/repos.go delete mode 100644 handler/api/users/token.go delete mode 100644 handler/api/users/token_test.go delete mode 100644 handler/api/users/update.go delete mode 100644 handler/api/users/update_test.go delete mode 100644 handler/health/health.go delete mode 100644 handler/health/health_test.go delete mode 100644 handler/web/hook.go delete mode 100644 handler/web/link/link.go delete mode 100644 handler/web/link/link_test.go delete mode 100644 handler/web/login.go delete mode 100644 handler/web/login_test.go delete mode 100644 handler/web/logout.go delete mode 100644 handler/web/logout_test.go delete mode 100644 handler/web/pages.go delete mode 100644 handler/web/varz.go delete mode 100644 handler/web/varz_test.go delete mode 100644 handler/web/version.go delete mode 100644 handler/web/version_test.go delete mode 100644 handler/web/web.go delete mode 100644 handler/web/writer.go delete mode 100644 handler/web/writer_test.go delete mode 100644 livelog/livelog.go delete mode 100644 livelog/livelog_oss.go delete mode 100644 livelog/stream.go delete mode 100644 livelog/stream_redis.go delete mode 100644 livelog/stream_test.go delete mode 100644 livelog/streamer.go delete mode 100644 livelog/streamer_test.go delete mode 100644 livelog/sub.go delete mode 100644 livelog/sub_test.go delete mode 100644 logger/handler.go delete mode 100644 logger/handler_test.go delete mode 100644 logger/logger.go delete mode 100644 logger/logger_test.go delete mode 100644 metric/builds.go delete mode 100644 metric/builds_test.go delete mode 100644 metric/handler.go delete mode 100644 metric/handler_oss.go delete mode 100644 metric/handler_test.go delete mode 100644 metric/license.go delete mode 100644 metric/license_test.go delete mode 100644 metric/metric.go delete mode 100644 metric/metric_oss.go delete mode 100644 metric/repos.go delete mode 100644 metric/repos_test.go delete mode 100644 metric/sink/config.go delete mode 100644 metric/sink/datadog.go delete mode 100644 metric/sink/datadog_test.go delete mode 100644 metric/sink/tags.go delete mode 100644 metric/stages.go delete mode 100644 metric/stages_test.go delete mode 100644 metric/users.go delete mode 100644 metric/users_test.go delete mode 100644 mock/mock.go delete mode 100644 mock/mock_gen.go delete mode 100644 mock/mockscm/mock.go delete mode 100644 mock/mockscm/mock_gen.go delete mode 100644 operator/manager/manager.go delete mode 100644 operator/manager/manager_test.go delete mode 100644 operator/manager/rpc/client.go delete mode 100644 operator/manager/rpc/client_test.go delete mode 100644 operator/manager/rpc/error.go delete mode 100644 operator/manager/rpc/server.go delete mode 100644 operator/manager/rpc/server_oss.go delete mode 100644 operator/manager/rpc/server_test.go delete mode 100644 operator/manager/rpc/types.go delete mode 100644 operator/manager/rpc2/client.go delete mode 100644 operator/manager/rpc2/handler.go delete mode 100644 operator/manager/rpc2/server.go delete mode 100644 operator/manager/rpc2/server_oss.go delete mode 100644 operator/manager/rpc2/types.go delete mode 100644 operator/manager/setup.go delete mode 100644 operator/manager/setup_test.go delete mode 100644 operator/manager/teardown.go delete mode 100644 operator/manager/teardown_test.go delete mode 100644 operator/manager/updater.go delete mode 100644 operator/manager/util.go delete mode 100644 operator/manager/util_test.go delete mode 100644 operator/runner/after.go delete mode 100644 operator/runner/after_test.go delete mode 100644 operator/runner/before.go delete mode 100644 operator/runner/before_test.go delete mode 100644 operator/runner/convert.go delete mode 100644 operator/runner/convert_test.go delete mode 100644 operator/runner/env.go delete mode 100644 operator/runner/env_test.go delete mode 100644 operator/runner/machine/client.go delete mode 100644 operator/runner/machine/config.go delete mode 100644 operator/runner/machine/config_test.go delete mode 100644 operator/runner/machine/machine.go delete mode 100644 operator/runner/machine/machine_test.go delete mode 100644 operator/runner/runner.go delete mode 100644 operator/runner/runner_test.go delete mode 100644 operator/runner/secrets.go delete mode 100644 plugin/admission/account.go delete mode 100644 plugin/admission/account_oss.go delete mode 100644 plugin/admission/account_test.go delete mode 100644 plugin/admission/combine.go delete mode 100644 plugin/admission/combine_test.go delete mode 100644 plugin/admission/external.go delete mode 100644 plugin/admission/external_oss.go delete mode 100644 plugin/admission/external_test.go delete mode 100644 plugin/admission/nobot.go delete mode 100644 plugin/admission/nobot_oss.go delete mode 100644 plugin/admission/nobot_test.go delete mode 100644 plugin/admission/noop.go delete mode 100644 plugin/admission/open.go delete mode 100644 plugin/admission/open_test.go delete mode 100644 plugin/config/combine.go delete mode 100644 plugin/config/combine_test.go delete mode 100644 plugin/config/global.go delete mode 100644 plugin/config/global_oss.go delete mode 100644 plugin/config/global_test.go delete mode 100644 plugin/config/jsonnet.go delete mode 100644 plugin/config/jsonnet_oss.go delete mode 100644 plugin/config/jsonnet_test.go delete mode 100644 plugin/config/memoize.go delete mode 100644 plugin/config/memoize_oss.go delete mode 100644 plugin/config/memoize_test.go delete mode 100644 plugin/config/repo.go delete mode 100644 plugin/config/repo_test.go delete mode 100644 plugin/converter/combine.go delete mode 100644 plugin/converter/combine_test.go delete mode 100644 plugin/converter/jsonnet.go delete mode 100644 plugin/converter/jsonnet/jsonnet.go delete mode 100644 plugin/converter/jsonnet/jsonnet_test.go delete mode 100644 plugin/converter/jsonnet_oss.go delete mode 100644 plugin/converter/jsonnet_test.go delete mode 100644 plugin/converter/legacy.go delete mode 100644 plugin/converter/legacy_oss.go delete mode 100644 plugin/converter/memoize.go delete mode 100644 plugin/converter/memoize_oss.go delete mode 100644 plugin/converter/memoize_test.go delete mode 100644 plugin/converter/noop.go delete mode 100644 plugin/converter/remote.go delete mode 100644 plugin/converter/remote_oss.go delete mode 100644 plugin/converter/remote_test.go delete mode 100644 plugin/converter/starlark.go delete mode 100644 plugin/converter/starlark/args.go delete mode 100644 plugin/converter/starlark/starlark.go delete mode 100644 plugin/converter/starlark/starlark_test.go delete mode 100644 plugin/converter/starlark/write.go delete mode 100644 plugin/converter/starlark_oss.go delete mode 100644 plugin/converter/starlark_test.go delete mode 100644 plugin/converter/template.go delete mode 100644 plugin/converter/template_oss.go delete mode 100644 plugin/converter/template_test.go delete mode 100644 plugin/converter/testdata/drone.yml delete mode 100644 plugin/converter/testdata/input.jsonnet delete mode 100644 plugin/converter/testdata/input.jsonnet.golden delete mode 100644 plugin/converter/testdata/jsonnet.template.yml delete mode 100644 plugin/converter/testdata/multi.star delete mode 100644 plugin/converter/testdata/multi.star.golden delete mode 100644 plugin/converter/testdata/single.jsonnet delete mode 100644 plugin/converter/testdata/single.star delete mode 100644 plugin/converter/testdata/single.star.golden delete mode 100644 plugin/converter/testdata/starlark-nested.template.yml delete mode 100644 plugin/converter/testdata/starlark.input-nested.star delete mode 100644 plugin/converter/testdata/starlark.input-nested.star.golden delete mode 100644 plugin/converter/testdata/starlark.input.star delete mode 100644 plugin/converter/testdata/starlark.input.star.golden delete mode 100644 plugin/converter/testdata/starlark.template.yml delete mode 100644 plugin/converter/testdata/yaml.input.golden delete mode 100644 plugin/converter/testdata/yaml.input.yml delete mode 100644 plugin/converter/testdata/yaml.template.comment.yml delete mode 100644 plugin/converter/testdata/yaml.template.invalid.yml delete mode 100644 plugin/converter/testdata/yaml.template.yml delete mode 100644 plugin/registry/auths/auth.go delete mode 100644 plugin/registry/auths/auth_test.go delete mode 100644 plugin/registry/auths/testdata/config.json delete mode 100644 plugin/registry/auths/testdata/config2.json delete mode 100644 plugin/registry/combine.go delete mode 100644 plugin/registry/combine_test.go delete mode 100644 plugin/registry/encrypted.go delete mode 100644 plugin/registry/endpoint.go delete mode 100644 plugin/registry/endpoint_oss.go delete mode 100644 plugin/registry/endpoint_test.go delete mode 100644 plugin/registry/external.go delete mode 100644 plugin/registry/external_oss.go delete mode 100644 plugin/registry/external_test.go delete mode 100644 plugin/registry/file.go delete mode 100644 plugin/registry/file_oss.go delete mode 100644 plugin/registry/file_test.go delete mode 100644 plugin/registry/noop.go delete mode 100644 plugin/registry/static.go delete mode 100644 plugin/registry/static_test.go delete mode 100644 plugin/secret/combine.go delete mode 100644 plugin/secret/combine_test.go delete mode 100644 plugin/secret/encrypted.go delete mode 100644 plugin/secret/encrypted_test.go delete mode 100644 plugin/secret/external.go delete mode 100644 plugin/secret/external_oss.go delete mode 100644 plugin/secret/external_test.go delete mode 100644 plugin/secret/static.go delete mode 100644 plugin/secret/static_test.go delete mode 100644 plugin/validator/combine.go delete mode 100644 plugin/validator/combine_test.go delete mode 100644 plugin/validator/filter.go delete mode 100644 plugin/validator/filter_test.go delete mode 100644 plugin/validator/noop.go delete mode 100644 plugin/validator/remote.go delete mode 100644 plugin/validator/remote_oss.go delete mode 100644 plugin/validator/remote_test.go delete mode 100644 plugin/webhook/config.go delete mode 100644 plugin/webhook/webhook.go delete mode 100644 plugin/webhook/webhook_oss.go delete mode 100644 plugin/webhook/webhook_test.go delete mode 100644 pubsub/doc.go delete mode 100644 pubsub/hub.go delete mode 100644 pubsub/hub_redis.go delete mode 100644 pubsub/hub_test.go delete mode 100644 pubsub/pubsub.go delete mode 100644 pubsub/pubsub_oss.go delete mode 100644 pubsub/sub.go delete mode 100644 pubsub/sub_test.go delete mode 100644 scheduler/queue/canceller.go delete mode 100644 scheduler/queue/canceller_redis.go delete mode 100644 scheduler/queue/canceller_test.go delete mode 100644 scheduler/queue/queue.go delete mode 100644 scheduler/queue/queue_test.go delete mode 100644 scheduler/queue/scheduler.go delete mode 100644 scheduler/queue/scheduler_non_oss.go delete mode 100644 scheduler/queue/scheduler_oss.go delete mode 100644 scheduler/queue/scheduler_redis.go delete mode 100644 scheduler/scheduler.go delete mode 100755 scripts/build.sh delete mode 100644 server/server.go delete mode 100644 service/canceler/canceler.go delete mode 100644 service/canceler/canceler_test.go delete mode 100644 service/canceler/match.go delete mode 100644 service/canceler/match_test.go delete mode 100644 service/canceler/reaper/reaper.go delete mode 100644 service/canceler/reaper/reaper_test.go delete mode 100644 service/canceler/reaper/util.go delete mode 100644 service/canceler/reaper/util_test.go delete mode 100644 service/commit/commit.go delete mode 100644 service/commit/commit_test.go delete mode 100644 service/content/cache/contents.go delete mode 100644 service/content/cache/contents_oss.go delete mode 100644 service/content/cache/contents_test.go delete mode 100644 service/content/content.go delete mode 100644 service/content/content_test.go delete mode 100644 service/hook/hook.go delete mode 100644 service/hook/hook_test.go delete mode 100644 service/hook/parser/parse.go delete mode 100644 service/hook/parser/parse_test.go delete mode 100644 service/hook/parser/testdata/bitbucket_pull_open.json delete mode 100644 service/hook/parser/testdata/bitbucket_pull_open.json.golden delete mode 100644 service/hook/parser/testdata/bitbucket_push.json delete mode 100644 service/hook/parser/testdata/bitbucket_push.json.golden delete mode 100644 service/hook/parser/testdata/bitbucket_tag.json delete mode 100644 service/hook/parser/testdata/bitbucket_tag.json.golden delete mode 100644 service/hook/parser/testdata/gitea_pull_open.json delete mode 100644 service/hook/parser/testdata/gitea_pull_open.json.golden delete mode 100644 service/hook/parser/testdata/gitea_push.json delete mode 100644 service/hook/parser/testdata/gitea_push.json.golden delete mode 100644 service/hook/parser/testdata/gitea_tag.json delete mode 100644 service/hook/parser/testdata/gitea_tag.json.golden delete mode 100644 service/hook/parser/testdata/github_pull_create.json delete mode 100644 service/hook/parser/testdata/github_pull_create.json.golden delete mode 100644 service/hook/parser/testdata/github_pull_sync.json delete mode 100644 service/hook/parser/testdata/github_pull_sync.json.golden delete mode 100644 service/hook/parser/testdata/github_push.json delete mode 100644 service/hook/parser/testdata/github_push.json.golden delete mode 100644 service/hook/parser/testdata/github_push_tag.json delete mode 100644 service/hook/parser/testdata/github_push_tag.json.golden delete mode 100644 service/hook/parser/testdata/gitlab_pull_open.json delete mode 100644 service/hook/parser/testdata/gitlab_pull_open.json.golden delete mode 100644 service/hook/parser/testdata/gitlab_push.json delete mode 100644 service/hook/parser/testdata/gitlab_push.json.golden delete mode 100644 service/hook/parser/testdata/gitlab_tag.json delete mode 100644 service/hook/parser/testdata/gitlab_tag.json.golden delete mode 100644 service/hook/parser/testdata/gogs_pull_create.json delete mode 100644 service/hook/parser/testdata/gogs_push.json delete mode 100644 service/hook/parser/testdata/gogs_push.json.golden delete mode 100644 service/hook/parser/testdata/gogs_tag.json delete mode 100644 service/hook/parser/testdata/gogs_tag.json.golden delete mode 100644 service/hook/parser/testdata/stash_pull_create.json delete mode 100644 service/hook/parser/testdata/stash_pull_create.json.golden delete mode 100644 service/hook/parser/testdata/stash_push.json delete mode 100644 service/hook/parser/testdata/stash_push.json.golden delete mode 100644 service/hook/parser/testdata/stash_push_tag.json delete mode 100644 service/hook/parser/testdata/stash_push_tag.json.golden delete mode 100644 service/hook/util.go delete mode 100644 service/hook/util_test.go delete mode 100644 service/license/load.go delete mode 100644 service/license/load_test.go delete mode 100644 service/license/nolimit.go delete mode 100644 service/license/nolimit_oss.go delete mode 100644 service/license/service.go delete mode 100644 service/linker/linker.go delete mode 100644 service/linker/linker_test.go delete mode 100644 service/netrc/netrc.go delete mode 100644 service/netrc/netrc_test.go delete mode 100644 service/org/cache.go delete mode 100644 service/org/cache_test.go delete mode 100644 service/org/org.go delete mode 100644 service/org/org_test.go delete mode 100644 service/redisdb/lockerr.go delete mode 100644 service/redisdb/redisdb.go delete mode 100644 service/repo/repo.go delete mode 100644 service/repo/repo_test.go delete mode 100644 service/repo/util.go delete mode 100644 service/repo/util_test.go delete mode 100644 service/status/status.go delete mode 100644 service/status/status_test.go delete mode 100644 service/status/util.go delete mode 100644 service/status/util_test.go delete mode 100644 service/syncer/filter.go delete mode 100644 service/syncer/filter_oss.go delete mode 100644 service/syncer/filter_test.go delete mode 100644 service/syncer/syncer.go delete mode 100644 service/syncer/syncer_test.go delete mode 100644 service/syncer/util.go delete mode 100644 service/syncer/util_test.go delete mode 100644 service/token/renew.go delete mode 100644 service/token/renew_test.go delete mode 100644 service/transfer/transfer.go delete mode 100644 service/transfer/transfer_test.go delete mode 100644 service/user/user.go delete mode 100644 service/user/user_test.go delete mode 100644 session/config.go delete mode 100644 session/session.go delete mode 100644 session/session_test.go delete mode 100644 session/testdata/mapping.json delete mode 100644 store/batch/batch.go delete mode 100644 store/batch/batch_test.go delete mode 100644 store/batch2/batch.go delete mode 100644 store/batch2/batch_test.go delete mode 100644 store/build/build.go delete mode 100644 store/build/build_test.go delete mode 100644 store/build/scan.go delete mode 100644 store/card/card.go delete mode 100644 store/card/card_oss.go delete mode 100644 store/card/card_test.go delete mode 100644 store/card/scan.go delete mode 100644 store/cron/cron.go delete mode 100644 store/cron/cron_oss.go delete mode 100644 store/cron/cron_test.go delete mode 100644 store/cron/scan.go delete mode 100644 store/logs/azureblob.go delete mode 100644 store/logs/azureblob_oss.go delete mode 100644 store/logs/combine.go delete mode 100644 store/logs/logs.go delete mode 100644 store/logs/logs_test.go delete mode 100644 store/logs/s3.go delete mode 100644 store/logs/s3_oss.go delete mode 100644 store/logs/s3_test.go delete mode 100644 store/logs/scan.go delete mode 100644 store/perm/perm.go delete mode 100644 store/perm/perm_test.go delete mode 100644 store/perm/scan.go delete mode 100644 store/repos/repos.go delete mode 100644 store/repos/repos_test.go delete mode 100644 store/repos/scan.go delete mode 100644 store/repos/scan_test.go delete mode 100644 store/repos/testdata/repo.json delete mode 100644 store/repos/testdata/repo.json.golden delete mode 100644 store/repos/type.go delete mode 100644 store/secret/global/scan.go delete mode 100644 store/secret/global/secret.go delete mode 100644 store/secret/global/secret_oss.go delete mode 100644 store/secret/global/secret_test.go delete mode 100644 store/secret/scan.go delete mode 100644 store/secret/secret.go delete mode 100644 store/secret/secret_oss.go delete mode 100644 store/secret/secret_test.go delete mode 100644 store/shared/db/conn.go delete mode 100644 store/shared/db/conn_oss.go delete mode 100644 store/shared/db/conn_test.go delete mode 100644 store/shared/db/db.go delete mode 100644 store/shared/db/db_test.go delete mode 100644 store/shared/db/dbtest/dbtest.go delete mode 100644 store/shared/db/error.go delete mode 100644 store/shared/db/nop.go delete mode 100644 store/shared/encrypt/aesgcm.go delete mode 100644 store/shared/encrypt/aesgcm_test.go delete mode 100644 store/shared/encrypt/encrypt.go delete mode 100644 store/shared/encrypt/none.go delete mode 100644 store/shared/encrypt/none_test.go delete mode 100644 store/shared/migrate/README.md delete mode 100644 store/shared/migrate/mysql/ddl.go delete mode 100644 store/shared/migrate/mysql/ddl_gen.go delete mode 100644 store/shared/migrate/mysql/files/001_create_table_user.sql delete mode 100644 store/shared/migrate/mysql/files/002_create_table_repos.sql delete mode 100644 store/shared/migrate/mysql/files/003_create_table_perms.sql delete mode 100644 store/shared/migrate/mysql/files/004_create_table_builds.sql delete mode 100644 store/shared/migrate/mysql/files/005_create_table_stages.sql delete mode 100644 store/shared/migrate/mysql/files/006_create_table_steps.sql delete mode 100644 store/shared/migrate/mysql/files/007_create_table_logs.sql delete mode 100644 store/shared/migrate/mysql/files/008_create_table_cron.sql delete mode 100644 store/shared/migrate/mysql/files/009_create_table_secrets.sql delete mode 100644 store/shared/migrate/mysql/files/010_create_table_nodes.sql delete mode 100644 store/shared/migrate/mysql/files/011_add_column_builds_cron.sql delete mode 100644 store/shared/migrate/mysql/files/012_create_table_global_secrets.sql delete mode 100644 store/shared/migrate/mysql/files/013_add_column_builds_deploy_id.sql delete mode 100644 store/shared/migrate/mysql/files/014_create_table_refs.sql delete mode 100644 store/shared/migrate/mysql/files/015_create_table_templates.sql delete mode 100644 store/shared/migrate/mysql/files/016_add_columns_steps.sql delete mode 100644 store/shared/migrate/mysql/files/017_create_table_cards.sql delete mode 100644 store/shared/migrate/mysql/files/018_amend_table_cards.sql delete mode 100644 store/shared/migrate/postgres/ddl.go delete mode 100644 store/shared/migrate/postgres/ddl_gen.go delete mode 100644 store/shared/migrate/postgres/files/001_create_table_user.sql delete mode 100644 store/shared/migrate/postgres/files/002_create_table_repos.sql delete mode 100644 store/shared/migrate/postgres/files/003_create_table_perms.sql delete mode 100644 store/shared/migrate/postgres/files/004_create_table_builds.sql delete mode 100644 store/shared/migrate/postgres/files/005_create_table_stages.sql delete mode 100644 store/shared/migrate/postgres/files/006_create_table_steps.sql delete mode 100644 store/shared/migrate/postgres/files/007_create_table_logs.sql delete mode 100644 store/shared/migrate/postgres/files/008_create_table_cron.sql delete mode 100644 store/shared/migrate/postgres/files/009_create_table_secrets.sql delete mode 100644 store/shared/migrate/postgres/files/010_create_table_nodes.sql delete mode 100644 store/shared/migrate/postgres/files/011_add_column_builds_cron.sql delete mode 100644 store/shared/migrate/postgres/files/012_create_table_org_secrets.sql delete mode 100644 store/shared/migrate/postgres/files/013_add_column_builds_deploy_id.sql delete mode 100644 store/shared/migrate/postgres/files/015_create_table_refs.sql delete mode 100644 store/shared/migrate/postgres/files/016_create_template_tables.sql delete mode 100644 store/shared/migrate/postgres/files/017_add_columns_steps.sql delete mode 100644 store/shared/migrate/postgres/files/018_create_table_cards.sql delete mode 100644 store/shared/migrate/postgres/files/019_amend_table_cards.sql delete mode 100644 store/shared/migrate/sqlite/ddl.go delete mode 100644 store/shared/migrate/sqlite/ddl_gen.go delete mode 100644 store/shared/migrate/sqlite/files/001_create_table_user.sql delete mode 100644 store/shared/migrate/sqlite/files/002_create_table_repos.sql delete mode 100644 store/shared/migrate/sqlite/files/003_create_table_perms.sql delete mode 100644 store/shared/migrate/sqlite/files/004_create_table_builds.sql delete mode 100644 store/shared/migrate/sqlite/files/005_create_table_stages.sql delete mode 100644 store/shared/migrate/sqlite/files/006_create_table_steps.sql delete mode 100644 store/shared/migrate/sqlite/files/007_create_table_logs.sql delete mode 100644 store/shared/migrate/sqlite/files/008_create_table_cron.sql delete mode 100644 store/shared/migrate/sqlite/files/009_create_table_secrets.sql delete mode 100644 store/shared/migrate/sqlite/files/010_create_table_nodes.sql delete mode 100644 store/shared/migrate/sqlite/files/011_add_column_builds_cron.sql delete mode 100644 store/shared/migrate/sqlite/files/012_create_table_org_secrets.sql delete mode 100644 store/shared/migrate/sqlite/files/013_add_column_builds_deploy_id.sql delete mode 100644 store/shared/migrate/sqlite/files/014_create_table_refs.sql delete mode 100644 store/shared/migrate/sqlite/files/015_create_template_tables.sql delete mode 100644 store/shared/migrate/sqlite/files/016_add_columns_steps.sql delete mode 100644 store/shared/migrate/sqlite/files/017_create_table_cards.sql delete mode 100644 store/shared/migrate/sqlite/files/018_amend_table_cards.sql delete mode 100644 store/stage/scan.go delete mode 100644 store/stage/stage.go delete mode 100644 store/stage/stage_test.go delete mode 100644 store/stage/type.go delete mode 100644 store/step/scan.go delete mode 100644 store/step/step.go delete mode 100644 store/step/step_test.go delete mode 100644 store/store.go delete mode 100644 store/template/scan.go delete mode 100644 store/template/template.go delete mode 100644 store/template/template_oss.go delete mode 100644 store/template/template_test.go delete mode 100644 store/user/scan.go delete mode 100644 store/user/user.go delete mode 100644 store/user/user_test.go delete mode 100644 trigger/change.go delete mode 100644 trigger/change_test.go delete mode 100644 trigger/cron/cron.go delete mode 100644 trigger/cron/cron_oss.go delete mode 100644 trigger/cron/cron_test.go delete mode 100644 trigger/dag/dag.go delete mode 100644 trigger/dag/dag_test.go delete mode 100644 trigger/skip.go delete mode 100644 trigger/skip_test.go delete mode 100644 trigger/trigger.go delete mode 100644 trigger/trigger_test.go delete mode 100644 version/version.go delete mode 100644 version/version_test.go delete mode 100644 web/README diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 858594f58a..0000000000 --- a/.dockerignore +++ /dev/null @@ -1,2 +0,0 @@ -* -!release/* diff --git a/.drone.yml b/.drone.yml deleted file mode 100644 index 50a28859bc..0000000000 --- a/.drone.yml +++ /dev/null @@ -1,107 +0,0 @@ ---- -kind: pipeline -type: docker -name: linux-amd64 - -platform: - arch: amd64 - os: linux - -steps: -- name: test - image: golang:1.14.15 - commands: - - go test -race ./... - - go build -o /dev/null github.com/drone/drone/cmd/drone-server - - go build -o /dev/null -tags "oss nolimit" github.com/drone/drone/cmd/drone-server - -- name: build - image: golang:1.14.15 - commands: - - sh scripts/build.sh - environment: - GOARCH: amd64 - GOOS: linux - -- name: publish - image: plugins/docker:18 - settings: - auto_tag: true - auto_tag_suffix: linux-amd64 - dockerfile: docker/Dockerfile.server.linux.amd64 - repo: drone/drone - username: - from_secret: docker_username - password: - from_secret: docker_password - when: - event: - - push - - tag - ---- -kind: pipeline -type: vm -name: linux-arm64 - -pool: - use: ubuntu_arm64 - -platform: - arch: arm64 - os: linux - -steps: -- name: build - image: golang:1.14.15 - commands: - - sh scripts/build.sh - environment: - GOARCH: arm64 - GOOS: linux - -- name: publish - image: plugins/docker:18 - settings: - auto_tag: true - auto_tag_suffix: linux-arm64 - dockerfile: docker/Dockerfile.server.linux.arm64 - repo: drone/drone - username: - from_secret: docker_username - password: - from_secret: docker_password - -trigger: - event: - - push - - tag - -depends_on: -- linux-amd64 - ---- -kind: pipeline -type: docker -name: manifest - -steps: -- name: publish - image: plugins/manifest:1.2 - settings: - auto_tag: true - ignore_missing: true - spec: docker/manifest.server.tmpl - username: - from_secret: docker_username - password: - from_secret: docker_password - -trigger: - event: - - push - - tag - -depends_on: -- linux-arm64 - diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 95b2417d8c..0000000000 --- a/.gitignore +++ /dev/null @@ -1,13 +0,0 @@ -.vscode -__debug_bin -*.sqlite -*.txt -*.out -*.key -.env -.env.* -release/ -scripts/*.go -docker/**/data -TODO* -.idea diff --git a/BUILDING b/BUILDING deleted file mode 100644 index fe4ee3168a..0000000000 --- a/BUILDING +++ /dev/null @@ -1,11 +0,0 @@ -1. Clone the repository -2. Install go 1.11 or later with Go modules enabled -3. Install binaries to $GOPATH/bin - - go install github.com/drone/drone/cmd/drone-server - -4. Start the server at localhost:8080 - - export DRONE_GITHUB_CLIENT_ID=... - export DRONE_GITHUB_CLIENT_SECRET=... - drone-server diff --git a/BUILDING_OSS b/BUILDING_OSS deleted file mode 100644 index b7ed226feb..0000000000 --- a/BUILDING_OSS +++ /dev/null @@ -1,11 +0,0 @@ -1. Clone the repository -2. Install go 1.11 or later with Go modules enabled -3. Install binaries to $GOPATH/bin - - go install -tags "oss nolimit" github.com/drone/drone/cmd/drone-server - -4. Start the server at localhost:8080 - - export DRONE_GITHUB_CLIENT_ID=... - export DRONE_GITHUB_CLIENT_SECRET=... - drone-server diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 1cad0841f5..0000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,797 +0,0 @@ -# Changelog - -## [v2.20.0](https://github.com/harness/drone/tree/v2.20.0) (2023-08-21) - -[Full Changelog](https://github.com/harness/drone/compare/v2.19.0...v2.20.0) - -**Implemented enhancements:** - -- + sync gitea redirecturl config from gitee for customize login redire… [\#3319](https://github.com/harness/drone/pull/3319) ([fireinice](https://github.com/fireinice)) - -**Fixed bugs:** - -- \(CI-8780\) set approved stages to waiting, if they have stage depende… [\#3355](https://github.com/harness/drone/pull/3355) ([tphoney](https://github.com/tphoney)) - -## [v2.19.0](https://github.com/harness/drone/tree/v2.19.0) (2023-08-15) - -[Full Changelog](https://github.com/harness/drone/compare/scheduler_experiment...v2.19.0) - -**Implemented enhancements:** - -- Support arbitrary action value from parameter in query string [\#3341](https://github.com/harness/drone/pull/3341) ([filippopisano](https://github.com/filippopisano)) - -**Fixed bugs:** - -- bump drone-ui to 2.11.5 [\#3350](https://github.com/harness/drone/pull/3350) ([d1wilko](https://github.com/d1wilko)) -- bump drone-ui to 2.11.4 [\#3349](https://github.com/harness/drone/pull/3349) ([d1wilko](https://github.com/d1wilko)) -- \(fix\) prevent scheduler deadlock [\#3344](https://github.com/harness/drone/pull/3344) ([tphoney](https://github.com/tphoney)) -- bump drone-ui to 2.11.3 [\#3337](https://github.com/harness/drone/pull/3337) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\) prep for v2.19.0 [\#3352](https://github.com/harness/drone/pull/3352) ([tphoney](https://github.com/tphoney)) -- remove repetitive words [\#3342](https://github.com/harness/drone/pull/3342) ([cuishuang](https://github.com/cuishuang)) -- Revert "fix scheduler queue deadlock" [\#3331](https://github.com/harness/drone/pull/3331) ([tphoney](https://github.com/tphoney)) - -## [scheduler_experiment](https://github.com/harness/drone/tree/scheduler_experiment) (2023-07-05) - -[Full Changelog](https://github.com/harness/drone/compare/v2.18.0...scheduler_experiment) - -**Fixed bugs:** - -- fix scheduler queue deadlock [\#3330](https://github.com/harness/drone/pull/3330) ([tphoney](https://github.com/tphoney)) - -## [v2.18.0](https://github.com/harness/drone/tree/v2.18.0) (2023-07-04) - -[Full Changelog](https://github.com/harness/drone/compare/v2.17.0...v2.18.0) - -**Implemented enhancements:** - -- support custom pipeline message [\#3294](https://github.com/harness/drone/pull/3294) ([zc2638](https://github.com/zc2638)) - -**Fixed bugs:** - -- bump drone-ui to 2.11.2 [\#3327](https://github.com/harness/drone/pull/3327) ([d1wilko](https://github.com/d1wilko)) -- Fix comment errors [\#3302](https://github.com/harness/drone/pull/3302) ([weidongkl](https://github.com/weidongkl)) - -**Merged pull requests:** - -- v2.18.0 release prep [\#3328](https://github.com/harness/drone/pull/3328) ([tphoney](https://github.com/tphoney)) - -## [v2.17.0](https://github.com/harness/drone/tree/v2.17.0) (2023-04-25) - -[Full Changelog](https://github.com/harness/drone/compare/v2.16.0...v2.17.0) - -**Implemented enhancements:** - -- Add `authtype` to logging middleware [\#3310](https://github.com/harness/drone/pull/3310) ([colinhoglund](https://github.com/colinhoglund)) -- Add config for the buffer [\#3308](https://github.com/harness/drone/pull/3308) ([TheJokersThief](https://github.com/TheJokersThief)) - -**Fixed bugs:** - -- store/card: fix dropped error [\#3300](https://github.com/harness/drone/pull/3300) ([alrs](https://github.com/alrs)) -- bump drone-ui to 2.9.1 [\#3298](https://github.com/harness/drone/pull/3298) ([d1wilko](https://github.com/d1wilko)) -- Starlark: Update `go.starlark.net` dependency [\#3284](https://github.com/harness/drone/pull/3284) ([dsotirakis](https://github.com/dsotirakis)) - -**Merged pull requests:** - -- release prep for v2.17.0 [\#3316](https://github.com/harness/drone/pull/3316) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- bump drone-ui to 2.11.1 [\#3315](https://github.com/harness/drone/pull/3315) ([d1wilko](https://github.com/d1wilko)) -- bump drone-ui to 2.11.0 [\#3313](https://github.com/harness/drone/pull/3313) ([d1wilko](https://github.com/d1wilko)) -- bump drone-ui to 2.10.0 [\#3311](https://github.com/harness/drone/pull/3311) ([d1wilko](https://github.com/d1wilko)) -- \(maint\) move to use the arm64 pool [\#3296](https://github.com/harness/drone/pull/3296) ([tphoney](https://github.com/tphoney)) - -## [v2.16.0](https://github.com/harness/drone/tree/v2.16.0) (2022-12-15) - -[Full Changelog](https://github.com/harness/drone/compare/v2.15.0...v2.16.0) - -**Implemented enhancements:** - -- Make Starlark file size limit configurable [\#3291](https://github.com/harness/drone/pull/3291) ([andrii-kasparevych](https://github.com/andrii-kasparevych)) -- Enhance status check label for promotions [\#3263](https://github.com/harness/drone/pull/3263) ([michelangelomo](https://github.com/michelangelomo)) - -**Fixed bugs:** - -- \(bugfix\) bump go-scm to v1.28.0 [\#3290](https://github.com/harness/drone/pull/3290) ([tphoney](https://github.com/tphoney)) - -**Merged pull requests:** - -- \(maint\) 2.16.0 release prep [\#3295](https://github.com/harness/drone/pull/3295) ([tphoney](https://github.com/tphoney)) - -## [v2.15.0](https://github.com/harness/drone/tree/v2.15.0) (2022-10-28) - -[Full Changelog](https://github.com/harness/drone/compare/v2.14.0...v2.15.0) - -**Implemented enhancements:** - -- bump ui version [\#3279](https://github.com/harness/drone/pull/3279) ([d1wilko](https://github.com/d1wilko)) -- Add endpoint for allowing admins to force rotate a user's token [\#3272](https://github.com/harness/drone/pull/3272) ([ShiftedMr](https://github.com/ShiftedMr)) - -**Merged pull requests:** - -- release prep v2.15.0 [\#3281](https://github.com/harness/drone/pull/3281) ([d1wilko](https://github.com/d1wilko)) - -## [v2.14.0](https://github.com/harness/drone/tree/v2.14.0) (2022-10-18) - -[Full Changelog](https://github.com/harness/drone/compare/v2.13.0...v2.14.0) - -**Implemented enhancements:** - -- \(DRON-418\) send webhook and set status for failed builds [\#3266](https://github.com/harness/drone/pull/3266) ([tphoney](https://github.com/tphoney)) - -**Merged pull requests:** - -- v2.14.0 release prep [\#3275](https://github.com/harness/drone/pull/3275) ([d1wilko](https://github.com/d1wilko)) - -## [v2.13.0](https://github.com/harness/drone/tree/v2.13.0) (2022-09-21) - -[Full Changelog](https://github.com/harness/drone/compare/v2.12.1...v2.13.0) - -**Implemented enhancements:** - -- feat: update drone-yaml module [\#3249](https://github.com/harness/drone/pull/3249) ([jimsheldon](https://github.com/jimsheldon)) -- support time zone [\#3241](https://github.com/harness/drone/pull/3241) ([zc2638](https://github.com/zc2638)) - -**Fixed bugs:** - -- update discourse.drone.io to community.harness.io [\#3261](https://github.com/harness/drone/pull/3261) ([kit101](https://github.com/kit101)) -- \(DRON-392\) cascade deletes on purge [\#3243](https://github.com/harness/drone/pull/3243) ([tphoney](https://github.com/tphoney)) -- Template converter, don't skip .yaml extension. [\#3242](https://github.com/harness/drone/pull/3242) ([staffanselander](https://github.com/staffanselander)) - -**Merged pull requests:** - -- v2.13.0 release prep [\#3268](https://github.com/harness/drone/pull/3268) ([tphoney](https://github.com/tphoney)) -- \(maint\) disable arm builds [\#3262](https://github.com/harness/drone/pull/3262) ([tphoney](https://github.com/tphoney)) -- Update links to discourse in issue template [\#3233](https://github.com/harness/drone/pull/3233) ([alikhil](https://github.com/alikhil)) - -## [v2.12.1](https://github.com/harness/drone/tree/v2.12.1) (2022-06-15) - -[Full Changelog](https://github.com/harness/drone/compare/v2.12.0...v2.12.1) - -**Fixed bugs:** - -- \(bug\) - fix original template scripts & remove amend scripts [\#3229](https://github.com/harness/drone/pull/3229) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- \(bug\) - remove unique index on template name [\#3226](https://github.com/harness/drone/pull/3226) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Added OAuth2 token refresher for Gitlab [\#3215](https://github.com/harness/drone/pull/3215) ([EndymionWight](https://github.com/EndymionWight)) - -**Merged pull requests:** - -- release prep for v2.12.1 [\#3232](https://github.com/harness/drone/pull/3232) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- \(maint\) fix starlark test on windows [\#3230](https://github.com/harness/drone/pull/3230) ([tphoney](https://github.com/tphoney)) -- \(maint\) fix unit tests so they pass on windows [\#3228](https://github.com/harness/drone/pull/3228) ([tphoney](https://github.com/tphoney)) -- Update Readme to Fix Typo [\#3223](https://github.com/harness/drone/pull/3223) ([hrittikhere](https://github.com/hrittikhere)) -- \(bug\) add unit test for comments in template file [\#3221](https://github.com/harness/drone/pull/3221) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Bump scm version to v1.24.0 [\#3219](https://github.com/harness/drone/pull/3219) ([kit101](https://github.com/kit101)) - -## [v2.12.0](https://github.com/harness/drone/tree/v2.12.0) (2022-05-16) - -[Full Changelog](https://github.com/harness/drone/compare/v2.11.1...v2.12.0) - -**Implemented enhancements:** - -- bump SCM version to v1.21.1 [\#3204](https://github.com/harness/drone/pull/3204) ([d1wilko](https://github.com/d1wilko)) -- bump ui version [\#3202](https://github.com/harness/drone/pull/3202) ([d1wilko](https://github.com/d1wilko)) - -**Fixed bugs:** - -- \(fix\) update drone ui to 2.8.2 [\#3211](https://github.com/harness/drone/pull/3211) ([tphoney](https://github.com/tphoney)) -- \(dron-267\) correctly set parent for promotion retry [\#3210](https://github.com/harness/drone/pull/3210) ([tphoney](https://github.com/tphoney)) - -**Merged pull requests:** - -- release prep v2.12.0 [\#3214](https://github.com/harness/drone/pull/3214) ([tphoney](https://github.com/tphoney)) -- fixing URL [\#3208](https://github.com/harness/drone/pull/3208) ([dnielsen](https://github.com/dnielsen)) -- update community information with updated links [\#3199](https://github.com/harness/drone/pull/3199) ([mrsantons](https://github.com/mrsantons)) - -## [v2.11.1](https://github.com/harness/drone/tree/v2.11.1) (2022-03-15) - -[Full Changelog](https://github.com/harness/drone/compare/v2.11.0...v2.11.1) - -**Fixed bugs:** - -- ignore nil repos in list and add better debugging [\#3196](https://github.com/harness/drone/pull/3196) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\) release prep for 2.11.1 [\#3197](https://github.com/harness/drone/pull/3197) ([d1wilko](https://github.com/d1wilko)) - -## [v2.11.0](https://github.com/harness/drone/tree/v2.11.0) (2022-03-08) - -[Full Changelog](https://github.com/harness/drone/compare/v2.10.0...v2.11.0) - -**Implemented enhancements:** - -- bump UI and SCM versions [\#3193](https://github.com/harness/drone/pull/3193) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\) release prep for 2.11.0 [\#3194](https://github.com/harness/drone/pull/3194) ([d1wilko](https://github.com/d1wilko)) - -## [v2.10.0](https://github.com/harness/drone/tree/v2.10.0) (2022-03-03) - -[Full Changelog](https://github.com/harness/drone/compare/v2.9.1...v2.10.0) - -**Implemented enhancements:** - -- bump UI version to v2.7.0 [\#3190](https://github.com/harness/drone/pull/3190) ([d1wilko](https://github.com/d1wilko)) -- bump UI version to v2.6.2 [\#3188](https://github.com/harness/drone/pull/3188) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\) release prep for 2.10.0 [\#3191](https://github.com/harness/drone/pull/3191) ([d1wilko](https://github.com/d1wilko)) - -## [v2.9.1](https://github.com/harness/drone/tree/v2.9.1) (2022-01-27) - -[Full Changelog](https://github.com/harness/drone/compare/v2.9.0...v2.9.1) - -**Fixed bugs:** - -- bump ui version 2.6.1 [\#3185](https://github.com/harness/drone/pull/3185) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\) release prep for 2.9.1 [\#3186](https://github.com/harness/drone/pull/3186) ([tphoney](https://github.com/tphoney)) - -## [v2.9.0](https://github.com/harness/drone/tree/v2.9.0) (2022-01-26) - -[Full Changelog](https://github.com/harness/drone/compare/v2.8.0...v2.9.0) - -**Implemented enhancements:** - -- bump ui to v2.6.0 [\#3183](https://github.com/harness/drone/pull/3183) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Merged pull requests:** - -- release prep for v2.9.0 [\#3184](https://github.com/harness/drone/pull/3184) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.8.0](https://github.com/harness/drone/tree/v2.8.0) (2022-01-11) - -[Full Changelog](https://github.com/harness/drone/compare/v2.7.3...v2.8.0) - -**Implemented enhancements:** - -- bump UI to v2.5.0 [\#3180](https://github.com/harness/drone/pull/3180) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- \(feat\) ignore archive repos on sync [\#3178](https://github.com/harness/drone/pull/3178) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Datadog add the tag of 'remote:gitee' [\#3174](https://github.com/harness/drone/pull/3174) ([kit101](https://github.com/kit101)) -- Add tag filter when call build list endpoint [\#3173](https://github.com/harness/drone/pull/3173) ([michelangelomo](https://github.com/michelangelomo)) - -**Fixed bugs:** - -- \(maint\) add warning around typo for stage\_id in step struct [\#3179](https://github.com/harness/drone/pull/3179) ([tphoney](https://github.com/tphoney)) - -**Merged pull requests:** - -- release prep v2.8.0 [\#3181](https://github.com/harness/drone/pull/3181) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.7.3](https://github.com/harness/drone/tree/v2.7.3) (2021-12-30) - -[Full Changelog](https://github.com/harness/drone/compare/v2.7.2...v2.7.3) - -**Fixed bugs:** - -- bump go-scm to v1.16.3 [\#3175](https://github.com/harness/drone/pull/3175) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Merged pull requests:** - -- release prep v2.7.3 [\#3176](https://github.com/harness/drone/pull/3176) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.7.2](https://github.com/harness/drone/tree/v2.7.2) (2021-12-19) - -[Full Changelog](https://github.com/harness/drone/compare/v2.7.1...v2.7.2) - -**Implemented enhancements:** - -- bump go-scm to v1.16.2 [\#3169](https://github.com/harness/drone/pull/3169) ([kit101](https://github.com/kit101)) - -**Fixed bugs:** - -- fixbug gitee provide refresher [\#3168](https://github.com/harness/drone/pull/3168) ([kit101](https://github.com/kit101)) - -**Merged pull requests:** - -- release prep 2.7.2 [\#3172](https://github.com/harness/drone/pull/3172) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.7.1](https://github.com/harness/drone/tree/v2.7.1) (2021-12-17) - -[Full Changelog](https://github.com/harness/drone/compare/v2.7.0...v2.7.1) - -**Fixed bugs:** - -- fixes issue with redirects on double slashes in url [\#3170](https://github.com/harness/drone/pull/3170) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Merged pull requests:** - -- release prep v2.7.1 [\#3171](https://github.com/harness/drone/pull/3171) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.7.0](https://github.com/harness/drone/tree/v2.7.0) (2021-12-15) - -[Full Changelog](https://github.com/harness/drone/compare/v2.6.0...v2.7.0) - -**Implemented enhancements:** - -- bump UI to v2.4.1 [\#3167](https://github.com/harness/drone/pull/3167) ([d1wilko](https://github.com/d1wilko)) - -**Fixed bugs:** - -- \(DRON-157\) use deploy string in deployment [\#3165](https://github.com/harness/drone/pull/3165) ([tphoney](https://github.com/tphoney)) - -**Merged pull requests:** - -- release v2.7.0 [\#3166](https://github.com/harness/drone/pull/3166) ([d1wilko](https://github.com/d1wilko)) - -## [v2.6.0](https://github.com/harness/drone/tree/v2.6.0) (2021-11-30) - -[Full Changelog](https://github.com/harness/drone/compare/v2.5.0...v2.6.0) - -**Implemented enhancements:** - -- Feat: implemented gitee client [\#3156](https://github.com/harness/drone/pull/3156) ([kit101](https://github.com/kit101)) - -**Merged pull requests:** - -- release prep for v2.6.0 [\#3163](https://github.com/harness/drone/pull/3163) ([tphoney](https://github.com/tphoney)) - -## [v2.5.0](https://github.com/harness/drone/tree/v2.5.0) (2021-11-17) - -[Full Changelog](https://github.com/harness/drone/compare/v2.4.0...v2.5.0) - -**Implemented enhancements:** - -- bump ui to v2.4.0 [\#3160](https://github.com/harness/drone/pull/3160) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- add new endpoint for uploading cards [\#3159](https://github.com/harness/drone/pull/3159) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- refactor create / find / delete end points for cards [\#3158](https://github.com/harness/drone/pull/3158) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- bump ui to v2.3.1 [\#3155](https://github.com/harness/drone/pull/3155) ([d1wilko](https://github.com/d1wilko)) -- provide ability to create/read/store card data in drone server [\#3149](https://github.com/harness/drone/pull/3149) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- \(DRON-124\) adding new status endpoint [\#3143](https://github.com/harness/drone/pull/3143) ([tphoney](https://github.com/tphoney)) - -**Fixed bugs:** - -- fix a typo in readme [\#3150](https://github.com/harness/drone/pull/3150) ([nothatDinger](https://github.com/nothatDinger)) - -**Merged pull requests:** - -- release prep for v2.5.0 [\#3161](https://github.com/harness/drone/pull/3161) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -## [v2.4.0](https://github.com/harness/drone/tree/v2.4.0) (2021-09-23) - -[Full Changelog](https://github.com/harness/drone/compare/v2.3.1...v2.4.0) - -**Implemented enhancements:** - -- bump ui version to v2.3.0 [\#3146](https://github.com/harness/drone/pull/3146) ([d1wilko](https://github.com/d1wilko)) -- verify if the application is buildable [\#3144](https://github.com/harness/drone/pull/3144) ([marko-gacesa](https://github.com/marko-gacesa)) - -**Fixed bugs:** - -- fixes build issue with bitbucket cloud [\#3147](https://github.com/harness/drone/pull/3147) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Fix stepLimit param in Starlark and Template OSS code [\#3141](https://github.com/harness/drone/pull/3141) ([phil-davis](https://github.com/phil-davis)) -- fix a broken link in readme [\#3140](https://github.com/harness/drone/pull/3140) ([empire](https://github.com/empire)) - -**Merged pull requests:** - -- \(maint\)-release 2.4.0 [\#3148](https://github.com/harness/drone/pull/3148) ([d1wilko](https://github.com/d1wilko)) -- Allow jsonnet imports in pipeline configuration [\#3105](https://github.com/harness/drone/pull/3105) ([hhamalai](https://github.com/hhamalai)) - -## [v2.3.1](https://github.com/harness/drone/tree/v2.3.1) (2021-09-09) - -[Full Changelog](https://github.com/harness/drone/compare/v2.3.0...v2.3.1) - -**Implemented enhancements:** - -- bump ui to v2.2.1 - https://github.com/drone/drone-ui/blob/main/CHANGELOG.md [\#3138](https://github.com/harness/drone/pull/3138) ([d1wilko](https://github.com/d1wilko)) - -**Merged pull requests:** - -- \(maint\)-release 2.3.1 [\#3139](https://github.com/harness/drone/pull/3139) ([d1wilko](https://github.com/d1wilko)) - -## [v2.3.0](https://github.com/harness/drone/tree/v2.3.0) (2021-09-09) - -[Full Changelog](https://github.com/harness/drone/compare/v2.2.0...v2.3.0) - -**Implemented enhancements:** - -- bump ui to v2.2.0 - https://github.com/drone/drone-ui/blob/main/CHANGELOG.md [\#3137](https://github.com/harness/drone/pull/3137) ([d1wilko](https://github.com/d1wilko)) -- Make starlark step limit configurable [\#3134](https://github.com/harness/drone/pull/3134) ([phil-davis](https://github.com/phil-davis)) -- \(feat\) drone h/a: wrapped scheduler's signal func with redis mutex [\#3130](https://github.com/harness/drone/pull/3130) ([marko-gacesa](https://github.com/marko-gacesa)) - -**Fixed bugs:** - -- \(fix\) trim http/s prefixes from config hostnames [\#3136](https://github.com/harness/drone/pull/3136) ([tphoney](https://github.com/tphoney)) -- \(fix\) remove unused jwt-go library [\#3129](https://github.com/harness/drone/pull/3129) ([tphoney](https://github.com/tphoney)) - -## [v2.2.0](https://github.com/harness/drone/tree/v2.2.0) (2021-09-01) - -[Full Changelog](https://github.com/harness/drone/compare/v2.1.0...v2.2.0) - -**Implemented enhancements:** - -- \(maint\) ui version v2.1.0 - https://github.com/drone/drone-ui/blob/main/CHANGELOG.md [\#3132](https://github.com/harness/drone/pull/3132) ([d1wilko](https://github.com/d1wilko)) -- Ability to cancel running builds, if a new commit is pushed [\#3126](https://github.com/harness/drone/pull/3126) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Fixed bugs:** - -- fix templating reg expression to match if .drone.yml contains --- characters [\#3131](https://github.com/harness/drone/pull/3131) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- add check on template extension type - throw error if invalid [\#3128](https://github.com/harness/drone/pull/3128) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Merged pull requests:** - -- \(maint\)-release 2.2.0 [\#3133](https://github.com/harness/drone/pull/3133) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Rename files with camelCase name to use snake\_case convention [\#3127](https://github.com/harness/drone/pull/3127) ([marko-gacesa](https://github.com/marko-gacesa)) -- event-stream supports timeout [\#3125](https://github.com/harness/drone/pull/3125) ([zc2638](https://github.com/zc2638)) -- \(maint\) Readme update Add Contributor Section [\#3111](https://github.com/harness/drone/pull/3111) ([mrsantons](https://github.com/mrsantons)) - -## [v2.1.0](https://github.com/harness/drone/tree/v2.1.0) (2021-08-24) - -[Full Changelog](https://github.com/harness/drone/compare/v2.0.6...v2.1.0) - -**Implemented enhancements:** - -- \(maint\) ui version v2.0.1. - https://github.com/drone/drone-ui/blob/main/CHANGELOG.md [\#3123](https://github.com/harness/drone/pull/3123) ([d1wilko](https://github.com/d1wilko)) -- add support for yaml templates [\#3120](https://github.com/harness/drone/pull/3120) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Fixed bugs:** - -- Update error message to forbidden if user membership doesn't exist on repo [\#3122](https://github.com/harness/drone/pull/3122) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- update create template path to have namespace, instead of inside the payload [\#3121](https://github.com/harness/drone/pull/3121) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- update dependency drone/go-scm to 1.15.2 to fix gitea build problem [\#3118](https://github.com/harness/drone/pull/3118) ([sesky4](https://github.com/sesky4)) - -**Merged pull requests:** - -- \(maint\) v2.1.0 release prep [\#3124](https://github.com/harness/drone/pull/3124) ([d1wilko](https://github.com/d1wilko)) - -## [v2.0.6](https://github.com/harness/drone/tree/v2.0.6) (2021-08-17) - -[Full Changelog](https://github.com/harness/drone/compare/v2.0.5...v2.0.6) - -**Merged pull requests:** - -- \(maint\) v2.0.6 release prep [\#3119](https://github.com/harness/drone/pull/3119) ([tphoney](https://github.com/tphoney)) - -## [v2.0.5](https://github.com/harness/drone/tree/v2.0.5) (2021-08-17) - -[Full Changelog](https://github.com/harness/drone/compare/v2.0.4...v2.0.5) - -**Implemented enhancements:** - -- bump ui version [\#3115](https://github.com/harness/drone/pull/3115) ([d1wilko](https://github.com/d1wilko)) -- bump ui version [\#3114](https://github.com/harness/drone/pull/3114) ([d1wilko](https://github.com/d1wilko)) -- Add support for nested data objects within templates [\#3110](https://github.com/harness/drone/pull/3110) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- \(feat\) redis implementation for pub-sub, log streaming and canceller [\#3108](https://github.com/harness/drone/pull/3108) ([marko-gacesa](https://github.com/marko-gacesa)) - -**Fixed bugs:** - -- fix issue where map changes order therefore test randomly fails [\#3112](https://github.com/harness/drone/pull/3112) ([eoinmcafee00](https://github.com/eoinmcafee00)) - -**Merged pull requests:** - -- release 2.0.5 [\#3117](https://github.com/harness/drone/pull/3117) ([eoinmcafee00](https://github.com/eoinmcafee00)) -- Update pull\_request\_template.md [\#3107](https://github.com/harness/drone/pull/3107) ([tphoney](https://github.com/tphoney)) - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## Unreleased - -## [2.0.4] -### Fixed -- DRON-97 remove use of request animation frame to prevent high CPU on tab refocus events. - -## [2.0.3] -### Fixed -- DONE-91 handle extra slashes in url. [#3009](https://github.com/drone/drone/pull/3099). - -## [2.0.2] -### Added -- Merge remote-tracking branch 'origin/master' -- prevent repository list short circuit in UI -- remove deprecated steps from building file [#3097](https://github.com/drone/drone/pull/3097) -- adding depends_on, image and detached fields to step [#3072](https://github.com/drone/drone/pull/3072) -- Add ctx.build.debug boolean [#3082](https://github.com/drone/drone/pull/3082) -- Bump github.com/google/go-jsonnet to v0.17.0 [#3084](https://github.com/drone/drone/pull/3084) -- bump go-scm v1.15.1 [#3096](https://github.com/drone/drone/pull/3096) -- bitbucket server build issue [#3092](https://github.com/drone/drone/pull/3092) -- update scm version [#3091](https://github.com/drone/drone/pull/3091) -- Limit graceful shutdown duration [#3093](https://github.com/drone/drone/pull/3093) -- bump user interface -- bump ui version -- ignore skip directive for promote and rollback events -- new feature: maximum open DB connections is configurable[#3089](https://github.com/drone/drone/pull/3089) -- jsonnet additional parameters [#3087](https://github.com/drone/drone/pull/3087) -- hide login button if user already authenticated -- new feature: configuration templates [#3081](https://github.com/drone/drone/pull/3081) - -### Fixed -- various typos [#3088](https://github.com/drone/drone/pull/3088) -- handle error properly if template doesn't exist [#3095](https://github.com/drone/drone/pull/3093) -- oss build issue [#3086](https://github.com/drone/drone/pull/3086) -- graceful shutdown [#3083](https://github.com/drone/drone/pull/3083) - -## [2.0.1] -### Added -- support for configuring the internal yaml cache size. - -## [2.0.0] -### Added -- feature flags for mixed-mode database encryption. - -### Changed -- user-interface re-design - -### Breaking -- removed deprecated kubernetes integration in favor of official kubernetes runner. -- removed deprecated nomad integration in favor of official nomad runner. - -## [1.10.1] -### Added -- support for repository-level concurrency limits. -- support for gitlab and github internal visibility on initial sync. - -### Fixed -- create machine user with a custom API token. - -## [1.10.0] -### Added -- support for starlark scripts in core. -- support for executing pipelines in debug mode. - -## [1.9.2] -### Added -- update go-scm dependency to fix - -## [1.9.1] -### Added -- support for increasing the http request timeout for extensions. [#2998](https://github.com/drone/drone/pull/2998). -- support for skipping a pipeline if the validation extension returns an ErrSkip. -- support for blocking a pipeline if the validation extension returns an ErrBlock. - -### Fixed -- rollback endpoint should be available to users with write permission. -- retrying a build should re-use custom build parameters from parent build. - -## [1.9.0] - 2020-07-12 -### Added -- ui support for deployment list and summary. -- ui support for promoting and rolling back builds. -- feature flag to use static secret when signing webhooks, from @chiraggadasc. - -### Fixed -- ui branch list improperly capped. - -### Changed -- upgrade drone/envsubst dependency -- upgrade drone/go-scm dependency - -## [1.8.1] - 2020-06-23 -### Fixed -- support for gitea api pagination, repository sync hanging. - -## [1.8.0] - 2020-06-10 -### Added -- re-assigned repository ownership when deactivating a user. -- re-assigned repository ownership when deleting a user. -- de-activate a repository when deleting a user if re-assignment fails. -- de-activate a repository when deactivating a user if re-assignment fails. -- routine to cleanup builds stuck in a pending state. -- routine to cleanup builds stuck in a running state. -- private mode setting requires authentication to view public repositories. - -### Fixed -- canceling a build emits a sql.ErrNoRows error. -- custom token is ignored when creating a user account via the API. -- machine accounts with sufficient permissions can create builds via the API. - -### Changed -- upgraded Go toolchain to version 1.14.4. - -## [1.7.0] - 2020-03-27 -### Added -- endpoint to display the latest build by branch. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to display the latest build by pull request. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to display the latest build by environment. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete a branch from the index. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete a pull request from the index. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete an environment from the index. [#2940](https://github.com/drone/drone/pull/2940). -- page to view the latest build per branch. - -### Fixed -- sync routine not executing asynchronously, being cancelled by http context. -- sync routine should ignore gitlab subrepositories -- convert deploy events in 0.8 yaml to promote events. -- do not execute cron job for disabled repositories. [#2931](https://github.com/drone/drone/issues/2931). -- remove trailing slash from gitea url to prevent oauth2 token refresh errors, by [@cmj0121](https://github.com/cmj0121). [#2920](https://github.com/drone/drone/issues/2920). -- disable font ligatures in build log output. [drone/drone-ui#322](https://github.com/drone/drone-ui/pull/322). -- missing am/pm in timestamps - -## [1.6.5] - 2020-01-29 -### Changed -- update version of go-scm -- update alpine version in docker images -- use ticker for cron jobs for more accurate timing - -## [1.6.4] - 2019-12-30 -### Added -- optionally enable pprof endpoints for profiling, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.3] - 2019-12-10 -### Fixed -- disable caching generated yaml files by commit sha, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Added -- support for bitbucket skipverify, by [@toni-moreno](https://github.com/toni-moreno). -- support for gitea skipverify, by [@toni-moreno](https://github.com/toni-moreno). - -## [1.6.2] - 2019-11-08 -### Added -- support for loading license contents from env, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- regression not converting legacy pipeline when using new runners, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.1] - 2019-10-17 -### Added -- updated autocert library in support of acme v2 protocol, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- fixed nil pointer when manually adding user from api, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.0] - 2019-10-04 -### Added -- added nsswitch to docker images -- option to auto-cancel pending builds when newer build enqueued, by [@bradrydzewski](https://github.com/bradrydzewski). [#1980](https://github.com/drone/drone/issues/1980). -- endpoint to list all repositories in the database, by [@bradrydzewski](https://github.com/bradrydzewski). [#2785](https://github.com/drone/drone/issues/2785). - -### Fixed -- improve sync to handle duplicate repository names with different unique identifiers, by [@bradrydzewski](https://github.com/bradrydzewski). [#2658](https://github.com/drone/drone/issues/2658). _You can revert to the previous sync logic with DRONE_DATABASE_LEGACY_BATCH=true_. - -## [1.5.1] - 2019-09-30 -### Added -- allow organization admins access to organization secret endpoints, by [@bradrydzewski](https://github.com/bradrydzewski). [#2838](https://github.com/drone/drone/issues/2838). - -### Fixed -- fix invalid deep links in UI for github enterprise, by [@bradrydzewski](https://github.com/bradrydzewski). -- ensure correct casing when manually adding user, by [@bradrydzewski](https://github.com/bradrydzewski). [#2766](https://github.com/drone/drone/issues/2766). - -## [1.5.0] - 2019-09-28 -### Added -- endpoint to execute a cron pipeline on-demand, by [@bradrydzewski](https://github.com/bradrydzewski). [#2781](https://github.com/drone/drone/issues/2781). -- endpoint to list builds by branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#1495](https://github.com/drone/drone/issues/1495). -- ignore skip comments when cron event, by [@bradrydzewski](https://github.com/bradrydzewski). [#2835](https://github.com/drone/drone/issues/2835). -- support for admission extensions, by [@bradrydzewski](https://github.com/bradrydzewski). [#2043](https://github.com/drone/drone/issues/2043). -- endpoint to provide link to git resources, by [@bradrydzewski](https://github.com/bradrydzewski). [#2843](https://github.com/drone/drone/issues/2843). -- improve bitbucket status display text on new pull request screen, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- missing cron job name in user interface, by [@bradrydzewski](https://github.com/bradrydzewski). -- log lines not properly wrapping in user interface, by [@bradrydzewski](https://github.com/bradrydzewski). -[#309](https://github.com/drone/drone-ui/issues/309). - -### Breaking -- the server now runs in multi-machine mode by default. In order to run the server in single-machine mode (agents disabled) you must set DRONE_AGENTS_DISABLED=true. - -## [1.4.0] - 2019-09-12 -### Added -- upgrade to Go 1.13 to resolve arm segfault, by [@KN4CK3R](https://github.com/KN4CK3R). [#2823](https://github.com/drone/drone/issues/2823). -- configure default visibility, by [@JordanSussman](https://github.com/JordanSussman). [#2824](https://github.com/drone/drone/issues/2824). -- configure default trusted flag, by [@vyckou](https://github.com/vyckou). -- support for validation plugins, by [@bradrydzewski](https://github.com/bradrydzewski). [#2266](https://github.com/drone/drone/issues/2266). -- support for conversion plugins, by [@bradrydzewski](https://github.com/bradrydzewski). -- support for cron event type, by [@bradrydzewski](https://github.com/bradrydzewski). [#2705](https://github.com/drone/drone/issues/2705). -- support for rollback event, by [@bradrydzewski](https://github.com/bradrydzewski). [#2695](https://github.com/drone/drone/issues/2695). -- support for lets encrypt email, by [@bradrydzewski](https://github.com/bradrydzewski). [#2505](https://github.com/drone/drone/issues/2505). - -### Removed -- Support for basic auth as an option for Gitea, by [@techknowlogick](https://giteahub.com/techknowlogick). [#2721](https://github.com/drone/drone/issues/2721) - -### Fixed -- copy cron job name when restarting a cron job, by [@bradrydzewski](https://github.com/bradrydzewski). [#2760](https://github.com/drone/drone/issues/2760). - -## [1.3.1] - 2019-08-26 -### Added -- support for the GitHub deployment status API, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.3.0] - 2019-08-20 -### Added -- support for storing logs in Azure Cloud Storage, by [@Lucretius](https://github.com/Lucretius). [#2788](https://github.com/drone/drone/pull/2788) -- support for windows server 1903, by [@bradrydzewski](https://github.com/bradrydzewski). -- button to view the full log file, by [@dramich](https://github.com/dramich). [drone/drone-ui#287](https://github.com/drone/drone-ui/pull/287). - -### Fixed -- read gogs sha from webhook, by [@marcotuna](https://github.com/marcotuna). -- create bind volume on host if not exists, by [@bradrydzewski](https://github.com/bradrydzewski). [#2725](https://github.com/drone/drone/issues/2725). -- preserve whitespace in build logs, by [@geek1011](https://github.com/geek1011). [drone/drone-ui#294](https://github.com/drone/drone-ui/pull/294). -- enable log file download on firefox, by [@bobmanary](https://github.com/bobmanary). [drone/drone-ui#303](https://github.com/drone/drone-ui/pull/303) - -### Security -- upgraded to Go 1.12.9 due to CVE-2019-9512 and CVE-2019-9514 - -## [1.2.3] - 2019-07-30 -### Added - -- disable github status for cron jobs -- support for action in conditionals, by [@bradrydzewski](https://github.com/bradrydzewski). [#2685](https://github.com/drone/drone/issues/2685). - -### Fixed - -- improve cancel logic for dangling stages, by [@bradrydzewski](https://github.com/bradrydzewski). -- improve error when kubernetes malforms the port configuration, by [@bradrydzewski](https://github.com/bradrydzewski). [#2742](https://github.com/drone/drone/issues/2742). -- copy parameters from parent build when promoting, by [@bradrydzewski](https://github.com/bradrydzewski). [#2748](https://github.com/drone/drone/issues/2748). - -## [1.2.2] - 2019-07-29 -### Added - -- support for legacy environment variables -- support for legacy workspace based on repository name -- support for github deployment hooks -- provide base sha for github pull requests -- option to filter webhooks by event and type -- upgrade drone-yaml to v1.2.2 -- upgrade drone-runtime to v1.0.7 - -### Fixed - -- error when manually creating an empty user, by [@bradrydzewski](https://github.com/bradrydzewski). [#2738](https://github.com/drone/drone/issues/2738). - -## [1.2.1] - 2019-06-11 -### Added - -- support for legacy tokens to ease upgrade path, by [@bradrydzewski](https://github.com/bradrydzewski). [#2713](https://github.com/drone/drone/issues/2713). -- include repository name and id in batch update error message, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed - -- fix inconsistent base64 encoding and decoding of encrypted secrets, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-yaml to version 1.1.2 for improved 0.8 to 1.0 yaml marshal escaping. -- update drone-yaml to version 1.1.3 for improved 0.8 to 1.0 workspace conversion. - -## [1.2.0] - 2019-05-30 -### Added - -- endpoint to trigger new build for default branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- endpoint to trigger new build for branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- endpoint to trigger new build for branch and sha, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- enable optional prometheus metrics guest access, by [@janberktold](https://github.com/janberktold) -- fallback to database when logs not found in s3, by [@bradrydzewski](https://github.com/bradrydzewski). [#2689](https://github.com/drone/drone/issues/2689). -- support for custom stage definitions and runners, by [@bradrydzewski](https://github.com/bradrydzewski). [#2680](https://github.com/drone/drone/issues/2680). -- update drone-yaml to version 1.1.0 - -### Fixed - -- retrieve latest build by branch, by [@tboerger](https://github.com/tboerger). -- copy the fork value when restarting a build, by [@bradrydzewski](https://github.com/bradrydzewski). [#2708](https://github.com/drone/drone/issues/2708). -- make healthz available without redirect, by [@bradrydzewski](https://github.com/bradrydzewski). [#2706](https://github.com/drone/drone/issues/2706). - -## [1.1.0] - 2019-04-23 -### Added - -- specify a user for the pipeline step, by [@bradrydzewski](https://github.com/bradrydzewski). [#2651](https://github.com/drone/drone/issues/2651). -- support for Gitea oauth2, by [@techknowlogick](https://github.com/techknowlogick). [#2622](https://github.com/drone/drone/pull/2622). -- ping the docker daemon before starting the agent, by [@bradrydzewski](https://github.com/bradrydzewski). [#2495](https://github.com/drone/drone/issues/2495). -- support for Cron job name in Yaml trigger block, by [@bradrydzewski](https://github.com/bradrydzewski). [#2628](https://github.com/drone/drone/issues/2628). -- support for Cron job name in Yaml when block, by [@bradrydzewski](https://github.com/bradrydzewski). [#2628](https://github.com/drone/drone/issues/2628). -- sqlite username column changed to case-insensitive, by [@bradrydzewski](https://github.com/bradrydzewski). -- endpoint to purge repository from database, by [@bradrydzewski](https://github.com/bradrydzewski). -- support for per-organization secrets, by [@bradrydzewski](https://github.com/bradrydzewski). -- include system metadata in global webhooks, by [@bradrydzewski](https://github.com/bradrydzewski). -- ability to customize cookie secure flag, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-yaml from version 1.0.6 to 1.0.8. -- update drone-runtime from version 1.0.4 to 1.0.6. -- update go-scm from version 1.0.3 to 1.0.4. - -### Fixed - -- fixed error in mysql table creation syntax, from [@xuyang2](https://github.com/xuyang2). [#2677](https://github.com/drone/drone/pull/2677). -- fixed stuck builds when upstream dependency is skipped, from [@bradrydzewski](https://github.com/bradrydzewski). [#2634](https://github.com/drone/drone/issues/2634). -- fixed issue running steps with dependencies on failure, from [@bradrydzewski](https://github.com/bradrydzewski). [#2667](https://github.com/drone/drone/issues/2667). - -## [1.0.1] - 2019-04-10 -### Added - -- pass stage environment variables to pipeline steps, by [@bradrydzewski](https://github.com/bradrydzewski). -- update go-scm to version 1.3.0, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-runtime to version to 1.0.4, by [@bradrydzewski](https://github.com/bradrydzewski). -- ping docker daemon before agent starts to ensure connectivity, by [@bradrydzewski](https://github.com/bradrydzewski). - - -\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* diff --git a/HISTORY.md b/HISTORY.md deleted file mode 100644 index 8f2bf2af83..0000000000 --- a/HISTORY.md +++ /dev/null @@ -1,326 +0,0 @@ -# Changelog -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## Unreleased - -## [2.0.4] -### Fixed -- DRON-97 remove use of request animation frame to prevent high CPU on tab refocus events. - -## [2.0.3] -### Fixed -- DONE-91 handle extra slashes in url. [#3009](https://github.com/drone/drone/pull/3099). - -## [2.0.2] -### Added -- Merge remote-tracking branch 'origin/master' -- prevent repository list short circuit in UI -- remove deprecated steps from building file [#3097](https://github.com/drone/drone/pull/3097) -- adding depends_on, image and detached fields to step [#3072](https://github.com/drone/drone/pull/3072) -- Add ctx.build.debug boolean [#3082](https://github.com/drone/drone/pull/3082) -- Bump github.com/google/go-jsonnet to v0.17.0 [#3084](https://github.com/drone/drone/pull/3084) -- bump go-scm v1.15.1 [#3096](https://github.com/drone/drone/pull/3096) -- bitbucket server build issue [#3092](https://github.com/drone/drone/pull/3092) -- update scm version [#3091](https://github.com/drone/drone/pull/3091) -- Limit graceful shutdown duration [#3093](https://github.com/drone/drone/pull/3093) -- bump user interface -- bump ui version -- ignore skip directive for promote and rollback events -- new feature: maximum open DB connections is configurable[#3089](https://github.com/drone/drone/pull/3089) -- jsonnet additional parameters [#3087](https://github.com/drone/drone/pull/3087) -- hide login button if user already authenticated -- new feature: configuration templates [#3081](https://github.com/drone/drone/pull/3081) - -### Fixed -- various typos [#3088](https://github.com/drone/drone/pull/3088) -- handle error properly if template doesn't exist [#3095](https://github.com/drone/drone/pull/3093) -- oss build issue [#3086](https://github.com/drone/drone/pull/3086) -- graceful shutdown [#3083](https://github.com/drone/drone/pull/3083) - -## [2.0.1] -### Added -- support for configuring the internal yaml cache size. - -## [2.0.0] -### Added -- feature flags for mixed-mode database encryption. - -### Changed -- user-interface re-design - -### Breaking -- removed deprecated kubernetes integration in favor of official kubernetes runner. -- removed deprecated nomad integration in favor of official nomad runner. - -## [1.10.1] -### Added -- support for repository-level concurrency limits. -- support for gitlab and github internal visibility on initial sync. - -### Fixed -- create machine user with a custom API token. - -## [1.10.0] -### Added -- support for starlark scripts in core. -- support for executing pipelines in debug mode. - -## [1.9.2] -### Added -- update go-scm dependency to fix - -## [1.9.1] -### Added -- support for increasing the http request timeout for extensions. [#2998](https://github.com/drone/drone/pull/2998). -- support for skipping a pipeline if the validation extension returns an ErrSkip. -- support for blocking a pipeline if the validation extension returns an ErrBlock. - -### Fixed -- rollback endpoint should be available to users with write permission. -- retrying a build should re-use custom build parameters from parent build. - -## [1.9.0] - 2020-07-12 -### Added -- ui support for deployment list and summary. -- ui support for promoting and rolling back builds. -- feature flag to use static secret when signing webhooks, from @chiraggadasc. - -### Fixed -- ui branch list improperly capped. - -### Changed -- upgrade drone/envsubst dependency -- upgrade drone/go-scm dependency - -## [1.8.1] - 2020-06-23 -### Fixed -- support for gitea api pagination, repository sync hanging. - -## [1.8.0] - 2020-06-10 -### Added -- re-assigned repository ownership when deactivating a user. -- re-assigned repository ownership when deleting a user. -- de-activate a repository when deleting a user if re-assignment fails. -- de-activate a repository when deactivating a user if re-assignment fails. -- routine to cleanup builds stuck in a pending state. -- routine to cleanup builds stuck in a running state. -- private mode setting requires authentication to view public repositories. - -### Fixed -- canceling a build emits a sql.ErrNoRows error. -- custom token is ignored when creating a user account via the API. -- machine accounts with sufficient permissions can create builds via the API. - -### Changed -- upgraded Go toolchain to version 1.14.4. - -## [1.7.0] - 2020-03-27 -### Added -- endpoint to display the latest build by branch. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to display the latest build by pull request. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to display the latest build by environment. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete a branch from the index. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete a pull request from the index. [#2940](https://github.com/drone/drone/pull/2940). -- endpoint to delete an environment from the index. [#2940](https://github.com/drone/drone/pull/2940). -- page to view the latest build per branch. - -### Fixed -- sync routine not executing asynchronously, being cancelled by http context. -- sync routine should ignore gitlab subrepositories -- convert deploy events in 0.8 yaml to promote events. -- do not execute cron job for disabled repositories. [#2931](https://github.com/drone/drone/issues/2931). -- remove trailing slash from gitea url to prevent oauth2 token refresh errors, by [@cmj0121](https://github.com/cmj0121). [#2920](https://github.com/drone/drone/issues/2920). -- disable font ligatures in build log output. [drone/drone-ui#322](https://github.com/drone/drone-ui/pull/322). -- missing am/pm in timestamps - -## [1.6.5] - 2020-01-29 -### Changed -- update version of go-scm -- update alpine version in docker images -- use ticker for cron jobs for more accurate timing - -## [1.6.4] - 2019-12-30 -### Added -- optionally enable pprof endpoints for profiling, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.3] - 2019-12-10 -### Fixed -- disable caching generated yaml files by commit sha, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Added -- support for bitbucket skipverify, by [@toni-moreno](https://github.com/toni-moreno). -- support for gitea skipverify, by [@toni-moreno](https://github.com/toni-moreno). - -## [1.6.2] - 2019-11-08 -### Added -- support for loading license contents from env, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- regression not converting legacy pipeline when using new runners, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.1] - 2019-10-17 -### Added -- updated autocert library in support of acme v2 protocol, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- fixed nil pointer when manually adding user from api, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.6.0] - 2019-10-04 -### Added -- added nsswitch to docker images -- option to auto-cancel pending builds when newer build enqueued, by [@bradrydzewski](https://github.com/bradrydzewski). [#1980](https://github.com/drone/drone/issues/1980). -- endpoint to list all repositories in the database, by [@bradrydzewski](https://github.com/bradrydzewski). [#2785](https://github.com/drone/drone/issues/2785). - -### Fixed -- improve sync to handle duplicate repository names with different unique identifiers, by [@bradrydzewski](https://github.com/bradrydzewski). [#2658](https://github.com/drone/drone/issues/2658). _You can revert to the previous sync logic with DRONE_DATABASE_LEGACY_BATCH=true_. - -## [1.5.1] - 2019-09-30 -### Added -- allow organization admins access to organization secret endpoints, by [@bradrydzewski](https://github.com/bradrydzewski). [#2838](https://github.com/drone/drone/issues/2838). - -### Fixed -- fix invalid deep links in UI for github enterprise, by [@bradrydzewski](https://github.com/bradrydzewski). -- ensure correct casing when manually adding user, by [@bradrydzewski](https://github.com/bradrydzewski). [#2766](https://github.com/drone/drone/issues/2766). - -## [1.5.0] - 2019-09-28 -### Added -- endpoint to execute a cron pipeline on-demand, by [@bradrydzewski](https://github.com/bradrydzewski). [#2781](https://github.com/drone/drone/issues/2781). -- endpoint to list builds by branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#1495](https://github.com/drone/drone/issues/1495). -- ignore skip comments when cron event, by [@bradrydzewski](https://github.com/bradrydzewski). [#2835](https://github.com/drone/drone/issues/2835). -- support for admission extensions, by [@bradrydzewski](https://github.com/bradrydzewski). [#2043](https://github.com/drone/drone/issues/2043). -- endpoint to provide link to git resources, by [@bradrydzewski](https://github.com/bradrydzewski). [#2843](https://github.com/drone/drone/issues/2843). -- improve bitbucket status display text on new pull request screen, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed -- missing cron job name in user interface, by [@bradrydzewski](https://github.com/bradrydzewski). -- log lines not properly wrapping in user interface, by [@bradrydzewski](https://github.com/bradrydzewski). -[#309](https://github.com/drone/drone-ui/issues/309). - -### Breaking -- the server now runs in multi-machine mode by default. In order to run the server in single-machine mode (agents disabled) you must set DRONE_AGENTS_DISABLED=true. - -## [1.4.0] - 2019-09-12 -### Added -- upgrade to Go 1.13 to resolve arm segfault, by [@KN4CK3R](https://github.com/KN4CK3R). [#2823](https://github.com/drone/drone/issues/2823). -- configure default visibility, by [@JordanSussman](https://github.com/JordanSussman). [#2824](https://github.com/drone/drone/issues/2824). -- configure default trusted flag, by [@vyckou](https://github.com/vyckou). -- support for validation plugins, by [@bradrydzewski](https://github.com/bradrydzewski). [#2266](https://github.com/drone/drone/issues/2266). -- support for conversion plugins, by [@bradrydzewski](https://github.com/bradrydzewski). -- support for cron event type, by [@bradrydzewski](https://github.com/bradrydzewski). [#2705](https://github.com/drone/drone/issues/2705). -- support for rollback event, by [@bradrydzewski](https://github.com/bradrydzewski). [#2695](https://github.com/drone/drone/issues/2695). -- support for lets encrypt email, by [@bradrydzewski](https://github.com/bradrydzewski). [#2505](https://github.com/drone/drone/issues/2505). - -### Removed -- Support for basic auth as an option for Gitea, by [@techknowlogick](https://giteahub.com/techknowlogick). [#2721](https://github.com/drone/drone/issues/2721) - -### Fixed -- copy cron job name when restarting a cron job, by [@bradrydzewski](https://github.com/bradrydzewski). [#2760](https://github.com/drone/drone/issues/2760). - -## [1.3.1] - 2019-08-26 -### Added -- support for the GitHub deployment status API, by [@bradrydzewski](https://github.com/bradrydzewski). - -## [1.3.0] - 2019-08-20 -### Added -- support for storing logs in Azure Cloud Storage, by [@Lucretius](https://github.com/Lucretius). [#2788](https://github.com/drone/drone/pull/2788) -- support for windows server 1903, by [@bradrydzewski](https://github.com/bradrydzewski). -- button to view the full log file, by [@dramich](https://github.com/dramich). [drone/drone-ui#287](https://github.com/drone/drone-ui/pull/287). - -### Fixed -- read gogs sha from webhook, by [@marcotuna](https://github.com/marcotuna). -- create bind volume on host if not exists, by [@bradrydzewski](https://github.com/bradrydzewski). [#2725](https://github.com/drone/drone/issues/2725). -- preserve whitespace in build logs, by [@geek1011](https://github.com/geek1011). [drone/drone-ui#294](https://github.com/drone/drone-ui/pull/294). -- enable log file download on firefox, by [@bobmanary](https://github.com/bobmanary). [drone/drone-ui#303](https://github.com/drone/drone-ui/pull/303) - -### Security -- upgraded to Go 1.12.9 due to CVE-2019-9512 and CVE-2019-9514 - -## [1.2.3] - 2019-07-30 -### Added - -- disable github status for cron jobs -- support for action in conditionals, by [@bradrydzewski](https://github.com/bradrydzewski). [#2685](https://github.com/drone/drone/issues/2685). - -### Fixed - -- improve cancel logic for dangling stages, by [@bradrydzewski](https://github.com/bradrydzewski). -- improve error when kubernetes malforms the port configuration, by [@bradrydzewski](https://github.com/bradrydzewski). [#2742](https://github.com/drone/drone/issues/2742). -- copy parameters from parent build when promoting, by [@bradrydzewski](https://github.com/bradrydzewski). [#2748](https://github.com/drone/drone/issues/2748). - -## [1.2.2] - 2019-07-29 -### Added - -- support for legacy environment variables -- support for legacy workspace based on repository name -- support for github deployment hooks -- provide base sha for github pull requests -- option to filter webhooks by event and type -- upgrade drone-yaml to v1.2.2 -- upgrade drone-runtime to v1.0.7 - -### Fixed - -- error when manually creating an empty user, by [@bradrydzewski](https://github.com/bradrydzewski). [#2738](https://github.com/drone/drone/issues/2738). - -## [1.2.1] - 2019-06-11 -### Added - -- support for legacy tokens to ease upgrade path, by [@bradrydzewski](https://github.com/bradrydzewski). [#2713](https://github.com/drone/drone/issues/2713). -- include repository name and id in batch update error message, by [@bradrydzewski](https://github.com/bradrydzewski). - -### Fixed - -- fix inconsistent base64 encoding and decoding of encrypted secrets, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-yaml to version 1.1.2 for improved 0.8 to 1.0 yaml marshal escaping. -- update drone-yaml to version 1.1.3 for improved 0.8 to 1.0 workspace conversion. - -## [1.2.0] - 2019-05-30 -### Added - -- endpoint to trigger new build for default branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- endpoint to trigger new build for branch, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- endpoint to trigger new build for branch and sha, by [@bradrydzewski](https://github.com/bradrydzewski). [#2679](https://github.com/drone/drone/issues/2679). -- enable optional prometheus metrics guest access, by [@janberktold](https://github.com/janberktold) -- fallback to database when logs not found in s3, by [@bradrydzewski](https://github.com/bradrydzewski). [#2689](https://github.com/drone/drone/issues/2689). -- support for custom stage definitions and runners, by [@bradrydzewski](https://github.com/bradrydzewski). [#2680](https://github.com/drone/drone/issues/2680). -- update drone-yaml to version 1.1.0 - -### Fixed - -- retrieve latest build by branch, by [@tboerger](https://github.com/tboerger). -- copy the fork value when restarting a build, by [@bradrydzewski](https://github.com/bradrydzewski). [#2708](https://github.com/drone/drone/issues/2708). -- make healthz available without redirect, by [@bradrydzewski](https://github.com/bradrydzewski). [#2706](https://github.com/drone/drone/issues/2706). - -## [1.1.0] - 2019-04-23 -### Added - -- specify a user for the pipeline step, by [@bradrydzewski](https://github.com/bradrydzewski). [#2651](https://github.com/drone/drone/issues/2651). -- support for Gitea oauth2, by [@techknowlogick](https://github.com/techknowlogick). [#2622](https://github.com/drone/drone/pull/2622). -- ping the docker daemon before starting the agent, by [@bradrydzewski](https://github.com/bradrydzewski). [#2495](https://github.com/drone/drone/issues/2495). -- support for Cron job name in Yaml trigger block, by [@bradrydzewski](https://github.com/bradrydzewski). [#2628](https://github.com/drone/drone/issues/2628). -- support for Cron job name in Yaml when block, by [@bradrydzewski](https://github.com/bradrydzewski). [#2628](https://github.com/drone/drone/issues/2628). -- sqlite username column changed to case-insensitive, by [@bradrydzewski](https://github.com/bradrydzewski). -- endpoint to purge repository from database, by [@bradrydzewski](https://github.com/bradrydzewski). -- support for per-organization secrets, by [@bradrydzewski](https://github.com/bradrydzewski). -- include system metadata in global webhooks, by [@bradrydzewski](https://github.com/bradrydzewski). -- ability to customize cookie secure flag, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-yaml from version 1.0.6 to 1.0.8. -- update drone-runtime from version 1.0.4 to 1.0.6. -- update go-scm from version 1.0.3 to 1.0.4. - -### Fixed - -- fixed error in mysql table creation syntax, from [@xuyang2](https://github.com/xuyang2). [#2677](https://github.com/drone/drone/pull/2677). -- fixed stuck builds when upstream dependency is skipped, from [@bradrydzewski](https://github.com/bradrydzewski). [#2634](https://github.com/drone/drone/issues/2634). -- fixed issue running steps with dependencies on failure, from [@bradrydzewski](https://github.com/bradrydzewski). [#2667](https://github.com/drone/drone/issues/2667). - -## [1.0.1] - 2019-04-10 -### Added - -- pass stage environment variables to pipeline steps, by [@bradrydzewski](https://github.com/bradrydzewski). -- update go-scm to version 1.3.0, by [@bradrydzewski](https://github.com/bradrydzewski). -- update drone-runtime to version to 1.0.4, by [@bradrydzewski](https://github.com/bradrydzewski). -- ping docker daemon before agent starts to ensure connectivity, by [@bradrydzewski](https://github.com/bradrydzewski). diff --git a/LICENSE b/LICENSE deleted file mode 100644 index ce08ee8a65..0000000000 --- a/LICENSE +++ /dev/null @@ -1,89 +0,0 @@ -Copyright 2019 Drone.IO, Inc. - -The Drone Community Edition is licensed under the Apache License, -Version 2.0 (the "Apache License"). You may obtain a copy of the -Apache License at - - http://www.apache.org/licenses/LICENSE-2.0 - -The Drone Enterprise Edition is licensed under the Drone -Non-Commercial License (the "Non-Commercial License"). A copy of -the Non-Commercial License is provided below. - -The source files in this repository have a header indicating -which license they are under. The BUILDING_OSS file provides -instructions for creating the Community Edition distribution -subject to the terms of the Apache License. - ------------------------------------------------------------------ - -Drone Non-Commercial License - -Contributor: Drone.IO, Inc. - -Source Code: https://github.com/harness/drone - -This license lets you use and share this software for free, -with a trial-length time limit on commercial use. Specifically: - -If you follow the rules below, you may do everything with this -software that would otherwise infringe either the contributor's -copyright in it, any patent claim the contributor can license -that covers this software as of the contributor's latest -contribution, or both. - -1. You must limit use of this software in any manner primarily - intended for or directed toward commercial advantage or - private monetary compensation to a trial period of 32 - consecutive calendar days. This limit does not apply to use in - developing feedback, modifications, or extensions that you - contribute back to those giving this license. - -2. Ensure everyone who gets a copy of this software from you, in - source code or any other form, gets the text of this license - and the contributor and source code lines above. - -3. Do not make any legal claim against anyone for infringing any - patent claim they would infringe by using this software alone, - accusing this software, with or without changes, alone or as - part of a larger application. - -You are excused for unknowingly breaking rule 1 if you stop -doing anything requiring this license within 30 days of -learning you broke the rule. - -**This software comes as is, without any warranty at all. As far -as the law allows, the contributor will not be liable for any -damages related to this software or this license, for any kind of -legal claim.** - ------------------------------------------------------------------ - -Waiver: Individual and Small Business - -Contributor waives the terms of rule 1 for companies meeting all -the following criteria, counting all subsidiaries and affiliated -entities as one: - -1. worldwide annual gross revenue under $5 million US dollars, - per generally accepted accounting principles - -2. less than $5 million US dollars in all-time aggregate debt and - equity financing - -Contributor will not revoke this waiver, but may change terms for -future versions of the software. - ------------------------------------------------------------------ - -Waiver: Low Usage - -Contributor waives the terms of rule 1 for companies meeting all -the following criteria, counting all subsidiaries and affiliated -entities as one: - -1. less than 5,000 total pipelines executed using this software - in the immediately preceding, year-long period - -Contributor will not revoke this waiver, but may change terms for -future versions of the software. diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 748c8350ce..0000000000 --- a/NOTICE +++ /dev/null @@ -1,14 +0,0 @@ -Drone -Copyright 2019 Drone.IO, Inc - -This product includes software developed at Drone.IO, Inc. -(http://drone.io/). - -This product includes software developed by Docker, Inc. -(https://www.docker.com/). - -This product includes software developed by Canonical Ltd. -(https://www.canonical.com/). - -This product includes software developed at CoreOS, Inc. -(http://www.coreos.com/). diff --git a/Taskfile.yml b/Taskfile.yml deleted file mode 100644 index 858f034c3c..0000000000 --- a/Taskfile.yml +++ /dev/null @@ -1,129 +0,0 @@ -# https://taskfile.org - -version: '2' - -tasks: - install: - dir: cmd/drone-server - cmds: [ go install -v ] - env: - GO111MODULE: on - - build: - cmds: - - task: build-base - vars: { name: server } - - build-base: - env: - GOOS: linux - GOARCH: amd64 - CGO_ENABLED: '0' - GO111MODULE: 'on' - cmds: - - cmd: > - go build -o release/linux/amd64/drone-{{.name}} - github.com/drone/drone/cmd/drone-{{.name}} - - cleanup: - cmds: - - rm -rf release - - docker: - cmds: - - task: docker-base - vars: { name: server, image: drone/drone } - - docker-base: - vars: - GIT_BRANCH: - sh: git rev-parse --abbrev-ref HEAD - cmds: - - cmd: docker rmi {{.image}} - ignore_error: true - - cmd: docker rmi {{.image}}:{{.GIT_BRANCH}} - ignore_error: true - - cmd: > - docker build --rm - -f docker/Dockerfile.{{.name}}.linux.amd64 - -t {{.image}} . - - cmd: > - docker tag {{.image}} {{.image}}:{{.GIT_BRANCH}} - - test: - cmds: - - go test ./... - env: - GO111MODULE: 'on' - - test-mysql: - env: - DRONE_DATABASE_DRIVER: mysql - DRONE_DATABASE_DATASOURCE: root@tcp(localhost:3306)/test?parseTime=true - GO111MODULE: 'on' - cmds: - - cmd: docker kill mysql - silent: true - ignore_error: true - - cmd: > - docker run - -p 3306:3306 - --env MYSQL_DATABASE=test - --env MYSQL_ALLOW_EMPTY_PASSWORD=yes - --name mysql - --detach - --rm - mysql:5.7 - --character-set-server=utf8mb4 - --collation-server=utf8mb4_unicode_ci - - cmd: go test -count=1 github.com/drone/drone/store/batch - - cmd: go test -count=1 github.com/drone/drone/store/batch2 - - cmd: go test -count=1 github.com/drone/drone/store/build - - cmd: go test -count=1 github.com/drone/drone/store/card - - cmd: go test -count=1 github.com/drone/drone/store/cron - - cmd: go test -count=1 github.com/drone/drone/store/logs - - cmd: go test -count=1 github.com/drone/drone/store/perm - - cmd: go test -count=1 github.com/drone/drone/store/repos - - cmd: go test -count=1 github.com/drone/drone/store/secret - - cmd: go test -count=1 github.com/drone/drone/store/secret/global - - cmd: go test -count=1 github.com/drone/drone/store/stage - - cmd: go test -count=1 github.com/drone/drone/store/step - - cmd: go test -count=1 github.com/drone/drone/store/template - - cmd: go test -count=1 github.com/drone/drone/store/user - - cmd: docker kill mysql - - test-postgres: - env: - DRONE_DATABASE_DRIVER: postgres - DRONE_DATABASE_DATASOURCE: host=localhost user=postgres password=postgres dbname=postgres sslmode=disable - GO111MODULE: 'on' - cmds: - - cmd: docker kill postgres - ignore_error: true - silent: false - - silent: false - cmd: > - docker run - -p 5432:5432 - --env POSTGRES_PASSWORD=postgres - --env POSTGRES_USER=postgres - --name postgres - --detach - --rm - postgres:9-alpine - - cmd: go test -count=1 github.com/drone/drone/store/batch - - cmd: go test -count=1 github.com/drone/drone/store/batch2 - - cmd: go test -count=1 github.com/drone/drone/store/build - - cmd: go test -count=1 github.com/drone/drone/store/card - - cmd: go test -count=1 github.com/drone/drone/store/cron - - cmd: go test -count=1 github.com/drone/drone/store/logs - - cmd: go test -count=1 github.com/drone/drone/store/perm - - cmd: go test -count=1 github.com/drone/drone/store/repos - - cmd: go test -count=1 github.com/drone/drone/store/secret - - cmd: go test -count=1 github.com/drone/drone/store/secret/global - - cmd: go test -count=1 github.com/drone/drone/store/stage - - cmd: go test -count=1 github.com/drone/drone/store/step - - cmd: go test -count=1 github.com/drone/drone/store/template - - cmd: go test -count=1 github.com/drone/drone/store/user - - cmd: docker kill postgres - silent: true diff --git a/cmd/drone-server/bootstrap/bootstrap.go b/cmd/drone-server/bootstrap/bootstrap.go deleted file mode 100644 index bfcdc3bcef..0000000000 --- a/cmd/drone-server/bootstrap/bootstrap.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package bootstrap - -import ( - "context" - "errors" - "time" - - "github.com/dchest/uniuri" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - - "github.com/sirupsen/logrus" -) - -var errMissingToken = errors.New("You must provide the machine account token") - -// New returns a new account bootstrapper. -func New(users core.UserStore) *Bootstrapper { - return &Bootstrapper{ - users: users, - } -} - -// Bootstrapper bootstraps the system with the initial account. -type Bootstrapper struct { - users core.UserStore -} - -// Bootstrap creates the user account. If the account already exists, -// no account is created, and a nil error is returned. -func (b *Bootstrapper) Bootstrap(ctx context.Context, user *core.User) error { - if user.Login == "" { - return nil - } - - log := logrus.WithFields( - logrus.Fields{ - "login": user.Login, - "admin": user.Admin, - "machine": user.Machine, - "token": user.Hash, - }, - ) - - log.Debugln("bootstrap: create account") - - existingUser, err := b.users.FindLogin(ctx, user.Login) - if err == nil { - ctx = logger.WithContext(ctx, log) - return b.update(ctx, user, existingUser) - } - - if user.Machine && user.Hash == "" { - log.Errorln("bootstrap: cannot create account, missing token") - return errMissingToken - } - - user.Active = true - user.Created = time.Now().Unix() - user.Updated = time.Now().Unix() - if user.Hash == "" { - user.Hash = uniuri.NewLen(32) - } - - err = b.users.Create(ctx, user) - if err != nil { - log = log.WithError(err) - log.Errorln("bootstrap: cannot create account") - return err - } - - log = log.WithField("token", user.Hash) - log.Infoln("bootstrap: account created") - return nil -} - -func (b *Bootstrapper) update(ctx context.Context, src, dst *core.User) error { - log := logger.FromContext(ctx) - log.Debugln("bootstrap: updating account") - var updated bool - if src.Hash != dst.Hash && src.Hash != "" { - log.Infoln("bootstrap: found updated user token") - dst.Hash = src.Hash - updated = true - } - if src.Machine != dst.Machine { - log.Infoln("bootstrap: found updated machine flag") - dst.Machine = src.Machine - updated = true - } - if src.Admin != dst.Admin { - log.Infoln("bootstrap: found updated admin flag") - dst.Admin = src.Admin - updated = true - } - if !updated { - log.Debugln("bootstrap: account already up-to-date") - return nil - } - dst.Updated = time.Now().Unix() - err := b.users.Update(ctx, dst) - if err != nil { - log = log.WithError(err) - log.Errorln("bootstrap: cannot update account") - return err - } - log.Infoln("bootstrap: account successfully updated") - return nil -} diff --git a/cmd/drone-server/bootstrap/bootstrap_test.go b/cmd/drone-server/bootstrap/bootstrap_test.go deleted file mode 100644 index 02dfb7b685..0000000000 --- a/cmd/drone-server/bootstrap/bootstrap_test.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package bootstrap - -import ( - "context" - "database/sql" - "io/ioutil" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/dchest/uniuri" - "github.com/golang/mock/gomock" - "github.com/sirupsen/logrus" -) - -var noContext = context.TODO() - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestBootstrap(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: true, - Admin: true, - Hash: uniuri.NewLen(32), - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(nil, sql.ErrNoRows) - store.EXPECT().Create(gomock.Any(), dummyUser).Return(nil) - - err := New(store).Bootstrap(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestBootstrap_GenerateHash(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: false, - Admin: true, - Hash: "", - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(nil, sql.ErrNoRows) - store.EXPECT().Create(gomock.Any(), dummyUser).Return(nil) - - err := New(store).Bootstrap(noContext, dummyUser) - if err != nil { - t.Error(err) - } - if got, want := len(dummyUser.Hash), 32; got != want { - t.Errorf("Want generated hash length %d, got %d", want, got) - } -} - -func TestBootstrap_Empty(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "", - } - - store := mock.NewMockUserStore(controller) - err := New(store).Bootstrap(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestBootstrap_Exists_WithoutUpdates(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: true, - Admin: true, - Hash: uniuri.NewLen(32), - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(dummyUser, nil) - err := New(store).Bootstrap(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestBootstrap_Exists_WithUpdates(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: true, - Admin: true, - Hash: uniuri.NewLen(32), - } - existingUser := &core.User{ - Login: "octocat", - Machine: false, - Admin: false, - Hash: uniuri.NewLen(32), - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(existingUser, nil) - store.EXPECT().Update(gomock.Any(), existingUser).Return(nil) - err := New(store).Bootstrap(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestBootstrap_MissingTokenError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: true, - Admin: true, - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(nil, sql.ErrNoRows) - - err := New(store).Bootstrap(noContext, dummyUser) - if err != errMissingToken { - t.Errorf("Expect missing token error") - } -} - -func TestBootstrap_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - Machine: true, - Admin: true, - Hash: uniuri.NewLen(32), - } - - store := mock.NewMockUserStore(controller) - store.EXPECT().FindLogin(gomock.Any(), dummyUser.Login).Return(nil, sql.ErrNoRows) - store.EXPECT().Create(gomock.Any(), dummyUser).Return(sql.ErrConnDone) - - err := New(store).Bootstrap(noContext, dummyUser) - if err != sql.ErrConnDone { - t.Errorf("Expect error creating user") - } -} diff --git a/cmd/drone-server/config/config.go b/cmd/drone-server/config/config.go deleted file mode 100644 index 36b978a645..0000000000 --- a/cmd/drone-server/config/config.go +++ /dev/null @@ -1,644 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package config - -import ( - "errors" - "fmt" - "os" - "strings" - "time" - - "github.com/dchest/uniuri" - "github.com/dustin/go-humanize" - "github.com/kelseyhightower/envconfig" - "gopkg.in/yaml.v2" -) - -// IMPORTANT please do not add new configuration parameters unless it has -// been discussed on the mailing list. We are attempting to reduce the -// number of configuration parameters, and may reject pull requests that -// introduce new parameters. (mailing list https://community.harness.io) - -// default runner hostname. -var hostname string - -func init() { - hostname, _ = os.Hostname() - if hostname == "" { - hostname = "localhost" - } -} - -type ( - // Config provides the system configuration. - Config struct { - License string `envconfig:"DRONE_LICENSE"` - - Authn Authentication - Agent Agent - AzureBlob AzureBlob - Convert Convert - Cleanup Cleanup - Cron Cron - Cloning Cloning - Database Database - Datadog Datadog - Docker Docker - HTTP HTTP - Jsonnet Jsonnet - Starlark Starlark - Logging Logging - Prometheus Prometheus - Proxy Proxy - Redis Redis - Registration Registration - Registries Registries - Repository Repository - Runner Runner - RPC RPC - S3 S3 - Secrets Secrets - Server Server - Session Session - Status Status - Users Users - Validate Validate - Webhook Webhook - Yaml Yaml - - // Remote configurations - Bitbucket Bitbucket - Gitea Gitea - Github Github - GitLab GitLab - Gogs Gogs - Stash Stash - Gitee Gitee - } - - // Cloning provides the cloning configuration. - Cloning struct { - AlwaysAuth bool `envconfig:"DRONE_GIT_ALWAYS_AUTH"` - Username string `envconfig:"DRONE_GIT_USERNAME"` - Password string `envconfig:"DRONE_GIT_PASSWORD"` - Image string `envconfig:"DRONE_GIT_IMAGE"` - Pull string `envconfig:"DRONE_GIT_IMAGE_PULL" default:"IfNotExists"` - } - - Cleanup struct { - Disabled bool `envconfig:"DRONE_CLEANUP_DISABLED"` - Interval time.Duration `envconfig:"DRONE_CLEANUP_INTERVAL" default:"24h"` - Running time.Duration `envconfig:"DRONE_CLEANUP_DEADLINE_RUNNING" default:"24h"` - Pending time.Duration `envconfig:"DRONE_CLEANUP_DEADLINE_PENDING" default:"24h"` - Buffer time.Duration `envconfig:"DRONE_CLEANUP_BUFFER" default:"30m"` - } - - // Cron provides the cron configuration. - Cron struct { - Disabled bool `envconfig:"DRONE_CRON_DISABLED"` - Interval time.Duration `envconfig:"DRONE_CRON_INTERVAL" default:"30m"` - } - - // Database provides the database configuration. - Database struct { - Driver string `envconfig:"DRONE_DATABASE_DRIVER" default:"sqlite3"` - Datasource string `envconfig:"DRONE_DATABASE_DATASOURCE" default:"core.sqlite"` - Secret string `envconfig:"DRONE_DATABASE_SECRET"` - MaxConnections int `envconfig:"DRONE_DATABASE_MAX_CONNECTIONS" default:"0"` - - // Feature flag - LegacyBatch bool `envconfig:"DRONE_DATABASE_LEGACY_BATCH"` - - // Feature flag - EncryptUserTable bool `envconfig:"DRONE_DATABASE_ENCRYPT_USER_TABLE"` - EncryptMixedContent bool `envconfig:"DRONE_DATABASE_ENCRYPT_MIXED_MODE"` - } - - // Docker provides docker configuration - Docker struct { - Config string `envconfig:"DRONE_DOCKER_CONFIG"` - } - - // Datadog provides datadog configuration - Datadog struct { - Enabled bool `envconfig:"DRONE_DATADOG_ENABLED"` - Endpoint string `envconfig:"DRONE_DATADOG_ENDPOINT"` - Token string `envconfig:"DRONE_DATADOG_TOKEN"` - } - - // Jsonnet configures the jsonnet plugin - Jsonnet struct { - Enabled bool `envconfig:"DRONE_JSONNET_ENABLED"` - ImportLimit int `envconfig:"DRONE_JSONNET_IMPORT_LIMIT" default:"0"` - } - - // Starlark configures the starlark plugin - Starlark struct { - Enabled bool `envconfig:"DRONE_STARLARK_ENABLED"` - StepLimit uint64 `envconfig:"DRONE_STARLARK_STEP_LIMIT"` - SizeLimit uint64 `envconfig:"DRONE_STARLARK_SIZE_LIMIT" default:"0"` - } - - // License provides license configuration - License struct { - Key string `envconfig:"DRONE_LICENSE"` - Endpoint string `envconfig:"DRONE_LICENSE_ENDPOINT"` - } - - // Logging provides the logging configuration. - Logging struct { - Debug bool `envconfig:"DRONE_LOGS_DEBUG"` - Trace bool `envconfig:"DRONE_LOGS_TRACE"` - Color bool `envconfig:"DRONE_LOGS_COLOR"` - Pretty bool `envconfig:"DRONE_LOGS_PRETTY"` - Text bool `envconfig:"DRONE_LOGS_TEXT"` - } - - // Prometheus provides the prometheus configuration. - Prometheus struct { - EnableAnonymousAccess bool `envconfig:"DRONE_PROMETHEUS_ANONYMOUS_ACCESS" default:"false"` - } - - // Redis provides the redis configuration. - Redis struct { - ConnectionString string `envconfig:"DRONE_REDIS_CONNECTION"` - Addr string `envconfig:"DRONE_REDIS_ADDR"` - Password string `envconfig:"DRONE_REDIS_PASSWORD"` - DB int `envconfig:"DRONE_REDIS_DB"` - } - - // Repository provides the repository configuration. - Repository struct { - Filter []string `envconfig:"DRONE_REPOSITORY_FILTER"` - Visibility string `envconfig:"DRONE_REPOSITORY_VISIBILITY"` - Trusted bool `envconfig:"DRONE_REPOSITORY_TRUSTED"` - - // THIS SETTING IS INTERNAL USE ONLY AND SHOULD - // NOT BE USED OR RELIED UPON IN PRODUCTION. - Ignore []string `envconfig:"DRONE_REPOSITORY_IGNORE"` - } - - // Registries provides the registry configuration. - Registries struct { - Endpoint string `envconfig:"DRONE_REGISTRY_ENDPOINT"` - Password string `envconfig:"DRONE_REGISTRY_SECRET"` - SkipVerify bool `envconfig:"DRONE_REGISTRY_SKIP_VERIFY"` - } - - // Secrets provides the secret configuration. - Secrets struct { - Endpoint string `envconfig:"DRONE_SECRET_ENDPOINT"` - Password string `envconfig:"DRONE_SECRET_SECRET"` - SkipVerify bool `envconfig:"DRONE_SECRET_SKIP_VERIFY"` - } - - // RPC provides the rpc configuration. - RPC struct { - Server string `envconfig:"DRONE_RPC_SERVER"` - Secret string `envconfig:"DRONE_RPC_SECRET"` - Debug bool `envconfig:"DRONE_RPC_DEBUG"` - Host string `envconfig:"DRONE_RPC_HOST"` - Proto string `envconfig:"DRONE_RPC_PROTO"` - // Hosts map[string]string `envconfig:"DRONE_RPC_EXTRA_HOSTS"` - } - - Agent struct { - Disabled bool `envconfig:"DRONE_AGENTS_DISABLED"` - } - - // Runner provides the runner configuration. - Runner struct { - Local bool `envconfig:"DRONE_RUNNER_LOCAL"` - Image string `envconfig:"DRONE_RUNNER_IMAGE" default:"drone/controller:1"` - Platform string `envconfig:"DRONE_RUNNER_PLATFORM" default:"linux/amd64"` - OS string `envconfig:"DRONE_RUNNER_OS"` - Arch string `envconfig:"DRONE_RUNNER_ARCH"` - Kernel string `envconfig:"DRONE_RUNNER_KERNEL"` - Variant string `envconfig:"DRONE_RUNNER_VARIANT"` - Machine string `envconfig:"DRONE_RUNNER_NAME"` - Capacity int `envconfig:"DRONE_RUNNER_CAPACITY" default:"2"` - Labels map[string]string `envconfig:"DRONE_RUNNER_LABELS"` - Volumes []string `envconfig:"DRONE_RUNNER_VOLUMES"` - Networks []string `envconfig:"DRONE_RUNNER_NETWORKS"` - Devices []string `envconfig:"DRONE_RUNNER_DEVICES"` - Privileged []string `envconfig:"DRONE_RUNNER_PRIVILEGED_IMAGES"` - Environ map[string]string `envconfig:"DRONE_RUNNER_ENVIRON"` - Limits struct { - MemSwapLimit Bytes `envconfig:"DRONE_LIMIT_MEM_SWAP"` - MemLimit Bytes `envconfig:"DRONE_LIMIT_MEM"` - ShmSize Bytes `envconfig:"DRONE_LIMIT_SHM_SIZE"` - CPUQuota int64 `envconfig:"DRONE_LIMIT_CPU_QUOTA"` - CPUShares int64 `envconfig:"DRONE_LIMIT_CPU_SHARES"` - CPUSet string `envconfig:"DRONE_LIMIT_CPU_SET"` - } - } - - // Server provides the server configuration. - Server struct { - Addr string `envconfig:"-"` - Host string `envconfig:"DRONE_SERVER_HOST" default:"localhost:8080"` - Port string `envconfig:"DRONE_SERVER_PORT" default:":8080"` - Proto string `envconfig:"DRONE_SERVER_PROTO" default:"http"` - Pprof bool `envconfig:"DRONE_PPROF_ENABLED"` - Acme bool `envconfig:"DRONE_TLS_AUTOCERT"` - Email string `envconfig:"DRONE_TLS_EMAIL"` - Cert string `envconfig:"DRONE_TLS_CERT"` - Key string `envconfig:"DRONE_TLS_KEY"` - } - - // Proxy provides proxy server configuration. - Proxy struct { - Addr string `envconfig:"-"` - Host string `envconfig:"DRONE_SERVER_PROXY_HOST"` - Proto string `envconfig:"DRONE_SERVER_PROXY_PROTO"` - } - - // Registration configuration. - Registration struct { - Closed bool `envconfig:"DRONE_REGISTRATION_CLOSED"` - } - - // Authentication Controller configuration - Authentication struct { - Endpoint string `envconfig:"DRONE_ADMISSION_PLUGIN_ENDPOINT"` - Secret string `envconfig:"DRONE_ADMISSION_PLUGIN_SECRET"` - SkipVerify bool `envconfig:"DRONE_ADMISSION_PLUGIN_SKIP_VERIFY"` - } - - // Session provides the session configuration. - Session struct { - Timeout time.Duration `envconfig:"DRONE_COOKIE_TIMEOUT" default:"720h"` - Secret string `envconfig:"DRONE_COOKIE_SECRET"` - Secure bool `envconfig:"DRONE_COOKIE_SECURE"` - } - - // Status provides status configurations. - Status struct { - Disabled bool `envconfig:"DRONE_STATUS_DISABLED"` - Name string `envconfig:"DRONE_STATUS_NAME"` - } - - // Users provides the user configuration. - Users struct { - Create UserCreate `envconfig:"DRONE_USER_CREATE"` - Filter []string `envconfig:"DRONE_USER_FILTER"` - MinAge time.Duration `envconfig:"DRONE_MIN_AGE"` - } - - // Webhook provides the webhook configuration. - Webhook struct { - Events []string `envconfig:"DRONE_WEBHOOK_EVENTS"` - Endpoint []string `envconfig:"DRONE_WEBHOOK_ENDPOINT"` - Secret string `envconfig:"DRONE_WEBHOOK_SECRET"` - SkipVerify bool `envconfig:"DRONE_WEBHOOK_SKIP_VERIFY"` - } - - // Yaml provides the yaml webhook configuration. - Yaml struct { - Endpoint string `envconfig:"DRONE_YAML_ENDPOINT"` - Secret string `envconfig:"DRONE_YAML_SECRET"` - SkipVerify bool `envconfig:"DRONE_YAML_SKIP_VERIFY"` - Timeout time.Duration `envconfig:"DRONE_YAML_TIMEOUT" default:"1m"` - } - - // Convert provides the converter webhook configuration. - Convert struct { - Extension string `envconfig:"DRONE_CONVERT_PLUGIN_EXTENSION"` - Endpoint string `envconfig:"DRONE_CONVERT_PLUGIN_ENDPOINT"` - Secret string `envconfig:"DRONE_CONVERT_PLUGIN_SECRET"` - SkipVerify bool `envconfig:"DRONE_CONVERT_PLUGIN_SKIP_VERIFY"` - CacheSize int `envconfig:"DRONE_CONVERT_PLUGIN_CACHE_SIZE" default:"10"` - Timeout time.Duration `envconfig:"DRONE_CONVERT_TIMEOUT" default:"1m"` - - // this flag can be removed once we solve for - // https://github.com/harness/drone/pull/2994#issuecomment-795955312 - Multi bool `envconfig:"DRONE_CONVERT_MULTI"` - } - - // Validate provides the validation webhook configuration. - Validate struct { - Endpoint string `envconfig:"DRONE_VALIDATE_PLUGIN_ENDPOINT"` - Secret string `envconfig:"DRONE_VALIDATE_PLUGIN_SECRET"` - SkipVerify bool `envconfig:"DRONE_VALIDATE_PLUGIN_SKIP_VERIFY"` - Timeout time.Duration `envconfig:"DRONE_VALIDATE_TIMEOUT" default:"1m"` - } - - // - // Source code management. - // - - // Bitbucket provides the bitbucket client configuration. - Bitbucket struct { - ClientID string `envconfig:"DRONE_BITBUCKET_CLIENT_ID"` - ClientSecret string `envconfig:"DRONE_BITBUCKET_CLIENT_SECRET"` - SkipVerify bool `envconfig:"DRONE_BITBUCKET_SKIP_VERIFY"` - Debug bool `envconfig:"DRONE_BITBUCKET_DEBUG"` - } - - // Gitea provides the gitea client configuration. - Gitea struct { - Server string `envconfig:"DRONE_GITEA_SERVER"` - ClientID string `envconfig:"DRONE_GITEA_CLIENT_ID"` - ClientSecret string `envconfig:"DRONE_GITEA_CLIENT_SECRET"` - RedirectURL string `envconfig:"DRONE_GITEA_REDIRECT_URL"` - SkipVerify bool `envconfig:"DRONE_GITEA_SKIP_VERIFY"` - Scope []string `envconfig:"DRONE_GITEA_SCOPE" default:"repo,repo:status,user:email,read:org"` - Debug bool `envconfig:"DRONE_GITEA_DEBUG"` - } - - // Github provides the github client configuration. - Github struct { - Server string `envconfig:"DRONE_GITHUB_SERVER" default:"https://github.com"` - APIServer string `envconfig:"DRONE_GITHUB_API_SERVER"` - ClientID string `envconfig:"DRONE_GITHUB_CLIENT_ID"` - ClientSecret string `envconfig:"DRONE_GITHUB_CLIENT_SECRET"` - SkipVerify bool `envconfig:"DRONE_GITHUB_SKIP_VERIFY"` - Scope []string `envconfig:"DRONE_GITHUB_SCOPE" default:"repo,repo:status,user:email,read:org"` - RateLimit int `envconfig:"DRONE_GITHUB_USER_RATELIMIT"` - Debug bool `envconfig:"DRONE_GITHUB_DEBUG"` - } - - // Gitee providers the gitee client configuration. - Gitee struct { - Server string `envconfig:"DRONE_GITEE_SERVER" default:"https://gitee.com"` - APIServer string `envconfig:"DRONE_GITEE_API_SERVER" default:"https://gitee.com/api/v5"` - ClientID string `envconfig:"DRONE_GITEE_CLIENT_ID"` - ClientSecret string `envconfig:"DRONE_GITEE_CLIENT_SECRET"` - RedirectURL string `envconfig:"DRONE_GITEE_REDIRECT_URL"` - SkipVerify bool `envconfig:"DRONE_GITEE_SKIP_VERIFY"` - Scope []string `envconfig:"DRONE_GITEE_SCOPE" default:"user_info,projects,pull_requests,hook"` - Debug bool `envconfig:"DRONE_GITEE_DEBUG"` - } - - // GitLab provides the gitlab client configuration. - GitLab struct { - Server string `envconfig:"DRONE_GITLAB_SERVER" default:"https://gitlab.com"` - ClientID string `envconfig:"DRONE_GITLAB_CLIENT_ID"` - ClientSecret string `envconfig:"DRONE_GITLAB_CLIENT_SECRET"` - SkipVerify bool `envconfig:"DRONE_GITLAB_SKIP_VERIFY"` - Debug bool `envconfig:"DRONE_GITLAB_DEBUG"` - } - - // Gogs provides the gogs client configuration. - Gogs struct { - Server string `envconfig:"DRONE_GOGS_SERVER"` - SkipVerify bool `envconfig:"DRONE_GOGS_SKIP_VERIFY"` - Debug bool `envconfig:"DRONE_GOGS_DEBUG"` - } - - // Stash provides the stash client configuration. - Stash struct { - Server string `envconfig:"DRONE_STASH_SERVER"` - ConsumerKey string `envconfig:"DRONE_STASH_CONSUMER_KEY"` - ConsumerSecret string `envconfig:"DRONE_STASH_CONSUMER_SECRET"` - PrivateKey string `envconfig:"DRONE_STASH_PRIVATE_KEY"` - SkipVerify bool `envconfig:"DRONE_STASH_SKIP_VERIFY"` - Debug bool `envconfig:"DRONE_STASH_DEBUG"` - } - - // S3 provides the storage configuration. - S3 struct { - Bucket string `envconfig:"DRONE_S3_BUCKET"` - Prefix string `envconfig:"DRONE_S3_PREFIX"` - Endpoint string `envconfig:"DRONE_S3_ENDPOINT"` - PathStyle bool `envconfig:"DRONE_S3_PATH_STYLE"` - } - - //AzureBlob providers the storage configuration. - AzureBlob struct { - ContainerName string `envconfig:"DRONE_AZURE_BLOB_CONTAINER_NAME"` - StorageAccountName string `envconfig:"DRONE_AZURE_STORAGE_ACCOUNT_NAME"` - StorageAccessKey string `envconfig:"DRONE_AZURE_STORAGE_ACCESS_KEY"` - } - - // HTTP provides http configuration. - HTTP struct { - AllowedHosts []string `envconfig:"DRONE_HTTP_ALLOWED_HOSTS"` - HostsProxyHeaders []string `envconfig:"DRONE_HTTP_PROXY_HEADERS"` - SSLRedirect bool `envconfig:"DRONE_HTTP_SSL_REDIRECT"` - SSLTemporaryRedirect bool `envconfig:"DRONE_HTTP_SSL_TEMPORARY_REDIRECT"` - SSLHost string `envconfig:"DRONE_HTTP_SSL_HOST"` - SSLProxyHeaders map[string]string `envconfig:"DRONE_HTTP_SSL_PROXY_HEADERS"` - STSSeconds int64 `envconfig:"DRONE_HTTP_STS_SECONDS"` - STSIncludeSubdomains bool `envconfig:"DRONE_HTTP_STS_INCLUDE_SUBDOMAINS"` - STSPreload bool `envconfig:"DRONE_HTTP_STS_PRELOAD"` - ForceSTSHeader bool `envconfig:"DRONE_HTTP_STS_FORCE_HEADER"` - BrowserXSSFilter bool `envconfig:"DRONE_HTTP_BROWSER_XSS_FILTER" default:"true"` - FrameDeny bool `envconfig:"DRONE_HTTP_FRAME_DENY" default:"true"` - ContentTypeNosniff bool `envconfig:"DRONE_HTTP_CONTENT_TYPE_NO_SNIFF"` - ContentSecurityPolicy string `envconfig:"DRONE_HTTP_CONTENT_SECURITY_POLICY"` - ReferrerPolicy string `envconfig:"DRONE_HTTP_REFERRER_POLICY"` - } -) - -// Environ returns the settings from the environment. -func Environ() (Config, error) { - cfg := Config{} - err := envconfig.Process("", &cfg) - defaultAddress(&cfg) - defaultProxy(&cfg) - defaultRunner(&cfg) - defaultSession(&cfg) - defaultCallback(&cfg) - configureGithub(&cfg) - if err := kubernetesServiceConflict(&cfg); err != nil { - return cfg, err - } - return cfg, err -} - -// String returns the configuration in string format. -func (c *Config) String() string { - out, _ := yaml.Marshal(c) - return string(out) -} - -// IsGitHub returns true if the GitHub integration -// is activated. -func (c *Config) IsGitHub() bool { - return c.Github.ClientID != "" -} - -// IsGitHubEnterprise returns true if the GitHub -// integration is activated. -func (c *Config) IsGitHubEnterprise() bool { - return c.IsGitHub() && !strings.HasPrefix(c.Github.Server, "https://github.com") -} - -// IsGitLab returns true if the GitLab integration -// is activated. -func (c *Config) IsGitLab() bool { - return c.GitLab.ClientID != "" -} - -// IsGogs returns true if the Gogs integration -// is activated. -func (c *Config) IsGogs() bool { - return c.Gogs.Server != "" -} - -// IsGitea returns true if the Gitea integration -// is activated. -func (c *Config) IsGitea() bool { - return c.Gitea.Server != "" -} - -// IsGitee returns true if the Gitee integration -// is activated. -func (c *Config) IsGitee() bool { - return c.Gitee.ClientID != "" -} - -// IsBitbucket returns true if the Bitbucket Cloud -// integration is activated. -func (c *Config) IsBitbucket() bool { - return c.Bitbucket.ClientID != "" -} - -// IsStash returns true if the Atlassian Stash -// integration is activated. -func (c *Config) IsStash() bool { - return c.Stash.Server != "" -} - -func cleanHostname(hostname string) string { - hostname = strings.ToLower(hostname) - hostname = strings.TrimPrefix(hostname, "http://") - hostname = strings.TrimPrefix(hostname, "https://") - - return hostname -} - -func defaultAddress(c *Config) { - if c.Server.Key != "" || c.Server.Cert != "" || c.Server.Acme { - c.Server.Port = ":443" - c.Server.Proto = "https" - } - c.Server.Host = cleanHostname(c.Server.Host) - c.Server.Addr = c.Server.Proto + "://" + c.Server.Host -} - -func defaultProxy(c *Config) { - if c.Proxy.Host == "" { - c.Proxy.Host = c.Server.Host - } else { - c.Proxy.Host = cleanHostname(c.Proxy.Host) - } - if c.Proxy.Proto == "" { - c.Proxy.Proto = c.Server.Proto - } - c.Proxy.Addr = c.Proxy.Proto + "://" + c.Proxy.Host -} - -func defaultCallback(c *Config) { - if c.RPC.Host == "" { - c.RPC.Host = c.Server.Host - } - if c.RPC.Proto == "" { - c.RPC.Proto = c.Server.Proto - } -} - -func defaultRunner(c *Config) { - if c.Runner.Machine == "" { - c.Runner.Machine = hostname - } - parts := strings.Split(c.Runner.Platform, "/") - if len(parts) == 2 && c.Runner.OS == "" { - c.Runner.OS = parts[0] - } - if len(parts) == 2 && c.Runner.Arch == "" { - c.Runner.Arch = parts[1] - } -} - -func defaultSession(c *Config) { - if c.Session.Secret == "" { - c.Session.Secret = uniuri.NewLen(32) - } -} - -func configureGithub(c *Config) { - if c.Github.APIServer != "" { - return - } - if c.Github.Server == "https://github.com" { - c.Github.APIServer = "https://api.github.com" - } else { - c.Github.APIServer = strings.TrimSuffix(c.Github.Server, "/") + "/api/v3" - } -} - -func kubernetesServiceConflict(c *Config) error { - if strings.HasPrefix(c.Server.Port, "tcp://") { - return errors.New("Invalid port configuration. See https://community.harness.io/t/drone-server-changing-ports-protocol/11400") - } - return nil -} - -// Bytes stores number bytes (e.g. megabytes) -type Bytes int64 - -// Decode implements a decoder that parses a string representation -// of bytes into the number of bytes it represents. -func (b *Bytes) Decode(value string) error { - v, err := humanize.ParseBytes(value) - *b = Bytes(v) - return err -} - -// Int64 returns the int64 value of the Byte. -func (b *Bytes) Int64() int64 { - return int64(*b) -} - -// String returns the string value of the Byte. -func (b *Bytes) String() string { - return fmt.Sprint(*b) -} - -// UserCreate stores account information used to bootstrap -// the admin user account when the system initializes. -type UserCreate struct { - Username string - Machine bool - Admin bool - Token string -} - -// Decode implements a decoder that extracts user information -// from the environment variable string. -func (u *UserCreate) Decode(value string) error { - for _, param := range strings.Split(value, ",") { - parts := strings.Split(param, ":") - if len(parts) != 2 { - continue - } - key := parts[0] - val := parts[1] - switch key { - case "username": - u.Username = val - case "token": - u.Token = val - case "machine": - u.Machine = val == "true" - case "admin": - u.Admin = val == "true" - } - } - return nil -} diff --git a/cmd/drone-server/config/config_test.go b/cmd/drone-server/config/config_test.go deleted file mode 100644 index 500620a795..0000000000 --- a/cmd/drone-server/config/config_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package config - -import "testing" - -func Test_cleanHostname(t *testing.T) { - tests := []struct { - name string - hostname string - want string - }{ - { - name: "no prefix", - hostname: "drone.io", - want: "drone.io", - }, - { - name: "http prefix", - hostname: "http://drone.io", - want: "drone.io", - }, - { - name: "https prefix", - hostname: "https://drone.io", - want: "drone.io", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if got := cleanHostname(tt.hostname); got != tt.want { - t.Errorf("cleanHostname() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/cmd/drone-server/inject_client.go b/cmd/drone-server/inject_client.go deleted file mode 100644 index 93fab947c1..0000000000 --- a/cmd/drone-server/inject_client.go +++ /dev/null @@ -1,286 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "crypto/rsa" - "crypto/tls" - "crypto/x509" - "encoding/pem" - "io/ioutil" - "net/http" - "net/http/httputil" - "strings" - - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/go-scm/scm" - "github.com/drone/go-scm/scm/driver/bitbucket" - "github.com/drone/go-scm/scm/driver/gitea" - "github.com/drone/go-scm/scm/driver/gitee" - "github.com/drone/go-scm/scm/driver/github" - "github.com/drone/go-scm/scm/driver/gitlab" - "github.com/drone/go-scm/scm/driver/gogs" - "github.com/drone/go-scm/scm/driver/stash" - "github.com/drone/go-scm/scm/transport/oauth1" - "github.com/drone/go-scm/scm/transport/oauth2" - - "github.com/google/wire" - "github.com/sirupsen/logrus" -) - -// wire set for loading the scm client. -var clientSet = wire.NewSet( - provideClient, -) - -// provideBitbucketClient is a Wire provider function that -// returns a Source Control Management client based on the -// environment configuration. -func provideClient(config config.Config) *scm.Client { - switch { - case config.Bitbucket.ClientID != "": - return provideBitbucketClient(config) - case config.Github.ClientID != "": - return provideGithubClient(config) - case config.Gitee.ClientID != "": - return provideGiteeClient(config) - case config.Gitea.Server != "": - return provideGiteaClient(config) - case config.GitLab.ClientID != "": - return provideGitlabClient(config) - case config.Gogs.Server != "": - return provideGogsClient(config) - case config.Stash.ConsumerKey != "": - return provideStashClient(config) - } - logrus.Fatalln("main: source code management system not configured") - return nil -} - -// provideBitbucketClient is a Wire provider function that -// returns a Bitbucket Cloud client based on the environment -// configuration. -func provideBitbucketClient(config config.Config) *scm.Client { - client := bitbucket.NewDefault() - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Source: &oauth2.Refresher{ - ClientID: config.Bitbucket.ClientID, - ClientSecret: config.Bitbucket.ClientSecret, - Endpoint: "https://bitbucket.org/site/oauth2/access_token", - Source: oauth2.ContextTokenSource(), - }, - }, - } - if config.Bitbucket.Debug { - client.DumpResponse = httputil.DumpResponse - } - return client -} - -// provideGithubClient is a Wire provider function that returns -// a GitHub client based on the environment configuration. -func provideGithubClient(config config.Config) *scm.Client { - client, err := github.New(config.Github.APIServer) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the GitHub client") - } - if config.Github.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Source: oauth2.ContextTokenSource(), - Base: defaultTransport(config.Github.SkipVerify), - }, - } - return client -} - -// provideGiteeClient is a Wire provider function that returns -// a Gitee client based on the environment configuration. -func provideGiteeClient(config config.Config) *scm.Client { - client, err := gitee.New(config.Gitee.APIServer) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the Gitee client") - } - if config.Gitee.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Scheme: oauth2.SchemeBearer, - Source: &oauth2.Refresher{ - ClientID: config.Gitee.ClientID, - ClientSecret: config.Gitee.ClientSecret, - Endpoint: strings.TrimSuffix(config.Gitee.Server, "/") + "/oauth/token", - Source: oauth2.ContextTokenSource(), - }, - Base: defaultTransport(config.Gitee.SkipVerify), - }, - } - return client -} - -// provideGiteaClient is a Wire provider function that returns -// a Gitea client based on the environment configuration. -func provideGiteaClient(config config.Config) *scm.Client { - client, err := gitea.New(config.Gitea.Server) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the Gitea client") - } - if config.Gitea.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Scheme: oauth2.SchemeBearer, - Source: &oauth2.Refresher{ - ClientID: config.Gitea.ClientID, - ClientSecret: config.Gitea.ClientSecret, - Endpoint: strings.TrimSuffix(config.Gitea.Server, "/") + "/login/oauth/access_token", - Source: oauth2.ContextTokenSource(), - }, - Base: defaultTransport(config.Gitea.SkipVerify), - }, - } - return client -} - -// provideGitlabClient is a Wire provider function that returns -// a GitLab client based on the environment configuration. -func provideGitlabClient(config config.Config) *scm.Client { - logrus.WithField("server", config.GitLab.Server). - WithField("client", config.GitLab.ClientID). - WithField("skip_verify", config.GitLab.SkipVerify). - Debugln("main: creating the GitLab client") - - client, err := gitlab.New(config.GitLab.Server) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the GitLab client") - } - if config.GitLab.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Scheme: oauth2.SchemeBearer, - Source: &oauth2.Refresher{ - ClientID: config.GitLab.ClientID, - ClientSecret: config.GitLab.ClientSecret, - Endpoint: strings.TrimSuffix(config.GitLab.Server, "/") + "/oauth/token", - Source: oauth2.ContextTokenSource(), - }, - Base: defaultTransport(config.GitLab.SkipVerify), - }, - } - return client -} - -// provideGogsClient is a Wire provider function that returns -// a Gogs client based on the environment configuration. -func provideGogsClient(config config.Config) *scm.Client { - logrus.WithField("server", config.Gogs.Server). - WithField("skip_verify", config.Gogs.SkipVerify). - Debugln("main: creating the Gogs client") - - client, err := gogs.New(config.Gogs.Server) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the Gogs client") - } - if config.Gogs.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth2.Transport{ - Scheme: oauth2.SchemeToken, - Source: oauth2.ContextTokenSource(), - Base: defaultTransport(config.Gogs.SkipVerify), - }, - } - return client -} - -// provideStashClient is a Wire provider function that returns -// a Stash client based on the environment configuration. -func provideStashClient(config config.Config) *scm.Client { - logrus.WithField("server", config.Stash.Server). - WithField("skip_verify", config.Stash.SkipVerify). - Debugln("main: creating the Stash client") - - privateKey, err := parsePrivateKeyFile(config.Stash.PrivateKey) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot parse the Stash Private Key") - } - client, err := stash.New(config.Stash.Server) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot create the Stash client") - } - if config.Stash.Debug { - client.DumpResponse = httputil.DumpResponse - } - client.Client = &http.Client{ - Transport: &oauth1.Transport{ - ConsumerKey: config.Stash.ConsumerKey, - PrivateKey: privateKey, - Source: oauth1.ContextTokenSource(), - Base: defaultTransport(config.Stash.SkipVerify), - }, - } - return client -} - -// defaultClient provides a default http.Client. If skipverify -// is true, the default transport will skip ssl verification. -func defaultClient(skipverify bool) *http.Client { - client := &http.Client{} - client.Transport = defaultTransport(skipverify) - return client -} - -// defaultTransport provides a default http.Transport. If -// skipverify is true, the transport will skip ssl verification. -func defaultTransport(skipverify bool) http.RoundTripper { - return &http.Transport{ - Proxy: http.ProxyFromEnvironment, - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: skipverify, - }, - } -} - -// parsePrivateKeyFile is a helper function that parses an -// RSA Private Key file encoded in PEM format. -func parsePrivateKeyFile(path string) (*rsa.PrivateKey, error) { - d, err := ioutil.ReadFile(path) - if err != nil { - return nil, err - } - return parsePrivateKey(d) -} - -// parsePrivateKey is a helper function that parses an RSA -// Private Key encoded in PEM format. -func parsePrivateKey(data []byte) (*rsa.PrivateKey, error) { - p, _ := pem.Decode(data) - return x509.ParsePKCS1PrivateKey(p.Bytes) -} diff --git a/cmd/drone-server/inject_external.go b/cmd/drone-server/inject_external.go deleted file mode 100644 index d5a29a1e37..0000000000 --- a/cmd/drone-server/inject_external.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/service/redisdb" - - "github.com/google/wire" -) - -// wire set for loading the external services. -var externalSet = wire.NewSet( - provideRedisClient, -) - -func provideRedisClient(config config.Config) (rdb redisdb.RedisDB, err error) { - return redisdb.New(config) -} diff --git a/cmd/drone-server/inject_license.go b/cmd/drone-server/inject_license.go deleted file mode 100644 index 4a4bc2c5be..0000000000 --- a/cmd/drone-server/inject_license.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/service/license" - "github.com/drone/go-scm/scm" - - "github.com/google/wire" - "github.com/sirupsen/logrus" -) - -// wire set for loading the license. -var licenseSet = wire.NewSet( - provideLicense, - license.NewService, -) - -// provideLicense is a Wire provider function that returns a -// license loaded from a license file. -func provideLicense(client *scm.Client, config config.Config) *core.License { - l, err := license.Load(config.License) - if config.License == "" { - l = license.Trial(client.Driver.String()) - } else if err != nil { - logrus.WithError(err). - Fatalln("main: invalid or expired license") - } - logrus.WithFields( - logrus.Fields{ - "kind": l.Kind, - "expires": l.Expires, - "repo.limit": l.Repos, - "user.limit": l.Users, - "build.limit": l.Builds, - }, - ).Debugln("main: license loaded") - return l -} diff --git a/cmd/drone-server/inject_login.go b/cmd/drone-server/inject_login.go deleted file mode 100644 index a3f4fed39f..0000000000 --- a/cmd/drone-server/inject_login.go +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/go-login/login" - "github.com/drone/go-login/login/bitbucket" - "github.com/drone/go-login/login/gitea" - "github.com/drone/go-login/login/gitee" - "github.com/drone/go-login/login/github" - "github.com/drone/go-login/login/gitlab" - "github.com/drone/go-login/login/gogs" - "github.com/drone/go-login/login/stash" - "github.com/drone/go-scm/scm/transport/oauth2" - "strings" - - "github.com/google/wire" - "github.com/sirupsen/logrus" -) - -// wire set for loading the authenticator. -var loginSet = wire.NewSet( - provideLogin, - provideRefresher, -) - -// provideLogin is a Wire provider function that returns an -// authenticator based on the environment configuration. -func provideLogin(config config.Config) login.Middleware { - switch { - case config.Bitbucket.ClientID != "": - return provideBitbucketLogin(config) - case config.Github.ClientID != "": - return provideGithubLogin(config) - case config.Gitee.ClientID != "": - return provideGiteeLogin(config) - case config.Gitea.Server != "": - return provideGiteaLogin(config) - case config.GitLab.ClientID != "": - return provideGitlabLogin(config) - case config.Gogs.Server != "": - return provideGogsLogin(config) - case config.Stash.ConsumerKey != "": - return provideStashLogin(config) - } - logrus.Fatalln("main: source code management system not configured") - return nil -} - -// provideBitbucketLogin is a Wire provider function that -// returns a Bitbucket Cloud authenticator based on the -// environment configuration. -func provideBitbucketLogin(config config.Config) login.Middleware { - if config.Bitbucket.ClientID == "" { - return nil - } - return &bitbucket.Config{ - ClientID: config.Bitbucket.ClientID, - ClientSecret: config.Bitbucket.ClientSecret, - RedirectURL: config.Server.Addr + "/login", - } -} - -// provideGithubLogin is a Wire provider function that returns -// a GitHub authenticator based on the environment configuration. -func provideGithubLogin(config config.Config) login.Middleware { - if config.Github.ClientID == "" { - return nil - } - return &github.Config{ - ClientID: config.Github.ClientID, - ClientSecret: config.Github.ClientSecret, - Scope: config.Github.Scope, - Server: config.Github.Server, - Client: defaultClient(config.Github.SkipVerify), - Logger: logrus.StandardLogger(), - } -} - -// provideGiteeLogin is a Wire provider function that returns -// a Gitee authenticator based on the environment configuration. -func provideGiteeLogin(config config.Config) login.Middleware { - if config.Gitee.ClientID == "" { - return nil - } - redirectURL := config.Gitee.RedirectURL - if redirectURL == "" { - redirectURL = config.Server.Addr + "/login" - } - return &gitee.Config{ - ClientID: config.Gitee.ClientID, - ClientSecret: config.Gitee.ClientSecret, - RedirectURL: redirectURL, - Server: config.Gitee.Server, - Scope: config.Gitee.Scope, - Client: defaultClient(config.Gitee.SkipVerify), - } -} - -// provideGiteaLogin is a Wire provider function that returns -// a Gitea authenticator based on the environment configuration. -func provideGiteaLogin(config config.Config) login.Middleware { - if config.Gitea.Server == "" { - return nil - } - redirectURL := config.Gitea.RedirectURL - if redirectURL == "" { - redirectURL = config.Server.Addr + "/login" - } - return &gitea.Config{ - ClientID: config.Gitea.ClientID, - ClientSecret: config.Gitea.ClientSecret, - Server: config.Gitea.Server, - Client: defaultClient(config.Gitea.SkipVerify), - Logger: logrus.StandardLogger(), - RedirectURL: redirectURL, - Scope: config.Gitea.Scope, - } -} - -// provideGitlabLogin is a Wire provider function that returns -// a GitLab authenticator based on the environment configuration. -func provideGitlabLogin(config config.Config) login.Middleware { - if config.GitLab.ClientID == "" { - return nil - } - return &gitlab.Config{ - ClientID: config.GitLab.ClientID, - ClientSecret: config.GitLab.ClientSecret, - RedirectURL: config.Server.Addr + "/login", - Server: config.GitLab.Server, - Client: defaultClient(config.GitLab.SkipVerify), - } -} - -// provideGogsLogin is a Wire provider function that returns -// a Gogs authenticator based on the environment configuration. -func provideGogsLogin(config config.Config) login.Middleware { - if config.Gogs.Server == "" { - return nil - } - return &gogs.Config{ - Label: "drone", - Login: "/login/form", - Server: config.Gogs.Server, - Client: defaultClient(config.Gogs.SkipVerify), - } -} - -// provideStashLogin is a Wire provider function that returns -// a Stash authenticator based on the environment configuration. -func provideStashLogin(config config.Config) login.Middleware { - if config.Stash.ConsumerKey == "" { - return nil - } - privateKey, err := stash.ParsePrivateKeyFile(config.Stash.PrivateKey) - if err != nil { - logrus.WithError(err). - Fatalln("main: cannot parse Private Key file") - } - return &stash.Config{ - Address: config.Stash.Server, - ConsumerKey: config.Stash.ConsumerKey, - ConsumerSecret: config.Stash.ConsumerSecret, - PrivateKey: privateKey, - CallbackURL: config.Server.Addr + "/login", - Client: defaultClient(config.Stash.SkipVerify), - } -} - -// provideRefresher is a Wire provider function that returns -// an oauth token refresher for Bitbucket and Gitea -func provideRefresher(config config.Config) *oauth2.Refresher { - switch { - case config.Bitbucket.ClientID != "": - return &oauth2.Refresher{ - ClientID: config.Bitbucket.ClientID, - ClientSecret: config.Bitbucket.ClientSecret, - Endpoint: "https://bitbucket.org/site/oauth2/access_token", - Source: oauth2.ContextTokenSource(), - Client: defaultClient(config.Bitbucket.SkipVerify), - } - case config.Gitea.ClientID != "": - return &oauth2.Refresher{ - ClientID: config.Gitea.ClientID, - ClientSecret: config.Gitea.ClientSecret, - Endpoint: strings.TrimSuffix(config.Gitea.Server, "/") + "/login/oauth/access_token", - Source: oauth2.ContextTokenSource(), - Client: defaultClient(config.Gitea.SkipVerify), - } - case config.GitLab.ClientID != "": - return &oauth2.Refresher{ - ClientID: config.GitLab.ClientID, - ClientSecret: config.GitLab.ClientSecret, - Endpoint: strings.TrimSuffix(config.GitLab.Server, "/") + "/oauth/token", - Source: oauth2.ContextTokenSource(), - Client: defaultClient(config.GitLab.SkipVerify), - } - case config.Gitee.ClientID != "": - return &oauth2.Refresher{ - ClientID: config.Gitee.ClientID, - ClientSecret: config.Gitee.ClientSecret, - Endpoint: strings.TrimSuffix(config.Gitee.Server, "/") + "/oauth/token", - Source: oauth2.ContextTokenSource(), - Client: defaultClient(config.Gitee.SkipVerify), - } - - } - return nil -} diff --git a/cmd/drone-server/inject_plugin.go b/cmd/drone-server/inject_plugin.go deleted file mode 100644 index a97e8e8dae..0000000000 --- a/cmd/drone-server/inject_plugin.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - spec "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/plugin/admission" - "github.com/drone/drone/plugin/config" - "github.com/drone/drone/plugin/converter" - "github.com/drone/drone/plugin/registry" - "github.com/drone/drone/plugin/secret" - "github.com/drone/drone/plugin/validator" - "github.com/drone/drone/plugin/webhook" - "github.com/drone/go-scm/scm" - - "github.com/google/wire" -) - -// wire set for loading plugins. -var pluginSet = wire.NewSet( - provideAdmissionPlugin, - provideConfigPlugin, - provideConvertPlugin, - provideRegistryPlugin, - provideSecretPlugin, - provideValidatePlugin, - provideWebhookPlugin, -) - -// provideAdmissionPlugin is a Wire provider function that -// returns an admission plugin based on the environment -// configuration. -func provideAdmissionPlugin(client *scm.Client, orgs core.OrganizationService, users core.UserService, config spec.Config) core.AdmissionService { - return admission.Combine( - admission.Membership(orgs, config.Users.Filter), - admission.Open(config.Registration.Closed), - admission.Nobot(users, config.Users.MinAge), - admission.External( - config.Authn.Endpoint, - config.Authn.Secret, - config.Authn.SkipVerify, - ), - ) -} - -// provideConfigPlugin is a Wire provider function that returns -// a yaml configuration plugin based on the environment -// configuration. -func provideConfigPlugin(client *scm.Client, contents core.FileService, conf spec.Config) core.ConfigService { - return config.Combine( - config.Memoize( - config.Global( - conf.Yaml.Endpoint, - conf.Yaml.Secret, - conf.Yaml.SkipVerify, - conf.Yaml.Timeout, - ), - ), - config.Repository(contents), - ) -} - -// provideConvertPlugin is a Wire provider function that returns -// a yaml conversion plugin based on the environment -// configuration. -func provideConvertPlugin(client *scm.Client, fileService core.FileService, conf spec.Config, templateStore core.TemplateStore) core.ConvertService { - return converter.Combine( - conf.Convert.Multi, - converter.Legacy(false), - converter.Starlark( - conf.Starlark.Enabled, - conf.Starlark.StepLimit, - conf.Starlark.SizeLimit, - ), - converter.Jsonnet( - conf.Jsonnet.Enabled, - conf.Jsonnet.ImportLimit, - fileService, - ), - converter.Template( - templateStore, - conf.Starlark.StepLimit, - conf.Starlark.SizeLimit, - ), - converter.Memoize( - converter.Remote( - conf.Convert.Endpoint, - conf.Convert.Secret, - conf.Convert.Extension, - conf.Convert.SkipVerify, - conf.Convert.Timeout, - ), - conf.Convert.CacheSize, - ), - ) -} - -// provideRegistryPlugin is a Wire provider function that -// returns a registry plugin based on the environment -// configuration. -func provideRegistryPlugin(config spec.Config) core.RegistryService { - return registry.Combine( - registry.External( - config.Secrets.Endpoint, - config.Secrets.Password, - config.Secrets.SkipVerify, - ), - registry.FileSource( - config.Docker.Config, - ), - registry.EndpointSource( - config.Registries.Endpoint, - config.Registries.Password, - config.Registries.SkipVerify, - ), - ) -} - -// provideSecretPlugin is a Wire provider function that returns -// a secret plugin based on the environment configuration. -func provideSecretPlugin(config spec.Config) core.SecretService { - return secret.External( - config.Secrets.Endpoint, - config.Secrets.Password, - config.Secrets.SkipVerify, - ) -} - -// provideValidatePlugin is a Wire provider function that -// returns a yaml validation plugin based on the environment -// configuration. -func provideValidatePlugin(conf spec.Config) core.ValidateService { - return validator.Combine( - validator.Remote( - conf.Validate.Endpoint, - conf.Validate.Secret, - conf.Validate.SkipVerify, - conf.Validate.Timeout, - ), - // THIS FEATURE IS INTERNAL USE ONLY AND SHOULD - // NOT BE USED OR RELIED UPON IN PRODUCTION. - validator.Filter( - nil, - conf.Repository.Ignore, - ), - ) -} - -// provideWebhookPlugin is a Wire provider function that returns -// a webhook plugin based on the environment configuration. -func provideWebhookPlugin(config spec.Config, system *core.System) core.WebhookSender { - return webhook.New(webhook.Config{ - Events: config.Webhook.Events, - Endpoint: config.Webhook.Endpoint, - Secret: config.Webhook.Secret, - System: system, - }) -} diff --git a/cmd/drone-server/inject_runner.go b/cmd/drone-server/inject_runner.go deleted file mode 100644 index 416b26f20b..0000000000 --- a/cmd/drone-server/inject_runner.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone-runtime/engine/docker" - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/operator/runner" - - "github.com/google/wire" - "github.com/sirupsen/logrus" -) - -// wire set for loading the server. -var runnerSet = wire.NewSet( - provideRunner, -) - -// provideRunner is a Wire provider function that returns a -// local build runner configured from the environment. -func provideRunner( - manager manager.BuildManager, - secrets core.SecretService, - registry core.RegistryService, - config config.Config, -) *runner.Runner { - // the local runner is only created when remote agents - // are disabled - if config.Agent.Disabled == false { - return nil - } - engine, err := docker.NewEnv() - if err != nil { - logrus.WithError(err). - Fatalln("cannot load the docker engine") - return nil - } - return &runner.Runner{ - Platform: config.Runner.Platform, - OS: config.Runner.OS, - Arch: config.Runner.Arch, - Kernel: config.Runner.Kernel, - Variant: config.Runner.Variant, - Engine: engine, - Manager: manager, - Secrets: secrets, - Registry: registry, - Volumes: config.Runner.Volumes, - Networks: config.Runner.Networks, - Devices: config.Runner.Devices, - Privileged: config.Runner.Privileged, - Machine: config.Runner.Machine, - Labels: config.Runner.Labels, - Environ: config.Runner.Environ, - Limits: runner.Limits{ - MemSwapLimit: int64(config.Runner.Limits.MemSwapLimit), - MemLimit: int64(config.Runner.Limits.MemLimit), - ShmSize: int64(config.Runner.Limits.ShmSize), - CPUQuota: config.Runner.Limits.CPUQuota, - CPUShares: config.Runner.Limits.CPUShares, - CPUSet: config.Runner.Limits.CPUSet, - }, - } -} diff --git a/cmd/drone-server/inject_scheduler.go b/cmd/drone-server/inject_scheduler.go deleted file mode 100644 index 42a6053c90..0000000000 --- a/cmd/drone-server/inject_scheduler.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/scheduler/queue" - "github.com/drone/drone/service/redisdb" - - "github.com/google/wire" -) - -// wire set for loading the scheduler. -var schedulerSet = wire.NewSet( - provideScheduler, -) - -// provideScheduler is a Wire provider function that returns a -// scheduler based on the environment configuration. -func provideScheduler(store core.StageStore, r redisdb.RedisDB) core.Scheduler { - return queue.New(store, r) -} diff --git a/cmd/drone-server/inject_server.go b/cmd/drone-server/inject_server.go deleted file mode 100644 index 07302bd5eb..0000000000 --- a/cmd/drone-server/inject_server.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "net/http" - - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api" - "github.com/drone/drone/handler/health" - "github.com/drone/drone/handler/web" - "github.com/drone/drone/metric" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/operator/manager/rpc" - "github.com/drone/drone/operator/manager/rpc2" - "github.com/drone/drone/server" - "github.com/google/wire" - - "github.com/go-chi/chi" - "github.com/go-chi/chi/middleware" - "github.com/unrolled/secure" -) - -type ( - healthzHandler http.Handler - metricsHandler http.Handler - pprofHandler http.Handler - rpcHandlerV1 http.Handler - rpcHandlerV2 http.Handler -) - -// wire set for loading the server. -var serverSet = wire.NewSet( - manager.New, - api.New, - web.New, - provideHealthz, - provideMetric, - providePprof, - provideRouter, - provideRPC, - provideRPC2, - provideServer, - provideServerOptions, -) - -// provideRouter is a Wire provider function that returns a -// router that is serves the provided handlers. -func provideRouter(api api.Server, web web.Server, rpcv1 rpcHandlerV1, rpcv2 rpcHandlerV2, healthz healthzHandler, metrics *metric.Server, pprof pprofHandler) *chi.Mux { - r := chi.NewRouter() - r.Mount("/healthz", healthz) - r.Mount("/metrics", metrics) - r.Mount("/api", api.Handler()) - r.Mount("/rpc/v2", rpcv2) - r.Mount("/rpc", rpcv1) - r.Mount("/", web.Handler()) - r.Mount("/debug", pprof) - return r -} - -// provideMetric is a Wire provider function that returns the -// healthcheck server. -func provideHealthz() healthzHandler { - v := health.New() - return healthzHandler(v) -} - -// provideMetric is a Wire provider function that returns the -// metrics server exposing metrics in prometheus format. -func provideMetric(session core.Session, config config.Config) *metric.Server { - return metric.NewServer(session, config.Prometheus.EnableAnonymousAccess) -} - -// providePprof is a Wire provider function that returns the -// pprof server endpoints. -func providePprof(config config.Config) pprofHandler { - if config.Server.Pprof == false { - return pprofHandler( - http.NotFoundHandler(), - ) - } - return pprofHandler( - middleware.Profiler(), - ) -} - -// provideRPC is a Wire provider function that returns an rpc -// handler that exposes the build manager to a remote agent. -func provideRPC(m manager.BuildManager, config config.Config) rpcHandlerV1 { - v := rpc.NewServer(m, config.RPC.Secret) - return rpcHandlerV1(v) -} - -// provideRPC2 is a Wire provider function that returns an rpc -// handler that exposes the build manager to a remote agent. -func provideRPC2(m manager.BuildManager, config config.Config) rpcHandlerV2 { - v := rpc2.NewServer(m, config.RPC.Secret) - return rpcHandlerV2(v) -} - -// provideServer is a Wire provider function that returns an -// http server that is configured from the environment. -func provideServer(handler *chi.Mux, config config.Config) *server.Server { - return &server.Server{ - Acme: config.Server.Acme, - Addr: config.Server.Port, - Cert: config.Server.Cert, - Key: config.Server.Key, - Host: config.Server.Host, - Handler: handler, - } -} - -// provideServerOptions is a Wire provider function that returns -// the http web server security option from the environment. -func provideServerOptions(config config.Config) secure.Options { - return secure.Options{ - AllowedHosts: config.HTTP.AllowedHosts, - HostsProxyHeaders: config.HTTP.HostsProxyHeaders, - SSLRedirect: config.HTTP.SSLRedirect, - SSLTemporaryRedirect: config.HTTP.SSLTemporaryRedirect, - SSLHost: config.HTTP.SSLHost, - SSLProxyHeaders: config.HTTP.SSLProxyHeaders, - STSSeconds: config.HTTP.STSSeconds, - STSIncludeSubdomains: config.HTTP.STSIncludeSubdomains, - STSPreload: config.HTTP.STSPreload, - ForceSTSHeader: config.HTTP.ForceSTSHeader, - FrameDeny: config.HTTP.FrameDeny, - ContentTypeNosniff: config.HTTP.ContentTypeNosniff, - BrowserXssFilter: config.HTTP.BrowserXSSFilter, - ContentSecurityPolicy: config.HTTP.ContentSecurityPolicy, - ReferrerPolicy: config.HTTP.ReferrerPolicy, - } -} diff --git a/cmd/drone-server/inject_service.go b/cmd/drone-server/inject_service.go deleted file mode 100644 index 25c9b8eb88..0000000000 --- a/cmd/drone-server/inject_service.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "time" - - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/livelog" - "github.com/drone/drone/metric/sink" - "github.com/drone/drone/pubsub" - "github.com/drone/drone/service/canceler" - "github.com/drone/drone/service/canceler/reaper" - "github.com/drone/drone/service/commit" - contents "github.com/drone/drone/service/content" - "github.com/drone/drone/service/content/cache" - "github.com/drone/drone/service/hook" - "github.com/drone/drone/service/hook/parser" - "github.com/drone/drone/service/linker" - "github.com/drone/drone/service/netrc" - orgs "github.com/drone/drone/service/org" - "github.com/drone/drone/service/repo" - "github.com/drone/drone/service/status" - "github.com/drone/drone/service/syncer" - "github.com/drone/drone/service/token" - "github.com/drone/drone/service/transfer" - "github.com/drone/drone/service/user" - "github.com/drone/drone/session" - "github.com/drone/drone/trigger" - "github.com/drone/drone/trigger/cron" - "github.com/drone/drone/version" - "github.com/drone/go-scm/scm" - - "github.com/google/wire" -) - -// wire set for loading the services. -var serviceSet = wire.NewSet( - canceler.New, - commit.New, - cron.New, - livelog.New, - linker.New, - parser.New, - pubsub.New, - token.Renewer, - transfer.New, - trigger.New, - user.New, - - provideRepositoryService, - provideContentService, - provideDatadog, - provideHookService, - provideNetrcService, - provideOrgService, - provideReaper, - provideSession, - provideStatusService, - provideSyncer, - provideSystem, -) - -// provideContentService is a Wire provider function that -// returns a contents service wrapped with a simple LRU cache. -func provideContentService(client *scm.Client, renewer core.Renewer) core.FileService { - return cache.Contents( - contents.New(client, renewer), - ) -} - -// provideHookService is a Wire provider function that returns a -// hook service based on the environment configuration. -func provideHookService(client *scm.Client, renewer core.Renewer, config config.Config) core.HookService { - return hook.New(client, config.Proxy.Addr, renewer) -} - -// provideNetrcService is a Wire provider function that returns -// a netrc service based on the environment configuration. -func provideNetrcService(client *scm.Client, renewer core.Renewer, config config.Config) core.NetrcService { - return netrc.New( - client, - renewer, - config.Cloning.AlwaysAuth, - config.Cloning.Username, - config.Cloning.Password, - ) -} - -// provideOrgService is a Wire provider function that -// returns an organization service wrapped with a simple cache. -func provideOrgService(client *scm.Client, renewer core.Renewer) core.OrganizationService { - return orgs.NewCache(orgs.New(client, renewer), 10, time.Minute*5) -} - -// provideRepo is a Wire provider function that returns -// a repo based on the environment configuration -func provideRepositoryService(client *scm.Client, renewer core.Renewer, config config.Config) core.RepositoryService { - return repo.New( - client, - renewer, - config.Repository.Visibility, - config.Repository.Trusted, - ) -} - -// provideSession is a Wire provider function that returns a -// user session based on the environment configuration. -func provideSession(store core.UserStore, config config.Config) (core.Session, error) { - return session.New(store, session.NewConfig( - config.Session.Secret, - config.Session.Timeout, - config.Session.Secure), - ), nil -} - -// provideUserService is a Wire provider function that returns a -// user service based on the environment configuration. -func provideStatusService(client *scm.Client, renewer core.Renewer, config config.Config) core.StatusService { - return status.New(client, renewer, status.Config{ - Base: config.Server.Addr, - Name: config.Status.Name, - Disabled: config.Status.Disabled, - }) -} - -// provideSyncer is a Wire provider function that returns a -// repository synchronizer. -func provideSyncer(repoz core.RepositoryService, - repos core.RepositoryStore, - users core.UserStore, - batch core.Batcher, - config config.Config) core.Syncer { - sync := syncer.New(repoz, repos, users, batch) - // the user can define a filter that limits which - // repositories can be synchronized and stored in the - // database. - if filter := config.Repository.Filter; len(filter) > 0 { - sync.SetFilter(syncer.NamespaceFilter(filter)) - } - return sync -} - -// provideSyncer is a Wire provider function that returns the -// system details structure. -func provideSystem(config config.Config) *core.System { - return &core.System{ - Proto: config.Server.Proto, - Host: config.Server.Host, - Link: config.Server.Addr, - Version: version.Version.String(), - } -} - -// provideReaper is a Wire provider function that returns the -// zombie build reaper. -func provideReaper( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - canceler core.Canceler, - config config.Config, -) *reaper.Reaper { - return reaper.New( - repos, - builds, - stages, - canceler, - config.Cleanup.Running, - config.Cleanup.Pending, - config.Cleanup.Buffer, - ) -} - -// provideDatadog is a Wire provider function that returns the -// datadog sink. -func provideDatadog( - users core.UserStore, - repos core.RepositoryStore, - builds core.BuildStore, - system *core.System, - license *core.License, - config config.Config, -) *sink.Datadog { - return sink.New( - users, - repos, - builds, - *system, - sink.Config{ - Endpoint: config.Datadog.Endpoint, - Token: config.Datadog.Token, - License: license.Kind, - Licensor: license.Licensor, - Subscription: license.Subscription, - EnableGithub: config.IsGitHub(), - EnableGithubEnt: config.IsGitHubEnterprise(), - EnableGitlab: config.IsGitLab(), - EnableBitbucket: config.IsBitbucket(), - EnableStash: config.IsStash(), - EnableGogs: config.IsGogs(), - EnableGitea: config.IsGitea(), - EnableGitee: config.IsGitee(), - EnableAgents: !config.Agent.Disabled, - }, - ) -} diff --git a/cmd/drone-server/inject_store.go b/cmd/drone-server/inject_store.go deleted file mode 100644 index 23a6dcf1fc..0000000000 --- a/cmd/drone-server/inject_store.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/metric" - "github.com/drone/drone/store/batch" - "github.com/drone/drone/store/batch2" - "github.com/drone/drone/store/build" - "github.com/drone/drone/store/card" - "github.com/drone/drone/store/cron" - "github.com/drone/drone/store/logs" - "github.com/drone/drone/store/perm" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/secret" - "github.com/drone/drone/store/secret/global" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" - "github.com/drone/drone/store/stage" - "github.com/drone/drone/store/step" - "github.com/drone/drone/store/template" - "github.com/drone/drone/store/user" - - "github.com/google/wire" - "github.com/sirupsen/logrus" -) - -// wire set for loading the stores. -var storeSet = wire.NewSet( - provideDatabase, - provideEncrypter, - provideBuildStore, - provideLogStore, - provideRepoStore, - provideStageStore, - provideUserStore, - provideBatchStore, - // batch.New, - cron.New, - card.New, - perm.New, - secret.New, - global.New, - step.New, - template.New, -) - -// provideDatabase is a Wire provider function that provides a -// database connection, configured from the environment. -func provideDatabase(config config.Config) (*db.DB, error) { - return db.Connect( - config.Database.Driver, - config.Database.Datasource, - config.Database.MaxConnections, - ) -} - -// provideEncrypter is a Wire provider function that provides a -// database encrypter, configured from the environment. -func provideEncrypter(config config.Config) (encrypt.Encrypter, error) { - enc, err := encrypt.New(config.Database.Secret) - // mixed-content mode should be set to true if the database - // originally had encryption disabled and therefore has - // plaintext entries. This prevents Drone from returning an - // error if decryption fails; on failure, the ciphertext is - // returned as-is and the error is ignored. - if aesgcm, ok := enc.(*encrypt.Aesgcm); ok { - logrus.Debugln("main: database encryption enabled") - if config.Database.EncryptMixedContent { - logrus.Debugln("main: database encryption mixed-mode enabled") - aesgcm.Compat = true - } - } - return enc, err -} - -// provideBuildStore is a Wire provider function that provides a -// build datastore, configured from the environment, with metrics -// enabled. -func provideBuildStore(db *db.DB) core.BuildStore { - builds := build.New(db) - metric.BuildCount(builds) - metric.PendingBuildCount(builds) - metric.RunningBuildCount(builds) - return builds -} - -// provideLogStore is a Wire provider function that provides a -// log datastore, configured from the environment. -func provideLogStore(db *db.DB, config config.Config) core.LogStore { - s := logs.New(db) - if config.S3.Bucket != "" { - p := logs.NewS3Env( - config.S3.Bucket, - config.S3.Prefix, - config.S3.Endpoint, - config.S3.PathStyle, - ) - return logs.NewCombined(p, s) - } - if config.AzureBlob.ContainerName != "" { - p := logs.NewAzureBlobEnv( - config.AzureBlob.ContainerName, - config.AzureBlob.StorageAccountName, - config.AzureBlob.StorageAccessKey, - ) - return logs.NewCombined(p, s) - } - return s -} - -// provideStageStore is a Wire provider function that provides a -// stage datastore, configured from the environment, with metrics -// enabled. -func provideStageStore(db *db.DB) core.StageStore { - stages := stage.New(db) - metric.PendingJobCount(stages) - metric.RunningJobCount(stages) - return stages -} - -// provideRepoStore is a Wire provider function that provides a -// user datastore, configured from the environment, with metrics -// enabled. -func provideRepoStore(db *db.DB) core.RepositoryStore { - repos := repos.New(db) - metric.RepoCount(repos) - return repos -} - -// provideBatchStore is a Wire provider function that provides a -// batcher. If the experimental batcher is enabled it is returned. -func provideBatchStore(db *db.DB, config config.Config) core.Batcher { - if config.Database.LegacyBatch { - return batch.New(db) - } - return batch2.New(db) -} - -// provideUserStore is a Wire provider function that provides a -// user datastore, configured from the environment, with metrics -// enabled. -func provideUserStore(db *db.DB, enc encrypt.Encrypter, config config.Config) core.UserStore { - // create the user store with encryption iff the user - // encryption feature flag is enabled. - // - // why not enable by default? because the user table is - // accessed on every http request and we are unsure what, - // if any performance implications user table encryption - // may have on the system. - // - // it is very possible there are zero material performance - // implications, however, if there is a performance regression - // we could look at implementing in-memory lru caching, which - // we already employ in other areas of the software. - if config.Database.EncryptUserTable { - logrus.Debugln("main: database encryption enabled for user table") - users := user.New(db, enc) - metric.UserCount(users) - return users - } - - noenc, _ := encrypt.New("") - users := user.New(db, noenc) - metric.UserCount(users) - return users -} diff --git a/cmd/drone-server/main.go b/cmd/drone-server/main.go deleted file mode 100644 index d03a57dc07..0000000000 --- a/cmd/drone-server/main.go +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "context" - "flag" - "fmt" - - "github.com/drone/drone/cmd/drone-server/bootstrap" - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/core" - "github.com/drone/drone/metric/sink" - "github.com/drone/drone/operator/runner" - "github.com/drone/drone/service/canceler/reaper" - "github.com/drone/drone/server" - "github.com/drone/drone/trigger/cron" - "github.com/drone/signal" - - "github.com/joho/godotenv" - "github.com/sirupsen/logrus" - "golang.org/x/sync/errgroup" - - _ "github.com/go-sql-driver/mysql" - _ "github.com/lib/pq" - _ "github.com/mattn/go-sqlite3" -) - -func main() { - var envfile string - flag.StringVar(&envfile, "env-file", ".env", "Read in a file of environment variables") - flag.Parse() - - godotenv.Load(envfile) - config, err := config.Environ() - if err != nil { - logger := logrus.WithError(err) - logger.Fatalln("main: invalid configuration") - } - - initLogging(config) - ctx := signal.WithContext( - context.Background(), - ) - - // if trace level logging is enabled, output the - // configuration parameters. - if logrus.IsLevelEnabled(logrus.TraceLevel) { - fmt.Println(config.String()) - } - - app, err := InitializeApplication(config) - if err != nil { - logger := logrus.WithError(err) - logger.Fatalln("main: cannot initialize server") - } - - // optionally bootstrap the system with administrative or - // machine users configured in the environment. - err = bootstrap.New(app.users).Bootstrap(ctx, &core.User{ - Login: config.Users.Create.Username, - Machine: config.Users.Create.Machine, - Admin: config.Users.Create.Admin, - Hash: config.Users.Create.Token, - }) - if err != nil { - logger := logrus.WithError(err) - logger.Fatalln("cannot bootstrap user account") - } - - g := errgroup.Group{} - g.Go(func() error { - logrus.WithFields( - logrus.Fields{ - "proto": config.Server.Proto, - "host": config.Server.Host, - "port": config.Server.Port, - "url": config.Server.Addr, - "acme": config.Server.Acme, - }, - ).Infoln("starting the http server") - return app.server.ListenAndServe(ctx) - }) - - // launches the datadog sink in a goroutine. If the sink - // is disabled, the goroutine exits immediately without error. - g.Go(func() (err error) { - if !config.Datadog.Enabled { - return nil - } - return app.sink.Start(ctx) - }) - - // launches the cron runner in a goroutine. If the cron - // runner is disabled, the goroutine exits immediately - // without error. - g.Go(func() (err error) { - if config.Cron.Disabled { - return nil - } - logrus.WithField("interval", config.Cron.Interval.String()). - Infoln("starting the cron scheduler") - return app.cron.Start(ctx, config.Cron.Interval) - }) - - // launches the reaper in a goroutine. If the reaper - // is disabled, the goroutine exits immediately - // without error. - g.Go(func() (err error) { - if config.Cleanup.Disabled { - return nil - } - logrus.WithField("interval", config.Cleanup.Interval.String()). - Infoln("starting the zombie build reaper") - return app.reaper.Start(ctx, config.Cleanup.Interval) - }) - - // launches the build runner in a goroutine. If the local - // runner is disabled (because nomad or kubernetes is enabled) - // then the goroutine exits immediately without error. - g.Go(func() (err error) { - if app.runner == nil { - return nil - } - logrus.WithField("threads", config.Runner.Capacity). - Infoln("main: starting the local build runner") - return app.runner.Start(ctx, config.Runner.Capacity) - }) - - if err := g.Wait(); err != nil { - logrus.WithError(err).Fatalln("program terminated") - } -} - -// helper function configures the logging. -func initLogging(c config.Config) { - if c.Logging.Debug { - logrus.SetLevel(logrus.DebugLevel) - } - if c.Logging.Trace { - logrus.SetLevel(logrus.TraceLevel) - } - if c.Logging.Text { - logrus.SetFormatter(&logrus.TextFormatter{ - ForceColors: c.Logging.Color, - DisableColors: !c.Logging.Color, - }) - } else { - logrus.SetFormatter(&logrus.JSONFormatter{ - PrettyPrint: c.Logging.Pretty, - }) - } -} - -// application is the main struct for the Drone server. -type application struct { - cron *cron.Scheduler - reaper *reaper.Reaper - sink *sink.Datadog - runner *runner.Runner - server *server.Server - users core.UserStore -} - -// newApplication creates a new application struct. -func newApplication( - cron *cron.Scheduler, - reaper *reaper.Reaper, - sink *sink.Datadog, - runner *runner.Runner, - server *server.Server, - users core.UserStore) application { - return application{ - users: users, - cron: cron, - sink: sink, - server: server, - runner: runner, - reaper: reaper, - } -} diff --git a/cmd/drone-server/wire.go b/cmd/drone-server/wire.go deleted file mode 100644 index bc9376511e..0000000000 --- a/cmd/drone-server/wire.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//+build wireinject - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/google/wire" -) - -func InitializeApplication(config config.Config) (application, error) { - wire.Build( - clientSet, - licenseSet, - loginSet, - pluginSet, - runnerSet, - schedulerSet, - serverSet, - serviceSet, - storeSet, - externalSet, - newApplication, - ) - return application{}, nil -} diff --git a/cmd/drone-server/wire_gen.go b/cmd/drone-server/wire_gen.go deleted file mode 100644 index 99e04df405..0000000000 --- a/cmd/drone-server/wire_gen.go +++ /dev/null @@ -1,120 +0,0 @@ -// Code generated by Wire. DO NOT EDIT. - -//go:generate wire -//+build !wireinject - -package main - -import ( - "github.com/drone/drone/cmd/drone-server/config" - "github.com/drone/drone/handler/api" - "github.com/drone/drone/handler/web" - "github.com/drone/drone/livelog" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/pubsub" - "github.com/drone/drone/service/canceler" - "github.com/drone/drone/service/commit" - "github.com/drone/drone/service/hook/parser" - "github.com/drone/drone/service/license" - "github.com/drone/drone/service/linker" - "github.com/drone/drone/service/token" - "github.com/drone/drone/service/transfer" - "github.com/drone/drone/service/user" - "github.com/drone/drone/store/card" - "github.com/drone/drone/store/cron" - "github.com/drone/drone/store/perm" - "github.com/drone/drone/store/secret" - "github.com/drone/drone/store/secret/global" - "github.com/drone/drone/store/step" - "github.com/drone/drone/store/template" - "github.com/drone/drone/trigger" - cron2 "github.com/drone/drone/trigger/cron" -) - -import ( - _ "github.com/go-sql-driver/mysql" - _ "github.com/lib/pq" - _ "github.com/mattn/go-sqlite3" -) - -// Injectors from wire.go: - -func InitializeApplication(config2 config.Config) (application, error) { - client := provideClient(config2) - refresher := provideRefresher(config2) - db, err := provideDatabase(config2) - if err != nil { - return application{}, err - } - encrypter, err := provideEncrypter(config2) - if err != nil { - return application{}, err - } - userStore := provideUserStore(db, encrypter, config2) - renewer := token.Renewer(refresher, userStore) - commitService := commit.New(client, renewer) - cronStore := cron.New(db) - repositoryStore := provideRepoStore(db) - buildStore := provideBuildStore(db) - redisDB, err := provideRedisClient(config2) - if err != nil { - return application{}, err - } - corePubsub := pubsub.New(redisDB) - stageStore := provideStageStore(db) - scheduler := provideScheduler(stageStore, redisDB) - statusService := provideStatusService(client, renewer, config2) - stepStore := step.New(db) - system := provideSystem(config2) - webhookSender := provideWebhookPlugin(config2, system) - coreCanceler := canceler.New(buildStore, corePubsub, repositoryStore, scheduler, stageStore, statusService, stepStore, userStore, webhookSender) - fileService := provideContentService(client, renewer) - configService := provideConfigPlugin(client, fileService, config2) - templateStore := template.New(db) - convertService := provideConvertPlugin(client, fileService, config2, templateStore) - validateService := provideValidatePlugin(config2) - triggerer := trigger.New(coreCanceler, configService, convertService, commitService, statusService, buildStore, scheduler, repositoryStore, userStore, validateService, webhookSender) - cronScheduler := cron2.New(commitService, cronStore, repositoryStore, userStore, triggerer) - reaper := provideReaper(repositoryStore, buildStore, stageStore, coreCanceler, config2) - coreLicense := provideLicense(client, config2) - datadog := provideDatadog(userStore, repositoryStore, buildStore, system, coreLicense, config2) - cardStore := card.New(db) - logStore := provideLogStore(db, config2) - logStream := livelog.New(redisDB) - netrcService := provideNetrcService(client, renewer, config2) - secretStore := secret.New(db, encrypter) - globalSecretStore := global.New(db, encrypter) - buildManager := manager.New(buildStore, cardStore, configService, convertService, corePubsub, logStore, logStream, netrcService, repositoryStore, scheduler, secretStore, globalSecretStore, statusService, stageStore, stepStore, system, userStore, webhookSender) - secretService := provideSecretPlugin(config2) - registryService := provideRegistryPlugin(config2) - runner := provideRunner(buildManager, secretService, registryService, config2) - hookService := provideHookService(client, renewer, config2) - licenseService := license.NewService(userStore, repositoryStore, buildStore, coreLicense) - organizationService := provideOrgService(client, renewer) - permStore := perm.New(db) - repositoryService := provideRepositoryService(client, renewer, config2) - session, err := provideSession(userStore, config2) - if err != nil { - return application{}, err - } - batcher := provideBatchStore(db, config2) - syncer := provideSyncer(repositoryService, repositoryStore, userStore, batcher, config2) - transferer := transfer.New(repositoryStore, permStore) - userService := user.New(client, renewer) - server := api.New(buildStore, commitService, cardStore, cronStore, corePubsub, globalSecretStore, hookService, logStore, coreLicense, licenseService, organizationService, permStore, repositoryStore, repositoryService, scheduler, secretStore, stageStore, stepStore, statusService, session, logStream, syncer, system, templateStore, transferer, triggerer, userStore, userService, webhookSender) - admissionService := provideAdmissionPlugin(client, organizationService, userService, config2) - hookParser := parser.New(client) - coreLinker := linker.New(client) - middleware := provideLogin(config2) - options := provideServerOptions(config2) - webServer := web.New(admissionService, buildStore, client, hookParser, coreLicense, licenseService, coreLinker, middleware, repositoryStore, session, syncer, triggerer, userStore, userService, webhookSender, options, system) - mainRpcHandlerV1 := provideRPC(buildManager, config2) - mainRpcHandlerV2 := provideRPC2(buildManager, config2) - mainHealthzHandler := provideHealthz() - metricServer := provideMetric(session, config2) - mainPprofHandler := providePprof(config2) - mux := provideRouter(server, webServer, mainRpcHandlerV1, mainRpcHandlerV2, mainHealthzHandler, metricServer, mainPprofHandler) - serverServer := provideServer(mux, config2) - mainApplication := newApplication(cronScheduler, reaper, datadog, runner, serverServer, userStore) - return mainApplication, nil -} diff --git a/core/admission.go b/core/admission.go deleted file mode 100644 index ad682b2f87..0000000000 --- a/core/admission.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// AdmissionService grants access to the system. The service can -// be used to restrict access to authorized users, such as -// members of an organization in your source control management -// system. -type AdmissionService interface { - Admit(context.Context, *User) error -} diff --git a/core/batch.go b/core/batch.go deleted file mode 100644 index 0303fa0fdd..0000000000 --- a/core/batch.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Batch represents a Batch request to synchronize the local -// repository and permission store for a user account. -type Batch struct { - Insert []*Repository `json:"insert"` - Update []*Repository `json:"update"` - Rename []*Repository `json:"rename"` - Revoke []*Repository `json:"revoke"` -} - -// Batcher batch updates the user account. -type Batcher interface { - Batch(context.Context, *User, *Batch) error -} diff --git a/core/build.go b/core/build.go deleted file mode 100644 index f70761869d..0000000000 --- a/core/build.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Build represents a build execution. -type Build struct { - ID int64 `db:"build_id" json:"id"` - RepoID int64 `db:"build_repo_id" json:"repo_id"` - Trigger string `db:"build_trigger" json:"trigger"` - Number int64 `db:"build_number" json:"number"` - Parent int64 `db:"build_parent" json:"parent,omitempty"` - Status string `db:"build_status" json:"status"` - Error string `db:"build_error" json:"error,omitempty"` - Event string `db:"build_event" json:"event"` - Action string `db:"build_action" json:"action"` - Link string `db:"build_link" json:"link"` - Timestamp int64 `db:"build_timestamp" json:"timestamp"` - Title string `db:"build_title" json:"title,omitempty"` - Message string `db:"build_message" json:"message"` - Before string `db:"build_before" json:"before"` - After string `db:"build_after" json:"after"` - Ref string `db:"build_ref" json:"ref"` - Fork string `db:"build_source_repo" json:"source_repo"` - Source string `db:"build_source" json:"source"` - Target string `db:"build_target" json:"target"` - Author string `db:"build_author" json:"author_login"` - AuthorName string `db:"build_author_name" json:"author_name"` - AuthorEmail string `db:"build_author_email" json:"author_email"` - AuthorAvatar string `db:"build_author_avatar" json:"author_avatar"` - Sender string `db:"build_sender" json:"sender"` - Params map[string]string `db:"build_params" json:"params,omitempty"` - Cron string `db:"build_cron" json:"cron,omitempty"` - Deploy string `db:"build_deploy" json:"deploy_to,omitempty"` - DeployID int64 `db:"build_deploy_id" json:"deploy_id,omitempty"` - Debug bool `db:"build_debug" json:"debug,omitempty"` - Started int64 `db:"build_started" json:"started"` - Finished int64 `db:"build_finished" json:"finished"` - Created int64 `db:"build_created" json:"created"` - Updated int64 `db:"build_updated" json:"updated"` - Version int64 `db:"build_version" json:"version"` - Stages []*Stage `db:"-" json:"stages,omitempty"` -} - -// BuildStore defines operations for working with builds. -type BuildStore interface { - // Find returns a build from the datastore. - Find(context.Context, int64) (*Build, error) - - // FindNumber returns a build from the datastore by build number. - FindNumber(context.Context, int64, int64) (*Build, error) - - // FindLast returns the last build from the datastore by ref. - FindRef(context.Context, int64, string) (*Build, error) - - // List returns a list of builds from the datastore by repository id. - List(context.Context, int64, int, int) ([]*Build, error) - - // ListRef returns a list of builds from the datastore by ref. - ListRef(context.Context, int64, string, int, int) ([]*Build, error) - - // LatestBranches returns the latest builds from the - // datastore by branch. - LatestBranches(context.Context, int64) ([]*Build, error) - - // LatestPulls returns the latest builds from the - // datastore by pull request. - LatestPulls(context.Context, int64) ([]*Build, error) - - // LatestDeploys returns the latest builds from the - // datastore by deployment target. - LatestDeploys(context.Context, int64) ([]*Build, error) - - // Pending returns a list of pending builds from the - // datastore by repository id (DEPRECATED). - Pending(context.Context) ([]*Build, error) - - // Running returns a list of running builds from the - // datastore by repository id (DEPRECATED). - Running(context.Context) ([]*Build, error) - - // Create persists a build to the datastore. - Create(context.Context, *Build, []*Stage) error - - // Update updates a build in the datastore. - Update(context.Context, *Build) error - - // Delete deletes a build from the datastore. - Delete(context.Context, *Build) error - - // DeletePull deletes a pull request index from the datastore. - DeletePull(context.Context, int64, int) error - - // DeleteBranch deletes a branch index from the datastore. - DeleteBranch(context.Context, int64, string) error - - // DeleteDeploy deletes a deploy index from the datastore. - DeleteDeploy(context.Context, int64, string) error - - // Purge deletes builds from the database where the build number is less than n. - Purge(context.Context, int64, int64) error - - // Count returns a count of builds. - Count(context.Context) (int64, error) -} - -// IsDone returns true if the build has a completed state. -func (b *Build) IsDone() bool { - switch b.Status { - case StatusWaiting, - StatusPending, - StatusRunning, - StatusBlocked: - return false - default: - return true - } -} - -// IsFailed returns true if the build has failed -func (b *Build) IsFailed() bool { - switch b.Status { - case StatusFailing, - StatusKilled, - StatusError: - return true - default: - return false - } -} diff --git a/core/build_test.go b/core/build_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/build_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/cancel.go b/core/cancel.go deleted file mode 100644 index 00a4a5ecfc..0000000000 --- a/core/cancel.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Canceler cancels a build. -type Canceler interface { - // Cancel cancels the provided build. - Cancel(context.Context, *Repository, *Build) error - - // CancelPending cancels all pending builds of the same - // type of as the provided build. - CancelPending(context.Context, *Repository, *Build) error -} diff --git a/core/card.go b/core/card.go deleted file mode 100644 index 2da4dcc40a..0000000000 --- a/core/card.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "encoding/json" - "io" -) - -type CardInput struct { - Schema string `json:"schema"` - Data json.RawMessage `json:"data"` -} - -// CardStore manages repository cards. -type CardStore interface { - // Find returns a card data stream from the datastore. - Find(ctx context.Context, step int64) (io.ReadCloser, error) - - // Create copies the card stream from Reader r to the datastore. - Create(ctx context.Context, step int64, r io.Reader) error - - // Update copies the card stream from Reader r to the datastore. - Update(ctx context.Context, step int64, r io.Reader) error - - // Delete purges the card data from the datastore. - Delete(ctx context.Context, step int64) error -} diff --git a/core/commit.go b/core/commit.go deleted file mode 100644 index f556b9d8e2..0000000000 --- a/core/commit.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // Commit represents a git commit. - Commit struct { - Sha string - Ref string - Message string - Author *Committer - Committer *Committer - Link string - } - - // Committer represents the commit author. - Committer struct { - Name string - Email string - Date int64 - Login string - Avatar string - } - - // Change represents a file change in a commit. - Change struct { - Path string - Added bool - Renamed bool - Deleted bool - } - - // CommitService provides access to the commit history from - // the external source code management service (e.g. GitHub). - CommitService interface { - // Find returns the commit information by sha. - Find(ctx context.Context, user *User, repo, sha string) (*Commit, error) - - // FindRef returns the commit information by reference. - FindRef(ctx context.Context, user *User, repo, ref string) (*Commit, error) - - // ListChanges returns the files change by sha or reference. - ListChanges(ctx context.Context, user *User, repo, sha, ref string) ([]*Change, error) - } -) diff --git a/core/config.go b/core/config.go deleted file mode 100644 index d3c2acd2b5..0000000000 --- a/core/config.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // Config represents a pipeline config file. - Config struct { - Data string `json:"data"` - Kind string `json:"kind"` - } - - // ConfigArgs represents a request for the pipeline - // configuration file (e.g. .drone.yml) - ConfigArgs struct { - User *User `json:"-"` - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - Config *Config `json:"config,omitempty"` - } - - // ConfigService provides pipeline configuration from an - // external service. - ConfigService interface { - Find(context.Context, *ConfigArgs) (*Config, error) - } -) diff --git a/core/convert.go b/core/convert.go deleted file mode 100644 index 8ed8e02078..0000000000 --- a/core/convert.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // ConvertArgs represents a request to the pipeline - // conversion service. - ConvertArgs struct { - User *User `json:"-"` - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - Config *Config `json:"config,omitempty"` - } - - // ConvertService converts non-native pipeline - // configuration formats to native configuration - // formats (e.g. jsonnet to yaml). - ConvertService interface { - Convert(context.Context, *ConvertArgs) (*Config, error) - } -) diff --git a/core/cron.go b/core/cron.go deleted file mode 100644 index 5d64d2b817..0000000000 --- a/core/cron.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "errors" - "time" - - "github.com/gosimple/slug" - "github.com/robfig/cron" -) - -var ( - errCronExprInvalid = errors.New("Invalid Cronjob Expression") - errCronNameInvalid = errors.New("Invalid Cronjob Name") - errCronBranchInvalid = errors.New("Invalid Cronjob Branch") -) - -type ( - // Cron defines a cron job. - Cron struct { - ID int64 `json:"id"` - RepoID int64 `json:"repo_id"` - Name string `json:"name"` - Expr string `json:"expr"` - Next int64 `json:"next"` - Prev int64 `json:"prev"` - Event string `json:"event"` - Branch string `json:"branch"` - Target string `json:"target,omitempty"` - Disabled bool `json:"disabled"` - Created int64 `json:"created"` - Updated int64 `json:"updated"` - Version int64 `json:"version"` - } - - // CronStore persists cron information to storage. - CronStore interface { - // List returns a cron list from the datastore. - List(context.Context, int64) ([]*Cron, error) - - // Ready returns a cron list from the datastore ready for execution. - Ready(context.Context, int64) ([]*Cron, error) - - // Find returns a cron job from the datastore. - Find(context.Context, int64) (*Cron, error) - - // FindName returns a cron job from the datastore. - FindName(context.Context, int64, string) (*Cron, error) - - // Create persists a new cron job to the datastore. - Create(context.Context, *Cron) error - - // Update persists an updated cron job to the datastore. - Update(context.Context, *Cron) error - - // Delete deletes a cron job from the datastore. - Delete(context.Context, *Cron) error - } -) - -// Validate validates the required fields and formats. -func (c *Cron) Validate() error { - _, err := cron.Parse(c.Expr) - if err != nil { - return errCronExprInvalid - } - switch { - case c.Name == "": - return errCronNameInvalid - case c.Name != slug.Make(c.Name): - return errCronNameInvalid - case c.Branch == "": - return errCronBranchInvalid - default: - return nil - } -} - -// SetExpr sets the cron expression name and updates -// the next execution date. -func (c *Cron) SetExpr(expr string) error { - _, err := cron.Parse(expr) - if err != nil { - return errCronExprInvalid - } - c.Expr = expr - return c.Update() -} - -// SetName sets the cronjob name. -func (c *Cron) SetName(name string) { - c.Name = slug.Make(name) -} - -// Update updates the next Cron execution date. -func (c *Cron) Update() error { - sched, err := cron.Parse(c.Expr) - if err != nil { - return err - } - c.Next = sched.Next(time.Now()).Unix() - return nil -} diff --git a/core/cron_test.go b/core/cron_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/cron_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/event.go b/core/event.go deleted file mode 100644 index adffb5df20..0000000000 --- a/core/event.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -// Hook event constants. -const ( - EventCron = "cron" - EventCustom = "custom" - EventPush = "push" - EventPullRequest = "pull_request" - EventTag = "tag" - EventPromote = "promote" - EventRollback = "rollback" -) diff --git a/core/file.go b/core/file.go deleted file mode 100644 index 3f7a1ca947..0000000000 --- a/core/file.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // File represents the raw file contents in the remote - // version control system. - File struct { - Data []byte - Hash []byte - } - - // FileArgs provides repository and commit details required - // to fetch the file from the remote source code management - // service. - FileArgs struct { - Commit string - Ref string - } - - // FileService provides access to contents of files in - // the remote source code management service (e.g. GitHub). - FileService interface { - Find(ctx context.Context, user *User, repo, commit, ref, path string) (*File, error) - } -) diff --git a/core/hook.go b/core/hook.go deleted file mode 100644 index 6b05095604..0000000000 --- a/core/hook.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "net/http" -) - -// Hook action constants. -const ( - ActionOpen = "open" - ActionClose = "close" - ActionCreate = "create" - ActionDelete = "delete" - ActionSync = "sync" -) - -// Hook represents the payload of a post-commit hook. -type Hook struct { - Parent int64 `json:"parent"` - Trigger string `json:"trigger"` - Event string `json:"event"` - Action string `json:"action"` - Link string `json:"link"` - Timestamp int64 `json:"timestamp"` - Title string `json:"title"` - Message string `json:"message"` - Before string `json:"before"` - After string `json:"after"` - Ref string `json:"ref"` - Fork string `json:"hook"` - Source string `json:"source"` - Target string `json:"target"` - Author string `json:"author_login"` - AuthorName string `json:"author_name"` - AuthorEmail string `json:"author_email"` - AuthorAvatar string `json:"author_avatar"` - Deployment string `json:"deploy_to"` - DeploymentID int64 `json:"deploy_id"` - Debug bool `json:"debug"` - Cron string `json:"cron"` - Sender string `json:"sender"` - Params map[string]string `json:"params"` -} - -// HookService manages post-commit hooks in the external -// source code management service (e.g. GitHub). -type HookService interface { - Create(ctx context.Context, user *User, repo *Repository) error - Delete(ctx context.Context, user *User, repo *Repository) error -} - -// HookParser parses a post-commit hook from the source -// code management system, and returns normalized data. -type HookParser interface { - Parse(req *http.Request, secretFunc func(string) string) (*Hook, *Repository, error) -} diff --git a/core/hook_test.go b/core/hook_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/hook_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/license.go b/core/license.go deleted file mode 100644 index 0927cb834f..0000000000 --- a/core/license.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "errors" - "time" -) - -// License types. -const ( - LicenseFoss = "foss" - LicenseFree = "free" - LicensePersonal = "personal" - LicenseStandard = "standard" - LicenseTrial = "trial" -) - -// ErrUserLimit is returned when attempting to create a new -// user but the maximum number of allowed user accounts -// is exceeded. -var ErrUserLimit = errors.New("User limit exceeded") - -// ErrRepoLimit is returned when attempting to create a new -// repository but the maximum number of allowed repositories -// is exceeded. -var ErrRepoLimit = errors.New("Repository limit exceeded") - -// ErrBuildLimit is returned when attempting to create a new -// build but the maximum number of allowed builds is exceeded. -var ErrBuildLimit = errors.New("Build limit exceeded") - -type ( - // License defines software license parameters. - License struct { - Licensor string `json:"-"` - Subscription string `json:"-"` - Expires time.Time `json:"expires_at,omitempty"` - Kind string `json:"kind,omitempty"` - Repos int64 `json:"repos,omitempty"` - Users int64 `json:"users,omitempty"` - Builds int64 `json:"builds,omitempty"` - Nodes int64 `json:"nodes,omitempty"` - } - - // LicenseService provides access to the license - // service and can be used to check for violations - // and expirations. - LicenseService interface { - // Exceeded returns true if the system has exceeded - // its limits as defined in the license. - Exceeded(context.Context) (bool, error) - - // Expired returns true if the license is expired. - Expired(context.Context) bool - } -) - -// Expired returns true if the license is expired. -func (l *License) Expired() bool { - return l.Expires.IsZero() == false && time.Now().After(l.Expires) -} diff --git a/core/license_test.go b/core/license_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/license_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/linker.go b/core/linker.go deleted file mode 100644 index 6cf5fdc215..0000000000 --- a/core/linker.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Linker provides a deep link to a git resource in the -// source control management system for a given build. -type Linker interface { - Link(ctx context.Context, repo, ref, sha string) (string, error) -} diff --git a/core/logs.go b/core/logs.go deleted file mode 100644 index 65a7074f76..0000000000 --- a/core/logs.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "io" -) - -// Line represents a line in the logs. -type Line struct { - Number int `json:"pos"` - Message string `json:"out"` - Timestamp int64 `json:"time"` -} - -// LogStore persists build output to storage. -type LogStore interface { - // Find returns a log stream from the datastore. - Find(ctx context.Context, stage int64) (io.ReadCloser, error) - - // Create writes copies the log stream from Reader r to the datastore. - Create(ctx context.Context, stage int64, r io.Reader) error - - // Update writes copies the log stream from Reader r to the datastore. - Update(ctx context.Context, stage int64, r io.Reader) error - - // Delete purges the log stream from the datastore. - Delete(ctx context.Context, stage int64) error -} - -// LogStream manages a live stream of logs. -type LogStream interface { - // Create creates the log stream for the step ID. - Create(context.Context, int64) error - - // Delete deletes the log stream for the step ID. - Delete(context.Context, int64) error - - // Writes writes to the log stream. - Write(context.Context, int64, *Line) error - - // Tail tails the log stream. - Tail(context.Context, int64) (<-chan *Line, <-chan error) - - // Info returns internal stream information. - Info(context.Context) *LogStreamInfo -} - -// LogStreamInfo provides internal stream information. This can -// be used to monitor the number of registered streams and -// subscribers. -type LogStreamInfo struct { - // Streams is a key-value pair where the key is the step - // identifier, and the value is the count of subscribers - // streaming the logs. - Streams map[int64]int `json:"streams"` -} diff --git a/core/netrc.go b/core/netrc.go deleted file mode 100644 index 2e11dd2281..0000000000 --- a/core/netrc.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "fmt" - "net/url" -) - -type ( - // Netrc contains login and initialization information used by - // an automated login process. - Netrc struct { - Machine string `json:"machine"` - Login string `json:"login"` - Password string `json:"password"` - } - - // NetrcService returns a valid netrc file that can be used - // to authenticate and clone a private repository. If - // authentication is not required or enabled, a nil Netrc - // file and nil error are returned. - NetrcService interface { - Create(context.Context, *User, *Repository) (*Netrc, error) - } -) - -// SetMachine sets the netrc machine from a URL value. -func (n *Netrc) SetMachine(address string) error { - url, err := url.Parse(address) - if err != nil { - return err - } - n.Machine = url.Hostname() - return nil -} - -// String returns the string representation of a netrc file. -func (n *Netrc) String() string { - return fmt.Sprintf("machine %s login %s password %s", - n.Machine, - n.Login, - n.Password, - ) -} diff --git a/core/netrc_test.go b/core/netrc_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/netrc_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/org.go b/core/org.go deleted file mode 100644 index 4ea548e6e4..0000000000 --- a/core/org.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Organization represents an organization in the source -// code management system (e.g. GitHub). -type Organization struct { - Name string - Avatar string -} - -// OrganizationService provides access to organization and -// team access in the external source code management system -// (e.g. GitHub). -type OrganizationService interface { - // List returns a list of organization to which the - // user is a member. - List(context.Context, *User) ([]*Organization, error) - - // Membership returns true if the user is a member - // of the organization, and true if the user is an - // of the organization. - Membership(context.Context, *User, string) (bool, bool, error) -} diff --git a/core/perm.go b/core/perm.go deleted file mode 100644 index 4479ed365a..0000000000 --- a/core/perm.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // Perm represents an individuals repository - // permission. - Perm struct { - UserID int64 `db:"perm_user_id" json:"-"` - RepoUID string `db:"perm_repo_uid" json:"-"` - Read bool `db:"perm_read" json:"read"` - Write bool `db:"perm_write" json:"write"` - Admin bool `db:"perm_admin" json:"admin"` - Synced int64 `db:"perm_synced" json:"-"` - Created int64 `db:"perm_created" json:"-"` - Updated int64 `db:"perm_updated" json:"-"` - } - - // Collaborator represents a project collaborator, - // and provides the account and repository permissions - // details. - Collaborator struct { - UserID int64 `db:"perm_user_id" json:"user_id"` - RepoUID string `db:"perm_repo_uid" json:"repo_id"` - Login string `db:"user_login" json:"login"` - Avatar string `db:"user_avatar" json:"avatar"` - Read bool `db:"perm_read" json:"read"` - Write bool `db:"perm_write" json:"write"` - Admin bool `db:"perm_admin" json:"admin"` - Synced int64 `db:"perm_synced" json:"synced"` - Created int64 `db:"perm_created" json:"created"` - Updated int64 `db:"perm_updated" json:"updated"` - } - - // PermStore defines operations for working with - // repository permissions. - PermStore interface { - // Find returns a project member from the - // datastore. - Find(ctx context.Context, repoUID string, userID int64) (*Perm, error) - - // List returns a list of project members from the - // datastore. - List(ctx context.Context, repoUID string) ([]*Collaborator, error) - - // Update persists an updated project member - // to the datastore. - Update(context.Context, *Perm) error - - // Delete deletes a project member from the - // datastore. - Delete(context.Context, *Perm) error - } -) diff --git a/core/pubsub.go b/core/pubsub.go deleted file mode 100644 index 59e4d97a7f..0000000000 --- a/core/pubsub.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Message defines a build change. -type Message struct { - Repository string - Visibility string - Data []byte -} - -// Pubsub provides publish subscriber capabilities, distributing -// messages from multiple publishers to multiple subscribers. -type Pubsub interface { - // Publish publishes the message to all subscribers. - Publish(context.Context, *Message) error - - // Subscribe subscribes to the message broker. - Subscribe(context.Context) (<-chan *Message, <-chan error) - - // Subscribers returns a count of subscribers. - Subscribers() (int, error) -} diff --git a/core/registry.go b/core/registry.go deleted file mode 100644 index b8cc3f0844..0000000000 --- a/core/registry.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - - "github.com/drone/drone-yaml/yaml" -) - -const ( - // RegistryPull policy allows pulling from a registry. - RegistryPull = "pull" - - // RegistryPush Policy allows pushing to a registry for - // all event types except pull requests. - RegistryPush = "push" - - // RegistryPushPullRequest Policy allows pushing to a - // registry for all event types, including pull requests. - RegistryPushPullRequest = "push-pull-request" -) - -type ( - // Registry represents a docker registry with credentials. - Registry struct { - Address string `json:"address"` - Username string `json:"username"` - Password string `json:"password"` - Policy string `json:"policy"` - } - - // RegistryArgs provides arguments for requesting - // registry credentials from the remote service. - RegistryArgs struct { - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - Conf *yaml.Manifest `json:"-"` - Pipeline *yaml.Pipeline `json:"-"` - } - - // RegistryService provides registry credentials from an - // external service. - RegistryService interface { - // List returns registry credentials from the global - // remote registry plugin. - List(context.Context, *RegistryArgs) ([]*Registry, error) - } -) diff --git a/core/renewer.go b/core/renewer.go deleted file mode 100644 index 2f3fac66a7..0000000000 --- a/core/renewer.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Renewer renews the user account authorization. If -// successful, the user token and token expiry attributes -// are updated, and persisted to the datastore. -type Renewer interface { - Renew(ctx context.Context, user *User, force bool) error -} diff --git a/core/repo.go b/core/repo.go deleted file mode 100644 index 2220213abd..0000000000 --- a/core/repo.go +++ /dev/null @@ -1,161 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Repository visibility. -const ( - VisibilityPublic = "public" - VisibilityPrivate = "private" - VisibilityInternal = "internal" -) - -// Version control systems. -const ( - VersionControlGit = "git" - VersionControlMercurial = "hg" -) - -type ( - // Repository represents a source code repository. - Repository struct { - ID int64 `json:"id"` - UID string `json:"uid"` - UserID int64 `json:"user_id"` - Namespace string `json:"namespace"` - Name string `json:"name"` - Slug string `json:"slug"` - SCM string `json:"scm"` - HTTPURL string `json:"git_http_url"` - SSHURL string `json:"git_ssh_url"` - Link string `json:"link"` - Branch string `json:"default_branch"` - Private bool `json:"private"` - Visibility string `json:"visibility"` - Active bool `json:"active"` - Config string `json:"config_path"` - Trusted bool `json:"trusted"` - Protected bool `json:"protected"` - IgnoreForks bool `json:"ignore_forks"` - IgnorePulls bool `json:"ignore_pull_requests"` - CancelPulls bool `json:"auto_cancel_pull_requests"` - CancelPush bool `json:"auto_cancel_pushes"` - CancelRunning bool `json:"auto_cancel_running"` - Timeout int64 `json:"timeout"` - Throttle int64 `json:"throttle,omitempty"` - Counter int64 `json:"counter"` - Synced int64 `json:"synced"` - Created int64 `json:"created"` - Updated int64 `json:"updated"` - Version int64 `json:"version"` - Signer string `json:"-"` - Secret string `json:"-"` - Build *Build `json:"build,omitempty"` - Perms *Perm `json:"permissions,omitempty"` - Archived bool `json:"archived"` - } - - RepoBuildStage struct { - RepoNamespace string `json:"repo_namespace"` - RepoName string `json:"repo_name"` - RepoSlug string `json:"repo_slug"` - BuildNumber int64 `json:"build_number"` - BuildAuthor string `json:"build_author"` - BuildAuthorName string `json:"build_author_name"` - BuildAuthorEmail string `json:"build_author_email"` - BuildAuthorAvatar string `json:"build_author_avatar"` - BuildSender string `json:"build_sender"` - BuildStarted int64 `json:"build_started"` - BuildFinished int64 `json:"build_finished"` - BuildCreated int64 `json:"build_created"` - BuildUpdated int64 `json:"build_updated"` - StageName string `json:"stage_name"` - StageKind string `json:"stage_kind"` - StageType string `json:"stage_type"` - StageStatus string `json:"stage_status"` - StageMachine string `json:"stage_machine"` - StageOS string `json:"stage_os"` - StageArch string `json:"stage_arch"` - StageVariant string `json:"stage_variant"` - StageKernel string `json:"stage_kernel"` - StageLimit string `json:"stage_limit"` - StageLimitRepo string `json:"stage_limit_repo"` - StageStarted int64 `json:"stage_started"` - StageStopped int64 `json:"stage_stopped"` - } - - // RepositoryStore defines operations for working with repositories. - RepositoryStore interface { - // List returns a repository list from the datastore. - List(context.Context, int64) ([]*Repository, error) - - // ListLatest returns a unique repository list form - // the datastore with the most recent build. - ListLatest(context.Context, int64) ([]*Repository, error) - - // ListRecent returns a non-unique repository list form - // the datastore with the most recent builds. - ListRecent(context.Context, int64) ([]*Repository, error) - - // ListIncomplete returns a non-unique repository list form - // the datastore with incomplete builds. - ListIncomplete(context.Context) ([]*Repository, error) - - // ListRunningStatus returns a list of build / repository /stage information for builds that are incomplete. - ListRunningStatus(context.Context) ([]*RepoBuildStage, error) - - // ListAll returns a paginated list of all repositories - // stored in the database, including disabled repositories. - ListAll(ctx context.Context, limit, offset int) ([]*Repository, error) - - // Find returns a repository from the datastore. - Find(context.Context, int64) (*Repository, error) - - // FindName returns a named repository from the datastore. - FindName(context.Context, string, string) (*Repository, error) - - // Create persists a new repository in the datastore. - Create(context.Context, *Repository) error - - // Activate persists the activated repository to the datastore. - Activate(context.Context, *Repository) error - - // Update persists repository changes to the datastore. - Update(context.Context, *Repository) error - - // Delete deletes a repository from the datastore. - Delete(context.Context, *Repository) error - - // Count returns a count of activated repositories. - Count(context.Context) (int64, error) - - // Increment returns an incremented build number - Increment(context.Context, *Repository) (*Repository, error) - } - - // RepositoryService provides access to repository information - // in the remote source code management system (e.g. GitHub). - RepositoryService interface { - // List returns a list of repositories. - List(ctx context.Context, user *User) ([]*Repository, error) - - // Find returns the named repository details. - Find(ctx context.Context, user *User, repo string) (*Repository, error) - - // FindPerm returns the named repository permissions. - FindPerm(ctx context.Context, user *User, repo string) (*Perm, error) - } -) diff --git a/core/sched.go b/core/sched.go deleted file mode 100644 index 58e2488f86..0000000000 --- a/core/sched.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Filter provides filter criteria to limit stages requested -// from the scheduler. -type Filter struct { - Kind string - Type string - OS string - Arch string - Kernel string - Variant string - Labels map[string]string -} - -// Scheduler schedules Build stages for execution. -type Scheduler interface { - // Schedule schedules the stage for execution. - Schedule(context.Context, *Stage) error - - // Request requests the next stage scheduled for execution. - Request(context.Context, Filter) (*Stage, error) - - // Cancel cancels scheduled or running jobs associated - // with the parent build ID. - Cancel(context.Context, int64) error - - // Cancelled blocks and listens for a cancellation event and - // returns true if the build has been cancelled. - Cancelled(context.Context, int64) (bool, error) - - // Pause pauses the scheduler and prevents new pipelines - // from being scheduled for execution. - Pause(context.Context) error - - // Resume unpauses the scheduler, allowing new pipelines - // to be scheduled for execution. - Resume(context.Context) error - - // Stats provides statistics for underlying scheduler. The - // data format is scheduler-specific. - Stats(context.Context) (interface{}, error) -} diff --git a/core/secret.go b/core/secret.go deleted file mode 100644 index 79776f7759..0000000000 --- a/core/secret.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "errors" - "regexp" - - "github.com/drone/drone-yaml/yaml" -) - -var ( - errSecretNameInvalid = errors.New("Invalid Secret Name") - errSecretDataInvalid = errors.New("Invalid Secret Value") -) - -type ( - // Secret represents a secret variable, such as a password or token, - // that is provided to the build at runtime. - Secret struct { - ID int64 `json:"id,omitempty"` - RepoID int64 `json:"repo_id,omitempty"` - Namespace string `json:"namespace,omitempty"` - Name string `json:"name,omitempty"` - Type string `json:"type,omitempty"` - Data string `json:"data,omitempty"` - PullRequest bool `json:"pull_request,omitempty"` - PullRequestPush bool `json:"pull_request_push,omitempty"` - } - - // SecretArgs provides arguments for requesting secrets - // from the remote service. - SecretArgs struct { - Name string `json:"name"` - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - Conf *yaml.Manifest `json:"-"` - } - - // SecretStore manages repository secrets. - SecretStore interface { - // List returns a secret list from the datastore. - List(context.Context, int64) ([]*Secret, error) - - // Find returns a secret from the datastore. - Find(context.Context, int64) (*Secret, error) - - // FindName returns a secret from the datastore. - FindName(context.Context, int64, string) (*Secret, error) - - // Create persists a new secret to the datastore. - Create(context.Context, *Secret) error - - // Update persists an updated secret to the datastore. - Update(context.Context, *Secret) error - - // Delete deletes a secret from the datastore. - Delete(context.Context, *Secret) error - } - - // GlobalSecretStore manages global secrets accessible to - // all repositories in the system. - GlobalSecretStore interface { - // List returns a secret list from the datastore. - List(ctx context.Context, namespace string) ([]*Secret, error) - - // ListAll returns a secret list from the datastore - // for all namespaces. - ListAll(ctx context.Context) ([]*Secret, error) - - // Find returns a secret from the datastore. - Find(ctx context.Context, id int64) (*Secret, error) - - // FindName returns a secret from the datastore. - FindName(ctx context.Context, namespace, name string) (*Secret, error) - - // Create persists a new secret to the datastore. - Create(ctx context.Context, secret *Secret) error - - // Update persists an updated secret to the datastore. - Update(ctx context.Context, secret *Secret) error - - // Delete deletes a secret from the datastore. - Delete(ctx context.Context, secret *Secret) error - } - - // SecretService provides secrets from an external service. - SecretService interface { - // Find returns a named secret from the global remote service. - Find(context.Context, *SecretArgs) (*Secret, error) - } -) - -// Validate validates the required fields and formats. -func (s *Secret) Validate() error { - switch { - case len(s.Name) == 0: - return errSecretNameInvalid - case len(s.Data) == 0: - return errSecretDataInvalid - case slugRE.MatchString(s.Name): - return errSecretNameInvalid - default: - return nil - } -} - -// Copy makes a copy of the secret without the value. -func (s *Secret) Copy() *Secret { - return &Secret{ - ID: s.ID, - RepoID: s.RepoID, - Namespace: s.Namespace, - Name: s.Name, - Type: s.Type, - PullRequest: s.PullRequest, - PullRequestPush: s.PullRequestPush, - } -} - -// slug regular expression -var slugRE = regexp.MustCompile("[^a-zA-Z0-9-_.]+") diff --git a/core/secret_test.go b/core/secret_test.go deleted file mode 100644 index 96df2b028e..0000000000 --- a/core/secret_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core - -import "testing" - -func TestSecretValidate(t *testing.T) { - tests := []struct { - secret *Secret - error error - }{ - { - secret: &Secret{Name: "password", Data: "correct-horse-battery-staple"}, - error: nil, - }, - { - secret: &Secret{Name: ".some_random-password", Data: "correct-horse-battery-staple"}, - error: nil, - }, - { - secret: &Secret{Name: "password", Data: ""}, - error: errSecretDataInvalid, - }, - { - secret: &Secret{Name: "", Data: "correct-horse-battery-staple"}, - error: errSecretNameInvalid, - }, - { - secret: &Secret{Name: "docker/password", Data: "correct-horse-battery-staple"}, - error: errSecretNameInvalid, - }, - } - for i, test := range tests { - got, want := test.secret.Validate(), test.error - if got != want { - t.Errorf("Want error %v, got %v at index %d", want, got, i) - } - } -} - -func TestSecretSafeCopy(t *testing.T) { - before := Secret{ - ID: 1, - RepoID: 2, - Name: "docker_password", - Namespace: "octocat", - Type: "", - Data: "correct-horse-battery-staple", - PullRequest: true, - PullRequestPush: true, - } - after := before.Copy() - if got, want := after.ID, before.ID; got != want { - t.Errorf("Want secret ID %d, got %d", want, got) - } - if got, want := after.RepoID, before.RepoID; got != want { - t.Errorf("Want secret RepoID %d, got %d", want, got) - } - if got, want := after.Name, before.Name; got != want { - t.Errorf("Want secret Name %s, got %s", want, got) - } - if got, want := after.Namespace, before.Namespace; got != want { - t.Errorf("Want secret Namespace %s, got %s", want, got) - } - if got, want := after.PullRequest, before.PullRequest; got != want { - t.Errorf("Want secret PullRequest %v, got %v", want, got) - } - if got, want := after.PullRequestPush, before.PullRequestPush; got != want { - t.Errorf("Want secret PullRequest %v, got %v", want, got) - } - if after.Data != "" { - t.Errorf("Expect secret is empty after copy") - } -} diff --git a/core/session.go b/core/session.go deleted file mode 100644 index 3200caba69..0000000000 --- a/core/session.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "net/http" - -// Session provides session management for -// authenticated users. -type Session interface { - // Create creates a new user session and writes the - // session to the http.Response. - Create(http.ResponseWriter, *User) error - - // Delete deletes the user session from the http.Response. - Delete(http.ResponseWriter) error - - // Get returns the session from the http.Request. If no - // session exists a nil user is returned. Returning an - // error is optional, for debugging purposes only. - Get(*http.Request) (*User, error) -} diff --git a/core/stage.go b/core/stage.go deleted file mode 100644 index c4ca9471a6..0000000000 --- a/core/stage.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // Stage represents a stage of build execution. - Stage struct { - ID int64 `json:"id"` - RepoID int64 `json:"repo_id"` - BuildID int64 `json:"build_id"` - Number int `json:"number"` - Name string `json:"name"` - Kind string `json:"kind,omitempty"` - Type string `json:"type,omitempty"` - Status string `json:"status"` - Error string `json:"error,omitempty"` - ErrIgnore bool `json:"errignore"` - ExitCode int `json:"exit_code"` - Machine string `json:"machine,omitempty"` - OS string `json:"os"` - Arch string `json:"arch"` - Variant string `json:"variant,omitempty"` - Kernel string `json:"kernel,omitempty"` - Limit int `json:"limit,omitempty"` - LimitRepo int `json:"throttle,omitempty"` - Started int64 `json:"started"` - Stopped int64 `json:"stopped"` - Created int64 `json:"created"` - Updated int64 `json:"updated"` - Version int64 `json:"version"` - OnSuccess bool `json:"on_success"` - OnFailure bool `json:"on_failure"` - DependsOn []string `json:"depends_on,omitempty"` - Labels map[string]string `json:"labels,omitempty"` - Steps []*Step `json:"steps,omitempty"` - } - - // StageStore persists build stage information to storage. - StageStore interface { - // List returns a build stage list from the datastore. - List(context.Context, int64) ([]*Stage, error) - - // List returns a build stage list from the datastore - // where the stage is incomplete (pending or running). - ListIncomplete(ctx context.Context) ([]*Stage, error) - - // ListSteps returns a build stage list from the datastore, - // with the individual steps included. - ListSteps(context.Context, int64) ([]*Stage, error) - - // ListState returns a build stage list from the database - // across all repositories. - ListState(context.Context, string) ([]*Stage, error) - - // Find returns a build stage from the datastore by ID. - Find(context.Context, int64) (*Stage, error) - - // FindNumber returns a stage from the datastore by number. - FindNumber(context.Context, int64, int) (*Stage, error) - - // Create persists a new stage to the datastore. - Create(context.Context, *Stage) error - - // Update persists an updated stage to the datastore. - Update(context.Context, *Stage) error - } -) - -// IsDone returns true if the step has a completed state. -func (s *Stage) IsDone() bool { - switch s.Status { - case StatusWaiting, - StatusPending, - StatusRunning, - StatusBlocked: - return false - default: - return true - } -} - -// IsFailed returns true if the step has failed -func (s *Stage) IsFailed() bool { - switch s.Status { - case StatusFailing, - StatusKilled, - StatusError: - return true - default: - return false - } -} diff --git a/core/stage_test.go b/core/stage_test.go deleted file mode 100644 index b00c91c987..0000000000 --- a/core/stage_test.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core - -import "testing" - -var statusDone = []string{ - StatusDeclined, - StatusError, - StatusFailing, - StatusKilled, - StatusSkipped, - StatusPassing, -} - -var statusNotDone = []string{ - StatusWaiting, - StatusPending, - StatusRunning, - StatusBlocked, -} - -var statusFailed = []string{ - StatusError, - StatusFailing, - StatusKilled, -} - -var statusNotFailed = []string{ - StatusDeclined, - StatusSkipped, - StatusPassing, - StatusWaiting, - StatusPending, - StatusRunning, - StatusBlocked, -} - -func TestStageIsDone(t *testing.T) { - for _, status := range statusDone { - v := Stage{Status: status} - if v.IsDone() == false { - t.Errorf("Expect status %s is done", status) - } - } - - for _, status := range statusNotDone { - v := Stage{Status: status} - if v.IsDone() == true { - t.Errorf("Expect status %s is not done", status) - } - } -} - -func TestStageIsFailed(t *testing.T) { - for _, status := range statusFailed { - v := Stage{Status: status} - if v.IsFailed() == false { - t.Errorf("Expect status %s is failed", status) - } - } - - for _, status := range statusNotFailed { - v := Stage{Status: status} - if v.IsFailed() == true { - t.Errorf("Expect status %s is not failed", status) - } - } -} diff --git a/core/status.go b/core/status.go deleted file mode 100644 index d2e440dc6b..0000000000 --- a/core/status.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Status types. -const ( - StatusSkipped = "skipped" - StatusBlocked = "blocked" - StatusDeclined = "declined" - StatusWaiting = "waiting_on_dependencies" - StatusPending = "pending" - StatusRunning = "running" - StatusPassing = "success" - StatusFailing = "failure" - StatusKilled = "killed" - StatusError = "error" -) - -type ( - // Status represents a commit status. - Status struct { - State string - Label string - Desc string - Target string - } - - // StatusInput provides the necessary metadata to - // set the commit or deployment status. - StatusInput struct { - Repo *Repository - Build *Build - } - - // StatusService sends the commit status to an external - // source code management service (e.g. GitHub). - StatusService interface { - Send(ctx context.Context, user *User, req *StatusInput) error - } -) diff --git a/core/step.go b/core/step.go deleted file mode 100644 index 1339b15c23..0000000000 --- a/core/step.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -type ( - // Step represents an individual step in the stage. - Step struct { - ID int64 `json:"id"` - StageID int64 `json:"step_id"` // this is a typo, fixing it has far reaching ramifications. It should only be attempted in a major version change - Number int `json:"number"` - Name string `json:"name"` - Status string `json:"status"` - Error string `json:"error,omitempty"` - ErrIgnore bool `json:"errignore,omitempty"` - ExitCode int `json:"exit_code"` - Started int64 `json:"started,omitempty"` - Stopped int64 `json:"stopped,omitempty"` - Version int64 `json:"version"` - DependsOn []string `json:"depends_on,omitempty"` - Image string `json:"image,omitempty"` - Detached bool `json:"detached,omitempty"` - Schema string `json:"schema,omitempty"` - } - - // StepStore persists build step information to storage. - StepStore interface { - // List returns a build stage list from the datastore. - List(context.Context, int64) ([]*Step, error) - - // Find returns a build stage from the datastore by ID. - Find(context.Context, int64) (*Step, error) - - // FindNumber returns a stage from the datastore by number. - FindNumber(context.Context, int64, int) (*Step, error) - - // Create persists a new stage to the datastore. - Create(context.Context, *Step) error - - // Update persists an updated stage to the datastore. - Update(context.Context, *Step) error - } -) - -// IsDone returns true if the step has a completed state. -func (s *Step) IsDone() bool { - switch s.Status { - case StatusWaiting, - StatusPending, - StatusRunning, - StatusBlocked: - return false - default: - return true - } -} diff --git a/core/step_test.go b/core/step_test.go deleted file mode 100644 index b7d689a466..0000000000 --- a/core/step_test.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core - -import "testing" - -func TestStepIsDone(t *testing.T) { - for _, status := range statusDone { - v := Step{Status: status} - if v.IsDone() == false { - t.Errorf("Expect status %s is done", status) - } - } - - for _, status := range statusNotDone { - v := Step{Status: status} - if v.IsDone() == true { - t.Errorf("Expect status %s is not done", status) - } - } -} diff --git a/core/syncer.go b/core/syncer.go deleted file mode 100644 index 6c2ec6e1b9..0000000000 --- a/core/syncer.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Syncer synchronizes the account repository list. -type Syncer interface { - Sync(context.Context, *User) (*Batch, error) -} diff --git a/core/system.go b/core/system.go deleted file mode 100644 index 7cd0a17588..0000000000 --- a/core/system.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -// System stores system information. -type System struct { - Proto string `json:"proto,omitempty"` - Host string `json:"host,omitempty"` - Link string `json:"link,omitempty"` - Version string `json:"version,omitempty"` -} diff --git a/core/template.go b/core/template.go deleted file mode 100644 index 29fccc87d1..0000000000 --- a/core/template.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - - "github.com/drone/drone/handler/api/errors" -) - -var ( - errTemplateNameInvalid = errors.New("No Template Name Provided") - errTemplateDataInvalid = errors.New("No Template Data Provided") -) - -type ( - TemplateArgs struct { - Kind string - Load string - Data map[string]interface{} - } - - Template struct { - Id int64 `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Namespace string `json:"namespace,omitempty"` - Data string `json:"data,omitempty"` - Created int64 `json:"created,omitempty"` - Updated int64 `json:"updated,omitempty"` - } - - // TemplateStore manages repository templates. - TemplateStore interface { - // List returns template list at org level - List(ctx context.Context, namespace string) ([]*Template, error) - - // ListAll returns templates list from the datastore. - ListAll(ctx context.Context) ([]*Template, error) - - // Find returns a template from the datastore. - Find(ctx context.Context, id int64) (*Template, error) - - // FindName returns a template from the data store - FindName(ctx context.Context, name string, namespace string) (*Template, error) - - // Create persists a new template to the datastore. - Create(ctx context.Context, template *Template) error - - // Update persists an updated template to the datastore. - Update(ctx context.Context, template *Template) error - - // Delete deletes a template from the datastore. - Delete(ctx context.Context, template *Template) error - } -) - -// Validate validates the required fields and formats. -func (s *Template) Validate() error { - switch { - case len(s.Name) == 0: - return errTemplateNameInvalid - case len(s.Data) == 0: - return errTemplateDataInvalid - default: - return nil - } -} diff --git a/core/transfer.go b/core/transfer.go deleted file mode 100644 index 3e65351799..0000000000 --- a/core/transfer.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Transferer handles transferring repository ownership from one -// user to another user account. -type Transferer interface { - Transfer(ctx context.Context, user *User) error -} diff --git a/core/trigger.go b/core/trigger.go deleted file mode 100644 index 00b705a075..0000000000 --- a/core/trigger.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import "context" - -// Trigger types -const ( - TriggerHook = "@hook" - TriggerCron = "@cron" -) - -// Triggerer is responsible for triggering a Build from an -// incoming drone. If a build is skipped a nil value is -// returned. -type Triggerer interface { - Trigger(context.Context, *Repository, *Hook) (*Build, error) -} diff --git a/core/trigger_test.go b/core/trigger_test.go deleted file mode 100644 index 0912528b2f..0000000000 --- a/core/trigger_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core diff --git a/core/user.go b/core/user.go deleted file mode 100644 index e9fcaa9d73..0000000000 --- a/core/user.go +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "errors" - - "github.com/asaskevich/govalidator" -) - -var ( - errUsernameLen = errors.New("Invalid username length") - errUsernameChar = errors.New("Invalid character in username") -) - -type ( - // User represents a user of the system. - User struct { - ID int64 `json:"id"` - Login string `json:"login"` - Email string `json:"email"` - Machine bool `json:"machine"` - Admin bool `json:"admin"` - Active bool `json:"active"` - Avatar string `json:"avatar"` - Syncing bool `json:"syncing"` - Synced int64 `json:"synced"` - Created int64 `json:"created"` - Updated int64 `json:"updated"` - LastLogin int64 `json:"last_login"` - Token string `json:"-"` - Refresh string `json:"-"` - Expiry int64 `json:"-"` - Hash string `json:"-"` - } - - // UserParams defines user query parameters. - UserParams struct { - // Sort instructs the system to sort by Login if true, - // else sort by primary key. - Sort bool - - Page int64 - Size int64 - } - - // UserStore defines operations for working with users. - UserStore interface { - // Find returns a user from the datastore. - Find(context.Context, int64) (*User, error) - - // FindLogin returns a user from the datastore by username. - FindLogin(context.Context, string) (*User, error) - - // FindToken returns a user from the datastore by token. - FindToken(context.Context, string) (*User, error) - - // List returns a list of users from the datastore. - List(context.Context) ([]*User, error) - - // ListRange returns a range of users from the datastore. - ListRange(context.Context, UserParams) ([]*User, error) - - // Create persists a new user to the datastore. - Create(context.Context, *User) error - - // Update persists an updated user to the datastore. - Update(context.Context, *User) error - - // Delete deletes a user from the datastore. - Delete(context.Context, *User) error - - // Count returns a count of human and machine users. - Count(context.Context) (int64, error) - - // CountHuman returns a count of human users. - CountHuman(context.Context) (int64, error) - } - - // UserService provides access to user account - // resources in the remote system (e.g. GitHub). - UserService interface { - // Find returns the authenticated user. - Find(ctx context.Context, access, refresh string) (*User, error) - - // FindLogin returns a user by username. - FindLogin(ctx context.Context, user *User, login string) (*User, error) - } -) - -// Validate validates the user and returns an error if the -// validation fails. -func (u *User) Validate() error { - switch { - case !govalidator.IsByteLength(u.Login, 1, 50): - return errUsernameLen - case !govalidator.Matches(u.Login, "^[.a-zA-Z0-9_-]+$"): - return errUsernameChar - default: - return nil - } -} diff --git a/core/user_test.go b/core/user_test.go deleted file mode 100644 index cb8af1ccd4..0000000000 --- a/core/user_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package core - -import ( - "testing" -) - -func TestValidateUser(t *testing.T) { - tests := []struct { - user *User - err error - }{ - { - user: &User{Login: ""}, - err: errUsernameLen, - }, - { - user: &User{Login: "©"}, // non ascii character - err: errUsernameChar, - }, - { - user: &User{Login: "소주"}, // non ascii character - err: errUsernameChar, - }, - { - user: &User{Login: "foo/bar"}, - err: errUsernameChar, - }, - { - user: &User{Login: "this-is-a-really-really-really-really-long-username"}, - err: errUsernameLen, - }, - { - user: &User{Login: "octocat"}, - err: nil, - }, - { - user: &User{Login: "octocat.with.dot"}, - err: nil, - }, - { - user: &User{Login: "OctO-Cat_01"}, - err: nil, - }, - } - for i, test := range tests { - got := test.user.Validate() - if got == nil && test.err == nil { - continue - } - if got == nil && test.err != nil { - t.Errorf("Expected error: %q at index %d", test.err, i) - continue - } - if got != nil && test.err == nil { - t.Errorf("Unexpected error: %q at index %d", got, i) - continue - } - if got, want := got.Error(), test.err.Error(); got != want { - t.Errorf("Want error %q, got %q at index %d", want, got, i) - } - } -} diff --git a/core/validate.go b/core/validate.go deleted file mode 100644 index cb07e8e61d..0000000000 --- a/core/validate.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "errors" -) - -var ( - // ErrValidatorSkip is returned if the pipeline - // validation fails, but the pipeline should be skipped - // and silently ignored instead of erroring. - ErrValidatorSkip = errors.New("validation failed: skip pipeline") - - // ErrValidatorBlock is returned if the pipeline - // validation fails, but the pipeline should be blocked - // pending manual approval instead of erroring. - ErrValidatorBlock = errors.New("validation failed: block pipeline") -) - -type ( - // ValidateArgs represents a request to the pipeline - // validation service. - ValidateArgs struct { - User *User `json:"-"` - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - Config *Config `json:"config,omitempty"` - } - - // ValidateService validates the yaml configuration - // and returns an error if the yaml is deemed invalid. - ValidateService interface { - Validate(context.Context, *ValidateArgs) error - } -) diff --git a/core/webhook.go b/core/webhook.go deleted file mode 100644 index d4a87b3fda..0000000000 --- a/core/webhook.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" -) - -// Webhook event types. -const ( - WebhookEventBuild = "build" - WebhookEventRepo = "repo" - WebhookEventUser = "user" -) - -// Webhook action types. -const ( - WebhookActionCreated = "created" - WebhookActionUpdated = "updated" - WebhookActionDeleted = "deleted" - WebhookActionEnabled = "enabled" - WebhookActionDisabled = "disabled" -) - -type ( - // Webhook defines an integration endpoint. - Webhook struct { - Endpoint string `json:"endpoint,omitempty"` - Signer string `json:"-"` - SkipVerify bool `json:"skip_verify,omitempty"` - } - - // WebhookData provides the webhook data. - WebhookData struct { - Event string `json:"event"` - Action string `json:"action"` - User *User `json:"user,omitempty"` - Repo *Repository `json:"repo,omitempty"` - Build *Build `json:"build,omitempty"` - } - - // WebhookSender sends the webhook payload. - WebhookSender interface { - // Send sends the webhook to the global endpoint. - Send(context.Context, *WebhookData) error - } -) diff --git a/docker/Dockerfile.agent.linux.amd64 b/docker/Dockerfile.agent.linux.amd64 deleted file mode 100644 index bb9899a88b..0000000000 --- a/docker/Dockerfile.agent.linux.amd64 +++ /dev/null @@ -1,19 +0,0 @@ -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -ENV GODEBUG netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=linux/amd64 -ENV DRONE_RUNNER_CAPACITY=1 -ADD release/linux/amd64/drone-agent /bin/ - -RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ENTRYPOINT ["/bin/drone-agent"] diff --git a/docker/Dockerfile.agent.linux.arm b/docker/Dockerfile.agent.linux.arm deleted file mode 100644 index 47f0d61553..0000000000 --- a/docker/Dockerfile.agent.linux.arm +++ /dev/null @@ -1,12 +0,0 @@ -FROM drone/ca-certs -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm -ENV DRONE_RUNNER_PLATFORM=linux/arm -ENV DRONE_RUNNER_CAPACITY=1 -ENV DRONE_RUNNER_VARIANT=v7 -ADD release/linux/arm/drone-agent /bin/ - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ENTRYPOINT ["/bin/drone-agent"] diff --git a/docker/Dockerfile.agent.linux.arm64 b/docker/Dockerfile.agent.linux.arm64 deleted file mode 100644 index 04ba015480..0000000000 --- a/docker/Dockerfile.agent.linux.arm64 +++ /dev/null @@ -1,12 +0,0 @@ -FROM drone/ca-certs -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm64 -ENV DRONE_RUNNER_PLATFORM=linux/arm64 -ENV DRONE_RUNNER_CAPACITY=1 -ENV DRONE_RUNNER_VARIANT=v8 -ADD release/linux/arm64/drone-agent /bin/ - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ENTRYPOINT ["/bin/drone-agent"] diff --git a/docker/Dockerfile.agent.windows.1803 b/docker/Dockerfile.agent.windows.1803 deleted file mode 100644 index 9c369ef3dd..0000000000 --- a/docker/Dockerfile.agent.windows.1803 +++ /dev/null @@ -1,14 +0,0 @@ -FROM mcr.microsoft.com/windows/nanoserver:1803 -USER ContainerAdministrator - -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=windows -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=windows/amd64 -ENV DRONE_RUNNER_KERNEL=1803 -ENV DRONE_RUNNER_CAPACITY=1 - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ADD release/windows/amd64/drone-agent.exe C:/drone-agent.exe -ENTRYPOINT [ "C:\\drone-agent.exe" ] diff --git a/docker/Dockerfile.agent.windows.1809 b/docker/Dockerfile.agent.windows.1809 deleted file mode 100644 index 831401e0b2..0000000000 --- a/docker/Dockerfile.agent.windows.1809 +++ /dev/null @@ -1,14 +0,0 @@ -FROM mcr.microsoft.com/windows/nanoserver:1809 -USER ContainerAdministrator - -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=windows -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=windows/amd64 -ENV DRONE_RUNNER_KERNEL=1809 -ENV DRONE_RUNNER_CAPACITY=1 - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ADD release/windows/amd64/drone-agent.exe C:/drone-agent.exe -ENTRYPOINT [ "C:\\drone-agent.exe" ] diff --git a/docker/Dockerfile.agent.windows.1903 b/docker/Dockerfile.agent.windows.1903 deleted file mode 100644 index a7d34426cd..0000000000 --- a/docker/Dockerfile.agent.windows.1903 +++ /dev/null @@ -1,14 +0,0 @@ -FROM mcr.microsoft.com/windows/nanoserver:1903 -USER ContainerAdministrator - -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=windows -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=windows/amd64 -ENV DRONE_RUNNER_KERNEL=1903 -ENV DRONE_RUNNER_CAPACITY=1 - -LABEL com.centurylinklabs.watchtower.stop-signal="SIGINT" - -ADD release/windows/amd64/drone-agent.exe C:/drone-agent.exe -ENTRYPOINT [ "C:\\drone-agent.exe" ] diff --git a/docker/Dockerfile.controller.linux.amd64 b/docker/Dockerfile.controller.linux.amd64 deleted file mode 100644 index 8161dc2cca..0000000000 --- a/docker/Dockerfile.controller.linux.amd64 +++ /dev/null @@ -1,15 +0,0 @@ -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -ENV GODEBUG netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=linux/amd64 -ENV DRONE_RUNNER_CAPACITY=1 -ADD release/linux/amd64/drone-controller /bin/ - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ENTRYPOINT ["/bin/drone-controller"] diff --git a/docker/Dockerfile.controller.linux.arm b/docker/Dockerfile.controller.linux.arm deleted file mode 100644 index be5091259f..0000000000 --- a/docker/Dockerfile.controller.linux.arm +++ /dev/null @@ -1,16 +0,0 @@ -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm -ENV DRONE_RUNNER_PLATFORM=linux/arm -ENV DRONE_RUNNER_CAPACITY=1 -ENV DRONE_RUNNER_VARIANT=v7 -ADD release/linux/arm/drone-controller /bin/ - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ENTRYPOINT ["/bin/drone-controller"] diff --git a/docker/Dockerfile.controller.linux.arm64 b/docker/Dockerfile.controller.linux.arm64 deleted file mode 100644 index 54a1ca02c9..0000000000 --- a/docker/Dockerfile.controller.linux.arm64 +++ /dev/null @@ -1,16 +0,0 @@ -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm64 -ENV DRONE_RUNNER_PLATFORM=linux/arm64 -ENV DRONE_RUNNER_CAPACITY=1 -ENV DRONE_RUNNER_VARIANT=v8 -ADD release/linux/arm64/drone-controller /bin/ - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ENTRYPOINT ["/bin/drone-controller"] diff --git a/docker/Dockerfile.controller.windows.1803 b/docker/Dockerfile.controller.windows.1803 deleted file mode 100644 index b8f0f56642..0000000000 --- a/docker/Dockerfile.controller.windows.1803 +++ /dev/null @@ -1,12 +0,0 @@ -FROM mcr.microsoft.com/windows/nanoserver:1803 -USER ContainerAdministrator - -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=windows -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=windows/amd64 -ENV DRONE_RUNNER_KERNEL=1803 -ENV DRONE_RUNNER_CAPACITY=1 - -ADD release/windows/1803/amd64/drone-controller.exe C:/drone-controller.exe -ENTRYPOINT [ "C:\\drone-controller.exe" ] diff --git a/docker/Dockerfile.controller.windows.1809 b/docker/Dockerfile.controller.windows.1809 deleted file mode 100644 index 1664998e75..0000000000 --- a/docker/Dockerfile.controller.windows.1809 +++ /dev/null @@ -1,12 +0,0 @@ -FROM mcr.microsoft.com/windows/nanoserver:1809 -USER ContainerAdministrator - -ENV GODEBUG=netdns=go -ENV DRONE_RUNNER_OS=windows -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_RUNNER_PLATFORM=windows/amd64 -ENV DRONE_RUNNER_KERNEL=1809 -ENV DRONE_RUNNER_CAPACITY=1 - -ADD release/windows/1809/amd64/drone-controller.exe C:/drone-controller.exe -ENTRYPOINT [ "C:\\drone-controller.exe" ] diff --git a/docker/Dockerfile.server.linux.amd64 b/docker/Dockerfile.server.linux.amd64 deleted file mode 100644 index 1f4d604b14..0000000000 --- a/docker/Dockerfile.server.linux.amd64 +++ /dev/null @@ -1,27 +0,0 @@ -# docker build --rm -f docker/Dockerfile -t drone/drone . - -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -EXPOSE 80 443 -VOLUME /data - -RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf - -ENV GODEBUG netdns=go -ENV XDG_CACHE_HOME /data -ENV DRONE_DATABASE_DRIVER sqlite3 -ENV DRONE_DATABASE_DATASOURCE /data/database.sqlite -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=amd64 -ENV DRONE_SERVER_PORT=:80 -ENV DRONE_SERVER_HOST=localhost -ENV DRONE_DATADOG_ENABLED=true -ENV DRONE_DATADOG_ENDPOINT=https://stats.drone.ci/api/v1/series - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ADD release/linux/amd64/drone-server /bin/ -ENTRYPOINT ["/bin/drone-server"] \ No newline at end of file diff --git a/docker/Dockerfile.server.linux.arm b/docker/Dockerfile.server.linux.arm deleted file mode 100644 index 3f803d1536..0000000000 --- a/docker/Dockerfile.server.linux.arm +++ /dev/null @@ -1,27 +0,0 @@ -# docker build --rm -f docker/Dockerfile -t drone/drone . - -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -EXPOSE 80 443 -VOLUME /data - -RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf - -ENV GODEBUG netdns=go -ENV XDG_CACHE_HOME /data -ENV DRONE_DATABASE_DRIVER sqlite3 -ENV DRONE_DATABASE_DATASOURCE /data/database.sqlite -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm -ENV DRONE_SERVER_PORT=:80 -ENV DRONE_SERVER_HOST=localhost -ENV DRONE_DATADOG_ENABLED=true -ENV DRONE_DATADOG_ENDPOINT=https://stats.drone.ci/api/v1/series - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ADD release/linux/arm/drone-server /bin/ -ENTRYPOINT ["/bin/drone-server"] \ No newline at end of file diff --git a/docker/Dockerfile.server.linux.arm64 b/docker/Dockerfile.server.linux.arm64 deleted file mode 100644 index f0be82848c..0000000000 --- a/docker/Dockerfile.server.linux.arm64 +++ /dev/null @@ -1,27 +0,0 @@ -# docker build --rm -f docker/Dockerfile -t drone/drone . - -FROM alpine:3.11 as alpine -RUN apk add -U --no-cache ca-certificates tzdata - -FROM alpine:3.11 -EXPOSE 80 443 -VOLUME /data - -RUN [ ! -e /etc/nsswitch.conf ] && echo 'hosts: files dns' > /etc/nsswitch.conf - -ENV GODEBUG netdns=go -ENV XDG_CACHE_HOME /data -ENV DRONE_DATABASE_DRIVER sqlite3 -ENV DRONE_DATABASE_DATASOURCE /data/database.sqlite -ENV DRONE_RUNNER_OS=linux -ENV DRONE_RUNNER_ARCH=arm64 -ENV DRONE_SERVER_PORT=:80 -ENV DRONE_SERVER_HOST=localhost -ENV DRONE_DATADOG_ENABLED=true -ENV DRONE_DATADOG_ENDPOINT=https://stats.drone.ci/api/v1/series - -COPY --from=alpine /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo - -ADD release/linux/arm64/drone-server /bin/ -ENTRYPOINT ["/bin/drone-server"] \ No newline at end of file diff --git a/docker/compose/README.md b/docker/compose/README.md deleted file mode 100644 index b431aa7624..0000000000 --- a/docker/compose/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# Local development - -This directory contains Docker compose files used by the core development team for local development and testing purposes only. These are not part of the core distribution, and are not intended for use outside of the core development team. We are not currently accepting changes or additions to these files. - -## Running a Drone deployment locally using Github - -At the end of this guide you will have a drone server and a drone runner that is hooked up to your Github account. This will allow you to trigger builds on your Github repositories. - -### (prerequisite) Setup a Github oauth application - -Create an oauth application here - -The most important entry is setting the `Authorization callback URL` you can set this to `http://localhost:8080/login` - -You will also need to create a client secret for the application. - -Now you have the `DRONE_GITHUB_CLIENT_ID` and `DRONE_GITHUB_CLIENT_SECRET` - -### (prerequisite) Setup Ngrok - -Ngrok allows us to send the webhooks from Github to our local Drone setup. - -Follow the guide here - -### Running Drone - -+ Move into the `drone/docker/compose/drone-github` folder. - -+ Run Ngrok against port `8080` it will run in the foreground. - -``` bash -./ngrok http 8080 -``` - -Take note of the forwarding hostname this is your `DRONE_SERVER_PROXY_HOST` EG - -``` bash -Forwarding http://c834c33asdde.ngrok.io -> http://localhost:8080 -``` - -+ You will want to edit the Docker compose file `docker-compose.yml` updating in the following entries. - -``` bash -DRONE_SERVER_PROXY_HOST=${DRONE_SERVER_PROXY_HOST} # taken from Ngrok -DRONE_GITHUB_CLIENT_ID=${DRONE_GITHUB_CLIENT_ID} # taken from your Github oauth application -DRONE_GITHUB_CLIENT_SECRET=${DRONE_GITHUB_CLIENT_SECRET} # taken from your Github oauth application -``` - -NB for `DRONE_SERVER_PROXY_HOST` do not include http/https. - -+ Run docker compose - -``` bash -docker-compose up -``` - -Now you can go access the Drone ui at diff --git a/docker/compose/drone-gitea/docker-compose.yml b/docker/compose/drone-gitea/docker-compose.yml deleted file mode 100644 index 40da98cae2..0000000000 --- a/docker/compose/drone-gitea/docker-compose.yml +++ /dev/null @@ -1,40 +0,0 @@ -version: "3.8" -services: - drone: - image: drone/drone:latest - ports: - - "9000:80" - environment: - - DRONE_SERVER_HOST=localhost:9000 - - DRONE_SERVER_PROTO=http - - DRONE_RPC_SECRET=bea26a2221fd8090ea38720fc445eca6 - - DRONE_COOKIE_SECRET=e8206356c843d81e05ab6735e7ebf075 - - DRONE_COOKIE_TIMEOUT=720h - - DRONE_GITEA_CLIENT_ID=${DRONE_GITEA_CLIENT_ID} - - DRONE_GITEA_CLIENT_SECRET=${DRONE_GITEA_CLIENT_SECRET} - - DRONE_GITEA_SERVER=http://gitea:3000 - - DRONE_LOGS_DEBUG=true - - DRONE_CRON_DISABLED=true - volumes: - - ./data:/data - networks: - - default - - gitea - runner: - image: drone/drone-runner-docker:latest - environment: - - DRONE_RPC_HOST=drone - - DRONE_RPC_PROTO=http - - DRONE_RPC_SECRET=bea26a2221fd8090ea38720fc445eca6 - volumes: - - /var/run/docker.sock:/var/run/docker.sock - networks: - - default - - gitea - -networks: - default: - external: false - gitea: - external: - name: gitea diff --git a/docker/compose/drone-github/docker-compose.yml b/docker/compose/drone-github/docker-compose.yml deleted file mode 100644 index 732ad644f1..0000000000 --- a/docker/compose/drone-github/docker-compose.yml +++ /dev/null @@ -1,29 +0,0 @@ -version: "3.8" -services: - drone: - image: drone/drone:latest - ports: - - "8080:80" - environment: - - DRONE_SERVER_HOST=localhost:8080 - - DRONE_SERVER_PROTO=http - - DRONE_SERVER_PROXY_HOST=${DRONE_SERVER_PROXY_HOST} - - DRONE_SERVER_PROXY_PROTO=https - - DRONE_RPC_SECRET=bea26a2221fd8090ea38720fc445eca6 - - DRONE_COOKIE_SECRET=e8206356c843d81e05ab6735e7ebf075 - - DRONE_COOKIE_TIMEOUT=720h - - DRONE_GITHUB_CLIENT_ID=${DRONE_GITHUB_CLIENT_ID} - - DRONE_GITHUB_CLIENT_SECRET=${DRONE_GITHUB_CLIENT_SECRET} - - DRONE_LOGS_DEBUG=true - - DRONE_CRON_DISABLED=true - volumes: - - ./data:/data - runner: - image: drone/drone-runner-docker:latest - environment: - - DRONE_RPC_HOST=drone - - DRONE_RPC_PROTO=http - - DRONE_RPC_SECRET=bea26a2221fd8090ea38720fc445eca6 - - DRONE_TMATE_ENABLED=true - volumes: - - /var/run/docker.sock:/var/run/docker.sock diff --git a/docker/compose/gitea/docker-compose.yml b/docker/compose/gitea/docker-compose.yml deleted file mode 100644 index 1bec20fa2b..0000000000 --- a/docker/compose/gitea/docker-compose.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: "3.8" -services: - gitea: - image: gitea/gitea:latest - container_name: gitea - environment: - - USER_UID=1000 - - USER_GID=1000 - - ROOT_URL=http://gitea:3000 - networks: - - gitea - volumes: - - ./data:/data - ports: - - "3000:3000" - - "3022:22" - -networks: - gitea: - name: gitea - external: false diff --git a/docker/manifest.agent.tmpl b/docker/manifest.agent.tmpl deleted file mode 100644 index 1db9863bb2..0000000000 --- a/docker/manifest.agent.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} -{{#if build.tags}} -tags: -{{#each build.tags}} - - {{this}} -{{/each}} -{{/if}} -manifests: - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 - platform: - architecture: amd64 - os: linux - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 - platform: - architecture: arm64 - os: linux - variant: v8 - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm - platform: - architecture: arm - os: linux - variant: v7 - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}windows-1803-amd64 - platform: - architecture: amd64 - os: windows - variant: 1803 - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}windows-1809-amd64 - platform: - architecture: amd64 - os: windows - variant: 1809 - - - image: drone/agent:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}windows-1903-amd64 - platform: - architecture: amd64 - os: windows - variant: 1903 \ No newline at end of file diff --git a/docker/manifest.controller.tmpl b/docker/manifest.controller.tmpl deleted file mode 100644 index ec34401fda..0000000000 --- a/docker/manifest.controller.tmpl +++ /dev/null @@ -1,37 +0,0 @@ -image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} -{{#if build.tags}} -tags: -{{#each build.tags}} - - {{this}} -{{/each}} -{{/if}} -manifests: - - - image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 - platform: - architecture: amd64 - os: linux - - - image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 - platform: - architecture: arm64 - os: linux - variant: v8 - - - image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm - platform: - architecture: arm - os: linux - variant: v7 - - - image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}windows-1803-amd64 - platform: - architecture: amd64 - os: windows - variant: 1803 - - - image: drone/controller:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}windows-1809-amd64 - platform: - architecture: amd64 - os: windows - variant: 1809 diff --git a/docker/manifest.server.tmpl b/docker/manifest.server.tmpl deleted file mode 100644 index 55e6c87882..0000000000 --- a/docker/manifest.server.tmpl +++ /dev/null @@ -1,19 +0,0 @@ -image: drone/drone:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}} -{{#if build.tags}} -tags: -{{#each build.tags}} - - {{this}} -{{/each}} -{{/if}} -manifests: - - - image: drone/drone:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-amd64 - platform: - architecture: amd64 - os: linux - - - image: drone/drone:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}linux-arm64 - platform: - architecture: arm64 - os: linux - variant: v8 diff --git a/go.mod b/go.mod deleted file mode 100644 index 33066e1495..0000000000 --- a/go.mod +++ /dev/null @@ -1,63 +0,0 @@ -module github.com/drone/drone - -replace github.com/docker/docker => github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible - -require ( - github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e - github.com/Azure/azure-storage-blob-go v0.7.0 - github.com/Azure/go-autorest/autorest/adal v0.8.3 // indirect - github.com/asaskevich/govalidator v0.0.0-20180315120708-ccb8e960c48f - github.com/aws/aws-sdk-go v1.37.3 - github.com/codegangsta/negroni v1.0.0 // indirect - github.com/coreos/go-semver v0.2.0 - github.com/dchest/authcookie v0.0.0-20120917135355-fbdef6e99866 - github.com/dchest/uniuri v0.0.0-20160212164326-8902c56451e9 - github.com/drone/drone-go v1.7.2-0.20220308165842-f9e4fe31c2af - github.com/drone/drone-runtime v1.1.1-0.20200623162453-61e33e2cab5d - github.com/drone/drone-ui v2.11.5+incompatible - github.com/drone/drone-yaml v1.2.4-0.20220204000225-01fb17858c9b - github.com/drone/envsubst v1.0.3-0.20200709231038-aa43e1c1a629 - github.com/drone/funcmap v0.0.0-20210823160631-9e9dec149056 - github.com/drone/go-license v1.0.2 - github.com/drone/go-login v1.1.0 - github.com/drone/go-scm v1.28.0 - github.com/drone/signal v1.0.0 - github.com/dustin/go-humanize v1.0.0 - github.com/go-chi/chi v3.3.3+incompatible - github.com/go-chi/cors v1.0.0 - github.com/go-redis/redis/v8 v8.11.0 - github.com/go-redsync/redsync/v4 v4.3.0 - github.com/go-sql-driver/mysql v1.4.0 - github.com/golang/mock v1.3.1 - github.com/google/go-cmp v0.5.6 - github.com/google/go-jsonnet v0.17.0 - github.com/google/wire v0.2.1 - github.com/gosimple/slug v1.3.0 - github.com/h2non/gock v1.0.15 - github.com/hashicorp/go-cleanhttp v0.5.1 // indirect - github.com/hashicorp/go-multierror v1.1.0 - github.com/hashicorp/go-retryablehttp v0.5.4 - github.com/hashicorp/golang-lru v0.5.1 - github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0 - github.com/joho/godotenv v1.3.0 - github.com/kelseyhightower/envconfig v1.3.0 - github.com/kr/pretty v0.2.0 // indirect - github.com/lib/pq v1.1.0 - github.com/mattn/go-sqlite3 v1.9.0 - github.com/oxtoacart/bpool v0.0.0-20150712133111-4e1c5567d7c2 - github.com/prometheus/client_golang v0.9.2 - github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be // indirect - github.com/robfig/cron v0.0.0-20180505203441-b41be1df6967 - github.com/segmentio/ksuid v1.0.2 - github.com/sirupsen/logrus v1.6.0 - github.com/unrolled/secure v0.0.0-20181022170031-4b6b7cf51606 - go.starlark.net v0.0.0-20221020143700-22309ac47eac - golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 - golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 - golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 // indirect - gopkg.in/yaml.v2 v2.3.0 -) - -replace github.com/h2non/gock => gopkg.in/h2non/gock.v1 v1.0.14 - -go 1.13 diff --git a/go.sum b/go.sum deleted file mode 100644 index 28b65e69e3..0000000000 --- a/go.sum +++ /dev/null @@ -1,414 +0,0 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -docker.io/go-docker v1.0.0/go.mod h1:7tiAn5a0LFmjbPDbyTPOaTTOuG1ZRNXdPA6RvKY+fpY= -github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e h1:rl2Aq4ZODqTDkeSqQBy+fzpZPamacO1Srp8zq7jf2Sc= -github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e/go.mod h1:Xa6lInWHNQnuWoF0YPSsx+INFA9qk7/7pTjwb3PInkY= -github.com/Azure/azure-pipeline-go v0.2.1 h1:OLBdZJ3yvOn2MezlWvbrBMTEUQC72zAftRZOMdj5HYo= -github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4= -github.com/Azure/azure-storage-blob-go v0.7.0 h1:MuueVOYkufCxJw5YZzF842DY2MBsp+hLuh2apKY0mck= -github.com/Azure/azure-storage-blob-go v0.7.0/go.mod h1:f9YQKtsG1nMisotuTPpO0tjNuEjKRYAcJU8/ydDI++4= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= -github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs= -github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= -github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= -github.com/Azure/go-autorest/autorest/adal v0.8.3 h1:O1AGG9Xig71FxdX9HO5pGNyZ7TbSyHaVg+5eJO/jSGw= -github.com/Azure/go-autorest/autorest/adal v0.8.3/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= -github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= -github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM= -github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= -github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.3.0 h1:qJumjCaCudz+OcqE9/XtEPfvtOjOmKaui4EOpFI6zZc= -github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= -github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY= -github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= -github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k= -github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/Microsoft/go-winio v0.4.11 h1:zoIOcVf0xPN1tnMVbTtEdI+P8OofVk3NObnwOQ6nK2Q= -github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/asaskevich/govalidator v0.0.0-20180315120708-ccb8e960c48f h1:y2hSFdXeA1y5z5f0vfNO0Dg5qVY036qzlz3Pds0B92o= -github.com/asaskevich/govalidator v0.0.0-20180315120708-ccb8e960c48f/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= -github.com/aws/aws-sdk-go v1.37.3 h1:1f0groABc4AuapskpHf6EBRaG2tqw0Sx3ebCMwfp1Ys= -github.com/aws/aws-sdk-go v1.37.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/bmatcuk/doublestar v1.1.1 h1:YroD6BJCZBYx06yYFEWvUuKVWQn3vLLQAVmDmvTSaiQ= -github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= -github.com/buildkite/yaml v2.1.0+incompatible h1:xirI+ql5GzfikVNDmt+yeiXpf/v1Gt03qXTtT5WXdr8= -github.com/buildkite/yaml v2.1.0+incompatible/go.mod h1:UoU8vbcwu1+vjZq01+KrpSeLBgQQIjL/H7Y6KwikUrI= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/codegangsta/negroni v1.0.0 h1:+aYywywx4bnKXWvoWtRfJ91vC59NbEhEY03sZjQhbVY= -github.com/codegangsta/negroni v1.0.0/go.mod h1:v0y3T5G7Y1UlFfyxFn/QLRU4a2EuNau2iZY63YTKWo0= -github.com/containerd/containerd v1.3.4 h1:3o0smo5SKY7H6AJCmJhsnCjR2/V2T8VmiHt7seN2/kI= -github.com/containerd/containerd v1.3.4/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= -github.com/coreos/go-semver v0.2.0 h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dchest/authcookie v0.0.0-20120917135355-fbdef6e99866 h1:98WJ4YCdjmB7uyrdT3P4A2Oa1hiRPKoa/0zInG6UnfQ= -github.com/dchest/authcookie v0.0.0-20120917135355-fbdef6e99866/go.mod h1:x7AK2h2QzaXVEFi1tbMYMDuvHcCEr1QdMDrg3hkW24Q= -github.com/dchest/uniuri v0.0.0-20160212164326-8902c56451e9 h1:74lLNRzvsdIlkTgfDSMuaPjBr4cf6k7pwQQANm/yLKU= -github.com/dchest/uniuri v0.0.0-20160212164326-8902c56451e9/go.mod h1:GgB8SF9nRG+GqaDtLcwJZsQFhcogVCJ79j4EdT0c2V4= -github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/docker/distribution v0.0.0-20170726174610-edc3ab29cdff/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= -github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible h1:hx8H7MbcmXUXAmphQuA/XB7CfSzX4DRrNuHFvfK9aIQ= -github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible/go.mod h1:3CPr2caMgTHxxIAZgEMd3uLYPDlRvPqCpyeRf6ncPcY= -github.com/docker/go-connections v0.3.0 h1:3lOnM9cSzgGwx8VfK/NGOW5fLQ0GjIlCkaktF+n1M6o= -github.com/docker/go-connections v0.3.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.3.3 h1:Xk8S3Xj5sLGlG5g67hJmYMmUgXv5N4PhkjJHHqrwnTk= -github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/drone/drone-go v1.7.2-0.20220308165842-f9e4fe31c2af h1:I+xfWPZLMaskFvfFO5fEJ6tVI1Q9bWR9tuORZeaCTPo= -github.com/drone/drone-go v1.7.2-0.20220308165842-f9e4fe31c2af/go.mod h1:fxCf9jAnXDZV1yDr0ckTuWd1intvcQwfJmTRpTZ1mXg= -github.com/drone/drone-runtime v1.0.7-0.20190729202838-87c84080f4a1/go.mod h1:+osgwGADc/nyl40J0fdsf8Z09bgcBZXvXXnLOY48zYs= -github.com/drone/drone-runtime v1.1.1-0.20200623162453-61e33e2cab5d h1:P5HI/Y9hARTZ3F3EKs0kYijhjXZWQRQHYn1neTi0pWM= -github.com/drone/drone-runtime v1.1.1-0.20200623162453-61e33e2cab5d/go.mod h1:4/2QToW5+HGD0y1sTw7X35W1f7YINS14UfDY4isggT8= -github.com/drone/drone-ui v2.11.5+incompatible h1:e+OmpCuxfCS1V8Szx+4OzCMXiM80ZJkV05/BuY8pVv8= -github.com/drone/drone-ui v2.11.5+incompatible/go.mod h1:NBtVWW7NNJpD9+huMD/5TAE1db2nrEh0i35/9Rf1MPI= -github.com/drone/drone-yaml v1.2.4-0.20220204000225-01fb17858c9b h1:NU8JZ2Py6dLa5kktIvcsQ5sokdDbvjL8GUOYEU53Wfk= -github.com/drone/drone-yaml v1.2.4-0.20220204000225-01fb17858c9b/go.mod h1:QsqliFK8nG04AHFN9tTn9XJomRBQHD4wcejWW1uz/10= -github.com/drone/envsubst v1.0.3-0.20200709231038-aa43e1c1a629 h1:rIaZZalMGGPb2cU/+ypuggZ8aMlpa17RUlJUtsMv8pw= -github.com/drone/envsubst v1.0.3-0.20200709231038-aa43e1c1a629/go.mod h1:N2jZmlMufstn1KEqvbHjw40h1KyTmnVzHcSc9bFiJ2g= -github.com/drone/funcmap v0.0.0-20210823160631-9e9dec149056 h1:SCJwMR0FMA0aKwAntCBh02YmtHEnU40zaDzeeCvIRug= -github.com/drone/funcmap v0.0.0-20210823160631-9e9dec149056/go.mod h1:Hph0/pT6ZxbujnE1Z6/08p5I0XXuOsppqF6NQlGOK0E= -github.com/drone/go-license v1.0.2 h1:7OwndfYk+Lp/cGHkxe4HUn/Ysrrw3WYH2pnd99yrkok= -github.com/drone/go-license v1.0.2/go.mod h1:fGRHf+F1cEaw3YVYiJ6js3G3dVhcxyS617RnNRUMsms= -github.com/drone/go-login v1.1.0 h1:anQFRh2Z5ketEJ/LvL6SJ6rIwDdfysGXK5bSXkFLInI= -github.com/drone/go-login v1.1.0/go.mod h1:FLxy9vRzLbyBxoCJYxGbG9R0WGn6OyuvBmAtYNt43uw= -github.com/drone/go-scm v1.28.0 h1:Y5SCf01Iw7/VO8hhjNgAX0eGIXZ225ctw7DlGQHh5g4= -github.com/drone/go-scm v1.28.0/go.mod h1:DFIJJjhMj0TSXPz+0ni4nyZ9gtTtC40Vh/TGRugtyWw= -github.com/drone/signal v1.0.0 h1:NrnM2M/4yAuU/tXs6RP1a1ZfxnaHwYkd0kJurA1p6uI= -github.com/drone/signal v1.0.0/go.mod h1:S8t92eFT0g4WUgEc/LxG+LCuiskpMNsG0ajAMGnyZpc= -github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-chi/chi v3.3.3+incompatible h1:KHkmBEMNkwKuK4FdQL7N2wOeB9jnIx7jR5wsuSBEFI8= -github.com/go-chi/chi v3.3.3+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= -github.com/go-chi/cors v1.0.0 h1:e6x8k7uWbUwYs+aXDoiUzeQFT6l0cygBYyNhD7/1Tg0= -github.com/go-chi/cors v1.0.0/go.mod h1:K2Yje0VW/SJzxiyMYu6iPQYa7hMjQX2i/F491VChg1I= -github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= -github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= -github.com/go-redis/redis/v7 v7.4.0 h1:7obg6wUoj05T0EpY0o8B59S9w5yeMWql7sw2kwNW1x4= -github.com/go-redis/redis/v7 v7.4.0/go.mod h1:JDNMw23GTyLNC4GZu9njt15ctBQVn7xjRfnwdHj/Dcg= -github.com/go-redis/redis/v8 v8.1.1/go.mod h1:ysgGY09J/QeDYbu3HikWEIPCwaeOkuNoTgKayTEaEOw= -github.com/go-redis/redis/v8 v8.11.0 h1:O1Td0mQ8UFChQ3N9zFQqo6kTU2cJ+/it88gDB+zg0wo= -github.com/go-redis/redis/v8 v8.11.0/go.mod h1:DLomh7y2e3ggQXQLd1YgmvIfecPJoFl7WU5SOQ/r06M= -github.com/go-redsync/redsync/v4 v4.3.0 h1:5cNxbHJc/tr1KX04piPv8nylURyuT3nqjmodkW8JCjM= -github.com/go-redsync/redsync/v4 v4.3.0/go.mod h1:QBOJAs1k8O6Eyrre4a++pxQgHe5eQ+HF56KuTVv+8Bs= -github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/gogo/protobuf v0.0.0-20170307180453-100ba4e88506 h1:zDlw+wgyXdfkRuvFCdEDUiPLmZp2cvf/dWHazY0a5VM= -github.com/gogo/protobuf v0.0.0-20170307180453-100ba4e88506/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/gomodule/redigo v1.8.2 h1:H5XSIre1MB5NbPYFp+i1NBbb5qN1W8Y8YAQoAYbkm8k= -github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-jsonnet v0.17.0 h1:/9NIEfhK1NQRKl3sP2536b2+x5HnZMdql7x3yK/l8JY= -github.com/google/go-jsonnet v0.17.0/go.mod h1:sOcuej3UW1vpPTZOr8L7RQimqai1a57bt5j22LzGZCw= -github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= -github.com/google/wire v0.2.1 h1:TYj4Z2qjqxa2ufb34UJqVeO9aznL+i0fLO6TqThKZ7Y= -github.com/google/wire v0.2.1/go.mod h1:ptBl5bWD3nzmJHVNwYHV3v4wdtKzBMlU2YbtKQCG9GI= -github.com/googleapis/gnostic v0.2.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= -github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gosimple/slug v1.3.0 h1:NKQyQMjKkgCpD/Vd+wKtFc7N60bJNCLDubKU/UDKMFI= -github.com/gosimple/slug v1.3.0/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0= -github.com/gregjones/httpcache v0.0.0-20181110185634-c63ab54fda8f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= -github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= -github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-multierror v1.1.0 h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI= -github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= -github.com/hashicorp/go-retryablehttp v0.5.4 h1:1BZvpawXoJCWX6pNtow9+rpEj+3itIlutiqnntI6jOE= -github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0 h1:5B0uxl2lzNRVkJVg+uGHxWtRt4C0Wjc6kJKo5XYx8xE= -github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU= -github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= -github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/kelseyhightower/envconfig v1.3.0 h1:IvRS4f2VcIQy6j4ORGIf9145T/AsUB+oY8LyvN8BXNM= -github.com/kelseyhightower/envconfig v1.3.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= -github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/lib/pq v1.1.0 h1:/5u4a+KGJptBRqGzPvYQL9p0d/tPR4S31+Tnzj9lEO4= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149 h1:HfxbT6/JcvIljmERptWhwa8XzP7H3T+Z2N26gTsaDaA= -github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/natessilva/dag v0.0.0-20180124060714-7194b8dcc5c4 h1:dnMxwus89s86tI8rcGVp2HwZzlz7c5o92VOy7dSckBQ= -github.com/natessilva/dag v0.0.0-20180124060714-7194b8dcc5c4/go.mod h1:cojhOHk1gbMeklOyDP2oKKLftefXoJreOQGOrXk+Z38= -github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= -github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= -github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.1/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.15.0 h1:1V1NfVQR87RtWAgp1lv9JZJ5Jap+XFGKPi00andXGi4= -github.com/onsi/ginkgo v1.15.0/go.mod h1:hF8qUzuuC8DJGygJH3726JnCZX4MYbRB8yFfISqnKUg= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.2/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.5 h1:7n6FEkpFmfCoo2t+YYqXH0evK+a9ICQz0xcAy9dYcaQ= -github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= -github.com/opencontainers/go-digest v1.0.0-rc1 h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ= -github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= -github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/oxtoacart/bpool v0.0.0-20150712133111-4e1c5567d7c2 h1:CXwSGu/LYmbjEab5aMCs5usQRVBGThelUKBNnoSOuso= -github.com/oxtoacart/bpool v0.0.0-20150712133111-4e1c5567d7c2/go.mod h1:L3UMQOThbttwfYRNFOWLLVXMhk5Lkio4GGOtw5UrxS0= -github.com/petar/GoLLRB v0.0.0-20130427215148-53be0d36a84c/go.mod h1:HUpKUBZnpzkdx0kD/+Yfuft+uD3zHGtXF/XJB14TUr4= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.2 h1:awm861/B8OKDd2I/6o1dy3ra4BamzKhYOiGItCeZ740= -github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275 h1:PnBWHBf+6L0jOqq0gIVUe6Yk0/QMZ640k6NvkxcBf+8= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a h1:9a8MnZMP0X2nLJdBg+pBmGgkJlSaKC2KaQmTCk1XDtE= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be h1:ta7tUOvsPHVHGom5hKW5VXNc2xZIkfCKP8iaqOyYtUQ= -github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be/go.mod h1:MIDFMn7db1kT65GmV94GzpX9Qdi7N/pQlwb+AN8wh+Q= -github.com/robfig/cron v0.0.0-20180505203441-b41be1df6967 h1:x7xEyJDP7Hv3LVgvWhzioQqbC/KtuUhTigKlH/8ehhE= -github.com/robfig/cron v0.0.0-20180505203441-b41be1df6967/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= -github.com/segmentio/ksuid v1.0.2 h1:9yBfKyw4ECGTdALaF09Snw3sLJmYIX6AbPJrAy6MrDc= -github.com/segmentio/ksuid v1.0.2/go.mod h1:BXuJDr2byAiHuQaQtSKoXh1J0YmUDurywOXgB2w+OSU= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203 h1:QVqDTf3h2WHt08YuiTGPZLls0Wq99X9bWd0Q5ZSBesM= -github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203/go.mod h1:oqN97ltKNihBbwlX8dLpwxCl3+HnXKV/R0e+sRLd9C8= -github.com/unrolled/secure v0.0.0-20181022170031-4b6b7cf51606 h1:dU9yXzNi9rl6Mou7+3npdfPyeFPb2+7BHs3zL47bhPY= -github.com/unrolled/secure v0.0.0-20181022170031-4b6b7cf51606/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA= -github.com/vinzenz/yaml v0.0.0-20170920082545-91409cdd725d h1:3wDi6J5APMqaHBVPuVd7RmHD2gRTfqbdcVSpCNoUWtk= -github.com/vinzenz/yaml v0.0.0-20170920082545-91409cdd725d/go.mod h1:mb5taDqMnJiZNRQ3+02W2IFG+oEz1+dTuCXkp4jpkfo= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opentelemetry.io/otel v0.11.0/go.mod h1:G8UCk+KooF2HLkgo8RHX9epABH/aRGYET7gQOqBVdB0= -go.starlark.net v0.0.0-20221020143700-22309ac47eac h1:gBO5Qfcw5V9404yzsu2FEIsxK/u2mBNTNogK0uIoVhk= -go.starlark.net v0.0.0-20221020143700-22309ac47eac/go.mod h1:kIVgS18CjmEC3PqMd5kaJSGEifyV/CeB9x506ZJ1Vbk= -golang.org/x/crypto v0.0.0-20180820150726-614d502a4dac/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U= -golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181005133103-4497e2df6f9e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 h1:0A+M6Uqn+Eje4kHMK80dtF3JCXC4ykBgQG4Fe06QRhQ= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181017214349-06f26fdaaa28/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.30.0 h1:M5a8xTlYTxwMn5ZFkwhRabsygDY5G8TYLyQDBxJNAxE= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/h2non/gock.v1 v1.0.14 h1:fTeu9fcUvSnLNacYvYI54h+1/XEteDyHvrVCZEEEYNM= -gopkg.in/h2non/gock.v1 v1.0.14/go.mod h1:sX4zAkdYX1TRGJ2JY156cFspQn4yRWn6p9EMdODlynE= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -k8s.io/api v0.0.0-20181130031204-d04500c8c3dd/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA= -k8s.io/apimachinery v0.0.0-20181201231028-18a5ff3097b4/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0= -k8s.io/client-go v9.0.0+incompatible/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s= -k8s.io/klog v0.1.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/handler/api/acl/acl.go b/handler/api/acl/acl.go deleted file mode 100644 index e76b3121ec..0000000000 --- a/handler/api/acl/acl.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package acl - -import ( - "net/http" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// AuthorizeUser returns an http.Handler middleware that authorizes only -// authenticated users to proceed to the next handler in the chain. Guest users -// are rejected with a 401 unauthorized error. -func AuthorizeUser(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - _, ok := request.UserFrom(r.Context()) - if !ok { - render.Unauthorized(w, errors.ErrUnauthorized) - logger.FromRequest(r). - Debugln("api: authentication required") - } else { - next.ServeHTTP(w, r) - } - }) -} - -// AuthorizeAdmin returns an http.Handler middleware that authorizes only -// system administrators to proceed to the next handler in the chain. -func AuthorizeAdmin(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - user, ok := request.UserFrom(r.Context()) - if !ok { - render.Unauthorized(w, errors.ErrUnauthorized) - logger.FromRequest(r). - Debugln("api: authentication required") - } else if !user.Admin { - render.Forbidden(w, errors.ErrForbidden) - logger.FromRequest(r). - Debugln("api: administrative access required") - } else { - next.ServeHTTP(w, r) - } - }) -} diff --git a/handler/api/acl/acl_test.go b/handler/api/acl/acl_test.go deleted file mode 100644 index 0be79fc2d1..0000000000 --- a/handler/api/acl/acl_test.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package acl - -import ( - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/request" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -var ( - mockUser = &core.User{ - ID: 1, - Login: "octocat", - Admin: false, - Active: true, - } - - mockUserAdmin = &core.User{ - ID: 1, - Login: "octocat", - Admin: true, - Active: true, - } - - mockUserInactive = &core.User{ - ID: 1, - Login: "octocat", - Admin: false, - Active: false, - } - - mockRepo = &core.Repository{ - ID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Counter: 42, - Branch: "master", - Private: true, - Visibility: core.VisibilityPrivate, - } -) - -func TestAuthorizeUser(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - AuthorizeUser( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // use dummy status code to signal the next handler in - // the middleware chain was properly invoked. - w.WriteHeader(http.StatusTeapot) - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestAuthorizeUserErr(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - AuthorizeUser( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestAuthorizeAdmin(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), &core.User{Admin: true}), - ) - - AuthorizeAdmin( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // use dummy status code to signal the next handler in - // the middleware chain was properly invoked. - w.WriteHeader(http.StatusTeapot) - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestAuthorizeAdminUnauthorized(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - AuthorizeAdmin( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestAuthorizeAdminForbidden(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), &core.User{Admin: false}), - ) - - AuthorizeAdmin( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusForbidden; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} diff --git a/handler/api/acl/check.go b/handler/api/acl/check.go deleted file mode 100644 index 2eaf6f7ed8..0000000000 --- a/handler/api/acl/check.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package acl - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" - "github.com/sirupsen/logrus" -) - -// CheckReadAccess returns an http.Handler middleware that authorizes only -// authenticated users with read repository access to proceed to the next -// handler in the chain. -func CheckReadAccess() func(http.Handler) http.Handler { - return CheckAccess(true, false, false) -} - -// CheckWriteAccess returns an http.Handler middleware that authorizes only -// authenticated users with write repository access to proceed to the next -// handler in the chain. -func CheckWriteAccess() func(http.Handler) http.Handler { - return CheckAccess(true, true, false) -} - -// CheckAdminAccess returns an http.Handler middleware that authorizes only -// authenticated users with admin repository access to proceed to the next -// handler in the chain. -func CheckAdminAccess() func(http.Handler) http.Handler { - return CheckAccess(true, true, true) -} - -// CheckAccess returns an http.Handler middleware that authorizes only -// authenticated users with the required read, write or admin access -// permissions to the requested repository resource. -func CheckAccess(read, write, admin bool) func(http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - log := logger.FromRequest(r). - WithField("namespace", owner). - WithField("name", name) - - user, ok := request.UserFrom(ctx) - switch { - case ok == false && write == true: - render.Unauthorized(w, errors.ErrUnauthorized) - log.Debugln("api: authentication required for write access") - return - case ok == false && admin == true: - render.Unauthorized(w, errors.ErrUnauthorized) - log.Debugln("api: authentication required for admin access") - return - case ok == true && user.Admin == true: - log.Debugln("api: root access granted") - next.ServeHTTP(w, r) - return - } - - repo, noRepo := request.RepoFrom(ctx) - if !noRepo { - // this should never happen. the repository - // should always be injected into the context - // by an upstream handler in the chain. - log.Errorln("api: null repository in context") - render.NotFound(w, errors.ErrNotFound) - return - } - - log = log.WithField("visibility", repo.Visibility) - - switch { - case admin == true: // continue - case write == true: // continue - case repo.Visibility == core.VisibilityPublic: - log.Debugln("api: read access granted") - next.ServeHTTP(w, r) - return - case ok == false: - render.Unauthorized(w, errors.ErrUnauthorized) - log.Debugln("api: authentication required") - return - case ok == true && repo.Visibility == core.VisibilityInternal: - log.Debugln("api: read access granted") - next.ServeHTTP(w, r) - return - } - - perm, ok := request.PermFrom(ctx) - if !ok { - render.NotFound(w, errors.ErrNotFound) - log.Debugln("api: repository permissions not found") - return - } - log = log.WithFields( - logrus.Fields{ - "read": perm.Read, - "write": perm.Write, - "admin": perm.Admin, - }, - ) - - switch { - case user.Active == false: - render.Forbidden(w, errors.ErrForbidden) - log.Debugln("api: active account required") - case read == true && perm.Read == false: - render.NotFound(w, errors.ErrNotFound) - log.Debugln("api: read access required") - case write == true && perm.Write == false: - render.NotFound(w, errors.ErrNotFound) - log.Debugln("api: write access required") - case admin == true && perm.Admin == false: - render.NotFound(w, errors.ErrNotFound) - log.Debugln("api: admin access required") - default: - log.Debug("api: access granted") - next.ServeHTTP(w, r.WithContext( - request.WithPerm(ctx, perm), - )) - } - }) - } -} diff --git a/handler/api/acl/check_test.go b/handler/api/acl/check_test.go deleted file mode 100644 index 02e396f41e..0000000000 --- a/handler/api/acl/check_test.go +++ /dev/null @@ -1,827 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package acl - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/google/go-cmp/cmp" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -// this test verifies that a 401 unauthorized error is written to -// the response if the client is not authenticated and repository -// visibility is internal or private. -func TestCheckAccess_Guest_Unauthorized(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithRepo(noContext, mockRepo), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrUnauthorized - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user is not authenticated BUT -// the repository is publicly visible. -func TestCheckAccess_Guest_PublicVisibility(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := *mockRepo - mockRepo.Visibility = core.VisibilityPublic - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithRepo(noContext, &mockRepo), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 401 unauthorized error is written to -// the response if the repository visibility is internal, and the -// client is not authenticated. -func TestCheckAccess_Guest_InternalVisibility(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := *mockRepo - mockRepo.Visibility = core.VisibilityInternal - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithRepo(noContext, &mockRepo), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user is authenticated AND -// the repository is publicly visible. -func TestCheckAccess_Authenticated_PublicVisibility(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := *mockRepo - mockRepo.Visibility = core.VisibilityPublic - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithUser( - request.WithRepo(noContext, &mockRepo), mockUser), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user is authenticated AND -// the repository has internal visible. -func TestCheckAccess_Authenticated_InternalVisibility(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := *mockRepo - mockRepo.Visibility = core.VisibilityInternal - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithUser( - request.WithRepo(noContext, &mockRepo), mockUser), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 404 not found error is written to -// the response if the repository is not found AND the user is -// authenticated. -func TestCheckAccess_Authenticated_RepositoryNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusNotFound; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found error is written to -// the response if the user does not have permissions to access -// the repository. -func TestCheckAccess_Permission_NotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithUser( - request.WithRepo(noContext, mockRepo), mockUser), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusNotFound; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user has read access to the -// repository. -func TestCheckReadAccess(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - readAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: false, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - readAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 404 not found error is written to -// the response if the user lacks read access to the repository. -func TestCheckReadAccess_InsufficientPermissions(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - noAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: false, - Write: false, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - noAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckReadAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusNotFound; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user has write access to the -// repository. -func TestCheckWriteAccess(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - writeAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: true, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - writeAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckWriteAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies the the next handler in the middleware -// chain is not processed if the user has write access BUT -// has been inactivated (e.g. blocked). -func TestCheckWriteAccess_InactiveUser(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - writeAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: true, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUserInactive, - ), - writeAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckWriteAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Error("should not invoke handler") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusForbidden; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 404 not found error is written to -// the response if the user lacks write access to the repository. -// -// TODO(bradrydzewski) we should consider returning a 403 forbidden -// if the user has read access. -func TestCheckWriteAccess_InsufficientPermissions(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - noAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: false, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - noAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckWriteAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusNotFound; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the user has admin access to the -// repository. -func TestCheckAdminAccess(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - noAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: true, - Admin: true, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - noAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckAdminAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 404 not found error is written to -// the response if the user lacks admin access to the repository. -// -// TODO(bradrydzewski) we should consider returning a 403 forbidden -// if the user has read access. -func TestCheckAdminAccess_InsufficientPermissions(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - noAccess := &core.Perm{ - Synced: time.Now().Unix(), - Read: true, - Write: true, - Admin: false, - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithPerm( - request.WithUser( - request.WithRepo(noContext, mockRepo), - mockUser, - ), - noAccess, - ), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckAdminAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusNotFound; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies the the next handler in the middleware -// chain is processed if the authenticated user is a system -// administrator. -func TestCheckAdminAccess_SystemAdmin(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1, Admin: true, Active: true} - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithUser(r.Context(), user), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckAdminAccess()) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -// this test verifies that a 401 unauthorized error is written to -// the response if the client is not authenticated and write -// access is required. -func TestCheckAccess_Guest_Write(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithRepo(noContext, mockRepo), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckAccess(true, true, false)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrUnauthorized - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 401 unauthorized error is written to -// the response if the client is not authenticated and admin -// access is required. -func TestCheckAccess_Guest_Admin(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext( - request.WithRepo(noContext, mockRepo), - ) - - router := chi.NewRouter() - router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { - router.Use(CheckAccess(true, false, true)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrUnauthorized - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// // this test verifies the the next handler in the middleware -// // chain is processed if the authenticated has read permissions -// // that are successfully synchronized with the source. -// func TestCheckAccess_RefreshPerms(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// expiredAccess := &core.Perm{ -// Synced: 0, -// Read: false, -// Write: false, -// Admin: false, -// } - -// updatedAccess := &core.Perm{ -// Read: true, -// Write: true, -// Admin: true, -// } - -// checkPermUpdate := func(ctx context.Context, perm *core.Perm) { -// if perm.Synced == 0 { -// t.Errorf("Expect synced timestamp updated") -// } -// if perm.Read == false { -// t.Errorf("Expect Read flag updated") -// } -// if perm.Write == false { -// t.Errorf("Expect Write flag updated") -// } -// if perm.Admin == false { -// t.Errorf("Expect Admin flag updated") -// } -// } - -// repos := mock.NewMockRepositoryStore(controller) -// repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(mockRepo, nil) - -// perms := mock.NewMockPermStore(controller) -// perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(expiredAccess, nil) -// perms.EXPECT().Update(gomock.Any(), expiredAccess).Return(nil).Do(checkPermUpdate) - -// service := mock.NewMockRepositoryService(controller) -// service.EXPECT().FindPerm(gomock.Any(), "octocat/hello-world").Return(updatedAccess, nil) - -// factory := mock.NewMockRepositoryServiceFactory(controller) -// factory.EXPECT().Create(mockUser).Return(service) - -// w := httptest.NewRecorder() -// r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) -// r = r.WithContext( -// request.WithUser(r.Context(), mockUser), -// ) - -// router := chi.NewRouter() -// router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { -// router.Use(CheckReadAccess(factory, repos, perms)) -// router.Get("/", func(w http.ResponseWriter, r *http.Request) { -// w.WriteHeader(http.StatusTeapot) -// }) -// }) - -// router.ServeHTTP(w, r) - -// if got, want := w.Code, http.StatusTeapot; got != want { -// t.Errorf("Want status code %d, got %d", want, got) -// } -// } - -// // this test verifies that a 404 not found error is written to -// // the response if the user permissions are expired and the -// // updated permissions cannot be fetched. -// func TestCheckAccess_RefreshPerms_Error(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// expiredAccess := &core.Perm{ -// Synced: 0, -// Read: false, -// Write: false, -// Admin: false, -// } - -// repos := mock.NewMockRepositoryStore(controller) -// repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(mockRepo, nil) - -// perms := mock.NewMockPermStore(controller) -// perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(expiredAccess, nil) - -// service := mock.NewMockRepositoryService(controller) -// service.EXPECT().FindPerm(gomock.Any(), "octocat/hello-world").Return(nil, io.EOF) - -// factory := mock.NewMockRepositoryServiceFactory(controller) -// factory.EXPECT().Create(mockUser).Return(service) - -// w := httptest.NewRecorder() -// r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) -// r = r.WithContext( -// request.WithUser(r.Context(), mockUser), -// ) - -// router := chi.NewRouter() -// router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { -// router.Use(CheckReadAccess(factory, repos, perms)) -// router.Get("/", func(w http.ResponseWriter, r *http.Request) { -// w.WriteHeader(http.StatusTeapot) -// }) -// }) - -// router.ServeHTTP(w, r) -// if got, want := w.Code, 404; got != want { -// t.Errorf("Want status code %d, got %d", want, got) -// } -// } - -// // this test verifies the the next handler in the middleware -// // chain is processed if the user permissions are expired, -// // updated permissions are fetched, but fail the changes fail -// // to persist to the database. We know the user has access, -// // so we allow them to proceed even in the event of a failure. -// func TestCheckAccess_RefreshPerms_CannotSave(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// expiredAccess := &core.Perm{ -// Synced: 0, -// Read: false, -// Write: false, -// Admin: false, -// } - -// updatedAccess := &core.Perm{ -// Read: true, -// Write: true, -// Admin: true, -// } - -// service := mock.NewMockRepositoryService(controller) -// service.EXPECT().FindPerm(gomock.Any(), "octocat/hello-world").Return(updatedAccess, nil) - -// factory := mock.NewMockRepositoryServiceFactory(controller) -// factory.EXPECT().Create(mockUser).Return(service) - -// repos := mock.NewMockRepositoryStore(controller) -// repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(mockRepo, nil) - -// perms := mock.NewMockPermStore(controller) -// perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(expiredAccess, nil) -// perms.EXPECT().Update(gomock.Any(), expiredAccess).Return(io.EOF) - -// w := httptest.NewRecorder() -// r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) -// r = r.WithContext( -// request.WithUser(r.Context(), mockUser), -// ) - -// router := chi.NewRouter() -// router.Route("/api/repos/{owner}/{name}", func(router chi.Router) { -// router.Use(CheckReadAccess(factory, repos, perms)) -// router.Get("/", func(w http.ResponseWriter, r *http.Request) { -// w.WriteHeader(http.StatusTeapot) -// }) -// }) - -// router.ServeHTTP(w, r) -// if got, want := w.Code, http.StatusTeapot; got != want { -// t.Errorf("Want status code %d, got %d", want, got) -// } -// } diff --git a/handler/api/acl/org.go b/handler/api/acl/org.go deleted file mode 100644 index 52d6c98890..0000000000 --- a/handler/api/acl/org.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package acl - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// CheckMembership returns an http.Handler middleware that authorizes only -// authenticated users with the required membership to an organization -// to the requested repository resource. -func CheckMembership(service core.OrganizationService, admin bool) func(http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "namespace") - log := logger.FromRequest(r) - ctx := r.Context() - - user, ok := request.UserFrom(ctx) - if !ok { - render.Unauthorized(w, errors.ErrUnauthorized) - log.Debugln("api: authentication required for access") - return - } - log = log.WithField("user.admin", user.Admin) - - // if the user is an administrator they are always - // granted access to the organization data. - if user.Admin { - next.ServeHTTP(w, r) - return - } - - if user.Login == namespace { - next.ServeHTTP(w, r) - return - } - - isMember, isAdmin, err := service.Membership(ctx, user, namespace) - if err != nil { - render.Unauthorized(w, errors.ErrForbidden) - log.Debugln("api: organization membership not found") - return - } - - log = log. - WithField("organization.member", isMember). - WithField("organization.admin", isAdmin) - - if isMember == false { - render.Unauthorized(w, errors.ErrForbidden) - log.Debugln("api: organization membership is required") - return - } - - if isAdmin == false && admin == true { - render.Unauthorized(w, errors.ErrForbidden) - log.Debugln("api: organization administrator is required") - return - } - - log.Debugln("api: organization membership verified") - next.ServeHTTP(w, r) - }) - } -} diff --git a/handler/api/acl/org_test.go b/handler/api/acl/org_test.go deleted file mode 100644 index c0082611ad..0000000000 --- a/handler/api/acl/org_test.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package acl - -import ( - "errors" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -func TestCheckMembership_Admin(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUserAdmin), - ) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(nil, true)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_NilUser_Unauthorized(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(nil, true)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_AuthorizeRead(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUser), - ) - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, false, nil).Times(1) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(mockOrgService, false)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_AuthorizeAdmin(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUser), - ) - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, true, nil).Times(1) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(mockOrgService, true)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusTeapot) - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_Unauthorized_Admin(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUser), - ) - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, false, nil).Times(1) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(mockOrgService, true)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_Unauthorized_Read(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUser), - ) - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(false, false, nil).Times(1) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(mockOrgService, false)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestCheckMembership_Unauthorized_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/secrets/github", nil) - r = r.WithContext( - request.WithUser(noContext, mockUser), - ) - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, true, errors.New("")).Times(1) - - router := chi.NewRouter() - router.Route("/api/secrets/{namespace}", func(router chi.Router) { - router.Use(CheckMembership(mockOrgService, false)) - router.Get("/", func(w http.ResponseWriter, r *http.Request) { - t.Errorf("Must not invoke next handler in middleware chain") - }) - }) - - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusUnauthorized; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} diff --git a/handler/api/acl/repo.go b/handler/api/acl/repo.go deleted file mode 100644 index 1faa14c0fe..0000000000 --- a/handler/api/acl/repo.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package acl - -import ( - "net/http" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" - "github.com/sirupsen/logrus" -) - -// InjectRepository returns an http.Handler middleware that injects -// the repository and repository permissions into the context. -func InjectRepository( - repoz core.RepositoryService, - repos core.RepositoryStore, - perms core.PermStore, -) func(http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - log := logger.FromRequest(r).WithFields( - logrus.Fields{ - "namespace": owner, - "name": name, - }, - ) - - // the user is stored in the context and is - // provided by a an ancestor middleware in the - // chain. - user, sessionExists := request.UserFrom(ctx) - - repo, err := repos.FindName(ctx, owner, name) - if err != nil { - if sessionExists { - render.NotFound(w, errors.ErrNotFound) - } else { - render.Unauthorized(w, errors.ErrUnauthorized) - } - log.WithError(err).Debugln("api: repository not found") - return - } - - // the repository is stored in the request context - // and can be accessed by subsequent handlers in the - // request chain. - ctx = request.WithRepo(ctx, repo) - - // if the user does not exist in the request context, - // this is a guest session, and there are no repository - // permissions to lookup. - if !sessionExists { - next.ServeHTTP(w, r.WithContext(ctx)) - return - } - - // else get the cached permissions from the database - // for the user and repository. - perm, err := perms.Find(ctx, repo.UID, user.ID) - if err != nil { - // if the permissions are not found we forward - // the request to the next handler in the chain - // with no permissions in the context. - // - // It is the responsibility to downstream - // middleware and handlers to decide if the - // request should be rejected. - next.ServeHTTP(w, r.WithContext(ctx)) - return - } - - log = log.WithFields( - logrus.Fields{ - "read": perm.Read, - "write": perm.Write, - "admin": perm.Admin, - }, - ) - - // because the permissions are synced with the remote - // system (e.g. github) they may be stale. If the permissions - // are stale they are refreshed below. - if perm.Synced == 0 || time.Unix(perm.Synced, 0).Add(time.Hour).Before(time.Now()) { - log.Debugln("api: sync repository permissions") - - permv, err := repoz.FindPerm(ctx, user, repo.Slug) - if err != nil { - render.NotFound(w, errors.ErrNotFound) - log.WithError(err). - Warnln("api: cannot sync repository permissions") - return - } - - perm.Synced = time.Now().Unix() - perm.Read = permv.Read - perm.Write = permv.Write - perm.Admin = permv.Admin - - err = perms.Update(ctx, perm) - if err != nil { - log.WithError(err).Debugln("api: cannot cache repository permissions") - } else { - log.Debugln("api: repository permissions synchronized") - } - } - - ctx = request.WithPerm(ctx, perm) - next.ServeHTTP(w, r.WithContext(ctx)) - }) - } -} diff --git a/handler/api/acl/repo_test.go b/handler/api/acl/repo_test.go deleted file mode 100644 index 9959c8c258..0000000000 --- a/handler/api/acl/repo_test.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package acl - -import ( - "context" - "database/sql" - "net/http" - "net/http/httptest" - "testing" - "time" - - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -// this unit test ensures that the http request returns a -// 401 unauthorized if the session does not exist, and the -// repository is not found. -func TestInjectRepository_RepoNotFound_Guest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - next := http.HandlerFunc(func(http.ResponseWriter, *http.Request) { - t.Fail() - }) - - InjectRepository(nil, repos, nil)(next).ServeHTTP(w, r) - if got, want := w.Code, http.StatusUnauthorized; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -// this unit test ensures that the http request returns a -// 404 not found if the session does exist, but the -// repository is not found. -func TestInjectRepository_RepoNotFound_User(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue( - request.WithUser(r.Context(), &core.User{}), - chi.RouteCtxKey, c), - ) - - next := http.HandlerFunc(func(http.ResponseWriter, *http.Request) { - t.Fail() - }) - - InjectRepository(nil, repos, nil)(next).ServeHTTP(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -// this unit test ensures that the middleware function -// invokes the next handler in the chain if the repository -// is found, but no user session exists. -func TestInjectRepository_RepoFound_Guest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(&core.Repository{}, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue( - r.Context(), - chi.RouteCtxKey, c), - ) - - invoked := false - next := http.HandlerFunc(func(http.ResponseWriter, *http.Request) { - invoked = true - }) - - InjectRepository(nil, repos, nil)(next).ServeHTTP(w, r) - if !invoked { - t.Errorf("Expect middleware invoked") - } -} - -// this unit test ensures that the middleware function -// invokes the next handler and stores the permissions -// in the context if found. -func TestInjectRepository_PermsFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ID: 1} - mockRepo := &core.Repository{UID: "1"} - mockPerm := &core.Perm{Synced: time.Now().Unix()} - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(mockRepo, nil) - - perms := mock.NewMockPermStore(controller) - perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(mockPerm, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue( - request.WithUser(r.Context(), mockUser), - chi.RouteCtxKey, c), - ) - - invoked := false - next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - invoked = true - _, ok := request.PermFrom(r.Context()) - if !ok { - t.Errorf("Expect perm from context") - } - }) - - InjectRepository(nil, repos, perms)(next).ServeHTTP(w, r) - if !invoked { - t.Errorf("Expect middleware invoked") - } -} - -// this unit test ensures that the middleware function -// invokes the next handler even if the permissions are -// not found. It is the responsibility to downstream -// middleware and handlers to decide if the request -// should be rejected. -func TestInjectRepository_PermsNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ID: 1} - mockRepo := &core.Repository{UID: "1"} - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(mockRepo, nil) - - perms := mock.NewMockPermStore(controller) - perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue( - request.WithUser(r.Context(), mockUser), - chi.RouteCtxKey, c), - ) - - invoked := false - next := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - invoked = true - _, ok := request.PermFrom(r.Context()) - if ok { - t.Errorf("Expect nil perm from context") - } - }) - - InjectRepository(nil, repos, perms)(next).ServeHTTP(w, r) - if !invoked { - t.Errorf("Expect middleware invoked") - } -} diff --git a/handler/api/api.go b/handler/api/api.go deleted file mode 100644 index a2c7d800ca..0000000000 --- a/handler/api/api.go +++ /dev/null @@ -1,401 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package api - -import ( - "net/http" - "os" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/acl" - "github.com/drone/drone/handler/api/auth" - "github.com/drone/drone/handler/api/badge" - globalbuilds "github.com/drone/drone/handler/api/builds" - "github.com/drone/drone/handler/api/card" - "github.com/drone/drone/handler/api/ccmenu" - "github.com/drone/drone/handler/api/events" - "github.com/drone/drone/handler/api/queue" - "github.com/drone/drone/handler/api/repos" - "github.com/drone/drone/handler/api/repos/builds" - "github.com/drone/drone/handler/api/repos/builds/branches" - "github.com/drone/drone/handler/api/repos/builds/deploys" - "github.com/drone/drone/handler/api/repos/builds/logs" - "github.com/drone/drone/handler/api/repos/builds/pulls" - "github.com/drone/drone/handler/api/repos/builds/stages" - "github.com/drone/drone/handler/api/repos/collabs" - "github.com/drone/drone/handler/api/repos/crons" - "github.com/drone/drone/handler/api/repos/encrypt" - "github.com/drone/drone/handler/api/repos/secrets" - "github.com/drone/drone/handler/api/repos/sign" - globalsecrets "github.com/drone/drone/handler/api/secrets" - "github.com/drone/drone/handler/api/system" - "github.com/drone/drone/handler/api/template" - "github.com/drone/drone/handler/api/user" - "github.com/drone/drone/handler/api/user/remote" - "github.com/drone/drone/handler/api/users" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" - "github.com/go-chi/chi/middleware" - "github.com/go-chi/cors" -) - -var corsOpts = cors.Options{ - AllowedOrigins: []string{"*"}, - AllowedMethods: []string{"GET", "POST", "PATCH", "PUT", "DELETE", "OPTIONS"}, - AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "X-CSRF-Token"}, - ExposedHeaders: []string{"Link"}, - AllowCredentials: true, - MaxAge: 300, -} - -func New( - builds core.BuildStore, - commits core.CommitService, - card core.CardStore, - cron core.CronStore, - events core.Pubsub, - globals core.GlobalSecretStore, - hooks core.HookService, - logs core.LogStore, - license *core.License, - licenses core.LicenseService, - orgs core.OrganizationService, - perms core.PermStore, - repos core.RepositoryStore, - repoz core.RepositoryService, - scheduler core.Scheduler, - secrets core.SecretStore, - stages core.StageStore, - steps core.StepStore, - status core.StatusService, - session core.Session, - stream core.LogStream, - syncer core.Syncer, - system *core.System, - template core.TemplateStore, - transferer core.Transferer, - triggerer core.Triggerer, - users core.UserStore, - userz core.UserService, - webhook core.WebhookSender, -) Server { - return Server{ - Builds: builds, - Card: card, - Cron: cron, - Commits: commits, - Events: events, - Globals: globals, - Hooks: hooks, - Logs: logs, - License: license, - Licenses: licenses, - Orgs: orgs, - Perms: perms, - Repos: repos, - Repoz: repoz, - Scheduler: scheduler, - Secrets: secrets, - Stages: stages, - Steps: steps, - Status: status, - Session: session, - Stream: stream, - Syncer: syncer, - System: system, - Template: template, - Transferer: transferer, - Triggerer: triggerer, - Users: users, - Userz: userz, - Webhook: webhook, - } -} - -// Server is a http.Handler which exposes drone functionality over HTTP. -type Server struct { - Builds core.BuildStore - Card core.CardStore - Cron core.CronStore - Commits core.CommitService - Events core.Pubsub - Globals core.GlobalSecretStore - Hooks core.HookService - Logs core.LogStore - License *core.License - Licenses core.LicenseService - Orgs core.OrganizationService - Perms core.PermStore - Repos core.RepositoryStore - Repoz core.RepositoryService - Scheduler core.Scheduler - Secrets core.SecretStore - Stages core.StageStore - Steps core.StepStore - Status core.StatusService - Session core.Session - Stream core.LogStream - Syncer core.Syncer - System *core.System - Template core.TemplateStore - Transferer core.Transferer - Triggerer core.Triggerer - Users core.UserStore - Userz core.UserService - Webhook core.WebhookSender - Private bool -} - -// Handler returns an http.Handler -func (s Server) Handler() http.Handler { - r := chi.NewRouter() - r.Use(middleware.Recoverer) - r.Use(middleware.NoCache) - r.Use(logger.Middleware) - r.Use(auth.HandleAuthentication(s.Session)) - - cors := cors.New(corsOpts) - r.Use(cors.Handler) - - r.Route("/repos", func(r chi.Router) { - // temporary workaround to enable private mode - // for the drone server. - if os.Getenv("DRONE_SERVER_PRIVATE_MODE") == "true" { - r.Use(acl.AuthorizeUser) - } - - r.With( - acl.AuthorizeAdmin, - ).Get("/", repos.HandleAll(s.Repos)) - - r.Route("/{owner}/{name}", func(r chi.Router) { - r.Use(acl.InjectRepository(s.Repoz, s.Repos, s.Perms)) - r.Use(acl.CheckReadAccess()) - - r.Get("/", repos.HandleFind()) - r.With( - acl.CheckAdminAccess(), - ).Patch("/", repos.HandleUpdate(s.Repos)) - r.With( - acl.CheckAdminAccess(), - ).Post("/", repos.HandleEnable(s.Hooks, s.Repos, s.Webhook)) - r.With( - acl.CheckAdminAccess(), - ).Delete("/", repos.HandleDisable(s.Repos, s.Webhook)) - r.With( - acl.CheckAdminAccess(), - ).Post("/chown", repos.HandleChown(s.Repos)) - r.With( - acl.CheckAdminAccess(), - ).Post("/repair", repos.HandleRepair(s.Hooks, s.Repoz, s.Repos, s.Users, s.System.Link)) - - r.Route("/builds", func(r chi.Router) { - r.Get("/", builds.HandleList(s.Repos, s.Builds)) - r.With(acl.CheckWriteAccess()).Post("/", builds.HandleCreate(s.Users, s.Repos, s.Commits, s.Triggerer)) - - r.Get("/branches", branches.HandleList(s.Repos, s.Builds)) - r.With(acl.CheckWriteAccess()).Delete("/branches/*", branches.HandleDelete(s.Repos, s.Builds)) - - r.Get("/pulls", pulls.HandleList(s.Repos, s.Builds)) - r.With(acl.CheckWriteAccess()).Delete("/pulls/{pull}", pulls.HandleDelete(s.Repos, s.Builds)) - - r.Get("/deployments", deploys.HandleList(s.Repos, s.Builds)) - r.With(acl.CheckWriteAccess()).Delete("/deployments/*", deploys.HandleDelete(s.Repos, s.Builds)) - - r.Get("/latest", builds.HandleLast(s.Repos, s.Builds, s.Stages)) - r.Get("/{number}", builds.HandleFind(s.Repos, s.Builds, s.Stages)) - r.Get("/{number}/logs/{stage}/{step}", logs.HandleFind(s.Repos, s.Builds, s.Stages, s.Steps, s.Logs)) - - r.With( - acl.CheckWriteAccess(), - ).Post("/{number}", builds.HandleRetry(s.Repos, s.Builds, s.Triggerer)) - - r.With( - acl.CheckWriteAccess(), - ).Delete("/{number}", builds.HandleCancel(s.Users, s.Repos, s.Builds, s.Stages, s.Steps, s.Status, s.Scheduler, s.Webhook)) - - r.With( - acl.CheckWriteAccess(), - ).Post("/{number}/promote", builds.HandlePromote(s.Repos, s.Builds, s.Triggerer)) - - r.With( - acl.CheckWriteAccess(), - ).Post("/{number}/rollback", builds.HandleRollback(s.Repos, s.Builds, s.Triggerer)) - - r.With( - acl.CheckAdminAccess(), - ).Post("/{number}/decline/{stage}", stages.HandleDecline(s.Repos, s.Builds, s.Stages)) - - r.With( - acl.CheckAdminAccess(), - ).Post("/{number}/approve/{stage}", stages.HandleApprove(s.Repos, s.Builds, s.Stages, s.Scheduler)) - - r.With( - acl.CheckAdminAccess(), - ).Delete("/{number}/logs/{stage}/{step}", logs.HandleDelete(s.Repos, s.Builds, s.Stages, s.Steps, s.Logs)) - - r.With( - acl.CheckAdminAccess(), - ).Delete("/", builds.HandlePurge(s.Repos, s.Builds)) - }) - - r.Route("/secrets", func(r chi.Router) { - r.Use(acl.CheckWriteAccess()) - r.Get("/", secrets.HandleList(s.Repos, s.Secrets)) - r.Post("/", secrets.HandleCreate(s.Repos, s.Secrets)) - r.Get("/{secret}", secrets.HandleFind(s.Repos, s.Secrets)) - r.Patch("/{secret}", secrets.HandleUpdate(s.Repos, s.Secrets)) - r.Delete("/{secret}", secrets.HandleDelete(s.Repos, s.Secrets)) - }) - - r.Route("/sign", func(r chi.Router) { - r.Use(acl.CheckWriteAccess()) - r.Post("/", sign.HandleSign(s.Repos)) - }) - - r.Route("/encrypt", func(r chi.Router) { - r.Use(acl.CheckWriteAccess()) - r.Post("/", encrypt.Handler(s.Repos)) - r.Post("/secret", encrypt.Handler(s.Repos)) - }) - - r.Route("/cron", func(r chi.Router) { - r.Use(acl.CheckWriteAccess()) - r.Post("/", crons.HandleCreate(s.Repos, s.Cron)) - r.Get("/", crons.HandleList(s.Repos, s.Cron)) - r.Get("/{cron}", crons.HandleFind(s.Repos, s.Cron)) - r.Post("/{cron}", crons.HandleExec(s.Users, s.Repos, s.Cron, s.Commits, s.Triggerer)) - r.Patch("/{cron}", crons.HandleUpdate(s.Repos, s.Cron)) - r.Delete("/{cron}", crons.HandleDelete(s.Repos, s.Cron)) - }) - - r.Route("/collaborators", func(r chi.Router) { - r.Get("/", collabs.HandleList(s.Repos, s.Perms)) - r.Get("/{member}", collabs.HandleFind(s.Users, s.Repos, s.Perms)) - r.With( - acl.CheckAdminAccess(), - ).Delete("/{member}", collabs.HandleDelete(s.Users, s.Repos, s.Perms)) - }) - - r.Route("/cards", func(r chi.Router) { - r.Get("/{build}/{stage}/{step}", card.HandleFind(s.Builds, s.Card, s.Stages, s.Steps, s.Repos)) - r.With( - acl.CheckAdminAccess(), - ).Post("/{build}/{stage}/{step}", card.HandleCreate(s.Builds, s.Card, s.Stages, s.Steps, s.Repos)) - r.With( - acl.CheckAdminAccess(), - ).Delete("/{build}/{stage}/{step}", card.HandleDelete(s.Builds, s.Card, s.Stages, s.Steps, s.Repos)) - }) - }) - }) - - r.Route("/badges/{owner}/{name}", func(r chi.Router) { - r.Get("/status.svg", badge.Handler(s.Repos, s.Builds)) - r.With( - acl.InjectRepository(s.Repoz, s.Repos, s.Perms), - acl.CheckReadAccess(), - ).Get("/cc.xml", ccmenu.Handler(s.Repos, s.Builds, s.System.Link)) - }) - - r.Route("/queue", func(r chi.Router) { - r.Use(acl.AuthorizeAdmin) - r.Get("/", queue.HandleItems(s.Stages)) - r.Post("/", queue.HandleResume(s.Scheduler)) - r.Delete("/", queue.HandlePause(s.Scheduler)) - }) - - r.Route("/user", func(r chi.Router) { - r.Use(acl.AuthorizeUser) - r.Get("/", user.HandleFind()) - r.Patch("/", user.HandleUpdate(s.Users)) - r.Post("/token", user.HandleToken(s.Users)) - r.Get("/repos", user.HandleRepos(s.Repos)) - r.Post("/repos", user.HandleSync(s.Syncer, s.Repos)) - - // TODO(bradrydzewski) finalize the name for this endpoint. - r.Get("/builds", user.HandleRecent(s.Repos)) - r.Get("/builds/recent", user.HandleRecent(s.Repos)) - - // expose remote endpoints (e.g. to github) - r.Get("/remote/repos", remote.HandleRepos(s.Repoz)) - r.Get("/remote/repos/{owner}/{name}", remote.HandleRepo(s.Repoz)) - }) - - r.Route("/users", func(r chi.Router) { - r.Use(acl.AuthorizeAdmin) - r.Get("/", users.HandleList(s.Users)) - r.Post("/", users.HandleCreate(s.Users, s.Userz, s.Webhook)) - r.Get("/{user}", users.HandleFind(s.Users)) - r.Patch("/{user}", users.HandleUpdate(s.Users, s.Transferer)) - r.Post("/{user}/token/rotate", users.HandleTokenRotation(s.Users)) - r.Delete("/{user}", users.HandleDelete(s.Users, s.Transferer, s.Webhook)) - r.Get("/{user}/repos", users.HandleRepoList(s.Users, s.Repos)) - }) - - r.Route("/stream", func(r chi.Router) { - r.Get("/", events.HandleGlobal(s.Repos, s.Events)) - - r.Route("/{owner}/{name}", func(r chi.Router) { - r.Use(acl.InjectRepository(s.Repoz, s.Repos, s.Perms)) - r.Use(acl.CheckReadAccess()) - - r.Get("/", events.HandleEvents(s.Repos, s.Events)) - r.Get("/{number}/{stage}/{step}", events.HandleLogStream(s.Repos, s.Builds, s.Stages, s.Steps, s.Stream)) - }) - }) - - r.Route("/builds", func(r chi.Router) { - r.Use(acl.AuthorizeAdmin) - r.Get("/incomplete", globalbuilds.HandleIncomplete(s.Repos)) - r.Get("/incomplete/v2", globalbuilds.HandleRunningStatus(s.Repos)) - }) - - r.Route("/secrets", func(r chi.Router) { - r.With(acl.AuthorizeAdmin).Get("/", globalsecrets.HandleAll(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, false)).Get("/{namespace}", globalsecrets.HandleList(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, true)).Post("/{namespace}", globalsecrets.HandleCreate(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, false)).Get("/{namespace}/{name}", globalsecrets.HandleFind(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, true)).Post("/{namespace}/{name}", globalsecrets.HandleUpdate(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, true)).Patch("/{namespace}/{name}", globalsecrets.HandleUpdate(s.Globals)) - r.With(acl.CheckMembership(s.Orgs, true)).Delete("/{namespace}/{name}", globalsecrets.HandleDelete(s.Globals)) - }) - - r.Route("/templates", func(r chi.Router) { - r.With(acl.CheckMembership(s.Orgs, false)).Get("/", template.HandleListAll(s.Template)) - r.With(acl.CheckMembership(s.Orgs, true)).Post("/{namespace}", template.HandleCreate(s.Template)) - r.With(acl.CheckMembership(s.Orgs, false)).Get("/{namespace}", template.HandleList(s.Template)) - r.With(acl.CheckMembership(s.Orgs, false)).Get("/{namespace}/{name}", template.HandleFind(s.Template)) - r.With(acl.CheckMembership(s.Orgs, true)).Put("/{namespace}/{name}", template.HandleUpdate(s.Template)) - r.With(acl.CheckMembership(s.Orgs, true)).Patch("/{namespace}/{name}", template.HandleUpdate(s.Template)) - r.With(acl.CheckMembership(s.Orgs, true)).Delete("/{namespace}/{name}", template.HandleDelete(s.Template)) - }) - - r.Route("/system", func(r chi.Router) { - r.Use(acl.AuthorizeAdmin) - // r.Get("/license", system.HandleLicense()) - // r.Get("/limits", system.HandleLimits()) - r.Get("/stats", system.HandleStats( - s.Builds, - s.Stages, - s.Users, - s.Repos, - s.Events, - s.Stream, - )) - }) - - return r -} diff --git a/handler/api/auth/auth.go b/handler/api/auth/auth.go deleted file mode 100644 index 68d25d1f91..0000000000 --- a/handler/api/auth/auth.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package auth - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleAuthentication returns an http.HandlerFunc middleware that authenticates -// the http.Request and errors if the account cannot be authenticated. -func HandleAuthentication(session core.Session) func(http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - log := logger.FromContext(ctx) - user, err := session.Get(r) - - // this block of code checks the error message and user - // returned from the session, including some edge cases, - // to prevent a session from being falsely created. - if err != nil || user == nil || user.ID == 0 { - next.ServeHTTP(w, r) - log.Debugln("api: guest access") - return - } - - if user.Machine { - log = log.WithField("user.machine", user.Machine) - } - if user.Admin { - log = log.WithField("user.admin", user.Admin) - } - log = log.WithField("user.login", user.Login) - ctx = logger.WithContext(ctx, log) - next.ServeHTTP(w, r.WithContext( - request.WithUser(ctx, user), - )) - }) - } -} diff --git a/handler/api/auth/auth_test.go b/handler/api/auth/auth_test.go deleted file mode 100644 index 5efb62a350..0000000000 --- a/handler/api/auth/auth_test.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package auth - -import ( - "database/sql" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/sirupsen/logrus" - - "github.com/golang/mock/gomock" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestAuth(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - Admin: true, - Machine: true, - Hash: "$2a$04$rR2VvGjM9iqAAoyLSE4IrexAlxDbIS3M5YKtj9ANs7vraki0ybYJq 197XXbZablx0RPQ8", - } - - session := mock.NewMockSession(controller) - session.EXPECT().Get(gomock.Any()).Return(mockUser, nil) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/?access_token=VA.197XXbZablx0RPQ8", nil) - - HandleAuthentication(session)( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // use dummy status code to signal the next handler in - // the middleware chain was properly invoked. - w.WriteHeader(http.StatusTeapot) - - // verify the user was added to the request context - if user, _ := request.UserFrom(r.Context()); user != mockUser { - t.Errorf("Expect user in context") - } - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestAuth_Guest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - session := mock.NewMockSession(controller) - session.EXPECT().Get(gomock.Any()).Return(nil, sql.ErrNoRows) - - HandleAuthentication(session)( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // use dummy status code to signal the next handler in - // the middleware chain was properly invoked. - w.WriteHeader(http.StatusTeapot) - - // verify the user was added to the request context - if _, ok := request.UserFrom(r.Context()); ok { - t.Errorf("Expect guest mode, no user in context") - } - }), - ).ServeHTTP(w, r) - - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} diff --git a/handler/api/badge/badge.go b/handler/api/badge/badge.go deleted file mode 100644 index 1152dd54dd..0000000000 --- a/handler/api/badge/badge.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package badge - -var ( - badgeSuccess = `buildbuildsuccesssuccess` - badgeFailure = `buildbuildfailurefailure` - badgeStarted = `buildbuildstartedstarted` - badgeError = `buildbuilderrorerror` - badgeNone = `buildbuildnonenone` -) diff --git a/handler/api/badge/status.go b/handler/api/badge/status.go deleted file mode 100644 index d90762e987..0000000000 --- a/handler/api/badge/status.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package badge - -import ( - "fmt" - "io" - "net/http" - "time" - - "github.com/drone/drone/core" - - "github.com/go-chi/chi" -) - -// Handler returns an http.HandlerFunc that writes an svg status -// badge to the response. -func Handler( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "owner") - name := chi.URLParam(r, "name") - ref := r.FormValue("ref") - branch := r.FormValue("branch") - if branch != "" { - ref = "refs/heads/" + branch - } - - // an SVG response is always served, even when error, so - // we can go ahead and set the content type appropriately. - w.Header().Set("Access-Control-Allow-Origin", "*") - w.Header().Set("Cache-Control", "no-cache, no-store, max-age=0, must-revalidate, value") - w.Header().Set("Expires", "Thu, 01 Jan 1970 00:00:00 GMT") - w.Header().Set("Last-Modified", time.Now().UTC().Format(http.TimeFormat)) - w.Header().Set("Content-Type", "image/svg+xml") - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - io.WriteString(w, badgeNone) - return - } - - if ref == "" { - ref = fmt.Sprintf("refs/heads/%s", repo.Branch) - } - build, err := builds.FindRef(r.Context(), repo.ID, ref) - if err != nil { - io.WriteString(w, badgeNone) - return - } - - switch build.Status { - case core.StatusPending, core.StatusRunning, core.StatusBlocked: - io.WriteString(w, badgeStarted) - case core.StatusPassing: - io.WriteString(w, badgeSuccess) - case core.StatusError: - io.WriteString(w, badgeError) - default: - io.WriteString(w, badgeFailure) - } - } -} diff --git a/handler/api/badge/status_test.go b/handler/api/badge/status_test.go deleted file mode 100644 index d8da8b8221..0000000000 --- a/handler/api/badge/status_test.go +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package badge - -import ( - "context" - "database/sql" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -var ( - mockRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Branch: "master", - } - - mockBuild = &core.Build{ - ID: 1, - RepoID: 1, - Number: 1, - Status: core.StatusPassing, - Ref: "refs/heads/develop", - } - - mockBuildFailing = &core.Build{ - ID: 2, - RepoID: 1, - Number: 2, - Status: core.StatusFailing, - Ref: "refs/heads/master", - } - - mockBuildRunning = &core.Build{ - ID: 3, - RepoID: 1, - Number: 3, - Status: core.StatusRunning, - Ref: "refs/heads/master", - } - - mockBuildError = &core.Build{ - ID: 4, - RepoID: 1, - Number: 4, - Status: core.StatusError, - Ref: "refs/heads/master", - } -) - -func TestHandler(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/develop").Return(mockBuild, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/?ref=refs/heads/develop", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - if got, want := w.Header().Get("Access-Control-Allow-Origin"), "*"; got != want { - t.Errorf("Want Access-Control-Allow-Origin %q, got %q", want, got) - } - if got, want := w.Header().Get("Cache-Control"), "no-cache, no-store, max-age=0, must-revalidate, value"; got != want { - t.Errorf("Want Cache-Control %q, got %q", want, got) - } - if got, want := w.Header().Get("Content-Type"), "image/svg+xml"; got != want { - t.Errorf("Want Access-Control-Allow-Origin %q, got %q", want, got) - } - if got, want := w.Body.String(), string(badgeSuccess); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} - -func TestHandler_Failing(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(mockBuildFailing, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds)(w, r) - if got, want := w.Body.String(), string(badgeFailure); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} - -func TestHandler_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(mockBuildError, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds)(w, r) - if got, want := w.Body.String(), string(badgeError); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} - -func TestHandler_Running(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(mockBuildRunning, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds)(w, r) - if got, want := w.Body.String(), string(badgeStarted); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} - -func TestHandler_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, nil)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - if got, want := w.Body.String(), string(badgeNone); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} - -func TestHandler_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - if got, want := w.Body.String(), string(badgeNone); got != want { - t.Errorf("Want badge %q, got %q", got, want) - } -} diff --git a/handler/api/builds/builds.go b/handler/api/builds/builds.go deleted file mode 100644 index 52b25cb88b..0000000000 --- a/handler/api/builds/builds.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandleIncomplete returns an http.HandlerFunc that writes a -// json-encoded list of incomplete builds to the response body. -func HandleIncomplete(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - list, err := repos.ListIncomplete(r.Context()) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot list incomplete builds") - } else { - render.JSON(w, list, 200) - } - } -} - -func HandleRunningStatus(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - list, err := repos.ListRunningStatus(r.Context()) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot list incomplete builds") - } else { - render.JSON(w, list, 200) - } - } -} diff --git a/handler/api/builds/builds_oss.go b/handler/api/builds/builds_oss.go deleted file mode 100644 index 1ab6f7fb50..0000000000 --- a/handler/api/builds/builds_oss.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -// HandleIncomplete returns a no-op http.HandlerFunc. -func HandleIncomplete(repos core.RepositoryStore) http.HandlerFunc { - return notImplemented -} - -func HandleRunningStatus(repos core.RepositoryStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/builds/builds_test.go b/handler/api/builds/builds_test.go deleted file mode 100644 index cbe91d1830..0000000000 --- a/handler/api/builds/builds_test.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "encoding/json" - "io/ioutil" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestHandleBuilds(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - want := []*core.Repository{ - {ID: 1, Slug: "octocat/hello-world"}, - {ID: 2, Slug: "octocat/spoon-fork"}, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().ListIncomplete(gomock.Any()).Return(want, nil) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - HandleIncomplete(repos)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got := []*core.Repository{} - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleBuilds_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().ListIncomplete(gomock.Any()).Return(nil, errors.ErrNotFound) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - HandleIncomplete(repos)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/card/create.go b/handler/api/card/create.go deleted file mode 100644 index a59d09999c..0000000000 --- a/handler/api/card/create.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "bytes" - "encoding/json" - "io/ioutil" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a new card. -func HandleCreate( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - buildNumber, err := strconv.ParseInt(chi.URLParam(r, "build"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - - in := new(core.CardInput) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - repo, err := repoStore.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := buildStore.FindNumber(r.Context(), repo.ID, buildNumber) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stageStore.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := stepStore.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - - data := ioutil.NopCloser( - bytes.NewBuffer(in.Data), - ) - - /// create card - err = cardStore.Create(r.Context(), step.ID, data) - if err != nil { - render.InternalError(w, err) - return - } - - // add schema - step.Schema = in.Schema - err = stepStore.Update(r.Context(), step) - if err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, step.ID, 200) - } -} diff --git a/handler/api/card/create_test.go b/handler/api/card/create_test.go deleted file mode 100644 index d657f7b636..0000000000 --- a/handler/api/card/create_test.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -type card struct { - Id int64 `json:"id,omitempty"` - Data []byte `json:"card_data"` -} - -var ( - dummyRepo = &core.Repository{ - ID: 1, - UserID: 1, - Slug: "octocat/hello-world", - } - dummyBuild = &core.Build{ - ID: 1, - RepoID: 1, - Number: 1, - } - dummyStage = &core.Stage{ - ID: 1, - BuildID: 1, - } - dummyStep = &core.Step{ - ID: 1, - StageID: 1, - Schema: "https://myschema.com", - } - dummyCard = &card{ - Id: dummyStep.ID, - Data: []byte("{\"type\": \"AdaptiveCard\"}"), - } -) - -func TestHandleCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - step.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Create(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCard) - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil, nil, nil, nil, nil).ServeHTTP(w, r) - got, want := &errors.Error{}, &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Create(gomock.Any(), gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCard) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/card/delete.go b/handler/api/card/delete.go deleted file mode 100644 index a9fa8d1eaa..0000000000 --- a/handler/api/card/delete.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "net/http" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete a card. -func HandleDelete( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - buildNumber, err := strconv.ParseInt(chi.URLParam(r, "build"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - - repo, err := repoStore.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := buildStore.FindNumber(r.Context(), repo.ID, buildNumber) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stageStore.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := stepStore.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - - _, err = cardStore.Find(r.Context(), step.ID) - if err != nil { - render.NotFound(w, err) - return - } - err = cardStore.Delete(r.Context(), step.ID) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/card/delete_test.go b/handler/api/card/delete_test.go deleted file mode 100644 index 0644e2c220..0000000000 --- a/handler/api/card/delete_test.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "bytes" - "context" - "encoding/json" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Find(gomock.Any(), dummyStep.ID).Return(ioutil.NopCloser( - bytes.NewBuffer(dummyCard.Data), - ), nil) - card.EXPECT().Delete(gomock.Any(), dummyCard.Id).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleDelete_CardNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Find(gomock.Any(), dummyStep.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Find(gomock.Any(), dummyStep.ID).Return(ioutil.NopCloser( - bytes.NewBuffer(dummyCard.Data), - ), nil) - card.EXPECT().Delete(gomock.Any(), dummyCard.Id).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/card/find.go b/handler/api/card/find.go deleted file mode 100644 index 852021909b..0000000000 --- a/handler/api/card/find.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "io" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes a json-encoded -func HandleFind( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - buildNumber, err := strconv.ParseInt(chi.URLParam(r, "build"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - - repo, err := repoStore.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := buildStore.FindNumber(r.Context(), repo.ID, buildNumber) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stageStore.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := stepStore.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - - cardData, err := cardStore.Find(r.Context(), step.ID) - if err != nil { - render.NotFound(w, err) - return - } - w.Header().Set("Content-Type", "application/json") - io.Copy(w, cardData) - cardData.Close() - } -} diff --git a/handler/api/card/find_test.go b/handler/api/card/find_test.go deleted file mode 100644 index 72451c0c54..0000000000 --- a/handler/api/card/find_test.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "bytes" - "context" - "encoding/json" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Find(gomock.Any(), dummyStep.ID).Return(ioutil.NopCloser( - bytes.NewBuffer(dummyCard.Data), - ), nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCard) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleFind_CardNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(dummyRepo, nil) - - build := mock.NewMockBuildStore(controller) - build.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyBuild, nil) - - stage := mock.NewMockStageStore(controller) - stage.EXPECT().FindNumber(gomock.Any(), dummyBuild.ID, gomock.Any()).Return(dummyStage, nil) - - step := mock.NewMockStepStore(controller) - step.EXPECT().FindNumber(gomock.Any(), dummyStage.ID, gomock.Any()).Return(dummyStep, nil) - - card := mock.NewMockCardStore(controller) - card.EXPECT().Find(gomock.Any(), dummyStep.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("build", "1") - c.URLParams.Add("stage", "1") - c.URLParams.Add("step", "1") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCard) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(build, card, stage, step, repos).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/card/none.go b/handler/api/card/none.go deleted file mode 100644 index 5b843468c5..0000000000 --- a/handler/api/card/none.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package card - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleCreate( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return notImplemented -} - -func HandleDelete( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return notImplemented -} - -func HandleFind( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return notImplemented -} - -func HandleFindAll( - buildStore core.BuildStore, - cardStore core.CardStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return notImplemented -} - -func HandleFindData( - buildStore core.BuildStore, - cardStore core.CardStore, - stageStore core.StageStore, - stepStore core.StepStore, - repoStore core.RepositoryStore, -) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/ccmenu/cc.go b/handler/api/ccmenu/cc.go deleted file mode 100644 index d55fdd03a4..0000000000 --- a/handler/api/ccmenu/cc.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package ccmenu - -import ( - "encoding/xml" - "fmt" - "time" - - "github.com/drone/drone/core" -) - -type CCProjects struct { - XMLName xml.Name `xml:"Projects"` - Project *CCProject `xml:"Project"` -} - -type CCProject struct { - XMLName xml.Name `xml:"Project"` - Name string `xml:"name,attr"` - Activity string `xml:"activity,attr"` - LastBuildStatus string `xml:"lastBuildStatus,attr"` - LastBuildLabel string `xml:"lastBuildLabel,attr"` - LastBuildTime string `xml:"lastBuildTime,attr"` - WebURL string `xml:"webUrl,attr"` -} - -// New creates a new CCProject from the Repository and Build details. -func New(r *core.Repository, b *core.Build, link string) *CCProjects { - proj := &CCProject{ - Name: r.Slug, - WebURL: link, - Activity: "Building", - LastBuildStatus: "Unknown", - LastBuildLabel: "Unknown", - } - - // if the build is not currently running then - // we can return the latest build status. - if b.Status != core.StatusPending && - b.Status != core.StatusRunning && - b.Status != core.StatusBlocked { - proj.Activity = "Sleeping" - proj.LastBuildTime = time.Unix(b.Started, 0).Format(time.RFC3339) - proj.LastBuildLabel = fmt.Sprint(b.Number) - } - - // ensure the last build Status accepts a valid - // ccmenu enumeration - switch b.Status { - case core.StatusError, core.StatusKilled, core.StatusDeclined: - proj.LastBuildStatus = "Exception" - case core.StatusPassing: - proj.LastBuildStatus = "Success" - case core.StatusFailing: - proj.LastBuildStatus = "Failure" - } - - return &CCProjects{Project: proj} -} diff --git a/handler/api/ccmenu/cc_test.go b/handler/api/ccmenu/cc_test.go deleted file mode 100644 index fe06e849f1..0000000000 --- a/handler/api/ccmenu/cc_test.go +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package ccmenu - -import ( - "encoding/xml" - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -var ignore = cmpopts.IgnoreFields(CCProjects{}, "Project.LastBuildTime") - -func TestNew(t *testing.T) { - repo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - build := &core.Build{ - Number: 1, - Status: core.StatusRunning, - Started: 1524251054, - } - link := "https://drone.company.com" - - want := &CCProjects{ - XMLName: xml.Name{}, - Project: &CCProject{ - XMLName: xml.Name{}, - Name: "octocat/hello-world", - Activity: "Building", - LastBuildStatus: "Unknown", - LastBuildLabel: "Unknown", - LastBuildTime: "", - WebURL: "https://drone.company.com", - }, - } - - got := New(repo, build, link) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestNew_Success(t *testing.T) { - repo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - build := &core.Build{ - Number: 1, - Status: core.StatusPassing, - Started: 1524251054, - } - link := "https://drone.company.com" - - want := &CCProjects{ - XMLName: xml.Name{}, - Project: &CCProject{ - XMLName: xml.Name{}, - Name: "octocat/hello-world", - Activity: "Sleeping", - LastBuildStatus: "Success", - LastBuildLabel: "1", - LastBuildTime: "2018-04-20T12:04:14-07:00", - WebURL: "https://drone.company.com", - }, - } - - got := New(repo, build, link) - if diff := cmp.Diff(got, want, ignore); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestNew_Failure(t *testing.T) { - repo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - build := &core.Build{ - Number: 1, - Status: core.StatusFailing, - Started: 1524251054, - } - link := "https://drone.company.com" - - want := &CCProjects{ - XMLName: xml.Name{}, - Project: &CCProject{ - XMLName: xml.Name{}, - Name: "octocat/hello-world", - Activity: "Sleeping", - LastBuildStatus: "Failure", - LastBuildLabel: "1", - LastBuildTime: "2018-04-20T12:04:14-07:00", - WebURL: "https://drone.company.com", - }, - } - - got := New(repo, build, link) - if diff := cmp.Diff(got, want, ignore); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestNew_Error(t *testing.T) { - repo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - build := &core.Build{ - Number: 1, - Status: core.StatusError, - Started: 1524251054, - } - link := "https://drone.company.com" - - want := &CCProjects{ - XMLName: xml.Name{}, - Project: &CCProject{ - XMLName: xml.Name{}, - Name: "octocat/hello-world", - Activity: "Sleeping", - LastBuildStatus: "Exception", - LastBuildLabel: "1", - LastBuildTime: "2018-04-20T12:04:14-07:00", - WebURL: "https://drone.company.com", - }, - } - - got := New(repo, build, link) - if diff := cmp.Diff(got, want, ignore); len(diff) > 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/ccmenu/ccmenu.go b/handler/api/ccmenu/ccmenu.go deleted file mode 100644 index 02ae548254..0000000000 --- a/handler/api/ccmenu/ccmenu.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package ccmenu - -import ( - "encoding/xml" - "fmt" - "net/http" - - "github.com/drone/drone/core" - - "github.com/go-chi/chi" -) - -// Handler returns an http.HandlerFunc that writes an svg status -// badge to the response. -func Handler( - repos core.RepositoryStore, - builds core.BuildStore, - link string, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "owner") - name := chi.URLParam(r, "name") - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - w.WriteHeader(404) - return - } - - build, err := builds.FindNumber(r.Context(), repo.ID, repo.Counter) - if err != nil { - w.WriteHeader(404) - return - } - - project := New(repo, build, - fmt.Sprintf("%s/%s/%s/%d", link, namespace, name, build.Number), - ) - - xml.NewEncoder(w).Encode(project) - } -} diff --git a/handler/api/ccmenu/ccmenu_oss.go b/handler/api/ccmenu/ccmenu_oss.go deleted file mode 100644 index 7beb3796dc..0000000000 --- a/handler/api/ccmenu/ccmenu_oss.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package ccmenu - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -// Handler returns a no-op http.HandlerFunc. -func Handler(core.RepositoryStore, core.BuildStore, string) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) - } -} diff --git a/handler/api/ccmenu/ccmenu_test.go b/handler/api/ccmenu/ccmenu_test.go deleted file mode 100644 index 8b13313640..0000000000 --- a/handler/api/ccmenu/ccmenu_test.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package ccmenu - -import ( - "context" - "database/sql" - "encoding/xml" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - mockRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Branch: "master", - Counter: 42, - } - - mockBuild = &core.Build{ - ID: 1, - RepoID: 1, - Number: 1, - Status: core.StatusPassing, - Ref: "refs/heads/develop", - } -) - -func TestHandler(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockRepo.Counter).Return(mockBuild, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/?ref=refs/heads/develop", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds, "https://drone.company.com")(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &CCProjects{}, &CCProjects{ - XMLName: xml.Name{ - Space: "", - Local: "Projects", - }, - Project: &CCProject{ - XMLName: xml.Name{Space: "", Local: "Project"}, - Name: "", - Activity: "Sleeping", - LastBuildStatus: "Success", - LastBuildLabel: "1", - LastBuildTime: "1969-12-31T16:00:00-08:00", - WebURL: "https://drone.company.com/octocat/hello-world/1", - }, - } - xml.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandler_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, nil, "")(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandler_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockRepo.Counter).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - Handler(repos, builds, "")(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/api/errors/errors.go b/handler/api/errors/errors.go deleted file mode 100644 index e599daf774..0000000000 --- a/handler/api/errors/errors.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package errors - -var ( - // ErrInvalidToken is returned when the api request token is invalid. - ErrInvalidToken = New("Invalid or missing token") - - // ErrUnauthorized is returned when the user is not authorized. - ErrUnauthorized = New("Unauthorized") - - // ErrForbidden is returned when user access is forbidden. - ErrForbidden = New("Forbidden") - - // ErrNotFound is returned when a resource is not found. - ErrNotFound = New("Not Found") -) - -// Error represents a json-encoded API error. -type Error struct { - Message string `json:"message"` -} - -func (e *Error) Error() string { - return e.Message -} - -// New returns a new error message. -func New(text string) error { - return &Error{Message: text} -} diff --git a/handler/api/errors/errors_test.go b/handler/api/errors/errors_test.go deleted file mode 100644 index 3b28ed7336..0000000000 --- a/handler/api/errors/errors_test.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package errors - -import "testing" - -func TestError(t *testing.T) { - got, want := ErrNotFound.Error(), ErrNotFound.(*Error).Message - if got != want { - t.Errorf("Want error string %q, got %q", got, want) - } -} diff --git a/handler/api/events/build.go b/handler/api/events/build.go deleted file mode 100644 index 4930abff67..0000000000 --- a/handler/api/events/build.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package events - -import ( - "context" - "io" - "net/http" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - "github.com/sirupsen/logrus" - - "github.com/go-chi/chi" -) - -// interval at which the client is pinged to prevent -// reverse proxy and load balancers from closing the -// connection. -var pingInterval = time.Second * 30 - -// implements a 24-hour timeout for connections. This -// should not be necessary, but is put in place just -// in case we encounter dangling connections. -var timeout = time.Hour * 24 - -// HandleEvents creates an http.HandlerFunc that streams builds events -// to the http.Response in an event stream format. -func HandleEvents( - repos core.RepositoryStore, - events core.Pubsub, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - logger := logger.FromRequest(r).WithFields( - logrus.Fields{ - "namespace": namespace, - "name": name, - }, - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.WithError(err).Debugln("events: cannot find repository") - return - } - - h := w.Header() - h.Set("Content-Type", "text/event-stream") - h.Set("Cache-Control", "no-cache") - h.Set("Connection", "keep-alive") - h.Set("X-Accel-Buffering", "no") - - f, ok := w.(http.Flusher) - if !ok { - return - } - - io.WriteString(w, ": ping\n\n") - f.Flush() - - ctx, cancel := context.WithCancel(r.Context()) - defer cancel() - - events, errc := events.Subscribe(ctx) - logger.Debugln("events: stream opened") - - timeoutChan := time.After(24 * time.Hour) - L: - for { - select { - case <-ctx.Done(): - logger.Debugln("events: stream cancelled") - break L - case <-errc: - logger.Debugln("events: stream error") - break L - case <-timeoutChan: - logger.Debugln("events: stream timeout") - break L - case <-time.After(pingInterval): - io.WriteString(w, ": ping\n\n") - f.Flush() - case event := <-events: - if event.Repository == repo.Slug { - io.WriteString(w, "data: ") - w.Write(event.Data) - io.WriteString(w, "\n\n") - f.Flush() - } - } - } - - io.WriteString(w, "event: error\ndata: eof\n\n") - f.Flush() - - logger.Debugln("events: stream closed") - } -} diff --git a/handler/api/events/build_test.go b/handler/api/events/build_test.go deleted file mode 100644 index fdfd5b6849..0000000000 --- a/handler/api/events/build_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package events diff --git a/handler/api/events/global.go b/handler/api/events/global.go deleted file mode 100644 index 25651bcf0a..0000000000 --- a/handler/api/events/global.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package events - -import ( - "context" - "io" - "net/http" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleGlobal creates an http.HandlerFunc that streams builds events -// to the http.Response in an event stream format. -func HandleGlobal( - repos core.RepositoryStore, - events core.Pubsub, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - logger := logger.FromRequest(r) - - h := w.Header() - h.Set("Content-Type", "text/event-stream") - h.Set("Cache-Control", "no-cache") - h.Set("Connection", "keep-alive") - h.Set("X-Accel-Buffering", "no") - - f, ok := w.(http.Flusher) - if !ok { - return - } - - access := map[string]struct{}{} - user, authenticated := request.UserFrom(r.Context()) - if authenticated { - list, _ := repos.List(r.Context(), user.ID) - for _, repo := range list { - access[repo.Slug] = struct{}{} - } - } - - io.WriteString(w, ": ping\n\n") - f.Flush() - - ctx, cancel := context.WithCancel(r.Context()) - defer cancel() - - events, errc := events.Subscribe(ctx) - logger.Debugln("events: stream opened") - - timeoutChan := time.After(24 * time.Hour) - L: - for { - select { - case <-ctx.Done(): - logger.Debugln("events: stream cancelled") - break L - case <-errc: - logger.Debugln("events: stream error") - break L - case <-timeoutChan: - logger.Debugln("events: stream timeout") - break L - case <-time.After(pingInterval): - io.WriteString(w, ": ping\n\n") - f.Flush() - case event := <-events: - _, authorized := access[event.Repository] - if event.Visibility == core.VisibilityPublic { - authorized = true - } - if event.Visibility == core.VisibilityInternal && authenticated { - authorized = true - } - if authorized { - io.WriteString(w, "data: ") - w.Write(event.Data) - io.WriteString(w, "\n\n") - f.Flush() - } - } - } - - io.WriteString(w, "event: error\ndata: eof\n\n") - f.Flush() - - logger.Debugln("events: stream closed") - } -} diff --git a/handler/api/events/logs.go b/handler/api/events/logs.go deleted file mode 100644 index 8f5fdf6776..0000000000 --- a/handler/api/events/logs.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package events - -import ( - "context" - "encoding/json" - "io" - "net/http" - "strconv" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleLogStream creates an http.HandlerFunc that streams builds logs -// to the http.Response in an event stream format. -func HandleLogStream( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - steps core.StepStore, - stream core.LogStream, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stages.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := steps.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - - h := w.Header() - h.Set("Content-Type", "text/event-stream") - h.Set("Cache-Control", "no-cache") - h.Set("Connection", "keep-alive") - h.Set("X-Accel-Buffering", "no") - - f, ok := w.(http.Flusher) - if !ok { - return - } - - io.WriteString(w, ": ping\n\n") - f.Flush() - - ctx, cancel := context.WithCancel(r.Context()) - defer cancel() - - enc := json.NewEncoder(w) - linec, errc := stream.Tail(ctx, step.ID) - if errc == nil { - io.WriteString(w, "event: error\ndata: eof\n\n") - return - } - - timeoutChan := time.After(24 * time.Hour) - L: - for { - select { - case <-ctx.Done(): - break L - case <-errc: - break L - case <-timeoutChan: - break L - case <-time.After(pingInterval): - io.WriteString(w, ": ping\n\n") - case line := <-linec: - io.WriteString(w, "data: ") - enc.Encode(line) - io.WriteString(w, "\n\n") - f.Flush() - } - } - - io.WriteString(w, "event: error\ndata: eof\n\n") - f.Flush() - } -} diff --git a/handler/api/events/logs_test.go b/handler/api/events/logs_test.go deleted file mode 100644 index fa533a08db..0000000000 --- a/handler/api/events/logs_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package events - -import ( - "io/ioutil" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} diff --git a/handler/api/queue/items.go b/handler/api/queue/items.go deleted file mode 100644 index c4bd42892e..0000000000 --- a/handler/api/queue/items.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandleItems returns an http.HandlerFunc that writes a -// json-encoded list of queue items to the response body. -func HandleItems(store core.StageStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - items, err := store.ListIncomplete(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot get running items") - return - } - render.JSON(w, items, 200) - } -} diff --git a/handler/api/queue/items_test.go b/handler/api/queue/items_test.go deleted file mode 100644 index 1ef484b925..0000000000 --- a/handler/api/queue/items_test.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue - -import ( - "io/ioutil" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} diff --git a/handler/api/queue/none.go b/handler/api/queue/none.go deleted file mode 100644 index 0c1f9c5f87..0000000000 --- a/handler/api/queue/none.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package queue - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleItems(store core.StageStore) http.HandlerFunc { - return notImplemented -} - -func HandlePause(core.Scheduler) http.HandlerFunc { - return notImplemented -} - -func HandleResume(core.Scheduler) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/queue/pause.go b/handler/api/queue/pause.go deleted file mode 100644 index cefbf3d4cc..0000000000 --- a/handler/api/queue/pause.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandlePause returns an http.HandlerFunc that processes -// an http.Request to pause the scheduler. -func HandlePause(scheduler core.Scheduler) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - err := scheduler.Pause(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Errorln("api: cannot pause scheduler") - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/queue/pause_test.go b/handler/api/queue/pause_test.go deleted file mode 100644 index 6937390566..0000000000 --- a/handler/api/queue/pause_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue diff --git a/handler/api/queue/resume.go b/handler/api/queue/resume.go deleted file mode 100644 index 39a88e7b2c..0000000000 --- a/handler/api/queue/resume.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandleResume returns an http.HandlerFunc that processes -// an http.Request to pause the scheduler. -func HandleResume(scheduler core.Scheduler) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - err := scheduler.Resume(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Errorln("api: cannot resume scheduler") - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/queue/resume_test.go b/handler/api/queue/resume_test.go deleted file mode 100644 index 6937390566..0000000000 --- a/handler/api/queue/resume_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package queue diff --git a/handler/api/render/render.go b/handler/api/render/render.go deleted file mode 100644 index 31469c8771..0000000000 --- a/handler/api/render/render.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package render - -import ( - "encoding/json" - "fmt" - "net/http" - "os" - "strconv" - - "github.com/drone/drone/handler/api/errors" -) - -// indent the json-encoded API responses -var indent bool - -func init() { - indent, _ = strconv.ParseBool( - os.Getenv("HTTP_JSON_INDENT"), - ) -} - -var ( - // ErrInvalidToken is returned when the api request token is invalid. - ErrInvalidToken = errors.New("Invalid or missing token") - - // ErrUnauthorized is returned when the user is not authorized. - ErrUnauthorized = errors.New("Unauthorized") - - // ErrForbidden is returned when user access is forbidden. - ErrForbidden = errors.New("Forbidden") - - // ErrNotFound is returned when a resource is not found. - ErrNotFound = errors.New("Not Found") - - // ErrNotImplemented is returned when an endpoint is not implemented. - ErrNotImplemented = errors.New("Not Implemented") -) - -// ErrorCode writes the json-encoded error message to the response. -func ErrorCode(w http.ResponseWriter, err error, status int) { - JSON(w, &errors.Error{Message: err.Error()}, status) -} - -// InternalError writes the json-encoded error message to the response -// with a 500 internal server error. -func InternalError(w http.ResponseWriter, err error) { - ErrorCode(w, err, 500) -} - -// InternalErrorf writes the json-encoded error message to the response -// with a 500 internal server error. -func InternalErrorf(w http.ResponseWriter, format string, a ...interface{}) { - ErrorCode(w, fmt.Errorf(format, a...), 500) -} - -// NotImplemented writes the json-encoded error message to the -// response with a 501 not found status code. -func NotImplemented(w http.ResponseWriter, err error) { - ErrorCode(w, err, 501) -} - -// NotFound writes the json-encoded error message to the response -// with a 404 not found status code. -func NotFound(w http.ResponseWriter, err error) { - ErrorCode(w, err, 404) -} - -// NotFoundf writes the json-encoded error message to the response -// with a 404 not found status code. -func NotFoundf(w http.ResponseWriter, format string, a ...interface{}) { - ErrorCode(w, fmt.Errorf(format, a...), 404) -} - -// Unauthorized writes the json-encoded error message to the response -// with a 401 unauthorized status code. -func Unauthorized(w http.ResponseWriter, err error) { - ErrorCode(w, err, 401) -} - -// Forbidden writes the json-encoded error message to the response -// with a 403 forbidden status code. -func Forbidden(w http.ResponseWriter, err error) { - ErrorCode(w, err, 403) -} - -// BadRequest writes the json-encoded error message to the response -// with a 400 bad request status code. -func BadRequest(w http.ResponseWriter, err error) { - ErrorCode(w, err, 400) -} - -// BadRequestf writes the json-encoded error message to the response -// with a 400 bad request status code. -func BadRequestf(w http.ResponseWriter, format string, a ...interface{}) { - ErrorCode(w, fmt.Errorf(format, a...), 400) -} - -// JSON writes the json-encoded error message to the response -// with a 400 bad request status code. -func JSON(w http.ResponseWriter, v interface{}, status int) { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(status) - enc := json.NewEncoder(w) - if indent { - enc.SetIndent("", " ") - } - enc.Encode(v) -} diff --git a/handler/api/render/render_test.go b/handler/api/render/render_test.go deleted file mode 100644 index dd1e08296c..0000000000 --- a/handler/api/render/render_test.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package render - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" -) - -func TestWriteError(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - InternalError(w, err) - - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteErrorCode(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - ErrorCode(w, err, 418) - - if got, want := w.Code, 418; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteNotFound(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - NotFound(w, err) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteNotFoundf(t *testing.T) { - w := httptest.NewRecorder() - - NotFoundf(w, "pc %s", "load letter") - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, "pc load letter"; got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteInternalError(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - InternalError(w, err) - - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteInternalErrorf(t *testing.T) { - w := httptest.NewRecorder() - - InternalErrorf(w, "pc %s", "load letter") - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, "pc load letter"; got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteUnauthorized(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - Unauthorized(w, err) - - if got, want := w.Code, 401; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteForbidden(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - Forbidden(w, err) - - if got, want := w.Code, 403; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteBadRequest(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - BadRequest(w, err) - - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestBadRequestf(t *testing.T) { - w := httptest.NewRecorder() - - BadRequestf(w, "pc %s", "load letter") - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &errors.Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, "pc load letter"; got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteJSON(t *testing.T) { - // without indent - { - w := httptest.NewRecorder() - JSON(w, map[string]string{"hello": "world"}, http.StatusTeapot) - if got, want := w.Body.String(), "{\"hello\":\"world\"}\n"; got != want { - t.Errorf("Want JSON body %q, got %q", want, got) - } - if got, want := w.HeaderMap.Get("Content-Type"), "application/json"; got != want { - t.Errorf("Want Content-Type %q, got %q", want, got) - } - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - } - // with indent - { - indent = true - defer func() { - indent = false - }() - w := httptest.NewRecorder() - JSON(w, map[string]string{"hello": "world"}, http.StatusTeapot) - if got, want := w.Body.String(), "{\n \"hello\": \"world\"\n}\n"; got != want { - t.Errorf("Want JSON body %q, got %q", want, got) - } - } -} diff --git a/handler/api/repos/all.go b/handler/api/repos/all.go deleted file mode 100644 index fa7f3ae700..0000000000 --- a/handler/api/repos/all.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandleAll returns an http.HandlerFunc that processes http -// requests to list all repositories in the database. -func HandleAll(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - page = r.FormValue("page") - perPage = r.FormValue("per_page") - ) - offset, _ := strconv.Atoi(page) - limit, _ := strconv.Atoi(perPage) - if limit < 1 { // || limit > 100 - limit = 25 - } - switch offset { - case 0, 1: - offset = 0 - default: - offset = (offset - 1) * limit - } - repo, err := repos.ListAll(r.Context(), limit, offset) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - Debugln("api: cannot list repositories") - } else { - render.JSON(w, repo, 200) - } - } -} diff --git a/handler/api/repos/builds/branches/create.go b/handler/api/repos/builds/branches/create.go deleted file mode 100644 index b85043db1e..0000000000 --- a/handler/api/repos/builds/branches/create.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package branches diff --git a/handler/api/repos/builds/branches/create_test.go b/handler/api/repos/builds/branches/create_test.go deleted file mode 100644 index 8860172ff2..0000000000 --- a/handler/api/repos/builds/branches/create_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package branches diff --git a/handler/api/repos/builds/branches/delete.go b/handler/api/repos/builds/branches/delete.go deleted file mode 100644 index 93d3a1d8e5..0000000000 --- a/handler/api/repos/builds/branches/delete.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package branches - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that handles an -// http.Request to delete a branch entry from the datastore. -func HandleDelete( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - branch = chi.URLParam(r, "*") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - err = builds.DeleteBranch(r.Context(), repo.ID, branch) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot delete branch") - } else { - w.WriteHeader(http.StatusNoContent) - } - } -} diff --git a/handler/api/repos/builds/branches/delete_test.go b/handler/api/repos/builds/branches/delete_test.go deleted file mode 100644 index 8860172ff2..0000000000 --- a/handler/api/repos/builds/branches/delete_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package branches diff --git a/handler/api/repos/builds/branches/list.go b/handler/api/repos/builds/branches/list.go deleted file mode 100644 index 84c6a51da4..0000000000 --- a/handler/api/repos/builds/branches/list.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package branches - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of build history to the response body. -func HandleList( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - results, err := builds.LatestBranches(r.Context(), repo.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot list builds") - } else { - render.JSON(w, results, 200) - } - } -} diff --git a/handler/api/repos/builds/branches/list_test.go b/handler/api/repos/builds/branches/list_test.go deleted file mode 100644 index 8860172ff2..0000000000 --- a/handler/api/repos/builds/branches/list_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package branches diff --git a/handler/api/repos/builds/cancel.go b/handler/api/repos/builds/cancel.go deleted file mode 100644 index 04c9943cdd..0000000000 --- a/handler/api/repos/builds/cancel.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "context" - "net/http" - "strconv" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleCancel returns an http.HandlerFunc that processes http -// requests to cancel a pending or running build. -func HandleCancel( - users core.UserStore, - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - steps core.StepStore, - status core.StatusService, - scheduler core.Scheduler, - webhooks core.WebhookSender, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - render.NotFound(w, err) - return - } - - build, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find build") - render.NotFound(w, err) - return - } - - done := build.Status != core.StatusPending && - build.Status != core.StatusRunning - - // do not cancel the build if the build status is - // complete. only cancel the build if the status is - // running or pending. - if !done { - build.Status = core.StatusKilled - build.Finished = time.Now().Unix() - if build.Started == 0 { - build.Started = time.Now().Unix() - } - - err = builds.Update(r.Context(), build) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Warnln("api: cannot update build status to cancelled") - render.ErrorCode(w, err, http.StatusConflict) - return - } - - err = scheduler.Cancel(r.Context(), build.ID) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Warnln("api: cannot signal cancelled build is complete") - } - - user, err := users.Find(r.Context(), repo.UserID) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot repository owner") - } else { - err := status.Send(r.Context(), user, &core.StatusInput{ - Repo: repo, - Build: build, - }) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot set status") - } - } - } - - stagez, err := stages.ListSteps(r.Context(), build.ID) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("build", build). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot list build stages") - } - - for _, stage := range stagez { - if stage.IsDone() { - continue - } - if stage.Started != 0 { - stage.Status = core.StatusKilled - } else { - stage.Status = core.StatusSkipped - stage.Started = time.Now().Unix() - } - stage.Stopped = time.Now().Unix() - err := stages.Update(context.Background(), stage) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("stage", stage.Number). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot update stage status") - } - - for _, step := range stage.Steps { - if step.IsDone() { - continue - } - if step.Started != 0 { - step.Status = core.StatusKilled - } else { - step.Status = core.StatusSkipped - step.Started = time.Now().Unix() - } - step.Stopped = time.Now().Unix() - step.ExitCode = 130 - err := steps.Update(context.Background(), step) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("stage", stage.Number). - WithField("build", build.Number). - WithField("step", step.Number). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot update step status") - } - } - } - - logger.FromRequest(r). - WithField("build", build.Number). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: successfully cancelled build") - - build.Stages = stagez - - // do not trigger a webhook if the build was already - // complete. only trigger a webhook if the build was - // pending or running and then cancelled. - if !done { - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionUpdated, - Repo: repo, - Build: build, - } - err = webhooks.Send(context.Background(), payload) - if err != nil { - logger.FromRequest(r).WithError(err). - Warnln("manager: cannot send global webhook") - } - } - - render.JSON(w, build, 200) - } -} diff --git a/handler/api/repos/builds/cancel_test.go b/handler/api/repos/builds/cancel_test.go deleted file mode 100644 index 5b37b7498a..0000000000 --- a/handler/api/repos/builds/cancel_test.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -func TestCancel(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockStages := []*core.Stage{ - {Status: core.StatusPassing}, - { - Status: core.StatusPending, - Steps: []*core.Step{ - {Status: core.StatusPassing}, - {Status: core.StatusPending}, - }, - }, - } - - mockBuildCopy := new(core.Build) - *mockBuildCopy = *mockBuild - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuildCopy, nil) - builds.EXPECT().Update(gomock.Any(), mockBuildCopy).Return(nil) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), mockRepo.UserID).Return(mockUser, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - stages.EXPECT().Update(gomock.Any(), mockStages[1]).Return(nil) - - steps := mock.NewMockStepStore(controller) - steps.EXPECT().Update(gomock.Any(), mockStages[1].Steps[1]).Return(nil) - - statusService := mock.NewMockStatusService(controller) - statusService.EXPECT().Send(gomock.Any(), mockUser, gomock.Any()).Return(nil) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - scheduler := mock.NewMockScheduler(controller) - scheduler.EXPECT().Cancel(gomock.Any(), mockBuild.ID).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCancel(users, repos, builds, stages, steps, statusService, scheduler, webhook)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/api/repos/builds/create.go b/handler/api/repos/builds/create.go deleted file mode 100644 index 1ff0a8be94..0000000000 --- a/handler/api/repos/builds/create.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/go-scm/scm" - - "github.com/go-chi/chi" -) - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a build for the specified commit. -func HandleCreate( - users core.UserStore, - repos core.RepositoryStore, - commits core.CommitService, - triggerer core.Triggerer, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - sha = r.FormValue("commit") - branch = r.FormValue("branch") - message = r.FormValue("message") - action = r.FormValue("action") - user, _ = request.UserFrom(ctx) - ) - - repo, err := repos.FindName(ctx, namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - owner, err := users.Find(ctx, repo.UserID) - if err != nil { - render.NotFound(w, err) - return - } - - // if the user does not provide a branch, assume the - // default repository branch. - if branch == "" { - branch = repo.Branch - } - // expand the branch to a git reference. - ref := scm.ExpandRef(branch, "refs/heads") - - var commit *core.Commit - if sha != "" { - commit, err = commits.Find(ctx, owner, repo.Slug, sha) - } else { - commit, err = commits.FindRef(ctx, owner, repo.Slug, ref) - } - if err != nil { - render.NotFound(w, err) - return - } - - hook := &core.Hook{ - Trigger: user.Login, - Event: core.EventCustom, - Link: commit.Link, - Timestamp: commit.Author.Date, - Title: "", // we expect this to be empty. - Message: commit.Message, - Before: commit.Sha, - After: commit.Sha, - Ref: ref, - Source: branch, - Target: branch, - Author: commit.Author.Login, - AuthorName: commit.Author.Name, - AuthorEmail: commit.Author.Email, - AuthorAvatar: commit.Author.Avatar, - Sender: user.Login, - Params: map[string]string{}, - } - if len(message) > 0 { - hook.Message = message - } - if len(action) > 0 { - hook.Action = action - } - - for key, value := range r.URL.Query() { - if key == "access_token" || - key == "commit" || - key == "branch" { - continue - } - if len(value) == 0 { - continue - } - hook.Params[key] = value[0] - } - - result, err := triggerer.Trigger(r.Context(), repo, hook) - if err != nil { - render.InternalError(w, err) - } else { - render.JSON(w, result, 200) - } - } -} diff --git a/handler/api/repos/builds/create_test.go b/handler/api/repos/builds/create_test.go deleted file mode 100644 index a4b527005d..0000000000 --- a/handler/api/repos/builds/create_test.go +++ /dev/null @@ -1,173 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "encoding/json" - "net/http/httptest" - "net/url" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockCommit := &core.Commit{ - Sha: "cce10d5c4760d1d6ede99db850ab7e77efe15579", - Ref: "refs/heads/master", - Message: "updated README.md", - Link: "https://github.com/octocatl/hello-world/commit/cce10d5c4760d1d6ede99db850ab7e77efe15579", - Author: &core.Committer{ - Name: "The Octocat", - Email: "octocat@github.com", - Login: "octocat", - Avatar: "https://github.com/octocat.png", - }, - } - - checkBuild := func(_ context.Context, _ *core.Repository, hook *core.Hook) error { - if got, want := hook.Trigger, mockUser.Login; got != want { - t.Errorf("Want hook Trigger By %s, got %s", want, got) - } - if got, want := hook.Event, core.EventCustom; got != want { - t.Errorf("Want hook Event %s, got %s", want, got) - } - if got, want := hook.Link, mockCommit.Link; got != want { - t.Errorf("Want hook Link %s, got %s", want, got) - } - if got, want := hook.Message, mockCommit.Message; got != want { - t.Errorf("Want hook Message %s, got %s", want, got) - } - if got, want := hook.Before, mockCommit.Sha; got != want { - t.Errorf("Want hook Before %s, got %s", want, got) - } - if got, want := hook.After, mockCommit.Sha; got != want { - t.Errorf("Want hook After %s, got %s", want, got) - } - if got, want := hook.Ref, mockCommit.Ref; got != want { - t.Errorf("Want hook Ref %s, got %s", want, got) - } - if got, want := hook.Source, "master"; got != want { - t.Errorf("Want hook Source %s, got %s", want, got) - } - if got, want := hook.Target, "master"; got != want { - t.Errorf("Want hook Target %s, got %s", want, got) - } - if got, want := hook.Author, mockCommit.Author.Login; got != want { - t.Errorf("Want hook Author %s, got %s", want, got) - } - if got, want := hook.AuthorName, mockCommit.Author.Name; got != want { - t.Errorf("Want hook AuthorName %s, got %s", want, got) - } - if got, want := hook.AuthorEmail, mockCommit.Author.Email; got != want { - t.Errorf("Want hook AuthorEmail %s, got %s", want, got) - } - if got, want := hook.AuthorAvatar, mockCommit.Author.Avatar; got != want { - t.Errorf("Want hook AuthorAvatar %s, got %s", want, got) - } - if got, want := hook.Sender, mockUser.Login; got != want { - t.Errorf("Want hook Sender %s, got %s", want, got) - } - return nil - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), mockRepo.UserID).Return(mockUser, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - commits := mock.NewMockCommitService(controller) - commits.EXPECT().Find(gomock.Any(), mockUser, mockRepo.Slug, mockCommit.Sha).Return(mockCommit, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(mockBuild, nil).Do(checkBuild) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - params := &url.Values{} - params.Set("branch", "master") - params.Set("commit", mockCommit.Sha) - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?"+params.Encode(), nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleCreate(users, repos, commits, triggerer)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Build), mockBuild - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestCreate_FromHead(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockCommit := &core.Commit{ - Sha: "cce10d5c4760d1d6ede99db850ab7e77efe15579", - Ref: "refs/heads/master", - Message: "updated README.md", - Link: "https://github.com/octocatl/hello-world/commit/cce10d5c4760d1d6ede99db850ab7e77efe15579", - Author: &core.Committer{ - Name: "The Octocat", - Email: "octocat@github.com", - Login: "octocat", - Avatar: "https://github.com/octocat.png", - }, - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), mockRepo.UserID).Return(mockUser, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - commits := mock.NewMockCommitService(controller) - commits.EXPECT().FindRef(gomock.Any(), mockUser, mockRepo.Slug, mockCommit.Ref).Return(mockCommit, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(mockBuild, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleCreate(users, repos, commits, triggerer)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Build), mockBuild - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/deploys/create.go b/handler/api/repos/builds/deploys/create.go deleted file mode 100644 index 4b57008aed..0000000000 --- a/handler/api/repos/builds/deploys/create.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package deploys diff --git a/handler/api/repos/builds/deploys/create_test.go b/handler/api/repos/builds/deploys/create_test.go deleted file mode 100644 index dc720aea04..0000000000 --- a/handler/api/repos/builds/deploys/create_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package deploys diff --git a/handler/api/repos/builds/deploys/delete.go b/handler/api/repos/builds/deploys/delete.go deleted file mode 100644 index 45d9a5f187..0000000000 --- a/handler/api/repos/builds/deploys/delete.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package deploys - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that handles an -// http.Request to delete a branch entry from the datastore. -func HandleDelete( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - target = chi.URLParam(r, "*") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - err = builds.DeleteDeploy(r.Context(), repo.ID, target) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot delete deployment") - } else { - w.WriteHeader(http.StatusNoContent) - } - } -} diff --git a/handler/api/repos/builds/deploys/delete_test.go b/handler/api/repos/builds/deploys/delete_test.go deleted file mode 100644 index dc720aea04..0000000000 --- a/handler/api/repos/builds/deploys/delete_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package deploys diff --git a/handler/api/repos/builds/deploys/list.go b/handler/api/repos/builds/deploys/list.go deleted file mode 100644 index 77d9359baa..0000000000 --- a/handler/api/repos/builds/deploys/list.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package deploys - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of build history to the response body. -func HandleList( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - results, err := builds.LatestDeploys(r.Context(), repo.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot list builds") - } else { - render.JSON(w, results, 200) - } - } -} diff --git a/handler/api/repos/builds/deploys/list_test.go b/handler/api/repos/builds/deploys/list_test.go deleted file mode 100644 index dc720aea04..0000000000 --- a/handler/api/repos/builds/deploys/list_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package deploys diff --git a/handler/api/repos/builds/find.go b/handler/api/repos/builds/find.go deleted file mode 100644 index 508603a293..0000000000 --- a/handler/api/repos/builds/find.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// build details to the response body. -func HandleFind( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - stages, err := stages.ListSteps(r.Context(), build.ID) - if err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, &buildWithStages{build, stages}, 200) - } -} - -type buildWithStages struct { - *core.Build - Stages []*core.Stage `json:"stages,omitempty"` -} diff --git a/handler/api/repos/builds/find_test.go b/handler/api/repos/builds/find_test.go deleted file mode 100644 index 52d1bf4198..0000000000 --- a/handler/api/repos/builds/find_test.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, builds, stages)(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &buildWithStages{}, &buildWithStages{mockBuild, mockStages} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_BadRequest(t *testing.T) { - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "one") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(nil, nil, nil)(w, r) - - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "strconv.ParseInt: parsing \"one\": invalid syntax"} - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, nil, nil)(w, r) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - builds := mock.NewMockBuildStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, builds, nil)(w, r) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_StagesNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, builds, stages)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/latest.go b/handler/api/repos/builds/latest.go deleted file mode 100644 index a15be13080..0000000000 --- a/handler/api/repos/builds/latest.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "fmt" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleLast returns an http.HandlerFunc that writes json-encoded -// build details to the the response body for the latest build. -func HandleLast( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ref = r.FormValue("ref") - branch = r.FormValue("branch") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - if ref == "" { - ref = fmt.Sprintf("refs/heads/%s", repo.Branch) - } - if branch != "" { - ref = fmt.Sprintf("refs/heads/%s", branch) - } - build, err := builds.FindRef(r.Context(), repo.ID, ref) - if err != nil { - render.NotFound(w, err) - return - } - stages, err := stages.ListSteps(r.Context(), build.ID) - if err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, &buildWithStages{build, stages}, 200) - } -} diff --git a/handler/api/repos/builds/latest_test.go b/handler/api/repos/builds/latest_test.go deleted file mode 100644 index 4527e1f273..0000000000 --- a/handler/api/repos/builds/latest_test.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/mock" - "github.com/drone/drone/handler/api/errors" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestLast(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleLast(repos, builds, stages)(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &buildWithStages{}, &buildWithStages{mockBuild, mockStages} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestLast_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleLast(repos, nil, nil)(w, r) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestLast_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleLast(repos, builds, nil)(w, r) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestLast_StagesNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindRef(gomock.Any(), mockRepo.ID, "refs/heads/master").Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleLast(repos, builds, stages)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/list.go b/handler/api/repos/builds/list.go deleted file mode 100644 index 699e81c36f..0000000000 --- a/handler/api/repos/builds/list.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "fmt" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of build history to the response body. -func HandleList( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - branch = r.FormValue("branch") - tag = r.FormValue("tag") - page = r.FormValue("page") - perPage = r.FormValue("per_page") - ) - offset, _ := strconv.Atoi(page) - limit, _ := strconv.Atoi(perPage) - if limit < 1 || limit > 100 { - limit = 25 - } - switch offset { - case 0, 1: - offset = 0 - default: - offset = (offset - 1) * limit - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - var results []*core.Build - if branch != "" { - ref := fmt.Sprintf("refs/heads/%s", branch) - results, err = builds.ListRef(r.Context(), repo.ID, ref, limit, offset) - } else if tag != "" { - ref := fmt.Sprintf("refs/tags/%s", tag) - results, err = builds.ListRef(r.Context(), repo.ID, ref, limit, offset) - } else { - results, err = builds.List(r.Context(), repo.ID, limit, offset) - } - - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot list builds") - } else { - render.JSON(w, results, 200) - } - } -} diff --git a/handler/api/repos/builds/list_test.go b/handler/api/repos/builds/list_test.go deleted file mode 100644 index 453582cfc1..0000000000 --- a/handler/api/repos/builds/list_test.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - mockRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Counter: 42, - Branch: "master", - } - - mockBuild = &core.Build{ - ID: 1, - Number: 1, - RepoID: 1, - Status: core.StatusPending, - Event: core.EventPush, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - } - - mockBuilds = []*core.Build{ - { - ID: 1, - Number: 1, - }, - } - - mockStage = &core.Stage{ - BuildID: 1, - Number: 1, - Name: "clone", - Status: core.StatusPassing, - } - - mockStages = []*core.Stage{ - mockStage, - } - - mockUser = &core.User{ - ID: 1, - Login: "octocat", - } -) - -func TestList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().List(gomock.Any(), mockRepo.ID, 25, 0).Return(mockBuilds, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, builds)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Build{}, mockBuilds - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestListBranch(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().ListRef(gomock.Any(), mockRepo.ID, "refs/heads/develop", 25, 0).Return(mockBuilds, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/?branch=develop", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, builds)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Build{}, mockBuilds - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestListTag(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().ListRef(gomock.Any(), mockRepo.ID, "refs/tags/1.33.7", 25, 0).Return(mockBuilds, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/?tag=1.33.7", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, builds)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Build{}, mockBuilds - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestList_RepositoryNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - builds := mock.NewMockBuildStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "one") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, builds)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestList_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - builds := mock.NewMockBuildStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - builds.EXPECT().List(gomock.Any(), mockRepo.ID, 25, 0).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "one") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, builds)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/logs/delete.go b/handler/api/repos/builds/logs/delete.go deleted file mode 100644 index a26e8e9a30..0000000000 --- a/handler/api/repos/builds/logs/delete.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logs - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete the logs. -func HandleDelete( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - steps core.StepStore, - logs core.LogStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stages.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := steps.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - err = logs.Delete(r.Context(), step.ID) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(204) - } -} diff --git a/handler/api/repos/builds/logs/delete_test.go b/handler/api/repos/builds/logs/delete_test.go deleted file mode 100644 index b2415dc13a..0000000000 --- a/handler/api/repos/builds/logs/delete_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package logs diff --git a/handler/api/repos/builds/logs/find.go b/handler/api/repos/builds/logs/find.go deleted file mode 100644 index 84abb0b2cc..0000000000 --- a/handler/api/repos/builds/logs/find.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logs - -import ( - "io" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes the -// json-encoded logs to the response body. -func HandleFind( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - steps core.StepStore, - logs core.LogStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequest(w, err) - return - } - stepNumber, err := strconv.Atoi(chi.URLParam(r, "step")) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - stage, err := stages.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFound(w, err) - return - } - step, err := steps.FindNumber(r.Context(), stage.ID, stepNumber) - if err != nil { - render.NotFound(w, err) - return - } - rc, err := logs.Find(r.Context(), step.ID) - if err != nil { - render.NotFound(w, err) - return - } - w.Header().Set("Content-Type", "application/json") - io.Copy(w, rc) - rc.Close() - - // TODO: logs are stored in jsonl format and therefore - // need to be converted to valid json. - // ELSE: JSON.parse('['+x.split('\n').join(',')+']') - } -} diff --git a/handler/api/repos/builds/logs/find_test.go b/handler/api/repos/builds/logs/find_test.go deleted file mode 100644 index b2415dc13a..0000000000 --- a/handler/api/repos/builds/logs/find_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package logs diff --git a/handler/api/repos/builds/promote.go b/handler/api/repos/builds/promote.go deleted file mode 100644 index 790ad62e59..0000000000 --- a/handler/api/repos/builds/promote.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - - "github.com/go-chi/chi" -) - -// HandlePromote returns an http.HandlerFunc that processes http -// requests to promote and re-execute a build. -func HandlePromote( - repos core.RepositoryStore, - builds core.BuildStore, - triggerer core.Triggerer, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - environ = r.FormValue("target") - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - user, _ = request.UserFrom(r.Context()) - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - prev, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - if environ == "" { - render.BadRequestf(w, "Missing target environment") - return - } - - hook := &core.Hook{ - Parent: prev.Number, - Trigger: user.Login, - Event: core.EventPromote, - Action: prev.Action, - Link: prev.Link, - Timestamp: prev.Timestamp, - Title: prev.Title, - Message: prev.Message, - Before: prev.Before, - After: prev.After, - Ref: prev.Ref, - Fork: prev.Fork, - Source: prev.Source, - Target: prev.Target, - Author: prev.Author, - AuthorName: prev.AuthorName, - AuthorEmail: prev.AuthorEmail, - AuthorAvatar: prev.AuthorAvatar, - Deployment: environ, - Cron: prev.Cron, - Sender: prev.Sender, - Params: map[string]string{}, - } - - for k, v := range prev.Params { - hook.Params[k] = v - } - - for key, value := range r.URL.Query() { - if key == "access_token" { - continue - } - if key == "target" { - continue - } - if len(value) == 0 { - continue - } - hook.Params[key] = value[0] - } - - result, err := triggerer.Trigger(r.Context(), repo, hook) - if err != nil { - render.InternalError(w, err) - } else { - render.JSON(w, result, 200) - } - } -} diff --git a/handler/api/repos/builds/promote_oss.go b/handler/api/repos/builds/promote_oss.go deleted file mode 100644 index 0664cd6015..0000000000 --- a/handler/api/repos/builds/promote_oss.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -// HandlePromote returns a non-op http.HandlerFunc. -func HandlePromote( - core.RepositoryStore, - core.BuildStore, - core.Triggerer, -) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/repos/builds/promote_test.go b/handler/api/repos/builds/promote_test.go deleted file mode 100644 index 5f9db4e9b3..0000000000 --- a/handler/api/repos/builds/promote_test.go +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestPromote(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkBuild := func(_ context.Context, _ *core.Repository, hook *core.Hook) error { - if got, want := hook.Trigger, mockUser.Login; got != want { - t.Errorf("Want Trigger By %s, got %s", want, got) - } - if got, want := hook.Event, core.EventPromote; got != want { - t.Errorf("Want Build Event %s, got %s", want, got) - } - if got, want := hook.Link, mockBuild.Link; got != want { - t.Errorf("Want Build Link %s, got %s", want, got) - } - if got, want := hook.Message, mockBuild.Message; got != want { - t.Errorf("Want Build Message %s, got %s", want, got) - } - if got, want := hook.Before, mockBuild.Before; got != want { - t.Errorf("Want Build Before %s, got %s", want, got) - } - if got, want := hook.After, mockBuild.After; got != want { - t.Errorf("Want Build After %s, got %s", want, got) - } - if got, want := hook.Ref, mockBuild.Ref; got != want { - t.Errorf("Want Build Ref %s, got %s", want, got) - } - if got, want := hook.Source, mockBuild.Source; got != want { - t.Errorf("Want Build Source %s, got %s", want, got) - } - if got, want := hook.Target, mockBuild.Target; got != want { - t.Errorf("Want Build Target %s, got %s", want, got) - } - if got, want := hook.Author, mockBuild.Author; got != want { - t.Errorf("Want Build Author %s, got %s", want, got) - } - if got, want := hook.AuthorName, mockBuild.AuthorName; got != want { - t.Errorf("Want Build AuthorName %s, got %s", want, got) - } - if got, want := hook.AuthorEmail, mockBuild.AuthorEmail; got != want { - t.Errorf("Want Build AuthorEmail %s, got %s", want, got) - } - if got, want := hook.AuthorAvatar, mockBuild.AuthorAvatar; got != want { - t.Errorf("Want Build AuthorAvatar %s, got %s", want, got) - } - if got, want := hook.Deployment, "production"; got != want { - t.Errorf("Want Build Deployment %s, got %s", want, got) - } - if got, want := hook.Sender, mockBuild.Sender; got != want { - t.Errorf("Want Build Sender %s, got %s", want, got) - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(mockBuild, nil).Do(checkBuild) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=production", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(repos, builds, triggerer)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Build), mockBuild - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestPromote_InvalidBuildNumber(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "XLII") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=production", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{ - Message: `strconv.ParseInt: parsing "XLII": invalid syntax`, - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestPromote_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=production", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(repos, nil, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestPromote_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=production", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(repos, builds, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestPromote_MissingTargetError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(repos, builds, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "Missing target environment"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestPromote_TriggerError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?target=production", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePromote(repos, builds, triggerer)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/pulls/create.go b/handler/api/repos/builds/pulls/create.go deleted file mode 100644 index 4a8d007c88..0000000000 --- a/handler/api/repos/builds/pulls/create.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pulls diff --git a/handler/api/repos/builds/pulls/create_test.go b/handler/api/repos/builds/pulls/create_test.go deleted file mode 100644 index f141d870c3..0000000000 --- a/handler/api/repos/builds/pulls/create_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package pulls diff --git a/handler/api/repos/builds/pulls/delete.go b/handler/api/repos/builds/pulls/delete.go deleted file mode 100644 index e08af36694..0000000000 --- a/handler/api/repos/builds/pulls/delete.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pulls - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that handles an -// http.Request to delete a branch entry from the datastore. -func HandleDelete( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - number, _ = strconv.Atoi(chi.URLParam(r, "pull")) - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - err = builds.DeletePull(r.Context(), repo.ID, number) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot delete pr") - } else { - w.WriteHeader(http.StatusNoContent) - } - } -} diff --git a/handler/api/repos/builds/pulls/delete_test.go b/handler/api/repos/builds/pulls/delete_test.go deleted file mode 100644 index f141d870c3..0000000000 --- a/handler/api/repos/builds/pulls/delete_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package pulls diff --git a/handler/api/repos/builds/pulls/list.go b/handler/api/repos/builds/pulls/list.go deleted file mode 100644 index a284623f97..0000000000 --- a/handler/api/repos/builds/pulls/list.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pulls - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of build history to the response body. -func HandleList( - repos core.RepositoryStore, - builds core.BuildStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot find repository") - return - } - - results, err := builds.LatestPulls(r.Context(), repo.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot list builds") - } else { - render.JSON(w, results, 200) - } - } -} diff --git a/handler/api/repos/builds/pulls/list_test.go b/handler/api/repos/builds/pulls/list_test.go deleted file mode 100644 index f141d870c3..0000000000 --- a/handler/api/repos/builds/pulls/list_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package pulls diff --git a/handler/api/repos/builds/purge.go b/handler/api/repos/builds/purge.go deleted file mode 100644 index 070251a1b1..0000000000 --- a/handler/api/repos/builds/purge.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandlePurge returns an http.HandlerFunc that purges the -// build history. If successful a 204 status code is returned. -func HandlePurge(repos core.RepositoryStore, builds core.BuildStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - before = r.FormValue("before") - ) - number, err := strconv.ParseInt(before, 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - err = builds.Purge(r.Context(), repo.ID, number) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/repos/builds/purge_oss.go b/handler/api/repos/builds/purge_oss.go deleted file mode 100644 index d9937051a8..0000000000 --- a/handler/api/repos/builds/purge_oss.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" -) - -// HandlePurge returns a non-op http.HandlerFunc. -func HandlePurge(core.RepositoryStore, core.BuildStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/repos/builds/purge_test.go b/handler/api/repos/builds/purge_test.go deleted file mode 100644 index 01b9bf2187..0000000000 --- a/handler/api/repos/builds/purge_test.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestPurge(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Purge(gomock.Any(), mockRepo.ID, int64(50)).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/?before=50", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePurge(repos, builds)(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -// The test verifies that a 404 Not Found error is returned -// if the repository store returns an error. -func TestPurge_NotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/?before=50", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePurge(repos, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// The test verifies that a 400 Bad Request error is returned -// if the user provides an invalid ?before query parameter -// that cannot be parsed. -func TestPurge_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/?before=XLII", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePurge(nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{ - Message: `strconv.ParseInt: parsing "XLII": invalid syntax`, - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// The test verifies that a 500 Internal server error is -// returned if the database purge transaction fails. -func TestPurge_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Purge(gomock.Any(), mockRepo.ID, int64(50)).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/?before=50", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandlePurge(repos, builds)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/retry.go b/handler/api/repos/builds/retry.go deleted file mode 100644 index 9d297d6920..0000000000 --- a/handler/api/repos/builds/retry.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package builds - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - - "github.com/go-chi/chi" -) - -// HandleRetry returns an http.HandlerFunc that processes http -// requests to retry and re-execute a build. -func HandleRetry( - repos core.RepositoryStore, - builds core.BuildStore, - triggerer core.Triggerer, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - user, _ = request.UserFrom(r.Context()) - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - prev, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - - switch prev.Status { - case core.StatusBlocked: - render.BadRequestf(w, "cannot start a blocked build") - return - case core.StatusDeclined: - render.BadRequestf(w, "cannot start a declined build") - return - } - - hook := &core.Hook{ - Parent: prev.Number, - Trigger: user.Login, - Event: prev.Event, - Action: prev.Action, - Link: prev.Link, - Timestamp: prev.Timestamp, - Title: prev.Title, - Message: prev.Message, - Before: prev.Before, - After: prev.After, - Ref: prev.Ref, - Fork: prev.Fork, - Source: prev.Source, - Target: prev.Target, - Author: prev.Author, - AuthorName: prev.AuthorName, - AuthorEmail: prev.AuthorEmail, - AuthorAvatar: prev.AuthorAvatar, - Deployment: prev.Deploy, - DeploymentID: prev.DeployID, - Debug: r.FormValue("debug") == "true", - Cron: prev.Cron, - Sender: prev.Sender, - Params: map[string]string{}, - } - - for key, value := range r.URL.Query() { - if key == "access_token" { - continue - } - if key == "debug" { - continue - } - if len(value) == 0 { - continue - } - hook.Params[key] = value[0] - } - for key, value := range prev.Params { - hook.Params[key] = value - } - - result, err := triggerer.Trigger(r.Context(), repo, hook) - if err != nil { - render.InternalError(w, err) - } else { - render.JSON(w, result, 200) - } - } -} diff --git a/handler/api/repos/builds/retry_test.go b/handler/api/repos/builds/retry_test.go deleted file mode 100644 index 8b5ab46b19..0000000000 --- a/handler/api/repos/builds/retry_test.go +++ /dev/null @@ -1,231 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package builds - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestRetry(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkBuild := func(_ context.Context, _ *core.Repository, hook *core.Hook) error { - if got, want := hook.Trigger, mockUser.Login; got != want { - t.Errorf("Want Trigger By %s, got %s", want, got) - } - if got, want := hook.Event, mockBuild.Event; got != want { - t.Errorf("Want Build Event %s, got %s", want, got) - } - if got, want := hook.Link, mockBuild.Link; got != want { - t.Errorf("Want Build Link %s, got %s", want, got) - } - if got, want := hook.Message, mockBuild.Message; got != want { - t.Errorf("Want Build Message %s, got %s", want, got) - } - if got, want := hook.Before, mockBuild.Before; got != want { - t.Errorf("Want Build Before %s, got %s", want, got) - } - if got, want := hook.After, mockBuild.After; got != want { - t.Errorf("Want Build After %s, got %s", want, got) - } - if got, want := hook.Ref, mockBuild.Ref; got != want { - t.Errorf("Want Build Ref %s, got %s", want, got) - } - if got, want := hook.Source, mockBuild.Source; got != want { - t.Errorf("Want Build Source %s, got %s", want, got) - } - if got, want := hook.Target, mockBuild.Target; got != want { - t.Errorf("Want Build Target %s, got %s", want, got) - } - if got, want := hook.Author, mockBuild.Author; got != want { - t.Errorf("Want Build Author %s, got %s", want, got) - } - if got, want := hook.AuthorName, mockBuild.AuthorName; got != want { - t.Errorf("Want Build AuthorName %s, got %s", want, got) - } - if got, want := hook.AuthorEmail, mockBuild.AuthorEmail; got != want { - t.Errorf("Want Build AuthorEmail %s, got %s", want, got) - } - if got, want := hook.AuthorAvatar, mockBuild.AuthorAvatar; got != want { - t.Errorf("Want Build AuthorAvatar %s, got %s", want, got) - } - if got, want := hook.Sender, mockBuild.Sender; got != want { - t.Errorf("Want Build Sender %s, got %s", want, got) - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(mockBuild, nil).Do(checkBuild) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleRetry(repos, builds, triggerer)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Build), mockBuild - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestRetry_InvalidBuildNumber(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "XLII") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleRetry(nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{ - Message: `strconv.ParseInt: parsing "XLII": invalid syntax`, - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestRetry_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleRetry(repos, nil, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestRetry_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleRetry(repos, builds, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestRetry_TriggerError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - triggerer := mock.NewMockTriggerer(controller) - triggerer.EXPECT().Trigger(gomock.Any(), mockRepo, gomock.Any()).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), mockUser), chi.RouteCtxKey, c), - ) - - HandleRetry(repos, builds, triggerer)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/builds/rollback.go b/handler/api/repos/builds/rollback.go deleted file mode 100644 index 794df1320c..0000000000 --- a/handler/api/repos/builds/rollback.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - - "github.com/go-chi/chi" -) - -// HandleRollback returns an http.HandlerFunc that processes http -// requests to rollback and re-execute a build. -func HandleRollback( - repos core.RepositoryStore, - builds core.BuildStore, - triggerer core.Triggerer, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - environ = r.FormValue("target") - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - user, _ = request.UserFrom(r.Context()) - ) - number, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequest(w, err) - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - prev, err := builds.FindNumber(r.Context(), repo.ID, number) - if err != nil { - render.NotFound(w, err) - return - } - if environ == "" { - render.BadRequestf(w, "Missing target environment") - return - } - - hook := &core.Hook{ - Parent: prev.Number, - Trigger: user.Login, - Event: core.EventRollback, - Action: prev.Action, - Link: prev.Link, - Timestamp: prev.Timestamp, - Title: prev.Title, - Message: prev.Message, - Before: prev.Before, - After: prev.After, - Ref: prev.Ref, - Fork: prev.Fork, - Source: prev.Source, - Target: prev.Target, - Author: prev.Author, - AuthorName: prev.AuthorName, - AuthorEmail: prev.AuthorEmail, - AuthorAvatar: prev.AuthorAvatar, - Deployment: environ, - Cron: prev.Cron, - Sender: prev.Sender, - Params: map[string]string{}, - } - - for k, v := range prev.Params { - hook.Params[k] = v - } - - for key, value := range r.URL.Query() { - if key == "access_token" { - continue - } - if key == "target" { - continue - } - if len(value) == 0 { - continue - } - hook.Params[key] = value[0] - } - - result, err := triggerer.Trigger(r.Context(), repo, hook) - if err != nil { - render.InternalError(w, err) - } else { - render.JSON(w, result, 200) - } - } -} diff --git a/handler/api/repos/builds/rollback_oss.go b/handler/api/repos/builds/rollback_oss.go deleted file mode 100644 index 3f1c3217b9..0000000000 --- a/handler/api/repos/builds/rollback_oss.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package builds - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var rollbackNotImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -// HandleRollback returns a non-op http.HandlerFunc. -func HandleRollback( - core.RepositoryStore, - core.BuildStore, - core.Triggerer, -) http.HandlerFunc { - return rollbackNotImplemented -} diff --git a/handler/api/repos/builds/rollback_test.go b/handler/api/repos/builds/rollback_test.go deleted file mode 100644 index 3047a80e93..0000000000 --- a/handler/api/repos/builds/rollback_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package builds diff --git a/handler/api/repos/builds/stages/approve.go b/handler/api/repos/builds/stages/approve.go deleted file mode 100644 index fe3d0af518..0000000000 --- a/handler/api/repos/builds/stages/approve.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stages - -import ( - "context" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -var noContext = context.Background() - -// HandleApprove returns an http.HandlerFunc that processes http -// requests to approve a blocked build that is pending review. -func HandleApprove( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - sched core.Scheduler, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - buildNumber, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequestf(w, "Invalid build number") - return - } - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequestf(w, "Invalid stage number") - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFoundf(w, "Repository not found") - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, buildNumber) - if err != nil { - render.NotFoundf(w, "Build not found") - return - } - stage, err := stages.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFoundf(w, "Stage not found") - return - } - if stage.Status != core.StatusBlocked { - render.BadRequestf(w, "Cannot approve a Pipeline with Status %q", stage.Status) - return - } - if len(stage.DependsOn) > 0 { - stage.Status = core.StatusWaiting - } else { - stage.Status = core.StatusPending - } - err = stages.Update(r.Context(), stage) - if err != nil { - render.InternalErrorf(w, "There was a problem approving the Pipeline") - return - } - err = sched.Schedule(noContext, stage) - if err != nil { - render.InternalErrorf(w, "There was a problem scheduling the Pipeline") - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/repos/builds/stages/approve_test.go b/handler/api/repos/builds/stages/approve_test.go deleted file mode 100644 index 7f73d34bc9..0000000000 --- a/handler/api/repos/builds/stages/approve_test.go +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package stages - -import ( - "context" - "database/sql" - "encoding/json" - "io" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestApprove(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - - checkStage := func(_ context.Context, stage *core.Stage) error { - if stage.Status != core.StatusPending { - t.Errorf("Want stage status changed to Pending") - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - stages.EXPECT().Update(gomock.Any(), mockStage).Return(nil).Do(checkStage) - - sched := mock.NewMockScheduler(controller) - sched.EXPECT().Schedule(gomock.Any(), mockStage).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the build status is not Blocked. -func TestApprove_InvalidStatus(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusPending, - OS: "linux", - Arch: "arm", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New(`Cannot approve a Pipeline with Status "pending"`) - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the build number url parameter fails to parse. -func TestApprove_InvalidBuildNumber(t *testing.T) { - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "I") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(nil, nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Invalid build number") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the stage number url parameter fails to parse. -func TestApprove_InvalidStageNumber(t *testing.T) { - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "II") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(nil, nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Invalid stage number") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the stage is not found in the database. -func TestApprove_StageNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, 2).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Stage not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the build is not found in the database. -func TestApprove_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, int64(1)).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, nil, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Build not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the repository is not found in the database. -func TestApprove_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, nil, nil, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Repository not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 500 internal error status is returned -// from the http.Handler with a human-readable error message if -// the the server fails to persist stage updates to the database. -func TestApprove_CannotSaveStage(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - stages.EXPECT().Update(gomock.Any(), mockStage).Return(sql.ErrConnDone) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, nil)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("There was a problem approving the Pipeline") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 500 internal error status is returned -// from the http.Handler with a human-readable error message if -// the the server fails to enqueue the approved pipeline. -func TestApprove_CannotEnqueue(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - stages.EXPECT().Update(gomock.Any(), mockStage).Return(nil) - - sched := mock.NewMockScheduler(controller) - sched.EXPECT().Schedule(gomock.Any(), gomock.Any()).Return(io.EOF) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("There was a problem scheduling the Pipeline") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} -func TestApprove_ParallelStages(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - mockStage2 := &core.Stage{ - ID: 333, - Number: 3, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - - checkPendingStage := func(_ context.Context, stage *core.Stage) error { - if stage.Status != core.StatusPending { - t.Errorf("Want stage status changed to Pending") - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil).Times(2) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil).Times(2) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - stages.EXPECT().Update(gomock.Any(), mockStage).Return(nil).Do(checkPendingStage) - - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage2.Number).Return(mockStage2, nil) - stages.EXPECT().Update(gomock.Any(), mockStage2).Return(nil).Do(checkPendingStage) - - sched := mock.NewMockScheduler(controller) - sched.EXPECT().Schedule(gomock.Any(), mockStage).Return(nil) - sched.EXPECT().Schedule(gomock.Any(), mockStage2).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - // now approve the second stage - c2 := new(chi.Context) - c2.URLParams.Add("owner", "octocat") - c2.URLParams.Add("name", "hello-world") - c2.URLParams.Add("number", "1") - c2.URLParams.Add("stage", "3") - - w = httptest.NewRecorder() - r = httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c2), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -// test for a build that has 2 parallel stages, and we approve both of them -func TestApprove_DependantStages(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - } - mockStage2 := &core.Stage{ - ID: 333, - Number: 3, - Status: core.StatusBlocked, - OS: "linux", - Arch: "arm", - DependsOn: []string{"2"}, - } - - checkPendingStage := func(_ context.Context, stage *core.Stage) error { - if stage.Status != core.StatusPending { - t.Errorf("Want stage status changed to Pending") - } - return nil - } - - checkWaitingStage := func(_ context.Context, stage *core.Stage) error { - if stage.Status != core.StatusWaiting { - t.Errorf("Want stage status changed to waiting") - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil).Times(2) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, mockBuild.Number).Return(mockBuild, nil).Times(2) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage.Number).Return(mockStage, nil) - stages.EXPECT().Update(gomock.Any(), mockStage).Return(nil).Do(checkPendingStage) - - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, mockStage2.Number).Return(mockStage2, nil) - stages.EXPECT().Update(gomock.Any(), mockStage2).Return(nil).Do(checkWaitingStage) - - sched := mock.NewMockScheduler(controller) - sched.EXPECT().Schedule(gomock.Any(), mockStage).Return(nil) - sched.EXPECT().Schedule(gomock.Any(), mockStage2).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - // now approve the second stage - c2 := new(chi.Context) - c2.URLParams.Add("owner", "octocat") - c2.URLParams.Add("name", "hello-world") - c2.URLParams.Add("number", "1") - c2.URLParams.Add("stage", "3") - - w = httptest.NewRecorder() - r = httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c2), - ) - - HandleApprove(repos, builds, stages, sched)(w, r) - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/api/repos/builds/stages/decline.go b/handler/api/repos/builds/stages/decline.go deleted file mode 100644 index 7ff0c7b3ca..0000000000 --- a/handler/api/repos/builds/stages/decline.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stages - -import ( - "fmt" - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDecline returns an http.HandlerFunc that processes http -// requests to decline a blocked build that is pending review. -func HandleDecline( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - buildNumber, err := strconv.ParseInt(chi.URLParam(r, "number"), 10, 64) - if err != nil { - render.BadRequestf(w, "Invalid build number") - return - } - stageNumber, err := strconv.Atoi(chi.URLParam(r, "stage")) - if err != nil { - render.BadRequestf(w, "Invalid stage number") - return - } - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFoundf(w, "Repository not found") - return - } - build, err := builds.FindNumber(r.Context(), repo.ID, buildNumber) - if err != nil { - render.NotFoundf(w, "Build not found") - return - } - stage, err := stages.FindNumber(r.Context(), build.ID, stageNumber) - if err != nil { - render.NotFoundf(w, "Stage not found") - return - } - if stage.Status != core.StatusBlocked { - err := fmt.Errorf("Cannot decline build with status %q", stage.Status) - render.BadRequest(w, err) - return - } - stage.Status = core.StatusDeclined - err = stages.Update(r.Context(), stage) - if err != nil { - render.InternalError(w, err) - return - } - build.Status = core.StatusDeclined - err = builds.Update(r.Context(), build) - if err != nil { - render.InternalError(w, err) - return - } - - // TODO delete any pending stages from the build queue - // TODO update any pending stages to skipped in the database - // TODO update the build status to error in the source code management system - - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/repos/builds/stages/decline_test.go b/handler/api/repos/builds/stages/decline_test.go deleted file mode 100644 index d822dd0b60..0000000000 --- a/handler/api/repos/builds/stages/decline_test.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package stages - -import ( - "context" - "database/sql" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the build number url parameter fails to parse. -func TestDecline_InvalidBuildNumber(t *testing.T) { - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "I") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Invalid build number") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the stage number url parameter fails to parse. -func TestDecline_InvalidStageNumber(t *testing.T) { - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "II") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(nil, nil, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Invalid stage number") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the repository is not found in the database. -func TestDecline_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(repos, nil, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Repository not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the build is not found in the database. -func TestDecline_BuildNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, int64(1)).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(repos, builds, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Build not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found status is returned -// from the http.Handler with a human-readable error message if -// the stage is not found in the database. -func TestDecline_StageNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, int64(1)).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, 2).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(repos, builds, stages)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("Stage not found") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 400 bad request status is returned -// from the http.Handler with a human-readable error message if -// the build status is not Blocked. -func TestDecline_InvalidStatus(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - mockBuild := &core.Build{ - ID: 111, - Number: 1, - Status: core.StatusPending, - } - mockStage := &core.Stage{ - ID: 222, - Number: 2, - Status: core.StatusPending, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().FindNumber(gomock.Any(), mockRepo.ID, int64(1)).Return(mockBuild, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().FindNumber(gomock.Any(), mockBuild.ID, 2).Return(mockStage, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - c.URLParams.Add("stage", "2") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDecline(repos, builds, stages)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New(`Cannot decline build with status "pending"`) - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/chown.go b/handler/api/repos/chown.go deleted file mode 100644 index efd0f39950..0000000000 --- a/handler/api/repos/chown.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleChown returns an http.HandlerFunc that processes http -// requests to chown the repository to the currently authenticated user. -func HandleChown(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), owner, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: repository not found") - return - } - - user, _ := request.UserFrom(r.Context()) - repo.UserID = user.ID - - err = repos.Update(r.Context(), repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: cannot chown repository") - } else { - render.JSON(w, repo, 200) - } - } -} diff --git a/handler/api/repos/chown_test.go b/handler/api/repos/chown_test.go deleted file mode 100644 index 1d85ac9f76..0000000000 --- a/handler/api/repos/chown_test.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestChown(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ - ID: 42, - } - repo := &core.Repository{ - ID: 1, - UserID: 1, - } - - checkChown := func(_ context.Context, updated *core.Repository) error { - if got, want := updated.UserID, user.ID; got != want { - t.Errorf("Want repository owner updated to %d, got %d", want, got) - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil).Do(checkChown) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), user), chi.RouteCtxKey, c), - ) - - HandleChown(repos)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Repository{}, repo - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestChown_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), &core.User{}), chi.RouteCtxKey, c), - ) - - HandleChown(repos)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestChown_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(&core.Repository{}, nil) - repos.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), &core.User{}), chi.RouteCtxKey, c), - ) - - HandleChown(repos)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/collabs/find.go b/handler/api/repos/collabs/find.go deleted file mode 100644 index e2dd450663..0000000000 --- a/handler/api/repos/collabs/find.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes a json-encoded -// repository collaborator details to the response body. -func HandleFind( - users core.UserStore, - repos core.RepositoryStore, - members core.PermStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - login = chi.URLParam(r, "member") - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: repository not found") - return - } - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - WithField("member", login). - Debugln("api: user not found") - return - } - member, err := members.Find(r.Context(), repo.UID, user.ID) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("member", login). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: membership not found") - return - } - render.JSON(w, member, 200) - } -} diff --git a/handler/api/repos/collabs/find_test.go b/handler/api/repos/collabs/find_test.go deleted file mode 100644 index 015861696a..0000000000 --- a/handler/api/repos/collabs/find_test.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "context" - "encoding/json" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - "github.com/sirupsen/logrus" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - perms := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(mockUser, nil) - perms.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(mockMember, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users, repos, perms)(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Perm{}, mockMember - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_UserNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestFind_MemberNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(mockUser, nil) - members.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/collabs/list.go b/handler/api/repos/collabs/list.go deleted file mode 100644 index 4868e685c7..0000000000 --- a/handler/api/repos/collabs/list.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that write a json-encoded -// list of repository collaborators to the response body. -func HandleList( - repos core.RepositoryStore, - members core.PermStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: repository not found") - return - } - members, err := members.List(r.Context(), repo.UID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Warnln("api: cannot get member list") - } else { - render.JSON(w, members, 200) - } - } -} diff --git a/handler/api/repos/collabs/list_test.go b/handler/api/repos/collabs/list_test.go deleted file mode 100644 index e8b073a0af..0000000000 --- a/handler/api/repos/collabs/list_test.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - mockUser = &core.User{ - ID: 1, - Login: "octocat", - } - - mockRepo = &core.Repository{ - ID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - } - - mockMember = &core.Perm{ - Read: true, - Write: true, - Admin: true, - } - - mockMembers = []*core.Collaborator{ - { - Login: "octocat", - Read: true, - Write: true, - Admin: true, - }, - { - Login: "spaceghost", - Read: true, - Write: true, - Admin: true, - }, - } -) - -func TestList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - members.EXPECT().List(gomock.Any(), mockRepo.UID).Return(mockMembers, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, members)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Collaborator{}, mockMembers - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestList_NotFoundError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestList_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - members.EXPECT().List(gomock.Any(), mockRepo.UID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, members)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/collabs/none.go b/handler/api/repos/collabs/none.go deleted file mode 100644 index 47c62a78d7..0000000000 --- a/handler/api/repos/collabs/none.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package collabs - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleDelete(core.UserStore, core.RepositoryStore, core.PermStore) http.HandlerFunc { - return notImplemented -} - -func HandleFind(core.UserStore, core.RepositoryStore, core.PermStore) http.HandlerFunc { - return notImplemented -} - -func HandleList(core.RepositoryStore, core.PermStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/repos/collabs/remove.go b/handler/api/repos/collabs/remove.go deleted file mode 100644 index dd3328abf4..0000000000 --- a/handler/api/repos/collabs/remove.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes -// a request to delete account membership to a repository. This should -// only be used if the datastore is out-of-sync with github. -func HandleDelete( - users core.UserStore, - repos core.RepositoryStore, - members core.PermStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - login = chi.URLParam(r, "member") - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: repository not found") - return - } - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("member", login). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: user not found") - return - } - member, err := members.Find(r.Context(), repo.UID, user.ID) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("member", member). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: membership not found") - return - } - err = members.Delete(r.Context(), member) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("member", login). - WithField("namespace", namespace). - WithField("name", name). - Debugln("api: cannot delete membership") - } else { - w.WriteHeader(http.StatusNoContent) - } - } -} diff --git a/handler/api/repos/collabs/remove_test.go b/handler/api/repos/collabs/remove_test.go deleted file mode 100644 index 109ed74abf..0000000000 --- a/handler/api/repos/collabs/remove_test.go +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package collabs - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(mockUser, nil) - members.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(mockMember, nil) - members.EXPECT().Delete(gomock.Any(), mockMember).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestDelete_UserNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDelete_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDelete_MemberNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(mockUser, nil) - members.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, repos, members)(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDelete_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - repos := mock.NewMockRepositoryStore(controller) - members := mock.NewMockPermStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(mockRepo, nil) - users.EXPECT().FindLogin(gomock.Any(), "octocat").Return(mockUser, nil) - members.EXPECT().Find(gomock.Any(), mockRepo.UID, mockUser.ID).Return(mockMember, nil) - members.EXPECT().Delete(gomock.Any(), mockMember).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("member", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, repos, members)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/crons/create.go b/handler/api/repos/crons/create.go deleted file mode 100644 index bffa767887..0000000000 --- a/handler/api/repos/crons/create.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a new cronjob. -func HandleCreate( - repos core.RepositoryStore, - crons core.CronStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - in := new(core.Cron) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - cronjob := new(core.Cron) - cronjob.Event = core.EventPush - cronjob.Branch = in.Branch - cronjob.RepoID = repo.ID - cronjob.SetName(in.Name) - err = cronjob.SetExpr(in.Expr) - if err != nil { - render.BadRequest(w, err) - return - } - - err = cronjob.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = crons.Create(r.Context(), cronjob) - if err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, cronjob, 200) - } -} diff --git a/handler/api/repos/crons/create_test.go b/handler/api/repos/crons/create_test.go deleted file mode 100644 index 113c6ebb77..0000000000 --- a/handler/api/repos/crons/create_test.go +++ /dev/null @@ -1,224 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -func TestHandleCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().Create(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCron) - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, crons)(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Cron{}, dummyCron - json.NewDecoder(w.Body).Decode(got) - - ignore := cmpopts.IgnoreFields(core.Cron{}, "Next") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - if got.Next == 0 { - t.Errorf("Expect next execution date scheduled") - } -} - -func TestHandleCreate_ValidationError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Cron{Name: "", Expr: "* * * * *"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "Invalid Cronjob Name"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_BadExpression(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Cron{Name: "", Expr: "a b c d e"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "Invalid Cronjob Expression"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().Create(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyCron) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/crons/delete.go b/handler/api/repos/crons/delete.go deleted file mode 100644 index 5a67a90b69..0000000000 --- a/handler/api/repos/crons/delete.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete the cron job. -func HandleDelete( - repos core.RepositoryStore, - crons core.CronStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - cron = chi.URLParam(r, "cron") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - cronjob, err := crons.FindName(r.Context(), repo.ID, cron) - if err != nil { - render.NotFound(w, err) - return - } - err = crons.Delete(r.Context(), cronjob) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/repos/crons/delete_test.go b/handler/api/repos/crons/delete_test.go deleted file mode 100644 index 26a0a89efb..0000000000 --- a/handler/api/repos/crons/delete_test.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(dummyCron, nil) - crons.EXPECT().Delete(gomock.Any(), dummyCron).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleDelete_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_CronNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(dummyCron, nil) - crons.EXPECT().Delete(gomock.Any(), dummyCron).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/crons/exec.go b/handler/api/repos/crons/exec.go deleted file mode 100644 index b533ec725c..0000000000 --- a/handler/api/repos/crons/exec.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "context" - "fmt" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/sirupsen/logrus" - - "github.com/go-chi/chi" -) - -// HandleExec returns an http.HandlerFunc that processes http -// requests to execute a cronjob on-demand. -func HandleExec( - users core.UserStore, - repos core.RepositoryStore, - crons core.CronStore, - commits core.CommitService, - trigger core.Triggerer, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - cron = chi.URLParam(r, "cron") - ) - - repo, err := repos.FindName(ctx, namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - cronjob, err := crons.FindName(ctx, repo.ID, cron) - if err != nil { - render.NotFound(w, err) - logger := logrus.WithError(err) - logger.Debugln("api: cannot find cron") - return - } - - user, err := users.Find(ctx, repo.UserID) - if err != nil { - logger := logrus.WithError(err) - logger.Debugln("api: cannot find repository owner") - render.NotFound(w, err) - return - } - - commit, err := commits.FindRef(ctx, user, repo.Slug, cronjob.Branch) - if err != nil { - logger := logrus.WithError(err). - WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("cron", cronjob.Name) - logger.Debugln("api: cannot find commit") - render.NotFound(w, err) - return - } - - hook := &core.Hook{ - Trigger: core.TriggerCron, - Event: core.EventCron, - Link: commit.Link, - Timestamp: commit.Author.Date, - Message: commit.Message, - After: commit.Sha, - Ref: fmt.Sprintf("refs/heads/%s", cronjob.Branch), - Target: cronjob.Branch, - Author: commit.Author.Login, - AuthorName: commit.Author.Name, - AuthorEmail: commit.Author.Email, - AuthorAvatar: commit.Author.Avatar, - Cron: cronjob.Name, - Sender: commit.Author.Login, - } - - build, err := trigger.Trigger(context.Background(), repo, hook) - if err != nil { - logger := logrus.WithError(err). - WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("cron", cronjob.Name) - logger.Debugln("api: cannot trigger cron") - render.InternalError(w, err) - return - } - - render.JSON(w, build, 200) - } -} diff --git a/handler/api/repos/crons/exec_test.go b/handler/api/repos/crons/exec_test.go deleted file mode 100644 index 403d96f1ac..0000000000 --- a/handler/api/repos/crons/exec_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons diff --git a/handler/api/repos/crons/find.go b/handler/api/repos/crons/find.go deleted file mode 100644 index c98cc37f74..0000000000 --- a/handler/api/repos/crons/find.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package crons - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// cronjob details to the response body. -func HandleFind( - repos core.RepositoryStore, - crons core.CronStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - cron = chi.URLParam(r, "cron") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - cronjob, err := crons.FindName(r.Context(), repo.ID, cron) - if err != nil { - render.NotFound(w, err) - return - } - render.JSON(w, cronjob, 200) - } -} diff --git a/handler/api/repos/crons/find_test.go b/handler/api/repos/crons/find_test.go deleted file mode 100644 index 942789f1f5..0000000000 --- a/handler/api/repos/crons/find_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(dummyCron, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Cron{}, dummyCron - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleFind_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleFind_CronNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/crons/list.go b/handler/api/repos/crons/list.go deleted file mode 100644 index 06af147e8a..0000000000 --- a/handler/api/repos/crons/list.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of cron jobs to the response body. -func HandleList( - repos core.RepositoryStore, - crons core.CronStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - list, err := crons.List(r.Context(), repo.ID) - if err != nil { - render.NotFound(w, err) - return - } - render.JSON(w, list, 200) - } -} diff --git a/handler/api/repos/crons/list_test.go b/handler/api/repos/crons/list_test.go deleted file mode 100644 index 7762cd52cf..0000000000 --- a/handler/api/repos/crons/list_test.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - dummyCronRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - } - - dummyCron = &core.Cron{ - RepoID: 1, - Event: core.EventPush, - Name: "nightly", - Expr: "* * * * * *", - Next: 0, - Branch: "master", - } - - dummyCronList = []*core.Cron{ - dummyCron, - } -) - -func TestHandleList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().List(gomock.Any(), dummyCronRepo.ID).Return(dummyCronList, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Cron{}, dummyCronList - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleList_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleList_CronListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().List(gomock.Any(), dummyCronRepo.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/crons/none.go b/handler/api/repos/crons/none.go deleted file mode 100644 index fe375541bd..0000000000 --- a/handler/api/repos/crons/none.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package crons - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleCreate(core.RepositoryStore, core.CronStore) http.HandlerFunc { - return notImplemented -} - -func HandleUpdate(core.RepositoryStore, core.CronStore) http.HandlerFunc { - return notImplemented -} - -func HandleDelete(core.RepositoryStore, core.CronStore) http.HandlerFunc { - return notImplemented -} - -func HandleFind(core.RepositoryStore, core.CronStore) http.HandlerFunc { - return notImplemented -} - -func HandleList(core.RepositoryStore, core.CronStore) http.HandlerFunc { - return notImplemented -} - -func HandleExec(core.UserStore, core.RepositoryStore, core.CronStore, - core.CommitService, core.Triggerer) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/repos/crons/update.go b/handler/api/repos/crons/update.go deleted file mode 100644 index 4cf49baa11..0000000000 --- a/handler/api/repos/crons/update.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type cronUpdate struct { - Branch *string `json:"branch"` - Target *string `json:"target"` - Disabled *bool `json:"disabled"` -} - -// HandleUpdate returns an http.HandlerFunc that processes http -// requests to enable or disable a cron job. -func HandleUpdate( - repos core.RepositoryStore, - crons core.CronStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - cron = chi.URLParam(r, "cron") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - cronjob, err := crons.FindName(r.Context(), repo.ID, cron) - if err != nil { - render.NotFound(w, err) - return - } - - in := new(cronUpdate) - json.NewDecoder(r.Body).Decode(in) - if in.Branch != nil { - cronjob.Branch = *in.Branch - } - if in.Target != nil { - cronjob.Target = *in.Target - } - if in.Disabled != nil { - cronjob.Disabled = *in.Disabled - } - - err = crons.Update(r.Context(), cronjob) - if err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, cronjob, 200) - } -} diff --git a/handler/api/repos/crons/update_test.go b/handler/api/repos/crons/update_test.go deleted file mode 100644 index 0160458358..0000000000 --- a/handler/api/repos/crons/update_test.go +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package crons - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockCron := new(core.Cron) - *mockCron = *dummyCron - mockCron.Disabled = false - mockCron.Branch = "develop" - mockCron.Target = "staging" - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, mockCron.Name).Return(mockCron, nil) - crons.EXPECT().Update(gomock.Any(), mockCron).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(mockCron) - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Cron{}, mockCron - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_CronNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummyCronRepo.Namespace, dummyCronRepo.Name).Return(dummyCronRepo, nil) - - crons := mock.NewMockCronStore(controller) - crons.EXPECT().FindName(gomock.Any(), dummyCronRepo.ID, dummyCron.Name).Return(dummyCron, nil) - crons.EXPECT().Update(gomock.Any(), dummyCron).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("cron", "nightly") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, crons).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/disable.go b/handler/api/repos/disable.go deleted file mode 100644 index d449c027e3..0000000000 --- a/handler/api/repos/disable.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleDisable returns an http.HandlerFunc that processes http -// requests to disable a repository in the system. -func HandleDisable( - repos core.RepositoryStore, - sender core.WebhookSender, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), owner, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: repository not found") - return - } - repo.Active = false - err = repos.Update(r.Context(), repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot update repository") - return - } - - action := core.WebhookActionDisabled - if r.FormValue("remove") == "true" { - action = core.WebhookActionDeleted - err = repos.Delete(r.Context(), repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot delete repository") - return - } - } - - err = sender.Send(r.Context(), &core.WebhookData{ - Event: core.WebhookEventRepo, - Action: action, - Repo: repo, - }) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot send webhook") - } - - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/repos/disable_test.go b/handler/api/repos/disable_test.go deleted file mode 100644 index 7f4f687a59..0000000000 --- a/handler/api/repos/disable_test.go +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos - -import ( - "encoding/json" - "io" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestDisable(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Active: true, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), repo.Name).Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil) - - // a failed webhook should result in a warning message in the - // logs, but should not cause the endpoint to error. - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(io.EOF) - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/api/repos/octocat/hello-world", nil) - - router := chi.NewRouter() - router.Delete("/api/repos/{owner}/{name}", HandleDisable(repos, webhook)) - router.ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - if got, want := repo.Active, false; got != want { - t.Errorf("Want repository activate %v, got %v", want, got) - } - - got, want := new(core.Repository), repo - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDisable_NotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, errors.ErrNotFound) - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/api/repos/octocat/hello-world", nil) - - router := chi.NewRouter() - router.Delete("/api/repos/{owner}/{name}", HandleDisable(repos, nil)) - router.ServeHTTP(w, r) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDisable_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Active: false, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), repo.Name).Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(errors.ErrNotFound) - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/api/repos/octocat/hello-world", nil) - - router := chi.NewRouter() - router.Delete("/api/repos/{owner}/{name}", HandleDisable(repos, nil)) - router.ServeHTTP(w, r) - - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Active: true, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), gomock.Any(), repo.Name).Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil) - repos.EXPECT().Delete(gomock.Any(), repo).Return(nil) - - // a failed webhook should result in a warning message in the - // logs, but should not cause the endpoint to error. - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(io.EOF) - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/api/repos/octocat/hello-world?remove=true", nil) - - router := chi.NewRouter() - router.Delete("/api/repos/{owner}/{name}", HandleDisable(repos, webhook)) - router.ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Repository), repo - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/enable.go b/handler/api/repos/enable.go deleted file mode 100644 index 2564414aa1..0000000000 --- a/handler/api/repos/enable.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - "os" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/dchest/uniuri" - "github.com/go-chi/chi" -) - -// FEATURE FLAG enables a static secret value used to sign -// incoming requests routed through a proxy. This was implemented -// based on feedback from @chiraggadasc and should not be -// removed until we have a permanent solution in place. -var staticSigner = os.Getenv("DRONE_FEATURE_SERVER_PROXY_SECRET") - -// HandleEnable returns an http.HandlerFunc that processes http -// requests to enable a repository in the system. -func HandleEnable( - hooks core.HookService, - repos core.RepositoryStore, - sender core.WebhookSender, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - user, _ := request.UserFrom(r.Context()) - repo, err := repos.FindName(r.Context(), owner, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: repository not found") - return - } - repo.Active = true - repo.UserID = user.ID - - if repo.Config == "" { - repo.Config = ".drone.yml" - } - if repo.Signer == "" { - repo.Signer = uniuri.NewLen(32) - } - if repo.Secret == "" { - repo.Secret = uniuri.NewLen(32) - } - if repo.Timeout == 0 { - repo.Timeout = 60 - } - - if staticSigner != "" { - repo.Signer = staticSigner - } - - err = hooks.Create(r.Context(), user, repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: cannot create or update hook") - return - } - - err = repos.Activate(r.Context(), repo) - if err == core.ErrRepoLimit { - render.ErrorCode(w, err, 402) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Errorln("api: cannot activate repository") - return - } - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: cannot activate repository") - return - } - - err = sender.Send(r.Context(), &core.WebhookData{ - Event: core.WebhookEventRepo, - Action: core.WebhookActionEnabled, - User: user, - Repo: repo, - }) - if err != nil { - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot send webhook") - } - - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/repos/enable_test.go b/handler/api/repos/enable_test.go deleted file mode 100644 index 88689b54fe..0000000000 --- a/handler/api/repos/enable_test.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos - -import ( - "context" - "encoding/json" - "io" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -func TestEnable(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - - service := mock.NewMockHookService(controller) - service.EXPECT().Create(gomock.Any(), gomock.Any(), repo).Return(nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), repo.Namespace, repo.Name).Return(repo, nil) - repos.EXPECT().Activate(gomock.Any(), repo).Return(nil) - - // a failed webhook should result in a warning message in the - // logs, but should not cause the endpoint to error. - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(io.EOF) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), &core.User{ID: 1}), chi.RouteCtxKey, c), - ) - - HandleEnable(service, repos, webhook)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - if got, want := repo.Active, true; got != want { - t.Errorf("Want repository activate %v, got %v", want, got) - } - - got, want := new(core.Repository), repo - json.NewDecoder(w.Body).Decode(got) - diff := cmp.Diff(got, want, cmpopts.IgnoreFields(core.Repository{}, "Secret", "Signer")) - if diff != "" { - t.Errorf(diff) - } -} - -func TestEnable_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), mockRepo.Namespace, mockRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleEnable(nil, repos, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestEnable_HookError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Active: false, - } - - service := mock.NewMockHookService(controller) - service.EXPECT().Create(gomock.Any(), gomock.Any(), repo).Return(io.EOF) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), repo.Namespace, repo.Name).Return(repo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), &core.User{ID: 1}), chi.RouteCtxKey, c), - ) - - HandleEnable(service, repos, nil)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestEnable_ActivateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - } - - service := mock.NewMockHookService(controller) - service.EXPECT().Create(gomock.Any(), gomock.Any(), repo).Return(nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), repo.Namespace, repo.Name).Return(repo, nil) - repos.EXPECT().Activate(gomock.Any(), repo).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(request.WithUser(r.Context(), &core.User{ID: 1}), chi.RouteCtxKey, c), - ) - - HandleEnable(service, repos, nil)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/encrypt/encrypt.go b/handler/api/repos/encrypt/encrypt.go deleted file mode 100644 index 3ad156a7fa..0000000000 --- a/handler/api/repos/encrypt/encrypt.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package encrypt - -import ( - "crypto/aes" - "crypto/cipher" - "crypto/rand" - "encoding/base64" - "encoding/json" - "io" - "net/http" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/go-chi/chi" -) - -type respEncrypted struct { - Data string `json:"data"` -} - -// Handler returns an http.HandlerFunc that processes http -// requests to create an encrypted secret. -func Handler(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "owner") - name := chi.URLParam(r, "name") - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - in := new(drone.Secret) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - // the secret is encrypted with a per-repository 256-bit - // key. If the key is missing or malformed we should - // return an error to the client. - encrypted, err := encrypt([]byte(in.Data), []byte(repo.Secret)) - if err != nil { - render.InternalError(w, err) - return - } - - // the encrypted secret is embedded in the yaml - // configuration file and is json-encoded for - // inclusion as a !binary attribute. - encoded := base64.StdEncoding.EncodeToString(encrypted) - - render.JSON(w, &respEncrypted{Data: encoded}, 200) - } -} - -func encrypt(plaintext, key []byte) (ciphertext []byte, err error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - - gcm, err := cipher.NewGCM(block) - if err != nil { - return nil, err - } - - nonce := make([]byte, gcm.NonceSize()) - _, err = io.ReadFull(rand.Reader, nonce) - if err != nil { - return nil, err - } - - return gcm.Seal(nonce, nonce, plaintext, nil), nil -} diff --git a/handler/api/repos/encrypt/encrypt_test.go b/handler/api/repos/encrypt/encrypt_test.go deleted file mode 100644 index 7e5445a872..0000000000 --- a/handler/api/repos/encrypt/encrypt_test.go +++ /dev/null @@ -1 +0,0 @@ -package encrypt diff --git a/handler/api/repos/find.go b/handler/api/repos/find.go deleted file mode 100644 index 79171a5dc4..0000000000 --- a/handler/api/repos/find.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" -) - -// HandleFind returns an http.HandlerFunc that writes the -// json-encoded repository details to the response body. -func HandleFind() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - repo, _ := request.RepoFrom(ctx) - perm, _ := request.PermFrom(ctx) - repo.Perms = perm - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/repos/find_test.go b/handler/api/repos/find_test.go deleted file mode 100644 index cded024eb2..0000000000 --- a/handler/api/repos/find_test.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos - -import ( - "context" - "encoding/json" - "io/ioutil" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/core" - "github.com/sirupsen/logrus" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -var ( - mockRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Counter: 42, - Branch: "master", - } - - mockRepos = []*core.Repository{ - { - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - { - ID: 1, - Namespace: "octocat", - Name: "spoon-knife", - Slug: "octocat/spoon-knife", - }, - } -) - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/repos/octocat/hello-world", nil) - r = r.WithContext(request.WithRepo( - context.Background(), mockRepo, - )) - - router := chi.NewRouter() - router.Get("/api/repos/{owner}/{name}", HandleFind()) - router.ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Repository), mockRepo - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/repair.go b/handler/api/repos/repair.go deleted file mode 100644 index 8c94078ebc..0000000000 --- a/handler/api/repos/repair.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleRepair returns an http.HandlerFunc that processes http -// requests to repair the repository hooks and sync the repository -// details. -func HandleRepair( - hooks core.HookService, - repoz core.RepositoryService, - repos core.RepositoryStore, - users core.UserStore, - link string, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - - repo, err := repos.FindName(r.Context(), owner, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: repository not found") - return - } - - user, err := users.Find(r.Context(), repo.UserID) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot find repository owner") - return - } - - remote, err := repoz.Find(r.Context(), user, repo.Slug) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: remote repository not found") - return - } - - repo.Branch = remote.Branch - repo.HTTPURL = remote.HTTPURL - repo.Private = remote.Private - repo.SSHURL = remote.SSHURL - - // the gitea and gogs repository endpoints do not - // return the http url, so we need to ensure we do - // not replace the existing value with a zero value. - if remote.Link != "" { - repo.Link = remote.Link - } - - err = repos.Update(r.Context(), repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Warnln("api: cannot chown repository") - return - } - - err = hooks.Create(r.Context(), user, repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("namespace", owner). - WithField("name", name). - Debugln("api: cannot create or update hook") - return - } - - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/repos/repair_test.go b/handler/api/repos/repair_test.go deleted file mode 100644 index aacddd1b95..0000000000 --- a/handler/api/repos/repair_test.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. -package repos - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestRepair(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ - ID: 1, - } - repo := &core.Repository{ - ID: 1, - UserID: 1, - Private: true, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - remoteRepo := &core.Repository{ - Branch: "master", - Private: false, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - checkRepair := func(_ context.Context, updated *core.Repository) error { - if got, want := updated.Branch, remoteRepo.Branch; got != want { - t.Errorf("Want repository Branch updated to %s, got %s", want, got) - } - if got, want := updated.Private, remoteRepo.Private; got != want { - t.Errorf("Want repository Private updated to %v, got %v", want, got) - } - if got, want := updated.HTTPURL, remoteRepo.HTTPURL; got != want { - t.Errorf("Want repository Clone updated to %s, got %s", want, got) - } - if got, want := updated.SSHURL, remoteRepo.SSHURL; got != want { - t.Errorf("Want repository CloneSSH updated to %s, got %s", want, got) - } - if got, want := updated.Link, remoteRepo.Link; got != want { - t.Errorf("Want repository Link updated to %s, got %s", want, got) - } - return nil - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), repo.UserID).Return(user, nil) - - hooks := mock.NewMockHookService(controller) - hooks.EXPECT().Create(gomock.Any(), gomock.Any(), repo).Return(nil) - - repoz := mock.NewMockRepositoryService(controller) - repoz.EXPECT().Find(gomock.Any(), user, repo.Slug).Return(remoteRepo, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil).Do(checkRepair) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(hooks, repoz, repos, users, "https://company.drone.io")(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Repository), &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found error is returned -// from the http.Handler if the named repository cannot be -// found in the local database. -func TestRepair_LocalRepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(nil, nil, repos, nil, "https://company.drone.io")(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found error is returned -// from the http.Handler if the remote repository cannot be -// found (e.g. in GitHub). -func TestRepair_RemoteRepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ - ID: 1, - } - repo := &core.Repository{ - ID: 1, - UserID: 1, - Private: true, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - - repoz := mock.NewMockRepositoryService(controller) - repoz.EXPECT().Find(gomock.Any(), user, repo.Slug).Return(nil, errors.ErrNotFound) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), repo.UserID).Return(user, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(nil, repoz, repos, users, "https://company.drone.io")(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found error is returned -// from the http.Handler if the repository owner cannot be -// found in the database. -func TestRepair_OwnerNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - UserID: 1, - Private: true, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), repo.UserID).Return(nil, errors.ErrNotFound) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(nil, nil, repos, users, "https://company.drone.io")(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 500 internal server error is -// returned from the http.Handler if the repository updates -// fail to persist in the datastore. -func TestRepair_CannotUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ - ID: 1, - } - repo := &core.Repository{ - ID: 1, - UserID: 1, - Private: true, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - remoteRepo := &core.Repository{ - Branch: "master", - Private: false, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - repoz := mock.NewMockRepositoryService(controller) - repoz.EXPECT().Find(gomock.Any(), user, repo.Slug).Return(remoteRepo, nil) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), repo.UserID).Return(user, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(nil, repoz, repos, users, "https://company.drone.io")(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 500 internal server error is -// returned from the http.Handler if the hook cannot be -// added or replaced in the remote system (e.g. github). -func TestRepair_CannotReplaceHook(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ - ID: 1, - } - repo := &core.Repository{ - ID: 1, - UserID: 1, - Private: true, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - } - remoteRepo := &core.Repository{ - Branch: "master", - Private: false, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - hooks := mock.NewMockHookService(controller) - hooks.EXPECT().Create(gomock.Any(), gomock.Any(), repo).Return(errors.ErrNotFound) - - repoz := mock.NewMockRepositoryService(controller) - repoz.EXPECT().Find(gomock.Any(), user, repo.Slug).Return(remoteRepo, nil) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), repo.UserID).Return(user, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleRepair(hooks, repoz, repos, users, "https://company.drone.io")(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/secrets/create.go b/handler/api/repos/secrets/create.go deleted file mode 100644 index 809b883b19..0000000000 --- a/handler/api/repos/secrets/create.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type secretInput struct { - Type string `json:"type"` - Name string `json:"name"` - Data string `json:"data"` - PullRequest bool `json:"pull_request"` - PullRequestPush bool `json:"pull_request_push"` -} - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a new secret. -func HandleCreate( - repos core.RepositoryStore, - secrets core.SecretStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - in := new(secretInput) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - s := &core.Secret{ - RepoID: repo.ID, - Name: in.Name, - Data: in.Data, - PullRequest: in.PullRequest, - PullRequestPush: in.PullRequestPush, - } - - err = s.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = secrets.Create(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - - s = s.Copy() - render.JSON(w, s, 200) - } -} diff --git a/handler/api/repos/secrets/create_test.go b/handler/api/repos/secrets/create_test.go deleted file mode 100644 index 5de51d98b9..0000000000 --- a/handler/api/repos/secrets/create_test.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().Create(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Secret{}, dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_ValidationError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Name: "", Data: "pa55word"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "Invalid Secret Name"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().Create(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/secrets/delete.go b/handler/api/repos/secrets/delete.go deleted file mode 100644 index a88878b7b0..0000000000 --- a/handler/api/repos/secrets/delete.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete the secret. -func HandleDelete( - repos core.RepositoryStore, - secrets core.SecretStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - secret = chi.URLParam(r, "secret") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - s, err := secrets.FindName(r.Context(), repo.ID, secret) - if err != nil { - render.NotFound(w, err) - return - } - - err = secrets.Delete(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/repos/secrets/delete_test.go b/handler/api/repos/secrets/delete_test.go deleted file mode 100644 index 67a982f0c5..0000000000 --- a/handler/api/repos/secrets/delete_test.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Delete(gomock.Any(), dummySecret).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleDelete_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Delete(gomock.Any(), dummySecret).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/secrets/find.go b/handler/api/repos/secrets/find.go deleted file mode 100644 index dab6f1ebbf..0000000000 --- a/handler/api/repos/secrets/find.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// secret details to the response body. -func HandleFind( - repos core.RepositoryStore, - secrets core.SecretStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - secret = chi.URLParam(r, "secret") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - result, err := secrets.FindName(r.Context(), repo.ID, secret) - if err != nil { - render.NotFound(w, err) - return - } - safe := result.Copy() - render.JSON(w, safe, 200) - } -} diff --git a/handler/api/repos/secrets/find_test.go b/handler/api/repos/secrets/find_test.go deleted file mode 100644 index dbf8c9defd..0000000000 --- a/handler/api/repos/secrets/find_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(dummySecret, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Secret{}, dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleFind_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleFind_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/secrets/list.go b/handler/api/repos/secrets/list.go deleted file mode 100644 index ebdcf4bfe9..0000000000 --- a/handler/api/repos/secrets/list.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of secrets to the response body. -func HandleList( - repos core.RepositoryStore, - secrets core.SecretStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - list, err := secrets.List(r.Context(), repo.ID) - if err != nil { - render.NotFound(w, err) - return - } - // the secret list is copied and the secret value is - // removed from the response. - secrets := []*core.Secret{} - for _, secret := range list { - secrets = append(secrets, secret.Copy()) - } - render.JSON(w, secrets, 200) - } -} diff --git a/handler/api/repos/secrets/list_test.go b/handler/api/repos/secrets/list_test.go deleted file mode 100644 index cf1b9b6a3a..0000000000 --- a/handler/api/repos/secrets/list_test.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - dummySecretRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - } - - dummySecret = &core.Secret{ - RepoID: 1, - Name: "github_password", - Data: "pa55word", - } - - dummySecretScrubbed = &core.Secret{ - RepoID: 1, - Name: "github_password", - Data: "", - } - - dummySecretList = []*core.Secret{ - dummySecret, - } - - dummySecretListScrubbed = []*core.Secret{ - dummySecretScrubbed, - } -) - -// -// HandleList -// - -func TestHandleList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().List(gomock.Any(), dummySecretRepo.ID).Return(dummySecretList, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Secret{}, dummySecretListScrubbed - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleList_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleList_SecretListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().List(gomock.Any(), dummySecretRepo.ID).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/secrets/none.go b/handler/api/repos/secrets/none.go deleted file mode 100644 index 894e070a5b..0000000000 --- a/handler/api/repos/secrets/none.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleCreate(core.RepositoryStore, core.SecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleUpdate(core.RepositoryStore, core.SecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleDelete(core.RepositoryStore, core.SecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleFind(core.RepositoryStore, core.SecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleList(core.RepositoryStore, core.SecretStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/repos/secrets/update.go b/handler/api/repos/secrets/update.go deleted file mode 100644 index b1e56e6033..0000000000 --- a/handler/api/repos/secrets/update.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type secretUpdate struct { - Data *string `json:"data"` - PullRequest *bool `json:"pull_request"` - PullRequestPush *bool `json:"pull_request_push"` -} - -// HandleUpdate returns an http.HandlerFunc that processes http -// requests to update a secret. -func HandleUpdate( - repos core.RepositoryStore, - secrets core.SecretStore, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - secret = chi.URLParam(r, "secret") - ) - - in := new(secretUpdate) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - s, err := secrets.FindName(r.Context(), repo.ID, secret) - if err != nil { - render.NotFound(w, err) - return - } - - if in.Data != nil { - s.Data = *in.Data - } - if in.PullRequest != nil { - s.PullRequest = *in.PullRequest - } - if in.PullRequestPush != nil { - s.PullRequestPush = *in.PullRequestPush - } - - err = s.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = secrets.Update(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - - s = s.Copy() - render.JSON(w, s, 200) - } -} diff --git a/handler/api/repos/secrets/update_test.go b/handler/api/repos/secrets/update_test.go deleted file mode 100644 index 2ff79f34b8..0000000000 --- a/handler/api/repos/secrets/update_test.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Secret), dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_ValidationError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(&core.Secret{Name: "github_password"}, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Data: ""}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "Invalid Secret Value"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(nil, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), dummySecretRepo.Namespace, dummySecretRepo.Name).Return(dummySecretRepo, nil) - - secrets := mock.NewMockSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecretRepo.ID, dummySecret.Name).Return(&core.Secret{Name: "github_password"}, nil) - secrets.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("secret", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Data: "password"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos, secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/repos/sign/sign.go b/handler/api/repos/sign/sign.go deleted file mode 100644 index ff070acc96..0000000000 --- a/handler/api/repos/sign/sign.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sign - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone-yaml/yaml/signer" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type payload struct { - Data string `json:"data"` -} - -// HandleSign returns an http.HandlerFunc that processes http -// requests to sign a pipeline configuration file. -func HandleSign(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - ) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - in := new(payload) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - k := []byte(repo.Secret) - d := []byte(in.Data) - out, err := signer.Sign(d, k) - if err != nil { - render.InternalError(w, err) - return - } - - render.JSON(w, &payload{Data: out}, 200) - } -} diff --git a/handler/api/repos/sign/sign_test.go b/handler/api/repos/sign/sign_test.go deleted file mode 100644 index e5166fce27..0000000000 --- a/handler/api/repos/sign/sign_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package sign diff --git a/handler/api/repos/update.go b/handler/api/repos/update.go deleted file mode 100644 index 8736c7c1b3..0000000000 --- a/handler/api/repos/update.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -type ( - repositoryInput struct { - Visibility *string `json:"visibility"` - Config *string `json:"config_path"` - Trusted *bool `json:"trusted"` - Protected *bool `json:"protected"` - IgnoreForks *bool `json:"ignore_forks"` - IgnorePulls *bool `json:"ignore_pull_requests"` - CancelPulls *bool `json:"auto_cancel_pull_requests"` - CancelPush *bool `json:"auto_cancel_pushes"` - CancelRunning *bool `json:"auto_cancel_running"` - Timeout *int64 `json:"timeout"` - Throttle *int64 `json:"throttle"` - Counter *int64 `json:"counter"` - } -) - -// HandleUpdate returns an http.HandlerFunc that processes http -// requests to update the repository details. -func HandleUpdate(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - slug = owner + "/" + name - ) - user, _ := request.UserFrom(r.Context()) - - repo, err := repos.FindName(r.Context(), owner, name) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("repository", slug). - Debugln("api: repository not found") - return - } - - in := new(repositoryInput) - err = json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - logger.FromRequest(r). - WithError(err). - WithField("repository", slug). - Debugln("api: cannot unmarshal json input") - return - } - - if in.Visibility != nil { - repo.Visibility = *in.Visibility - } - if in.Config != nil { - repo.Config = *in.Config - } - if in.Protected != nil { - repo.Protected = *in.Protected - } - if in.IgnoreForks != nil { - repo.IgnoreForks = *in.IgnoreForks - } - if in.IgnorePulls != nil { - repo.IgnorePulls = *in.IgnorePulls - } - if in.CancelPulls != nil { - repo.CancelPulls = *in.CancelPulls - } - if in.CancelPush != nil { - repo.CancelPush = *in.CancelPush - } - if in.CancelRunning != nil { - repo.CancelRunning = *in.CancelRunning - } - - // - // system administrator only - // - if user != nil && user.Admin { - if in.Trusted != nil { - repo.Trusted = *in.Trusted - } - if in.Timeout != nil { - repo.Timeout = *in.Timeout - } - if in.Throttle != nil { - repo.Throttle = *in.Throttle - } - if in.Counter != nil { - repo.Counter = *in.Counter - } - } - - // // right now the only repository field that a user - // // can update is the visibility field. - // if govalidator.IsIn(in.Visibility, - // core.VisibilityInternal, - // core.VisibilityPrivate, - // core.VisibilityPublic, - // ) { - // repo.Visibility = in.Visibility - // } - - err = repos.Update(r.Context(), repo) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("repository", slug). - Warnln("api: cannot update repository") - return - } - - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/repos/update_test.go b/handler/api/repos/update_test.go deleted file mode 100644 index 0d221e1932..0000000000 --- a/handler/api/repos/update_test.go +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos - -import ( - "bytes" - "context" - "encoding/json" - "net/http/httptest" - "strings" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPrivate, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - repoInput := &core.Repository{ - Visibility: core.VisibilityPublic, - } - - checkUpdate := func(_ context.Context, updated *core.Repository) error { - if got, want := updated.Visibility, core.VisibilityPublic; got != want { - t.Errorf("Want repository visibility updated to %s, got %s", want, got) - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil).Do(checkUpdate) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(repoInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Repository), &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPublic, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 404 not found error is returned -// from the http.Handler if the named repository cannot be -// found in the database. -func TestUpdate_RepoNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 400 bad request error is -// returned from the http.Handler if the request body -// is invalid json. -func TestUpdate_InvalidInput(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPrivate, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", strings.NewReader("")) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.New("EOF") - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that a 500 internal server error is -// returned from the http.Handler if the repository updates -// cannot be persisted to the database. -func TestUpdate_UpdateFailed(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPrivate, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - - repoInput := &core.Repository{ - Visibility: core.VisibilityPublic, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(repoInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestUpdateAutoCancelRunning(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - repo := &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPrivate, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - CancelRunning: false, - } - - repoInput := &core.Repository{ - CancelRunning: true, - Visibility: core.VisibilityPrivate, - } - - shouldBeValue := true - checkUpdate := func(_ context.Context, updated *core.Repository) error { - if got, want := updated.CancelRunning, shouldBeValue; got != want { - t.Errorf("Want repository visibility updated to %v, got %v", want, got) - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().FindName(gomock.Any(), "octocat", "hello-world").Return(repo, nil) - repos.EXPECT().Update(gomock.Any(), repo).Return(nil).Do(checkUpdate) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(repoInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - r = r.WithContext( - context.WithValue(r.Context(), chi.RouteCtxKey, c), - ) - - HandleUpdate(repos)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Repository), &core.Repository{ - ID: 1, - UserID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Branch: "master", - Private: false, - Visibility: core.VisibilityPrivate, - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - CancelRunning: true, - } - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/request/context.go b/handler/api/request/context.go deleted file mode 100644 index a1fe086a4c..0000000000 --- a/handler/api/request/context.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package request - -// https://github.com/kubernetes/apiserver/blob/master/pkg/endpoints/request/context.go - -import ( - "context" - - "github.com/drone/drone/core" -) - -type key int - -const ( - userKey key = iota - permKey - repoKey -) - -// WithUser returns a copy of parent in which the user value is set -func WithUser(parent context.Context, user *core.User) context.Context { - return context.WithValue(parent, userKey, user) -} - -// UserFrom returns the value of the user key on the ctx -func UserFrom(ctx context.Context) (*core.User, bool) { - user, ok := ctx.Value(userKey).(*core.User) - return user, ok -} - -// WithPerm returns a copy of parent in which the perm value is set -func WithPerm(parent context.Context, perm *core.Perm) context.Context { - return context.WithValue(parent, permKey, perm) -} - -// PermFrom returns the value of the perm key on the ctx -func PermFrom(ctx context.Context) (*core.Perm, bool) { - perm, ok := ctx.Value(permKey).(*core.Perm) - return perm, ok -} - -// WithRepo returns a copy of parent in which the repo value is set -func WithRepo(parent context.Context, repo *core.Repository) context.Context { - return context.WithValue(parent, repoKey, repo) -} - -// RepoFrom returns the value of the repo key on the ctx -func RepoFrom(ctx context.Context) (*core.Repository, bool) { - repo, ok := ctx.Value(repoKey).(*core.Repository) - return repo, ok -} diff --git a/handler/api/request/context_test.go b/handler/api/request/context_test.go deleted file mode 100644 index 87378f8661..0000000000 --- a/handler/api/request/context_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package request diff --git a/handler/api/secrets/all.go b/handler/api/secrets/all.go deleted file mode 100644 index f882e5c763..0000000000 --- a/handler/api/secrets/all.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -// HandleAll returns an http.HandlerFunc that writes a json-encoded -// list of secrets to the response body. -func HandleAll(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - list, err := secrets.ListAll(r.Context()) - if err != nil { - render.NotFound(w, err) - return - } - // the secret list is copied and the secret value is - // removed from the response. - secrets := []*core.Secret{} - for _, secret := range list { - secrets = append(secrets, secret.Copy()) - } - render.JSON(w, secrets, 200) - } -} diff --git a/handler/api/secrets/all_test.go b/handler/api/secrets/all_test.go deleted file mode 100644 index 1f8e7633be..0000000000 --- a/handler/api/secrets/all_test.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleAll(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().ListAll(gomock.Any()).Return(dummySecretList, nil) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - HandleAll(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Secret{}, dummySecretListScrubbed - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleAll_SecretListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().ListAll(gomock.Any()).Return(nil, errors.ErrNotFound) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - HandleAll(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/secrets/create.go b/handler/api/secrets/create.go deleted file mode 100644 index d80ab6fc07..0000000000 --- a/handler/api/secrets/create.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/go-chi/chi" -) - -type secretInput struct { - Type string `json:"type"` - Name string `json:"name"` - Data string `json:"data"` - PullRequest bool `json:"pull_request"` - PullRequestPush bool `json:"pull_request_push"` -} - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a new secret. -func HandleCreate(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - in := new(secretInput) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - s := &core.Secret{ - Namespace: chi.URLParam(r, "namespace"), - Name: in.Name, - Data: in.Data, - PullRequest: in.PullRequest, - PullRequestPush: in.PullRequestPush, - } - - err = s.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = secrets.Create(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - - s = s.Copy() - render.JSON(w, s, 200) - } -} diff --git a/handler/api/secrets/create_test.go b/handler/api/secrets/create_test.go deleted file mode 100644 index ff6dbfec8c..0000000000 --- a/handler/api/secrets/create_test.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().Create(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Secret{}, dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_ValidationError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Name: "", Data: "pa55word"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "Invalid Secret Name"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().Create(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/secrets/delete.go b/handler/api/secrets/delete.go deleted file mode 100644 index 2948cec2ba..0000000000 --- a/handler/api/secrets/delete.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete the secret. -func HandleDelete(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "namespace") - name = chi.URLParam(r, "name") - ) - s, err := secrets.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - err = secrets.Delete(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/secrets/delete_test.go b/handler/api/secrets/delete_test.go deleted file mode 100644 index 124ab87adf..0000000000 --- a/handler/api/secrets/delete_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Delete(gomock.Any(), dummySecret).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleDelete_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Delete(gomock.Any(), dummySecret).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/secrets/find.go b/handler/api/secrets/find.go deleted file mode 100644 index b44af80eba..0000000000 --- a/handler/api/secrets/find.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// secret details to the response body. -func HandleFind(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "namespace") - name = chi.URLParam(r, "name") - ) - secret, err := secrets.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - safe := secret.Copy() - render.JSON(w, safe, 200) - } -} diff --git a/handler/api/secrets/find_test.go b/handler/api/secrets/find_test.go deleted file mode 100644 index 9a2d144460..0000000000 --- a/handler/api/secrets/find_test.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(dummySecret, nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.Secret{}, dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleFind_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/secrets/list.go b/handler/api/secrets/list.go deleted file mode 100644 index 9daea2d96e..0000000000 --- a/handler/api/secrets/list.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of secrets to the response body. -func HandleList(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "namespace") - list, err := secrets.List(r.Context(), namespace) - if err != nil { - render.NotFound(w, err) - return - } - // the secret list is copied and the secret value is - // removed from the response. - secrets := []*core.Secret{} - for _, secret := range list { - secrets = append(secrets, secret.Copy()) - } - render.JSON(w, secrets, 200) - } -} diff --git a/handler/api/secrets/list_test.go b/handler/api/secrets/list_test.go deleted file mode 100644 index 9db84ad9b7..0000000000 --- a/handler/api/secrets/list_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - dummySecret = &core.Secret{ - Namespace: "octocat", - Name: "github_password", - Data: "pa55word", - } - - dummySecretScrubbed = &core.Secret{ - Namespace: "octocat", - Name: "github_password", - Data: "", - } - - dummySecretList = []*core.Secret{ - dummySecret, - } - - dummySecretListScrubbed = []*core.Secret{ - dummySecretScrubbed, - } -) - -// -// HandleList -// - -func TestHandleList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().List(gomock.Any(), dummySecret.Namespace).Return(dummySecretList, nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Secret{}, dummySecretListScrubbed - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleList_SecretListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().List(gomock.Any(), dummySecret.Namespace).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/secrets/none.go b/handler/api/secrets/none.go deleted file mode 100644 index a602c41ea0..0000000000 --- a/handler/api/secrets/none.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package secrets - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleCreate(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleUpdate(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleDelete(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleFind(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleList(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} - -func HandleAll(core.GlobalSecretStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/secrets/update.go b/handler/api/secrets/update.go deleted file mode 100644 index 7251eb254a..0000000000 --- a/handler/api/secrets/update.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type secretUpdate struct { - Data *string `json:"data"` - PullRequest *bool `json:"pull_request"` - PullRequestPush *bool `json:"pull_request_push"` -} - -// HandleUpdate returns an http.HandlerFunc that processes http -// requests to update a secret. -func HandleUpdate(secrets core.GlobalSecretStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - namespace = chi.URLParam(r, "namespace") - name = chi.URLParam(r, "name") - ) - - in := new(secretUpdate) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - s, err := secrets.FindName(r.Context(), namespace, name) - if err != nil { - render.NotFound(w, err) - return - } - - if in.Data != nil { - s.Data = *in.Data - } - if in.PullRequest != nil { - s.PullRequest = *in.PullRequest - } - if in.PullRequestPush != nil { - s.PullRequestPush = *in.PullRequestPush - } - - err = s.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = secrets.Update(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - - s = s.Copy() - render.JSON(w, s, 200) - } -} diff --git a/handler/api/secrets/update_test.go b/handler/api/secrets/update_test.go deleted file mode 100644 index 4cce15fe2d..0000000000 --- a/handler/api/secrets/update_test.go +++ /dev/null @@ -1,180 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secrets - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(dummySecret, nil) - secrets.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummySecret) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(core.Secret), dummySecretScrubbed - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_ValidationError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(&core.Secret{Name: "github_password"}, nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Data: ""}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "Invalid Secret Value"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_SecretNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - secrets := mock.NewMockGlobalSecretStore(controller) - secrets.EXPECT().FindName(gomock.Any(), dummySecret.Namespace, dummySecret.Name).Return(&core.Secret{Name: "github_password"}, nil) - secrets.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "octocat") - c.URLParams.Add("name", "github_password") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Data: "password"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(secrets).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/system/license.go b/handler/api/system/license.go deleted file mode 100644 index 01743c6f3b..0000000000 --- a/handler/api/system/license.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package system - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -// HandleLicense returns an http.HandlerFunc that writes -// json-encoded license details to the response body. -func HandleLicense(license core.License) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - render.JSON(w, license, 200) - } -} diff --git a/handler/api/system/limits.go b/handler/api/system/limits.go deleted file mode 100644 index 82f2daaec5..0000000000 --- a/handler/api/system/limits.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package system diff --git a/handler/api/system/none.go b/handler/api/system/none.go deleted file mode 100644 index ebb1e09486..0000000000 --- a/handler/api/system/none.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package system - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -// HandleLicense returns a no-op http.HandlerFunc. -func HandleLicense(license core.License) http.HandlerFunc { - return notImplemented -} - -// HandleStats returns a no-op http.HandlerFunc. -func HandleStats( - core.BuildStore, - core.StageStore, - core.UserStore, - core.RepositoryStore, - core.Pubsub, - core.LogStream, -) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/system/stats.go b/handler/api/system/stats.go deleted file mode 100644 index db5a429dc3..0000000000 --- a/handler/api/system/stats.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package system - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -type ( - users struct { - Total int64 `json:"total"` - } - - repos struct { - Active int64 `json:"active"` - } - - builds struct { - Pending int `json:"pending"` - Running int `json:"running"` - Total int64 `json:"total"` - } - - events struct { - Subscribers int `json:"subscribers"` - } - - streams struct { - Subscribers int `json:"subscribers"` - Channels int `json:"channels"` - } - - platform struct { - Subscribers int `json:"subscribers"` - OS string `json:"os"` - Arch string `json:"arch"` - Variant string `json:"variant"` - Kernel string `json:"kernel"` - Pending int `json:"pending"` - Running int `json:"running"` - } - - stats struct { - Users users `json:"users"` - Repos repos `json:"repos"` - Builds builds `json:"builds"` - Pipelines []*platform `json:"pipelines"` - Events events `json:"events"` - Streams map[int64]int `json:"streams"` - Watchers map[int64]int `json:"watchers"` - } -) - -// HandleStats returns an http.HandlerFunc that writes a -// json-encoded list of system stats to the response body. -func HandleStats( - builds core.BuildStore, - stages core.StageStore, - users core.UserStore, - repos core.RepositoryStore, - bus core.Pubsub, - streams core.LogStream, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ctx = r.Context() - var err error - - // - // User Stats - // - - stats := &stats{} - stats.Users.Total, err = users.Count(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get user count") - return - } - - // - // Repo Stats - // - - stats.Repos.Active, err = repos.Count(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get repo count") - return - } - - // - // Build Stats - // - - stats.Builds.Total, err = builds.Count(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get build count") - return - } - buildsPending, err := builds.Pending(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get pending build count") - return - } - buildsRunning, err := builds.Running(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get running build count") - return - } - stats.Builds.Pending = len(buildsPending) - stats.Builds.Running = len(buildsRunning) - - // - // Queue Stats - // - - incomplete, err := stages.ListIncomplete(ctx) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get pending stage count") - return - } - platforms := newPlatformList() - aggregatePlatformStats(platforms, incomplete) - stats.Pipelines = platforms - - // - // Event Stats - // - - stats.Events.Subscribers, err = bus.Subscribers() - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("stats: cannot get number of subscribers") - return - } - - // - // Stream Stats - // - - stats.Streams = streams.Info(ctx).Streams - - render.JSON(w, stats, 200) - } -} - -// platform statistics are returned in a fixed array. these -// are pointers to the platform index in the array. -const ( - linuxArm6 int = iota - linuxArm7 - linuxArm8 - linuxArm9 - linuxAmd64 - windows1709 - windows1803 - windows1809 -) - -// helper function returns a list of all platforms -// and variants currently supported by core. -func newPlatformList() []*platform { - platforms := [8]*platform{} - platforms[linuxArm6] = &platform{OS: "linux", Arch: "arm", Variant: "v6"} - platforms[linuxArm7] = &platform{OS: "linux", Arch: "arm", Variant: "v7"} - platforms[linuxArm8] = &platform{OS: "linux", Arch: "arm64", Variant: "v8"} - platforms[linuxArm9] = &platform{OS: "linux", Arch: "arm", Variant: "v9"} - platforms[linuxAmd64] = &platform{OS: "linux", Arch: "amd64"} - platforms[windows1709] = &platform{OS: "windows", Arch: "arm64", Kernel: "1709"} - platforms[windows1803] = &platform{OS: "windows", Arch: "arm64", Kernel: "1803"} - platforms[windows1809] = &platform{OS: "windows", Arch: "arm64", Kernel: "1809"} - return platforms[:] -} - -// helper function counts the number of running and -// pending stages by os, architecture, and variant. -func aggregatePlatformStats(platforms []*platform, stages []*core.Stage) { - for _, stage := range stages { - var index int - switch { - case stage.OS == "windows" && stage.Kernel == "1709": - index = windows1709 - case stage.OS == "windows" && stage.Kernel == "1803": - index = windows1803 - case stage.OS == "windows" && stage.Kernel == "1809": - index = windows1809 - case stage.OS == "windows": - // default to 1803 when no variant specified - index = windows1809 - case stage.Arch == "arm" && stage.Variant == "v6": - index = linuxArm6 - case stage.Arch == "arm" && stage.Variant == "v7": - index = linuxArm7 - case stage.Arch == "arm" && stage.Variant == "v9": - index = linuxArm9 - case stage.Arch == "arm": - // default to arm7 when no variant specified - index = linuxArm7 - case stage.Arch == "arm64" && stage.Variant == "v8": - index = linuxArm8 - case stage.Arch == "arm64": - // default to arm8 when arm64 - index = linuxArm8 - default: - index = linuxAmd64 - continue - } - - switch stage.Status { - case core.StatusPending: - platforms[index].Pending++ - case core.StatusRunning: - platforms[index].Running++ - } - } -} diff --git a/handler/api/system/stats_test.go b/handler/api/system/stats_test.go deleted file mode 100644 index 03c3cb2243..0000000000 --- a/handler/api/system/stats_test.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package system - -import ( - "io/ioutil" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} diff --git a/handler/api/template/all.go b/handler/api/template/all.go deleted file mode 100644 index 837d67d34a..0000000000 --- a/handler/api/template/all.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -// HandleListAll returns an http.HandlerFunc that writes a json-encoded -// list of templates to the response body. -func HandleListAll(templateStore core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - list, err := templateStore.ListAll(r.Context()) - if err != nil { - render.NotFound(w, err) - return - } - render.JSON(w, list, 200) - } -} diff --git a/handler/api/template/all_test.go b/handler/api/template/all_test.go deleted file mode 100644 index c50aae24d3..0000000000 --- a/handler/api/template/all_test.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - dummyTemplate = &core.Template{ - Name: "my_template.yml", - Data: "my_data", - Created: 1, - Updated: 2, - Namespace: "my_org", - } - dummyTemplateList = []*core.Template{ - dummyTemplate, - } -) - -func TestHandleAll(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().ListAll(gomock.Any()).Return(dummyTemplateList, nil) - - c := new(chi.Context) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleListAll(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleAll_TemplateListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().ListAll(gomock.Any()).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleListAll(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/template/create.go b/handler/api/template/create.go deleted file mode 100644 index 1a5cbaa1da..0000000000 --- a/handler/api/template/create.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "encoding/json" - "net/http" - "path/filepath" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -var ( - errTemplateExtensionInvalid = errors.New("Template extension invalid. Must be yaml, starlark or jsonnet") -) - -type templateInput struct { - Name string `json:"name"` - Data string `json:"data"` -} - -// HandleCreate returns an http.HandlerFunc that processes http -// requests to create a new template. -func HandleCreate(templateStore core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "namespace") - in := new(templateInput) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - // check valid template extension type - switch filepath.Ext(in.Name) { - case ".yml", ".yaml": - case ".star", ".starlark", ".script": - case ".jsonnet": - default: - render.BadRequest(w, errTemplateExtensionInvalid) - return - } - - t := &core.Template{ - Name: in.Name, - Data: in.Data, - Namespace: namespace, - } - - err = t.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = templateStore.Create(r.Context(), t) - if err != nil { - render.InternalError(w, err) - return - } - - render.JSON(w, t, 200) - } -} diff --git a/handler/api/template/create_test.go b/handler/api/template/create_test.go deleted file mode 100644 index 3df7230ba1..0000000000 --- a/handler/api/template/create_test.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "bytes" - "context" - "encoding/json" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "net/http" - "net/http/httptest" - "testing" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().Create(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyTemplate) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleCreate_NotValidTemplateExtensionName(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Template{Name: "my_template", Data: "my_data"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "Template extension invalid. Must be yaml, starlark or jsonnet"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_ValidationErrorData(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Template{Name: "my_template.yml", Data: ""}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "No Template Data Provided"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(nil).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleCreate_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().Create(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyTemplate) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleCreate(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/template/delete.go b/handler/api/template/delete.go deleted file mode 100644 index bca24ca245..0000000000 --- a/handler/api/template/delete.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes http -// requests to delete a template. -func HandleDelete(template core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - name = chi.URLParam(r, "name") - namespace = chi.URLParam(r, "namespace") - ) - s, err := template.FindName(r.Context(), name, namespace) - if err != nil { - render.NotFound(w, err) - return - } - err = template.Delete(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/template/delete_test.go b/handler/api/template/delete_test.go deleted file mode 100644 index aae3c1617a..0000000000 --- a/handler/api/template/delete_test.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(dummyTemplate, nil) - template.EXPECT().Delete(gomock.Any(), dummyTemplate).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNoContent; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleDelete_TemplateNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleDelete_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(dummyTemplate, nil) - template.EXPECT().Delete(gomock.Any(), dummyTemplate).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/template/find.go b/handler/api/template/find.go deleted file mode 100644 index b843e4e2bf..0000000000 --- a/handler/api/template/find.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package template - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// template details to the response body. -func HandleFind(templateStore core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - name = chi.URLParam(r, "name") - namespace = chi.URLParam(r, "namespace") - ) - template, err := templateStore.FindName(r.Context(), name, namespace) - if err != nil { - render.NotFound(w, err) - return - } - render.JSON(w, template, 200) - } -} diff --git a/handler/api/template/find_test.go b/handler/api/template/find_test.go deleted file mode 100644 index a5687b04af..0000000000 --- a/handler/api/template/find_test.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(dummyTemplate, nil) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleFind_TemplateNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/template/list.go b/handler/api/template/list.go deleted file mode 100644 index c414dba8ee..0000000000 --- a/handler/api/template/list.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of templates to the response body by namespace -func HandleList(templateStore core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - namespace := chi.URLParam(r, "namespace") - list, err := templateStore.List(r.Context(), namespace) - if err != nil { - render.NotFound(w, err) - return - } - render.JSON(w, list, 200) - } -} diff --git a/handler/api/template/list_test.go b/handler/api/template/list_test.go deleted file mode 100644 index d066274ef8..0000000000 --- a/handler/api/template/list_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package template - -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -import ( - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().List(gomock.Any(), dummyTemplate.Namespace).Return(dummyTemplateList, nil) - - c := new(chi.Context) - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleList_TemplateListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().List(gomock.Any(), dummyTemplate.Namespace).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("namespace", "my_org") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleList(templates).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/template/none.go b/handler/api/template/none.go deleted file mode 100644 index 43c51aa6b3..0000000000 --- a/handler/api/template/none.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package template - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" -) - -var notImplemented = func(w http.ResponseWriter, r *http.Request) { - render.NotImplemented(w, render.ErrNotImplemented) -} - -func HandleCreate(store core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleUpdate(core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleDelete(core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleFind(core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleList(core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleListAll(core.TemplateStore) http.HandlerFunc { - return notImplemented -} - -func HandleAll(core.TemplateStore) http.HandlerFunc { - return notImplemented -} diff --git a/handler/api/template/update.go b/handler/api/template/update.go deleted file mode 100644 index 5541244c7b..0000000000 --- a/handler/api/template/update.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - - "github.com/go-chi/chi" -) - -type templateUpdate struct { - Data *string `json:"data"` - Namespace *string `json:"namespace"` -} - -// HandleUpdate returns an http.HandlerFunc that processes http -// requests to update a template. -func HandleUpdate(templateStore core.TemplateStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - name = chi.URLParam(r, "name") - namespace = chi.URLParam(r, "namespace") - ) - - in := new(templateUpdate) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - s, err := templateStore.FindName(r.Context(), name, namespace) - if err != nil { - render.NotFound(w, err) - return - } - - if in.Data != nil { - s.Data = *in.Data - } - if in.Namespace != nil { - s.Namespace = *in.Namespace - } - - err = s.Validate() - if err != nil { - render.BadRequest(w, err) - return - } - - err = templateStore.Update(r.Context(), s) - if err != nil { - render.InternalError(w, err) - return - } - - render.JSON(w, s, 200) - } -} diff --git a/handler/api/template/update_test.go b/handler/api/template/update_test.go deleted file mode 100644 index 7ec96bbab9..0000000000 --- a/handler/api/template/update_test.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestHandleUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(dummyTemplate, nil) - template.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(dummyTemplate) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestHandleUpdate_ValidationErrorData(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(&core.Template{Name: "my_template.yml"}, nil) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{Data: ""}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "No Template Data Provided"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_TemplateNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(nil, errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Secret{}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusNotFound; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestHandleUpdate_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - template := mock.NewMockTemplateStore(controller) - template.EXPECT().FindName(gomock.Any(), dummyTemplate.Name, dummyTemplate.Namespace).Return(&core.Template{Name: "my_template.yml"}, nil) - template.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("name", "my_template.yml") - c.URLParams.Add("namespace", "my_org") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.Template{Data: "my_data"}) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(template).ServeHTTP(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/user/activity.go b/handler/api/user/activity.go deleted file mode 100644 index cdd668ba88..0000000000 --- a/handler/api/user/activity.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleRecent returns an http.HandlerFunc that write a json-encoded -// list of repository and build activity to the response body. -func HandleRecent(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - viewer, _ := request.UserFrom(r.Context()) - list, err := repos.ListRecent(r.Context(), viewer.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot list repositories") - } else { - render.JSON(w, list, 200) - } - } -} diff --git a/handler/api/user/find.go b/handler/api/user/find.go deleted file mode 100644 index e8607f1830..0000000000 --- a/handler/api/user/find.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "net/http" - - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// account information to the http response body. -func HandleFind() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - viewer, _ := request.UserFrom(ctx) - render.JSON(w, viewer, 200) - } -} diff --git a/handler/api/user/find_test.go b/handler/api/user/find_test.go deleted file mode 100644 index 8235b6b1e6..0000000000 --- a/handler/api/user/find_test.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user - -import ( - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/core" - - "github.com/google/go-cmp/cmp" -) - -func TestFind(t *testing.T) { - mockUser := &core.User{ - ID: 1, - Login: "octocat", - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/api/user", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - HandleFind()(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.User{}, mockUser - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/user/remote/repo.go b/handler/api/user/remote/repo.go deleted file mode 100644 index a932f7c7e3..0000000000 --- a/handler/api/user/remote/repo.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package remote - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" - "github.com/drone/go-scm/scm" - - "github.com/go-chi/chi" -) - -// HandleRepo returns an http.HandlerFunc that writes a json-encoded -// repository to the response body. -func HandleRepo(repos core.RepositoryService) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - viewer, _ = request.UserFrom(r.Context()) - - owner = chi.URLParam(r, "owner") - name = chi.URLParam(r, "name") - slug = scm.Join(owner, name) - ) - - repo, err := repos.Find(r.Context(), viewer, slug) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot get remote repository") - return - } - - perms, err := repos.FindPerm(r.Context(), viewer, slug) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot get remote repository permissions") - } else { - repo.Perms = perms - } - - render.JSON(w, repo, 200) - } -} diff --git a/handler/api/user/remote/repos.go b/handler/api/user/remote/repos.go deleted file mode 100644 index 53193916e8..0000000000 --- a/handler/api/user/remote/repos.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package remote - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleRepos returns an http.HandlerFunc that write a json-encoded -// list of repositories to the response body. -func HandleRepos(repos core.RepositoryService) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - viewer, _ := request.UserFrom(r.Context()) - - list, err := repos.List(r.Context(), viewer) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot list remote repositories") - } else { - render.JSON(w, list, 200) - } - } -} diff --git a/handler/api/user/repos.go b/handler/api/user/repos.go deleted file mode 100644 index d79c071bde..0000000000 --- a/handler/api/user/repos.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleRepos returns an http.HandlerFunc that write a json-encoded -// list of repositories to the response body. -func HandleRepos(repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - viewer, _ := request.UserFrom(r.Context()) - - var list []*core.Repository - var err error - if r.FormValue("latest") != "true" { - list, err = repos.List(r.Context(), viewer.ID) - } else { - list, err = repos.ListLatest(r.Context(), viewer.ID) - } - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot list repositories") - } else { - render.JSON(w, list, 200) - } - } -} diff --git a/handler/api/user/repos_test.go b/handler/api/user/repos_test.go deleted file mode 100644 index 3ab2ebace7..0000000000 --- a/handler/api/user/repos_test.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user - -import ( - "encoding/json" - "io/ioutil" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestRepositoryList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - } - - mockRepos := []*core.Repository{ - { - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().List(gomock.Any(), mockUser.ID).Return(mockRepos, nil) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - HandleRepos(repos)(w, r) - if got, want := w.Code, http.StatusOK; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.Repository{}, mockRepos - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestRepositoryListErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().List(gomock.Any(), mockUser.ID).Return(nil, errors.ErrNotFound) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - HandleRepos(repos)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &errors.Error{}, errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/user/sync.go b/handler/api/user/sync.go deleted file mode 100644 index 42512d7379..0000000000 --- a/handler/api/user/sync.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "context" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleSync returns an http.HandlerFunc synchronizes and then -// write a json-encoded list of repositories to the response body. -func HandleSync(syncer core.Syncer, repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - viewer, _ := request.UserFrom(r.Context()) - - // performs asynchronous account synchronization. - // this requires long polling to determine when the - // sync is complete. - if r.FormValue("async") == "true" { - ctx := context.Background() - go func(ctx context.Context, viewer *core.User) { - _, err := syncer.Sync(ctx, viewer) - if err != nil { - logger.FromContext(ctx).WithError(err). - Debugln("api: cannot synchronize account") - } - }(ctx, viewer) - w.WriteHeader(204) - return - } - - _, err := syncer.Sync(r.Context(), viewer) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot synchronize account") - return - } - list, err := repos.List(r.Context(), viewer.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot synchronize account") - } else { - render.JSON(w, list, 200) - } - } -} diff --git a/handler/api/user/sync_test.go b/handler/api/user/sync_test.go deleted file mode 100644 index 0c032c9e43..0000000000 --- a/handler/api/user/sync_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user diff --git a/handler/api/user/token.go b/handler/api/user/token.go deleted file mode 100644 index dd8d9dbabc..0000000000 --- a/handler/api/user/token.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "net/http" - - "github.com/dchest/uniuri" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" -) - -type userWithToken struct { - *core.User - Token string `json:"token"` -} - -// HandleToken returns an http.HandlerFunc that writes json-encoded -// account information to the http response body with the user token. -func HandleToken(users core.UserStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - viewer, _ := request.UserFrom(ctx) - if r.FormValue("rotate") == "true" { - viewer.Hash = uniuri.NewLen(32) - if err := users.Update(ctx, viewer); err != nil { - render.InternalError(w, err) - return - } - } - render.JSON(w, &userWithToken{viewer, viewer.Hash}, 200) - } -} diff --git a/handler/api/user/token_test.go b/handler/api/user/token_test.go deleted file mode 100644 index 6737a6e933..0000000000 --- a/handler/api/user/token_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user - -import ( - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -func TestToken(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - Hash: "MjAxOC0wOC0xMVQxNTo1ODowN1o", - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - HandleToken(nil)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &userWithToken{}, mockUser - json.NewDecoder(w.Body).Decode(got) - - if got, want := got.Token, want.Hash; got != want { - t.Errorf("Expect user secret returned") - } -} - -// the purpose of this unit test is to verify that the token -// is refreshed if the user ?refresh=true query parameter is -// included in the http request. -func TestTokenRotate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - Hash: "MjAxOC0wOC0xMVQxNTo1ODowN1o", - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?rotate=true", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - HandleToken(users)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &userWithToken{}, mockUser - json.NewDecoder(w.Body).Decode(got) - - ignore := cmpopts.IgnoreFields(core.User{}, "Hash") - if diff := cmp.Diff(got.User, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - if got.Token == "" { - t.Errorf("Expect user token returned") - } - if got, want := got.Token, "MjAxOC0wOC0xMVQxNTo1ODowN1o"; got == want { - t.Errorf("Expect user hash updated") - } -} - -// the purpose of this unit test is to verify that an error -// updating the database will result in an internal server -// error returned to the client. -func TestToken_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - } - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?rotate=true", nil) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - HandleToken(users)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/user/update.go b/handler/api/user/update.go deleted file mode 100644 index 7d91bea2f6..0000000000 --- a/handler/api/user/update.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -// HandleUpdate returns an http.HandlerFunc that processes an http.Request -// to update the current user account. -func HandleUpdate(users core.UserStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - viewer, _ := request.UserFrom(r.Context()) - - in := new(core.User) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot unmarshal request body") - return - } - - viewer.Email = in.Email - err = users.Update(r.Context(), viewer) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot update user") - } else { - render.JSON(w, viewer, 200) - } - } -} diff --git a/handler/api/user/update_test.go b/handler/api/user/update_test.go deleted file mode 100644 index f26766f2a8..0000000000 --- a/handler/api/user/update_test.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user - -import ( - "bytes" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - userInput := &core.User{ - Login: "octocat", - Email: "octocat@github.com", - } - user := &core.User{ - Login: "octocat", - Email: "", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Update(gomock.Any(), user) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(userInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/api/user", in) - r = r.WithContext( - request.WithUser(r.Context(), user), - ) - - HandleUpdate(users)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - if got, want := user.Email, "octocat@github.com"; got != want { - t.Errorf("Want user email %v, got %v", want, got) - } - - got, want := new(core.User), user - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// the purpose of this unit test is to verify that an invalid -// (in this case missing) request body will result in a bad -// request error returned to the client. -func TestUpdate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - ID: 1, - Login: "octocat", - } - - in := new(bytes.Buffer) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/api/user", in) - r = r.WithContext( - request.WithUser(r.Context(), mockUser), - ) - - HandleUpdate(nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// the purpose of this unit test is to verify that an error -// updating the database will result in an internal server -// error returned to the client. -func TestUpdate_ServerError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - userInput := &core.User{ - Login: "octocat", - Email: "octocat@github.com", - } - user := &core.User{ - Login: "octocat", - Email: "", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().Update(gomock.Any(), user).Return(errors.ErrNotFound) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(userInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/api/user", in) - r = r.WithContext( - request.WithUser(r.Context(), user), - ) - - HandleUpdate(users)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/users/create.go b/handler/api/users/create.go deleted file mode 100644 index 575680678e..0000000000 --- a/handler/api/users/create.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "encoding/json" - "net/http" - "time" - - "github.com/dchest/uniuri" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/handler/api/request" - "github.com/drone/drone/logger" -) - -type userWithToken struct { - *core.User - Token string `json:"token"` -} - -// HandleCreate returns an http.HandlerFunc that processes an http.Request -// to create the named user account in the system. -func HandleCreate(users core.UserStore, service core.UserService, sender core.WebhookSender) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - in := new(userWithToken) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot unmarshal request body") - return - } - - user := &core.User{ - Login: in.Login, - Active: true, - Admin: in.Admin, - Machine: in.Machine, - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - Hash: in.Token, - } - if user.Hash == "" { - user.Hash = uniuri.NewLen(32) - } - - // if the user is not a machine account, we lookup - // the user in the remote system. We can then augment - // the user input with the remote system data. - if !user.Machine { - viewer, _ := request.UserFrom(r.Context()) - remote, err := service.FindLogin(r.Context(), viewer, user.Login) - if err == nil { - if user.Login != remote.Login && remote.Login != "" { - user.Login = remote.Login - } - if user.Email == "" { - user.Email = remote.Email - } - } - } - - err = user.Validate() - if err != nil { - render.ErrorCode(w, err, 400) - logger.FromRequest(r).WithError(err). - Errorln("api: invalid username") - return - } - - err = users.Create(r.Context(), user) - if err == core.ErrUserLimit { - render.ErrorCode(w, err, 402) - logger.FromRequest(r).WithError(err). - Errorln("api: cannot create user") - return - } - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot create user") - return - } - - err = sender.Send(r.Context(), &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionCreated, - User: user, - }) - if err != nil { - logger.FromRequest(r).WithError(err). - Warnln("api: cannot send webhook") - } - - var out interface{} = user - // if the user is a machine account the api token - // is included in the response. - if user.Machine { - out = &userWithToken{user, user.Hash} - } - render.JSON(w, out, 200) - } -} diff --git a/handler/api/users/create_test.go b/handler/api/users/create_test.go deleted file mode 100644 index 77253edb61..0000000000 --- a/handler/api/users/create_test.go +++ /dev/null @@ -1,222 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().Create(gomock.Any(), gomock.Any()).Do(func(_ context.Context, in *core.User) error { - if got, want := in.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if in.Hash == "" { - t.Errorf("Expect user secret generated") - } - return nil - }) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - service := mock.NewMockUserService(controller) - service.EXPECT().FindLogin(gomock.Any(), gomock.Any(), "octocat").Return(nil, errors.New("not found")) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.User{Login: "octocat"}) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - - HandleCreate(users, service, webhook)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - out := new(core.User) - json.NewDecoder(w.Body).Decode(out) - if got, want := out.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if got, want := out.Active, true; got != want { - t.Errorf("Want user active %v, got %v", want, got) - } - if got := out.Created; got == 0 { - t.Errorf("Want user created set to current unix timestamp, got %v", got) - } - if got := out.Updated; got == 0 { - t.Errorf("Want user updated set to current unix timestamp, got %v", got) - } -} - -func TestCreateWithToken(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().Create(gomock.Any(), gomock.Any()).Do(func(_ context.Context, in *core.User) error { - if got, want := in.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if got, want := in.Machine, true; got != want { - t.Errorf("Want user machine %v, got %v", want, got) - } - if got, want := in.Hash, "abc123"; got != want { - t.Errorf("Want user hash %s, got %s", want, got) - } - return nil - }) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - service := mock.NewMockUserService(controller) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&userWithToken{&core.User{Login: "octocat", Machine: true}, "abc123"}) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - - HandleCreate(users, service, webhook)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - out := new(userWithToken) - json.NewDecoder(w.Body).Decode(out) - if got, want := out.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if got, want := out.Active, true; got != want { - t.Errorf("Want user active %v, got %v", want, got) - } - if got := out.Created; got == 0 { - t.Errorf("Want user created set to current unix timestamp, got %v", got) - } - if got := out.Updated; got == 0 { - t.Errorf("Want user updated set to current unix timestamp, got %v", got) - } - if got, want := out.Token, "abc123"; got != want { - t.Errorf("Want user token %s, got %s", want, got) - } -} - -func TestCreate_CorrectName(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().Create(gomock.Any(), gomock.Any()).Do(func(_ context.Context, in *core.User) error { - if got, want := in.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if got, want := in.Email, "octocat@github.com"; got != want { - t.Errorf("Want user email %s, got %s", want, got) - } - if in.Hash == "" { - t.Errorf("Expect user secert generated") - } - return nil - }) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - service := mock.NewMockUserService(controller) - service.EXPECT().FindLogin(gomock.Any(), gomock.Any(), "Octocat").Return(&core.User{ - Login: "octocat", - Email: "octocat@github.com", - }, nil) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.User{Login: "Octocat"}) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - - HandleCreate(users, service, webhook)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - out := new(core.User) - json.NewDecoder(w.Body).Decode(out) - if got, want := out.Login, "octocat"; got != want { - t.Errorf("Want user login %s, got %s", want, got) - } - if got, want := out.Active, true; got != want { - t.Errorf("Want user active %v, got %v", want, got) - } - if got := out.Created; got == 0 { - t.Errorf("Want user created set to current unix timestamp, got %v", got) - } - if got := out.Updated; got == 0 { - t.Errorf("Want user updated set to current unix timestamp, got %v", got) - } -} - -func TestCreate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - in := new(bytes.Buffer) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - - HandleCreate(nil, nil, nil)(w, r) - if got, want := w.Code, http.StatusBadRequest; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestCreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().Create(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - webhook := mock.NewMockWebhookSender(controller) - - service := mock.NewMockUserService(controller) - service.EXPECT().FindLogin(gomock.Any(), gomock.Any(), "octocat").Return(nil, errors.New("not found")) - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(&core.User{Login: "octocat"}) - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", in) - - HandleCreate(users, service, webhook)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/users/delete.go b/handler/api/users/delete.go deleted file mode 100644 index d5fd6fdc42..0000000000 --- a/handler/api/users/delete.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "context" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleDelete returns an http.HandlerFunc that processes an http.Request -// to delete the named user account from the system. -func HandleDelete( - users core.UserStore, - transferer core.Transferer, - sender core.WebhookSender, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - login := chi.URLParam(r, "user") - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot find user") - return - } - - err = transferer.Transfer(context.Background(), user) - if err != nil { - logger.FromRequest(r).WithError(err). - Warnln("api: cannot transfer repository ownership") - } - - err = users.Delete(r.Context(), user) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot delete user") - return - } - - err = sender.Send(r.Context(), &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionDeleted, - User: user, - }) - if err != nil { - logger.FromRequest(r).WithError(err). - Warnln("api: cannot send webhook") - } - - w.WriteHeader(http.StatusNoContent) - } -} diff --git a/handler/api/users/delete_test.go b/handler/api/users/delete_test.go deleted file mode 100644 index e43b2dee52..0000000000 --- a/handler/api/users/delete_test.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "context" - "database/sql" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" -) - -func TestUserDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(mockUser, nil) - users.EXPECT().Delete(gomock.Any(), mockUser).Return(nil) - - transferer := mock.NewMockTransferer(controller) - transferer.EXPECT().Transfer(gomock.Any(), mockUser).Return(nil) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, transferer, webhook)(w, r) - if got, want := w.Body.Len(), 0; want != got { - t.Errorf("Want response body size %d, got %d", want, got) - } - if got, want := w.Code, 204; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestUserDelete_NotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(nil, sql.ErrNoRows) - - webhook := mock.NewMockWebhookSender(controller) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, nil, webhook)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} - -func TestUserDelete_InternalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(mockUser, nil) - users.EXPECT().Delete(gomock.Any(), mockUser).Return(sql.ErrConnDone) - - transferer := mock.NewMockTransferer(controller) - transferer.EXPECT().Transfer(gomock.Any(), mockUser).Return(nil) - - webhook := mock.NewMockWebhookSender(controller) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("DELETE", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleDelete(users, transferer, webhook)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/api/users/find.go b/handler/api/users/find.go deleted file mode 100644 index dcff1a0aa2..0000000000 --- a/handler/api/users/find.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "net/http" - "strconv" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleFind returns an http.HandlerFunc that writes json-encoded -// user account information to the response body. -func HandleFind(users core.UserStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - login := chi.URLParam(r, "user") - - user, err := users.FindLogin(r.Context(), login) - if err != nil { - // the client can make a user request by providing - // the user id as opposed to the username. If a - // numeric user id is provided as input, attempt - // to lookup the user by id. - if id, _ := strconv.ParseInt(login, 10, 64); id != 0 { - user, err = users.Find(r.Context(), id) - if err == nil { - render.JSON(w, user, 200) - return - } - } - render.NotFound(w, err) - logger.FromRequest(r).Debugln("api: cannot find user") - } else { - render.JSON(w, user, 200) - } - } -} diff --git a/handler/api/users/find_test.go b/handler/api/users/find_test.go deleted file mode 100644 index 1df64c9b36..0000000000 --- a/handler/api/users/find_test.go +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "context" - "database/sql" - "encoding/json" - "io/ioutil" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/sirupsen/logrus" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -// var ( -// mockUser = &core.User{ -// Login: "octocat", -// } - -// mockUsers = []*core.User{ -// { -// Login: "octocat", -// }, -// } - -// // mockNotFound = &Error{ -// // Message: "sql: no rows in result set", -// // } - -// // mockBadRequest = &Error{ -// // Message: "EOF", -// // } - -// // mockInternalError = &Error{ -// // Message: "database/sql: connection is already closed", -// // } -// ) - -func TestUserFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(mockUser, nil) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.User{}, mockUser - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestUserFindID(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), "1").Return(nil, sql.ErrNoRows) - users.EXPECT().Find(gomock.Any(), mockUser.ID).Return(mockUser, nil) - - c := new(chi.Context) - c.URLParams.Add("user", "1") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &core.User{}, mockUser - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestUserFindErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleFind(users)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/api/users/list.go b/handler/api/users/list.go deleted file mode 100644 index b2af9372a3..0000000000 --- a/handler/api/users/list.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" -) - -// HandleList returns an http.HandlerFunc that writes a json-encoded -// list of all registered system users to the response body. -func HandleList(users core.UserStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - users, err := users.List(r.Context()) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot list users") - } else { - render.JSON(w, users, 200) - } - } -} diff --git a/handler/api/users/list_test.go b/handler/api/users/list_test.go deleted file mode 100644 index 6e5e4083eb..0000000000 --- a/handler/api/users/list_test.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "database/sql" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/mock" - "github.com/drone/drone/core" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var ( - mockUser = &core.User{ - ID: 1, - Login: "octocat", - Email: "octocat@github.com", - Admin: false, - Active: true, - Avatar: "https://avatars1.githubusercontent.com/u/583231", - } - - mockUserList = []*core.User{ - mockUser, - } -) - -func TestHandleList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().List(gomock.Any()).Return(mockUserList, nil) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - h := HandleList(users) - - h(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := []*core.User{}, mockUserList - json.NewDecoder(w.Body).Decode(&got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestUserList_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().List(gomock.Any()).Return(nil, sql.ErrNoRows) - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - HandleList(users)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - // got, want := new(render.Error), &render.Error{Message: "sql: no rows in result set"} - // json.NewDecoder(w.Body).Decode(got) - // if diff := cmp.Diff(got, want); len(diff) > 0 { - // t.Errorf(diff) - // } -} diff --git a/handler/api/users/repos.go b/handler/api/users/repos.go deleted file mode 100644 index b1f99bdfa8..0000000000 --- a/handler/api/users/repos.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -// HandleRepoList returns an http.HandlerFunc that writes a json-encoded -// list of all user repositories to the response body. -func HandleRepoList(users core.UserStore, repos core.RepositoryStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - login := chi.URLParam(r, "user") - - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r). - WithError(err). - WithField("user", login). - Debugln("api: cannot find user") - return - } - - repos, err := repos.List(r.Context(), user.ID) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r). - WithError(err). - WithField("user", login). - Warnln("api: cannot list user repositories") - } else { - render.JSON(w, repos, 200) - } - } -} diff --git a/handler/api/users/token.go b/handler/api/users/token.go deleted file mode 100644 index b277483a40..0000000000 --- a/handler/api/users/token.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "net/http" - - "github.com/dchest/uniuri" - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - "github.com/go-chi/chi" -) - -type userWithMessage struct { - *core.User - Message string `json:"message"` -} - -// HandleToken returns an http.HandlerFunc that writes json-encoded -// account information to the http response body with the user token. -func HandleTokenRotation(users core.UserStore) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - login := chi.URLParam(r, "user") - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot find user") - return - } - user.Hash = uniuri.NewLen(32) - if err := users.Update(r.Context(), user); err != nil { - render.InternalError(w, err) - return - } - render.JSON(w, &userWithMessage{user, "Token rotated successfully."}, 200) - } -} diff --git a/handler/api/users/token_test.go b/handler/api/users/token_test.go deleted file mode 100644 index 1d40547ca1..0000000000 --- a/handler/api/users/token_test.go +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "context" - "encoding/json" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - "github.com/go-chi/chi" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -// The purpose of this test is to make sure admins can rotate someone -// else's token. -func TestTokenRotate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - startingHash := "MjAxOC0wOC0xMVQxNTo1ODowN1o" - mockUser := &core.User{ - ID: 1, - Login: "octocat", - Hash: startingHash, - } - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(mockUser, nil) - users.EXPECT().Update(gomock.Any(), gomock.Any()).Return(nil) - - HandleTokenRotation(users)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &userWithMessage{}, mockUser - json.NewDecoder(w.Body).Decode(got) - - ignore := cmpopts.IgnoreFields(core.User{}, "Hash") - if diff := cmp.Diff(got.User, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - if got.Message == "" { - t.Errorf("Expect Message returned") - } - if got, want := mockUser.Hash, startingHash; got == want { - t.Errorf("Expect user hash updated") - } -} - -// the purpose of this unit test is to verify we fail safely when a non existing user is provided -func TestToken_UserNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - startingHash := "MjAxOC0wOC0xMVQxNTo1ODowN1o" - mockUser := &core.User{ - ID: 1, - Login: "octocat", - Hash: startingHash, - } - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?rotate=true", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(mockUser, nil) - users.EXPECT().Update(gomock.Any(), gomock.Any()).Return(errors.ErrNotFound) - - HandleTokenRotation(users)(w, r) - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// the purpose of this unit test is to verify we fail safely when a non existing user is provided -func TestToken_UpdateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/?rotate=true", nil) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(nil, errors.ErrNotFound) - - HandleTokenRotation(users)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/handler/api/users/update.go b/handler/api/users/update.go deleted file mode 100644 index af320c1e66..0000000000 --- a/handler/api/users/update.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package users - -import ( - "context" - "encoding/json" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/logger" - - "github.com/go-chi/chi" -) - -type userInput struct { - Admin *bool `json:"admin"` - Active *bool `json:"active"` -} - -// HandleUpdate returns an http.HandlerFunc that processes an http.Request -// to update a user account. -func HandleUpdate(users core.UserStore, transferer core.Transferer) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - login := chi.URLParam(r, "user") - - in := new(userInput) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot unmarshal request body") - return - } - - user, err := users.FindLogin(r.Context(), login) - if err != nil { - render.NotFound(w, err) - logger.FromRequest(r).WithError(err). - Debugln("api: cannot find user") - return - } - - if in.Admin != nil { - user.Admin = *in.Admin - } - if in.Active != nil { - user.Active = *in.Active - // if the user is inactive we should always - // disable administrative privileges since - // the user may still have some API access. - if user.Active == false { - user.Admin = false - } - } - err = users.Update(r.Context(), user) - if err != nil { - render.InternalError(w, err) - logger.FromRequest(r).WithError(err). - Warnln("api: cannot update user") - } else { - render.JSON(w, user, 200) - } - - if user.Active { - return - } - - err = transferer.Transfer(context.Background(), user) - if err != nil { - logger.FromRequest(r).WithError(err). - Warnln("api: cannot transfer repository ownership") - } - } -} diff --git a/handler/api/users/update_test.go b/handler/api/users/update_test.go deleted file mode 100644 index c2d848f4be..0000000000 --- a/handler/api/users/update_test.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package users - -import ( - "bytes" - "context" - "database/sql" - "encoding/json" - "net/http" - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - "github.com/drone/drone/mock" - - "github.com/go-chi/chi" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestUpdate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - admin := true - userInput := &userInput{ - Admin: &admin, - } - user := &core.User{ - Login: "octocat", - Admin: false, - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), user.Login).Return(user, nil) - users.EXPECT().Update(gomock.Any(), user) - - transferer := mock.NewMockTransferer(controller) - transferer.EXPECT().Transfer(gomock.Any(), user).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(userInput) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(users, transferer)(w, r) - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - if got, want := user.Admin, true; got != want { - t.Errorf("Want user admin %v, got %v", want, got) - } - - got, want := new(core.User), user - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestUpdate_BadRequest(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - in := new(bytes.Buffer) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(users, nil)(w, r) - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "EOF"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestUpdate_NotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), mockUser.Login).Return(nil, sql.ErrNoRows) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(mockUser) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(users, nil)(w, r) - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), &errors.Error{Message: "sql: no rows in result set"} - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestUpdate_UpdateFailed(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - userInput := &core.User{ - Login: "octocat", - Admin: true, - } - user := &core.User{ - Login: "octocat", - Admin: false, - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), userInput.Login).Return(user, nil) - users.EXPECT().Update(gomock.Any(), user).Return(errors.ErrNotFound) - - c := new(chi.Context) - c.URLParams.Add("user", "octocat") - - in := new(bytes.Buffer) - json.NewEncoder(in).Encode(mockUser) - w := httptest.NewRecorder() - r := httptest.NewRequest("PATCH", "/", in) - r = r.WithContext( - context.WithValue(context.Background(), chi.RouteCtxKey, c), - ) - - HandleUpdate(users, nil)(w, r) - if got, want := w.Code, http.StatusInternalServerError; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := new(errors.Error), errors.ErrNotFound - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); len(diff) > 0 { - t.Errorf(diff) - } -} diff --git a/handler/health/health.go b/handler/health/health.go deleted file mode 100644 index 920728c64c..0000000000 --- a/handler/health/health.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package health - -import ( - "io" - "net/http" - - "github.com/go-chi/chi" - "github.com/go-chi/chi/middleware" -) - -// New returns a new health check router. -func New() http.Handler { - r := chi.NewRouter() - r.Use(middleware.Recoverer) - r.Use(middleware.NoCache) - r.Handle("/", Handler()) - return r -} - -// Handler creates an http.HandlerFunc that performs system -// healthchecks and returns 500 if the system is in an unhealthy state. -func Handler() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(200) - w.Header().Set("Content-Type", "text/plain") - io.WriteString(w, "OK") - } -} - diff --git a/handler/health/health_test.go b/handler/health/health_test.go deleted file mode 100644 index 1b8ec44110..0000000000 --- a/handler/health/health_test.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package health - -import ( - "net/http/httptest" - "testing" -) - -func TestHandleHealthz(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/healthz", nil) - - Handler().ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } -} diff --git a/handler/web/hook.go b/handler/web/hook.go deleted file mode 100644 index c836995dd0..0000000000 --- a/handler/web/hook.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "context" - "net/http" - "net/http/httputil" - "os" - "strconv" - "time" - - "github.com/sirupsen/logrus" - - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - "github.com/drone/go-scm/scm" -) - -// this is intended for local testing and instructs the handler -// to print the contents of the hook to stdout. -var debugPrintHook = false - -func init() { - debugPrintHook, _ = strconv.ParseBool( - os.Getenv("DRONE_DEBUG_DUMP_HOOK"), - ) -} - -// HandleHook returns an http.HandlerFunc that handles webhooks -// triggered by source code management. -func HandleHook( - repos core.RepositoryStore, - builds core.BuildStore, - triggerer core.Triggerer, - parser core.HookParser, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - - if debugPrintHook { - // if DRONE_DEBUG_DUMP_HOOK=true print the http.Request - // headers and body to stdout. - out, _ := httputil.DumpRequest(r, true) - os.Stderr.Write(out) - } - - hook, remote, err := parser.Parse(r, func(slug string) string { - namespace, name := scm.Split(slug) - repo, err := repos.FindName(r.Context(), namespace, name) - if err != nil { - logrus.WithFields( - logrus.Fields{ - "namespace": namespace, - "name": name, - }).Debugln("cannot find repository") - return "" - } - return repo.Signer - }) - - if err != nil { - logrus.Debugf("cannot parse webhook: %s", err) - writeBadRequest(w, err) - return - } - - if hook == nil { - logrus.Debugf("webhook ignored") - return - } - - // TODO handle ping requests - // TODO consider using scm.Repository in the function callback. - - log := logrus.WithFields(logrus.Fields{ - "namespace": remote.Namespace, - "name": remote.Name, - "event": hook.Event, - "commit": hook.After, - }) - - log.Debugln("webhook parsed") - - repo, err := repos.FindName(r.Context(), remote.Namespace, remote.Name) - if err != nil { - log = log.WithError(err) - log.Debugln("cannot find repository") - writeNotFound(w, err) - return - } - - if !repo.Active { - log.Debugln("ignore webhook, repository inactive") - w.WriteHeader(200) - return - } - - ctx, cancel := context.WithTimeout(context.Background(), time.Minute*5) - ctx = logger.WithContext(ctx, log) - defer cancel() - - if hook.Event == core.EventPush && hook.Action == core.ActionDelete { - log.WithField("branch", hook.Target).Debugln("branch deleted") - builds.DeleteBranch(ctx, repo.ID, hook.Target) - w.WriteHeader(http.StatusNoContent) - return - } - if hook.Event == core.EventPullRequest && hook.Action == core.ActionClose { - log.WithField("ref", hook.Ref).Debugln("pull request closed") - builds.DeletePull(ctx, repo.ID, scm.ExtractPullRequest(hook.Ref)) - w.WriteHeader(http.StatusNoContent) - return - } - - builds, err := triggerer.Trigger(ctx, repo, hook) - if err != nil { - writeError(w, err) - return - } - - writeJSON(w, builds, 200) - } -} diff --git a/handler/web/link/link.go b/handler/web/link/link.go deleted file mode 100644 index 489b6292c6..0000000000 --- a/handler/web/link/link.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package link - -import ( - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - - "github.com/go-chi/chi" -) - -// HandleCommit returns an http.HandlerFunc that redirects the -// user to the git resource in the remote source control -// management system. -func HandleCommit(linker core.Linker) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - namespace = chi.URLParam(r, "namespace") - name = chi.URLParam(r, "name") - commit = chi.URLParam(r, "commit") - ref = r.FormValue("ref") - ) - repo := scm.Join(namespace, name) - to, err := linker.Link(ctx, repo, ref, commit) - if err != nil { - http.Error(w, "Not Found", http.StatusNotFound) - return - } - http.Redirect(w, r, to, http.StatusSeeOther) - } -} - -// HandleTree returns an http.HandlerFunc that redirects the -// user to the git resource in the remote source control -// management system. -func HandleTree(linker core.Linker) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - var ( - ctx = r.Context() - namespace = chi.URLParam(r, "namespace") - name = chi.URLParam(r, "name") - ref = chi.URLParam(r, "*") - commit = r.FormValue("sha") - ) - repo := scm.Join(namespace, name) - to, err := linker.Link(ctx, repo, ref, commit) - if err != nil { - http.Error(w, "Not Found", http.StatusNotFound) - return - } - http.Redirect(w, r, to, http.StatusSeeOther) - } -} diff --git a/handler/web/link/link_test.go b/handler/web/link/link_test.go deleted file mode 100644 index 2a10f2fe4e..0000000000 --- a/handler/web/link/link_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package link diff --git a/handler/web/login.go b/handler/web/login.go deleted file mode 100644 index 6b2e660d82..0000000000 --- a/handler/web/login.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "context" - "database/sql" - "errors" - "fmt" - "net/http" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - "github.com/drone/go-login/login" - - "github.com/dchest/uniuri" - "github.com/sirupsen/logrus" -) - -// period at which the user account is synchronized -// with the remote system. Default is weekly. -var syncPeriod = time.Hour * 24 * 7 - -// period at which the sync should timeout -var syncTimeout = time.Minute * 30 - -// HandleLogin creates and http.HandlerFunc that handles user -// authentication and session initialization. -func HandleLogin( - users core.UserStore, - userz core.UserService, - syncer core.Syncer, - session core.Session, - admission core.AdmissionService, - sender core.WebhookSender, -) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - err := login.ErrorFrom(ctx) - if err != nil { - writeLoginError(w, r, err) - logrus.Debugf("cannot authenticate user: %s", err) - return - } - - // The authorization token is passed from the - // login middleware in the context. - tok := login.TokenFrom(ctx) - - account, err := userz.Find(ctx, tok.Access, tok.Refresh) - if err != nil { - writeLoginError(w, r, err) - logrus.Debugf("cannot find remote user: %s", err) - return - } - - logger := logrus.WithField("login", account.Login) - logger.Debugf("attempting authentication") - - redirect := "/" - user, err := users.FindLogin(ctx, account.Login) - if err == sql.ErrNoRows { - redirect = "/register" - - user = &core.User{ - Login: account.Login, - Avatar: account.Avatar, - Admin: false, - Machine: false, - Active: true, - Syncing: true, - Synced: 0, - LastLogin: time.Now().Unix(), - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - Token: tok.Access, - Refresh: tok.Refresh, - Hash: uniuri.NewLen(32), - } - if !tok.Expires.IsZero() { - user.Expiry = tok.Expires.Unix() - } - - err = admission.Admit(ctx, user) - if err != nil { - writeLoginError(w, r, err) - logger.Errorf("cannot admit user: %s", err) - return - } - - err = users.Create(ctx, user) - if err != nil { - writeLoginError(w, r, err) - logger.Errorf("cannot create user: %s", err) - return - } - - err = sender.Send(ctx, &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionCreated, - User: user, - }) - if err != nil { - logger.Errorf("cannot send webhook: %s", err) - } else { - logger.Debugf("successfully created user") - } - } else if err != nil { - writeLoginError(w, r, err) - logger.Errorf("cannot find user: %s", err) - return - } else { - err = admission.Admit(ctx, user) - if err != nil { - writeLoginError(w, r, err) - logger.Errorf("cannot admit user: %s", err) - return - } - } - - if user.Machine { - writeLoginErrorStr(w, r, "Machine account login is forbidden") - return - } - - if user.Active == false { - writeLoginErrorStr(w, r, "Account is not active") - return - } - - user.Avatar = account.Avatar - user.Token = tok.Access - user.Refresh = tok.Refresh - user.LastLogin = time.Now().Unix() - if !tok.Expires.IsZero() { - user.Expiry = tok.Expires.Unix() - } - - // If the user account has never been synchronized we - // execute the synchronization logic. - if time.Unix(user.Synced, 0).Add(syncPeriod).Before(time.Now()) { - user.Syncing = true - } - - err = users.Update(ctx, user) - if err != nil { - // if the account update fails we should still - // proceed to create the user session. This is - // considered a non-fatal error. - logger.Errorf("cannot update user: %s", err) - } - - // launch the synchronization process in a go-routine, - // since it is a long-running process and can take up - // to a few minutes. - if user.Syncing { - go synchronize(ctx, syncer, user) - } - - // If the user account has not completed registration, - // redirect to the registration form. - if len(user.Email) == 0 && user.Created > 1619841600 { - redirect = "/register" - } - - logger.Debugf("authentication successful") - - session.Create(w, user) - http.Redirect(w, r, redirect, http.StatusSeeOther) - } -} - -func synchronize(ctx context.Context, syncer core.Syncer, user *core.User) { - log := logrus.WithField("login", user.Login) - log.Debugf("begin synchronization") - - timeout, cancel := context.WithTimeout(context.Background(), syncTimeout) - timeout = logger.WithContext(timeout, log) - defer cancel() - _, err := syncer.Sync(timeout, user) - if err != nil { - log.Debugf("synchronization failed: %s", err) - } else { - log.Debugf("synchronization success") - } -} - -func writeLoginError(w http.ResponseWriter, r *http.Request, err error) { - http.Redirect(w, r, "/login/error?message="+err.Error(), http.StatusSeeOther) -} - -func writeLoginErrorStr(w http.ResponseWriter, r *http.Request, s string) { - writeLoginError(w, r, errors.New(s)) -} - -func writeCookie(w http.ResponseWriter, cookie *http.Cookie) { - w.Header().Set("Set-Cookie", cookie.String()+"; SameSite=lax") -} - -// HandleLoginForm creates and http.HandlerFunc that presents the -// user with an Login form for password-based authentication. -func HandleLoginForm() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-Type", "text/html") - fmt.Fprint(w, loginForm) - } -} - -// html page displayed to collect credentials. -var loginForm = ` -
- - - -
-` diff --git a/handler/web/login_test.go b/handler/web/login_test.go deleted file mode 100644 index e988047378..0000000000 --- a/handler/web/login_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package web diff --git a/handler/web/logout.go b/handler/web/logout.go deleted file mode 100644 index a969a32ef8..0000000000 --- a/handler/web/logout.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "net/http" - - "github.com/drone/drone-ui/dist" -) - -// HandleLogout creates an http.HandlerFunc that handles -// session termination. -func HandleLogout() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - w.Header().Add("Set-Cookie", "_session_=deleted; Path=/; Max-Age=0") - w.Header().Set("Content-Type", "text/html; charset=UTF-8") - w.Write( - dist.MustLookup("/index.html"), - ) - } -} diff --git a/handler/web/logout_test.go b/handler/web/logout_test.go deleted file mode 100644 index 1666b27a33..0000000000 --- a/handler/web/logout_test.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package web - -import ( - "net/http/httptest" - "testing" -) - -func TestLogout(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/logout", nil) - - HandleLogout().ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - if got, want := w.Header().Get("Set-Cookie"), "_session_=deleted; Path=/; Max-Age=0"; want != got { - t.Errorf("Want response code %q, got %q", want, got) - } -} diff --git a/handler/web/pages.go b/handler/web/pages.go deleted file mode 100644 index 4bf347e42c..0000000000 --- a/handler/web/pages.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "bytes" - "crypto/md5" - "fmt" - "net/http" - "time" - - "github.com/drone/drone-ui/dist" - "github.com/drone/drone/core" -) - -func HandleIndex(host string, session core.Session, license core.LicenseService) http.HandlerFunc { - return func(rw http.ResponseWriter, r *http.Request) { - user, _ := session.Get(r) - if user == nil && r.URL.Path == "/" { - http.Redirect(rw, r, "/welcome", 303) - return - } - - out := dist.MustLookup("/index.html") - ctx := r.Context() - - if ok, _ := license.Exceeded(ctx); ok { - out = bytes.Replace(out, head, exceeded, -1) - } else if license.Expired(ctx) { - out = bytes.Replace(out, head, expired, -1) - } - rw.Header().Set("Content-Type", "text/html; charset=UTF-8") - rw.Write(out) - } -} - -var ( - head = []byte(``) - expired = []byte(``) - exceeded = []byte(``) -) - -func setupCache(h http.Handler) http.Handler { - data := []byte(time.Now().String()) - etag := fmt.Sprintf("%x", md5.Sum(data)) - - return http.HandlerFunc( - func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Cache-Control", "public, max-age=31536000") - w.Header().Del("Expires") - w.Header().Del("Pragma") - w.Header().Set("ETag", etag) - h.ServeHTTP(w, r) - }, - ) -} - -// func userFromSession(r *http.Request, users core.UserStore, secret string) *core.User { -// cookie, err := r.Cookie("_session_") -// if err != nil { -// return nil -// } -// login := authcookie.Login(cookie.Value, []byte(secret)) -// if login == "" { -// return nil -// } -// user, err := users.FindLogin(r.Context(), login) -// if err != nil { -// return nil -// } -// return user -// } - -// var tmpl = mustCreateTemplate( -// string(dist.MustLookup("/index.html")), -// ) - -// // default func map with json parser. -// var funcMap = template.FuncMap{ -// "json": func(v interface{}) template.JS { -// a, _ := json.Marshal(v) -// return template.JS(a) -// }, -// } - -// // helper function creates a new template from the text string. -// func mustCreateTemplate(text string) *template.Template { -// templ, err := createTemplate(text) -// if err != nil { -// panic(err) -// } -// return templ -// } - -// // helper function creates a new template from the text string. -// func createTemplate(text string) (*template.Template, error) { -// templ, err := template.New("_").Funcs(funcMap).Parse(partials) -// if err != nil { -// return nil, err -// } -// return templ.Parse( -// injectPartials(text), -// ) -// } - -// // helper function that parses the html file and injects -// // named partial templates. -// func injectPartials(s string) string { -// w := new(bytes.Buffer) -// r := bytes.NewBufferString(s) -// t := html.NewTokenizer(r) -// for { -// tt := t.Next() -// if tt == html.ErrorToken { -// break -// } -// if tt == html.CommentToken { -// txt := string(t.Text()) -// txt = strings.TrimSpace(txt) -// seg := strings.Split(txt, ":") -// if len(seg) == 2 && seg[0] == "drone" { -// fmt.Fprintf(w, "{{ template %q . }}", seg[1]) -// continue -// } -// } -// w.Write(t.Raw()) -// } -// return w.String() -// } - -// const partials = ` -// {{define "user"}} -// {{ if .user }} -// -// {{ end }} -// {{end}} -// {{define "csrf"}} -// {{ if .csrf -}} -// -// {{- end }} -// {{end}} -// {{define "version"}} -// -// {{end}} -// {{define "docs"}} -// {{ if .docs -}} -// -// {{- end }} -// {{end}} -// ` - -var landingPage = ` -` diff --git a/handler/web/varz.go b/handler/web/varz.go deleted file mode 100644 index 6799bea045..0000000000 --- a/handler/web/varz.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "net/http" - "time" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -type varz struct { - SCM *scmInfo `json:"scm"` - License *licenseInfo `json:"license"` -} - -type scmInfo struct { - URL string `json:"url"` - Rate *rateInfo `json:"rate"` -} - -type rateInfo struct { - Limit int `json:"limit"` - Remaining int `json:"remaining"` - Reset int64 `json:"reset"` -} - -type licenseInfo struct { - Kind string `json:"kind"` - Seats int64 `json:"seats"` - SeatsUsed int64 `json:"seats_used,omitempty"` - SeatsAvail int64 `json:"seats_available,omitempty"` - Repos int64 `json:"repos"` - ReposUsed int64 `json:"repos_used,omitempty"` - ReposAvail int64 `json:"repos_available,omitempty"` - Expires time.Time `json:"expire_at,omitempty"` -} - -// HandleVarz creates an http.HandlerFunc that exposes internal system -// information. -func HandleVarz(client *scm.Client, license *core.License) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - rate := client.Rate() - v := &varz{ - License: &licenseInfo{ - Kind: license.Kind, - Seats: license.Users, - Repos: license.Repos, - Expires: license.Expires, - }, - SCM: &scmInfo{ - URL: client.BaseURL.String(), - Rate: &rateInfo{ - Limit: rate.Limit, - Remaining: rate.Remaining, - Reset: rate.Reset, - }, - }, - } - writeJSON(w, v, 200) - } -} diff --git a/handler/web/varz_test.go b/handler/web/varz_test.go deleted file mode 100644 index 345b8a0fc2..0000000000 --- a/handler/web/varz_test.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package web - -import ( - "encoding/json" - "net/http/httptest" - "net/url" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - "github.com/google/go-cmp/cmp" -) - -func TestHandleVarz(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - client := new(scm.Client) - client.BaseURL, _ = url.Parse("https://github.com") - client.SetRate(scm.Rate{ - Limit: 5000, - Remaining: 875, - Reset: 1523640878, - }) - - license := &core.License{ - Kind: core.LicenseStandard, - Repos: 50, - Users: 100, - } - HandleVarz(client, license).ServeHTTP(w, r) - - if got, want := w.Code, 200; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - got, want := &varz{}, mockVarz - json.NewDecoder(w.Body).Decode(got) - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -var mockVarz = &varz{ - SCM: &scmInfo{ - URL: "https://github.com", - Rate: &rateInfo{ - Limit: 5000, - Remaining: 875, - Reset: 1523640878, - }, - }, - License: &licenseInfo{ - Kind: "standard", - Seats: 100, - SeatsUsed: 0, - SeatsAvail: 0, - Repos: 50, - ReposUsed: 0, - ReposAvail: 0, - }, -} diff --git a/handler/web/version.go b/handler/web/version.go deleted file mode 100644 index 7c48cf6b57..0000000000 --- a/handler/web/version.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "net/http" - - "github.com/drone/drone/version" -) - -// HandleVersion creates an http.HandlerFunc that returns the -// version number and build details. -func HandleVersion(w http.ResponseWriter, r *http.Request) { - v := struct { - Source string `json:"source,omitempty"` - Version string `json:"version,omitempty"` - Commit string `json:"commit,omitempty"` - }{ - Source: version.GitRepository, - Commit: version.GitCommit, - Version: version.Version.String(), - } - writeJSON(w, &v, 200) -} diff --git a/handler/web/version_test.go b/handler/web/version_test.go deleted file mode 100644 index 1c7e513e10..0000000000 --- a/handler/web/version_test.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package web - -// func TestHandleVersion(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// w := httptest.NewRecorder() -// r := httptest.NewRequest("GET", "/version", nil) - -// mockVersion := &core.Version{ -// Source: "github.com/octocat/hello-world", -// Version: "1.0.0", -// Commit: "ad2aec", -// } - -// h := HandleVersion(mockVersion) -// h.ServeHTTP(w, r) - -// if got, want := w.Code, 200; want != got { -// t.Errorf("Want response code %d, got %d", want, got) -// } - -// got, want := &core.Version{}, mockVersion -// json.NewDecoder(w.Body).Decode(got) -// if !reflect.DeepEqual(got, want) { -// t.Errorf("response body does match expected result") -// pretty.Ldiff(t, got, want) -// } -// } diff --git a/handler/web/web.go b/handler/web/web.go deleted file mode 100644 index c886f6d7ba..0000000000 --- a/handler/web/web.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "net/http" - - "github.com/drone/drone-ui/dist" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/web/link" - "github.com/drone/drone/logger" - "github.com/drone/go-login/login" - "github.com/drone/go-scm/scm" - - "github.com/go-chi/chi" - "github.com/go-chi/chi/middleware" - "github.com/unrolled/secure" -) - -func New( - admitter core.AdmissionService, - builds core.BuildStore, - client *scm.Client, - hooks core.HookParser, - license *core.License, - licenses core.LicenseService, - linker core.Linker, - login login.Middleware, - repos core.RepositoryStore, - session core.Session, - syncer core.Syncer, - triggerer core.Triggerer, - users core.UserStore, - userz core.UserService, - webhook core.WebhookSender, - options secure.Options, - system *core.System, -) Server { - return Server{ - Admitter: admitter, - Builds: builds, - Client: client, - Hooks: hooks, - License: license, - Licenses: licenses, - Linker: linker, - Login: login, - Repos: repos, - Session: session, - Syncer: syncer, - Triggerer: triggerer, - Users: users, - Userz: userz, - Webhook: webhook, - Options: options, - Host: system.Host, - } -} - -// Server is a http.Handler which exposes drone functionality over HTTP. -type Server struct { - Admitter core.AdmissionService - Builds core.BuildStore - Client *scm.Client - Hooks core.HookParser - License *core.License - Licenses core.LicenseService - Linker core.Linker - Login login.Middleware - Repos core.RepositoryStore - Session core.Session - Syncer core.Syncer - Triggerer core.Triggerer - Users core.UserStore - Userz core.UserService - Webhook core.WebhookSender - Options secure.Options - Host string -} - -// Handler returns an http.Handler -func (s Server) Handler() http.Handler { - r := chi.NewRouter() - r.Use(middleware.Recoverer) - r.Use(middleware.NoCache) - r.Use(logger.Middleware) - r.Use(middleware.StripSlashes) - - sec := secure.New(s.Options) - r.Use(sec.Handler) - - r.Route("/hook", func(r chi.Router) { - r.Post("/", HandleHook(s.Repos, s.Builds, s.Triggerer, s.Hooks)) - }) - - r.Get("/link/{namespace}/{name}/tree/*", link.HandleTree(s.Linker)) - r.Get("/link/{namespace}/{name}/src/*", link.HandleTree(s.Linker)) - r.Get("/link/{namespace}/{name}/commit/{commit}", link.HandleCommit(s.Linker)) - r.Get("/version", HandleVersion) - r.Get("/varz", HandleVarz(s.Client, s.License)) - - r.Handle("/login", - s.Login.Handler( - http.HandlerFunc( - HandleLogin( - s.Users, - s.Userz, - s.Syncer, - s.Session, - s.Admitter, - s.Webhook, - ), - ), - ), - ) - r.Get("/logout", HandleLogout()) - r.Post("/logout", HandleLogout()) - - h := http.FileServer(dist.New()) - h = setupCache(h) - r.Handle("/favicon.png", h) - r.Handle("/manifest.json", h) - r.Handle("/asset-manifest.json", h) - r.Handle("/static/*filepath", h) - r.NotFound(HandleIndex(s.Host, s.Session, s.Licenses)) - - return r -} diff --git a/handler/web/writer.go b/handler/web/writer.go deleted file mode 100644 index 55fba239cb..0000000000 --- a/handler/web/writer.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package web - -import ( - "encoding/json" - "errors" - "net/http" - "os" - "strconv" -) - -// indent the json-encoded API responses -var indent bool - -func init() { - indent, _ = strconv.ParseBool( - os.Getenv("HTTP_JSON_INDENT"), - ) -} - -var ( - // errInvalidToken is returned when the api request token is invalid. - errInvalidToken = errors.New("Invalid or missing token") - - // errUnauthorized is returned when the user is not authorized. - errUnauthorized = errors.New("Unauthorized") - - // errForbidden is returned when user access is forbidden. - errForbidden = errors.New("Forbidden") - - // errNotFound is returned when a resource is not found. - errNotFound = errors.New("Not Found") -) - -// Error represents a json-encoded API error. -type Error struct { - Message string `json:"message"` -} - -// writeErrorCode writes the json-encoded error message to the response. -func writeErrorCode(w http.ResponseWriter, err error, status int) { - writeJSON(w, &Error{Message: err.Error()}, status) -} - -// writeError writes the json-encoded error message to the response -// with a 500 internal server error. -func writeError(w http.ResponseWriter, err error) { - writeErrorCode(w, err, 500) -} - -// writeNotFound writes the json-encoded error message to the response -// with a 404 not found status code. -func writeNotFound(w http.ResponseWriter, err error) { - writeErrorCode(w, err, 404) -} - -// writeUnauthorized writes the json-encoded error message to the response -// with a 401 unauthorized status code. -func writeUnauthorized(w http.ResponseWriter, err error) { - writeErrorCode(w, err, 401) -} - -// writeForbidden writes the json-encoded error message to the response -// with a 403 forbidden status code. -func writeForbidden(w http.ResponseWriter, err error) { - writeErrorCode(w, err, 403) -} - -// writeBadRequest writes the json-encoded error message to the response -// with a 400 bad request status code. -func writeBadRequest(w http.ResponseWriter, err error) { - writeErrorCode(w, err, 400) -} - -// writeJSON writes the json-encoded error message to the response -// with a 400 bad request status code. -func writeJSON(w http.ResponseWriter, v interface{}, status int) { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(status) - enc := json.NewEncoder(w) - if indent { - enc.SetIndent("", " ") - } - enc.Encode(v) -} diff --git a/handler/web/writer_test.go b/handler/web/writer_test.go deleted file mode 100644 index e5cb8ad985..0000000000 --- a/handler/web/writer_test.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package web - -import ( - "encoding/json" - "errors" - "net/http" - "net/http/httptest" - "testing" -) - -func TestWriteError(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeError(w, err) - - if got, want := w.Code, 500; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteErrorCode(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeErrorCode(w, err, 418) - - if got, want := w.Code, 418; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteNotFound(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeNotFound(w, err) - - if got, want := w.Code, 404; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteUnauthorized(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeUnauthorized(w, err) - - if got, want := w.Code, 401; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteForbidden(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeForbidden(w, err) - - if got, want := w.Code, 403; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteBadRequest(t *testing.T) { - w := httptest.NewRecorder() - - err := errors.New("pc load letter") - writeBadRequest(w, err) - - if got, want := w.Code, 400; want != got { - t.Errorf("Want response code %d, got %d", want, got) - } - - errjson := &Error{} - json.NewDecoder(w.Body).Decode(errjson) - if got, want := errjson.Message, err.Error(); got != want { - t.Errorf("Want error message %s, got %s", want, got) - } -} - -func TestWriteJSON(t *testing.T) { - // without indent - { - w := httptest.NewRecorder() - writeJSON(w, map[string]string{"hello": "world"}, http.StatusTeapot) - if got, want := w.Body.String(), "{\"hello\":\"world\"}\n"; got != want { - t.Errorf("Want JSON body %q, got %q", want, got) - } - if got, want := w.HeaderMap.Get("Content-Type"), "application/json"; got != want { - t.Errorf("Want Content-Type %q, got %q", want, got) - } - if got, want := w.Code, http.StatusTeapot; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - } - // with indent - { - indent = true - defer func() { - indent = false - }() - w := httptest.NewRecorder() - writeJSON(w, map[string]string{"hello": "world"}, http.StatusTeapot) - if got, want := w.Body.String(), "{\n \"hello\": \"world\"\n}\n"; got != want { - t.Errorf("Want JSON body %q, got %q", want, got) - } - } -} diff --git a/livelog/livelog.go b/livelog/livelog.go deleted file mode 100644 index 5e288189b5..0000000000 --- a/livelog/livelog.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package livelog - -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new log streamer. If Redis client passed as parameter is not nil it uses -// a Redis implementation, otherwise it uses an in-memory implementation. -func New(rdb redisdb.RedisDB) core.LogStream { - if rdb != nil { - return newStreamRedis(rdb) - } - - return newStreamer() -} diff --git a/livelog/livelog_oss.go b/livelog/livelog_oss.go deleted file mode 100644 index c638b25e55..0000000000 --- a/livelog/livelog_oss.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package livelog - -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new in-memory log streamer. -func New(r redisdb.RedisDB) core.LogStream { - return newStreamer() -} diff --git a/livelog/stream.go b/livelog/stream.go deleted file mode 100644 index eec557fdc8..0000000000 --- a/livelog/stream.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package livelog - -import ( - "context" - "sync" - - "github.com/drone/drone/core" -) - -// this is the amount of items that are stored in memory -// in the buffer. This should result in approximately 10kb -// of memory allocated per-stream and per-subscriber, not -// including any logdata stored in these structures. -const bufferSize = 5000 - -type stream struct { - sync.Mutex - - hist []*core.Line - list map[*subscriber]struct{} -} - -func newStream() *stream { - return &stream{ - list: map[*subscriber]struct{}{}, - } -} - -func (s *stream) write(line *core.Line) error { - s.Lock() - s.hist = append(s.hist, line) - for l := range s.list { - l.publish(line) - } - // the history should not be unbounded. The history - // slice is capped and items are removed in a FIFO - // ordering when capacity is reached. - if size := len(s.hist); size >= bufferSize { - s.hist = s.hist[size-bufferSize:] - } - s.Unlock() - return nil -} - -func (s *stream) subscribe(ctx context.Context) (<-chan *core.Line, <-chan error) { - sub := &subscriber{ - handler: make(chan *core.Line, bufferSize), - closec: make(chan struct{}), - } - err := make(chan error) - - s.Lock() - for _, line := range s.hist { - sub.publish(line) - } - s.list[sub] = struct{}{} - s.Unlock() - - go func() { - defer close(err) - select { - case <-sub.closec: - case <-ctx.Done(): - sub.close() - } - }() - return sub.handler, err -} - -func (s *stream) close() error { - s.Lock() - defer s.Unlock() - for sub := range s.list { - delete(s.list, sub) - sub.close() - } - return nil -} diff --git a/livelog/stream_redis.go b/livelog/stream_redis.go deleted file mode 100644 index 9a72fab79f..0000000000 --- a/livelog/stream_redis.go +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package livelog - -import ( - "context" - "encoding/json" - "fmt" - "strconv" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" - - "github.com/go-redis/redis/v8" -) - -func newStreamRedis(r redisdb.RedisDB) core.LogStream { - return streamRedis{ - rdb: r, - } -} - -const ( - redisKeyExpiryTime = 5 * time.Hour // How long each key exists in redis - redisPollTime = 100 * time.Millisecond // should not be too large to avoid redis clients getting occupied for long - redisTailMaxTime = 1 * time.Hour // maximum duration a tail can last - redisEntryKey = "line" - redisStreamPrefix = "drone-log-" -) - -type streamRedis struct { - rdb redisdb.RedisDB -} - -// Create creates a redis stream and sets an expiry on it. -func (r streamRedis) Create(ctx context.Context, id int64) error { - // Delete if a stream already exists with the same key - _ = r.Delete(ctx, id) - - client := r.rdb.Client() - - key := redisStreamPrefix + strconv.FormatInt(id, 10) - - addResp := client.XAdd(ctx, &redis.XAddArgs{ - Stream: key, - ID: "*", // auto-generate a unique incremental ID - MaxLen: bufferSize, - Approx: true, - Values: map[string]interface{}{redisEntryKey: []byte{}}, - }) - if err := addResp.Err(); err != nil { - return fmt.Errorf("livelog/redis: could not create stream with key %s", key) - } - - res := client.Expire(ctx, key, redisKeyExpiryTime) - if err := res.Err(); err != nil { - return fmt.Errorf("livelog/redis: could not set expiry for key %s", key) - } - - return nil -} - -// Delete deletes a stream -func (r streamRedis) Delete(ctx context.Context, id int64) error { - client := r.rdb.Client() - - key := redisStreamPrefix + strconv.FormatInt(id, 10) - - if err := r._exists(ctx, key); err != nil { - return err - } - - deleteResp := client.Del(ctx, key) - if err := deleteResp.Err(); err != nil { - return fmt.Errorf("livelog/redis: could not delete stream for step %d", id) - } - - return nil -} - -// Write writes information into the Redis stream -func (r streamRedis) Write(ctx context.Context, id int64, line *core.Line) error { - client := r.rdb.Client() - - key := redisStreamPrefix + strconv.FormatInt(id, 10) - - if err := r._exists(ctx, key); err != nil { - return err - } - - lineJsonData, _ := json.Marshal(line) - addResp := client.XAdd(ctx, &redis.XAddArgs{ - Stream: key, - ID: "*", // auto-generate a unique incremental ID - MaxLen: bufferSize, - Approx: true, - Values: map[string]interface{}{redisEntryKey: lineJsonData}, - }) - if err := addResp.Err(); err != nil { - return err - } - - return nil -} - -// Tail returns back all the lines in the stream. -func (r streamRedis) Tail(ctx context.Context, id int64) (<-chan *core.Line, <-chan error) { - client := r.rdb.Client() - - key := redisStreamPrefix + strconv.FormatInt(id, 10) - - if err := r._exists(ctx, key); err != nil { - return nil, nil - } - - chLines := make(chan *core.Line, bufferSize) - chErr := make(chan error, 1) - - go func() { - defer close(chErr) - defer close(chLines) - timeout := time.After(redisTailMaxTime) // polling should not last for longer than tailMaxTime - - // Keep reading from the stream and writing to the channel - lastID := "0" - - for { - select { - case <-ctx.Done(): - return - case <-timeout: - return - default: - readResp := client.XRead(ctx, &redis.XReadArgs{ - Streams: append([]string{key}, lastID), - Block: redisPollTime, // periodically check for ctx.Done - }) - if readResp.Err() != nil && readResp.Err() != redis.Nil { // readResp.Err() is sometimes set to "redis: nil" instead of nil - chErr <- readResp.Err() - return - } - - for _, msg := range readResp.Val() { - messages := msg.Messages - if len(messages) > 0 { - lastID = messages[len(messages)-1].ID - } else { // should not happen - return - } - - for _, message := range messages { - values := message.Values - if val, ok := values[redisEntryKey]; ok { - var line *core.Line - if err := json.Unmarshal([]byte(val.(string)), &line); err != nil { - continue // ignore errors in the stream - } - chLines <- line - } - } - } - } - } - }() - - return chLines, chErr -} - -// Info returns info about log streams present in redis -func (r streamRedis) Info(ctx context.Context) (info *core.LogStreamInfo) { - client := r.rdb.Client() - - info = &core.LogStreamInfo{ - Streams: make(map[int64]int), - } - - keysResp := client.Keys(ctx, redisStreamPrefix+"*") - if err := keysResp.Err(); err != nil { - return - } - - for _, key := range keysResp.Val() { - ids := key[len(redisStreamPrefix):] - id, err := strconv.ParseInt(ids, 10, 64) - if err != nil { - continue - } - - lenResp := client.XLen(ctx, key) - if err := lenResp.Err(); err != nil { - continue - } - - size := int(lenResp.Val()) - - info.Streams[id] = size - } - - return -} - -func (r streamRedis) _exists(ctx context.Context, key string) error { - client := r.rdb.Client() - - exists := client.Exists(ctx, key) - if exists.Err() != nil || exists.Val() == 0 { - return fmt.Errorf("livelog/redis: log stream %s not found", key) - } - - return nil -} diff --git a/livelog/stream_test.go b/livelog/stream_test.go deleted file mode 100644 index 3548b5e3f6..0000000000 --- a/livelog/stream_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package livelog - -import ( - "context" - "sync" - "testing" - "time" - - "github.com/drone/drone/core" -) - -func TestStream(t *testing.T) { - w := sync.WaitGroup{} - - s := newStream() - - // test ability to replay history. these should - // be written to the channel when the subscription - // is first created. - - s.write(&core.Line{Number: 1}) - s.write(&core.Line{Number: 2}) - s.write(&core.Line{Number: 3}) - w.Add(3) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - stream, errc := s.subscribe(ctx) - - w.Add(4) - go func() { - s.write(&core.Line{Number: 4}) - s.write(&core.Line{Number: 5}) - s.write(&core.Line{Number: 6}) - w.Done() - }() - - // the code above adds 6 lines to the log stream. - // the wait group blocks until all 6 items are - // received. - - go func() { - for { - select { - case <-errc: - return - case <-stream: - w.Done() - } - } - }() - - w.Wait() -} - -func TestStream_Close(t *testing.T) { - s := newStream() - s.hist = []*core.Line{ - &core.Line{}, - } - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - s.subscribe(ctx) - if got, want := len(s.list), 1; got != want { - t.Errorf("Want %d subscribers before close, got %d", want, got) - } - - var sub *subscriber - for sub = range s.list { - } - - if got, want := sub.closed, false; got != want { - t.Errorf("Want subscriber open") - } - - if err := s.close(); err != nil { - t.Error(err) - } - - if got, want := len(s.list), 0; got != want { - t.Errorf("Want %d subscribers after close, got %d", want, got) - } - - <-time.After(time.Millisecond) - - if got, want := sub.closed, true; got != want { - t.Errorf("Want subscriber closed") - } -} - -func TestStream_BufferHistory(t *testing.T) { - s := newStream() - - // exceeds the history buffer by +10 - x := new(core.Line) - for i := 0; i < bufferSize+10; i++ { - s.write(x) - } - - if got, want := len(s.hist), bufferSize; got != want { - t.Errorf("Want %d history items, got %d", want, got) - } - - latest := &core.Line{Number: 1} - s.write(latest) - - if got, want := s.hist[len(s.hist)-1], latest; got != want { - t.Errorf("Expect history stored in FIFO order") - } -} diff --git a/livelog/streamer.go b/livelog/streamer.go deleted file mode 100644 index 09f0b166ab..0000000000 --- a/livelog/streamer.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package livelog - -import ( - "context" - "errors" - "sync" - - "github.com/drone/drone/core" -) - -// error returned when a stream is not registered with -// the streamer. -var errStreamNotFound = errors.New("stream: not found") - -type streamer struct { - sync.Mutex - - streams map[int64]*stream -} - -// New returns a new in-memory log streamer. -func newStreamer() core.LogStream { - return &streamer{ - streams: make(map[int64]*stream), - } -} - -func (s *streamer) Create(ctx context.Context, id int64) error { - s.Lock() - s.streams[id] = newStream() - s.Unlock() - return nil -} - -func (s *streamer) Delete(ctx context.Context, id int64) error { - s.Lock() - stream, ok := s.streams[id] - if ok { - delete(s.streams, id) - } - s.Unlock() - if !ok { - return errStreamNotFound - } - return stream.close() -} - -func (s *streamer) Write(ctx context.Context, id int64, line *core.Line) error { - s.Lock() - stream, ok := s.streams[id] - s.Unlock() - if !ok { - return errStreamNotFound - } - return stream.write(line) -} - -func (s *streamer) Tail(ctx context.Context, id int64) (<-chan *core.Line, <-chan error) { - s.Lock() - stream, ok := s.streams[id] - s.Unlock() - if !ok { - return nil, nil - } - return stream.subscribe(ctx) -} - -func (s *streamer) Info(ctx context.Context) *core.LogStreamInfo { - s.Lock() - defer s.Unlock() - info := &core.LogStreamInfo{ - Streams: map[int64]int{}, - } - for id, stream := range s.streams { - stream.Lock() - info.Streams[id] = len(stream.list) - stream.Unlock() - } - return info -} diff --git a/livelog/streamer_test.go b/livelog/streamer_test.go deleted file mode 100644 index df7f5f84e3..0000000000 --- a/livelog/streamer_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package livelog - -import ( - "context" - "sync" - "testing" - - "github.com/drone/drone/core" - - "github.com/google/go-cmp/cmp" -) - -func TestStreamer(t *testing.T) { - s := newStreamer().(*streamer) - err := s.Create(context.Background(), 1) - if err != nil { - t.Error(err) - } - if len(s.streams) == 0 { - t.Errorf("Want stream registered") - } - - w := sync.WaitGroup{} - w.Add(4) - go func() { - s.Write(context.Background(), 1, &core.Line{}) - s.Write(context.Background(), 1, &core.Line{}) - s.Write(context.Background(), 1, &core.Line{}) - w.Done() - }() - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - tail, errc := s.Tail(ctx, 1) - - go func() { - for { - select { - case <-errc: - return - case <-ctx.Done(): - return - case <-tail: - w.Done() - } - } - }() - - w.Wait() -} - -func TestStreamerDelete(t *testing.T) { - s := newStreamer().(*streamer) - err := s.Create(context.Background(), 1) - if err != nil { - t.Error(err) - } - if len(s.streams) == 0 { - t.Errorf("Want stream registered") - } - err = s.Delete(context.Background(), 1) - if err != nil { - t.Error(err) - } - if len(s.streams) != 0 { - t.Errorf("Want stream unregistered") - } -} - -func TestStreamerDeleteErr(t *testing.T) { - s := newStreamer() - err := s.Delete(context.Background(), 1) - if err != errStreamNotFound { - t.Errorf("Want errStreamNotFound") - } -} - -func TestStreamerWriteErr(t *testing.T) { - s := newStreamer() - err := s.Write(context.Background(), 1, &core.Line{}) - if err != errStreamNotFound { - t.Errorf("Want errStreamNotFound") - } -} - -func TestStreamTailNotFound(t *testing.T) { - s := newStreamer() - outc, errc := s.Tail(context.Background(), 0) - if outc != nil && errc != nil { - t.Errorf("Expect nil channel when stream not found") - } -} - -func TestStreamerInfo(t *testing.T) { - s := newStreamer().(*streamer) - s.streams[1] = &stream{list: map[*subscriber]struct{}{{}: struct{}{}, {}: struct{}{}}} - s.streams[2] = &stream{list: map[*subscriber]struct{}{{}: struct{}{}}} - s.streams[3] = &stream{list: map[*subscriber]struct{}{}} - got := s.Info(context.Background()) - - want := &core.LogStreamInfo{ - Streams: map[int64]int{ - 1: 2, - 2: 1, - 3: 0, - }, - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} diff --git a/livelog/sub.go b/livelog/sub.go deleted file mode 100644 index d96460e70f..0000000000 --- a/livelog/sub.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package livelog - -import ( - "sync" - - "github.com/drone/drone/core" -) - -type subscriber struct { - sync.Mutex - - handler chan *core.Line - closec chan struct{} - closed bool -} - -func (s *subscriber) publish(line *core.Line) { - select { - case <-s.closec: - case s.handler <- line: - default: - // lines are sent on a buffered channel. If there - // is a slow consumer that is not processing events, - // the buffered channel will fill and newer messages - // are ignored. - } -} - -func (s *subscriber) close() { - s.Lock() - if !s.closed { - close(s.closec) - s.closed = true - } - s.Unlock() -} diff --git a/livelog/sub_test.go b/livelog/sub_test.go deleted file mode 100644 index ab9bc07213..0000000000 --- a/livelog/sub_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package livelog - -import ( - "testing" - - "github.com/drone/drone/core" -) - -func TestSubscription_publish(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Line, 5), - closec: make(chan struct{}), - } - - e := new(core.Line) - s.publish(e) - - if got, want := len(s.handler), 1; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } - if got, want := <-s.handler, e; got != want { - t.Errorf("Want log entry received from channel") - } - if got, want := len(s.handler), 0; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } -} - -func TestSubscription_buffer(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Line, 1), - closec: make(chan struct{}), - } - - // the buffer size is 1 to simulate what happens - // if the subscriber cannot keep up with processing - // and the buffer fills up. In this case, lines - // should be ignored until pending lines are - // processed. - - e := new(core.Line) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - - if got, want := len(s.handler), 1; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } -} - -func TestSubscription_stop(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Line, 1), - closec: make(chan struct{}), - } - - if got, want := s.closed, false; got != want { - t.Errorf("Want subscription open") - } - - s.close() - if got, want := s.closed, true; got != want { - t.Errorf("Want subscription closed") - } - - // if the subscription is closed we should - // ignore any new events being published. - - e := new(core.Line) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) -} diff --git a/logger/handler.go b/logger/handler.go deleted file mode 100644 index 54440518b4..0000000000 --- a/logger/handler.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logger - -import ( - "net/http" - "time" - - "github.com/segmentio/ksuid" - "github.com/sirupsen/logrus" -) - -// Middleware provides logging middleware. -func Middleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - id := r.Header.Get("X-Request-ID") - if id == "" { - id = ksuid.New().String() - } - ctx := r.Context() - log := FromContext(ctx).WithField("request-id", id) - ctx = WithContext(ctx, log) - start := time.Now() - next.ServeHTTP(w, r.WithContext(ctx)) - end := time.Now() - log.WithFields(logrus.Fields{ - "method": r.Method, - "request": r.RequestURI, - "remote": r.RemoteAddr, - "latency": end.Sub(start), - "time": end.Format(time.RFC3339), - "authtype": authType(r), - }).Debug() - }) -} - -func authType(r *http.Request) string { - if r.Header.Get("Authorization") != "" || r.FormValue("access_token") != "" { - return "token" - } - - return "cookie" -} diff --git a/logger/handler_test.go b/logger/handler_test.go deleted file mode 100644 index edddbfb50e..0000000000 --- a/logger/handler_test.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package logger - -import ( - "net/http/httptest" - "testing" -) - -func TestMiddleware(t *testing.T) { - t.Skip() -} - -func TestMiddleware_GenerateRequestID(t *testing.T) { - t.Skip() -} - -func TestAuthType(t *testing.T) { - cookieRequest := httptest.NewRequest("GET", "/", nil) - if authType(cookieRequest) != "cookie" { - t.Error("authtype is not cookie") - } - - headerRequest := httptest.NewRequest("GET", "/", nil) - headerRequest.Header.Add("Authorization", "test") - if authType(headerRequest) != "token" { - t.Error("authtype is not token") - } - - formRequest := httptest.NewRequest("GET", "/?access_token=test", nil) - if authType(formRequest) != "token" { - t.Error("authtype is not token") - } -} diff --git a/logger/logger.go b/logger/logger.go deleted file mode 100644 index 469fce4a7d..0000000000 --- a/logger/logger.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// Copyright 2016 The containerd Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logger - -import ( - "context" - "net/http" - - "github.com/sirupsen/logrus" -) - -type loggerKey struct{} - -// L is an alias for the the standard logger. -var L = logrus.NewEntry(logrus.StandardLogger()) - -// WithContext returns a new context with the provided logger. Use in -// combination with logger.WithField(s) for great effect. -func WithContext(ctx context.Context, logger *logrus.Entry) context.Context { - return context.WithValue(ctx, loggerKey{}, logger) -} - -// FromContext retrieves the current logger from the context. If no -// logger is available, the default logger is returned. -func FromContext(ctx context.Context) *logrus.Entry { - logger := ctx.Value(loggerKey{}) - if logger == nil { - return L - } - return logger.(*logrus.Entry) -} - -// FromRequest retrieves the current logger from the request. If no -// logger is available, the default logger is returned. -func FromRequest(r *http.Request) *logrus.Entry { - return FromContext(r.Context()) -} diff --git a/logger/logger_test.go b/logger/logger_test.go deleted file mode 100644 index 37569fc8c6..0000000000 --- a/logger/logger_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package logger - -import ( - "context" - "net/http" - "testing" - - "github.com/sirupsen/logrus" -) - -func TestContext(t *testing.T) { - entry := logrus.NewEntry(logrus.StandardLogger()) - - ctx := WithContext(context.Background(), entry) - got := FromContext(ctx) - - if got != entry { - t.Errorf("Expected Logger from context") - } -} - -func TestEmptyContext(t *testing.T) { - got := FromContext(context.Background()) - if got != L { - t.Errorf("Expected default Logger from context") - } -} - -func TestRequest(t *testing.T) { - entry := logrus.NewEntry(logrus.StandardLogger()) - - ctx := WithContext(context.Background(), entry) - req := new(http.Request) - req = req.WithContext(ctx) - - got := FromRequest(req) - - if got != entry { - t.Errorf("Expected Logger from http.Request") - } -} diff --git a/metric/builds.go b/metric/builds.go deleted file mode 100644 index dd04622c81..0000000000 --- a/metric/builds.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "github.com/drone/drone/core" - - "github.com/prometheus/client_golang/prometheus" -) - -// BuildCount provides metrics for build counts. -func BuildCount(builds core.BuildStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_build_count", - Help: "Total number of builds.", - }, func() float64 { - i, _ := builds.Count(noContext) - return float64(i) - }), - ) -} - -// PendingBuildCount provides metrics for pending build counts. -func PendingBuildCount(builds core.BuildStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_pending_builds", - Help: "Total number of pending builds.", - }, func() float64 { - list, _ := builds.Pending(noContext) - return float64(len(list)) - }), - ) -} - -// RunningBuildCount provides metrics for running build counts. -func RunningBuildCount(builds core.BuildStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_running_builds", - Help: "Total number of running builds.", - }, func() float64 { - list, _ := builds.Running(noContext) - return float64(len(list)) - }), - ) -} diff --git a/metric/builds_test.go b/metric/builds_test.go deleted file mode 100644 index 33d293a6f6..0000000000 --- a/metric/builds_test.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/prometheus/client_golang/prometheus" -) - -func TestBuildCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x2 repository count - count := int64(5) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Count(gomock.Any()).Return(count, nil) - BuildCount(builds) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_build_count"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(count); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} - -func TestBuildPendingCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x2 repository count - data := []*core.Build{{}, {}, {}, {}, {}} - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Pending(gomock.Any()).Return(data, nil) - PendingBuildCount(builds) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_pending_builds"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(len(data)); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} - -func TestBuildRunningCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x2 repository count - data := []*core.Build{{}, {}, {}, {}, {}} - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Running(gomock.Any()).Return(data, nil) - RunningBuildCount(builds) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_running_builds"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(len(data)); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} diff --git a/metric/handler.go b/metric/handler.go deleted file mode 100644 index 6f186aa114..0000000000 --- a/metric/handler.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "errors" - "net/http" - - "github.com/drone/drone/core" - - "github.com/prometheus/client_golang/prometheus/promhttp" -) - -// errInvalidToken is returned when the prometheus token is invalid. -var errInvalidToken = errors.New("Invalid or missing prometheus token") - -// errAccessDenied is returned when the authorized user does not -// have access to the metrics endpoint. -var errAccessDenied = errors.New("Access denied") - -// Server is an http Metrics server. -type Server struct { - metrics http.Handler - session core.Session - anonymous bool -} - -// NewServer returns a new metrics server. -func NewServer(session core.Session, anonymous bool) *Server { - return &Server{ - metrics: promhttp.Handler(), - session: session, - anonymous: anonymous, - } -} - -// ServeHTTP responds to an http.Request and writes system -// metrics to the response body in plain text format. -func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { - user, _ := s.session.Get(r) - switch { - case !s.anonymous && user == nil: - http.Error(w, errInvalidToken.Error(), http.StatusUnauthorized) - case !s.anonymous && !user.Admin && !user.Machine: - http.Error(w, errAccessDenied.Error(), http.StatusForbidden) - default: - s.metrics.ServeHTTP(w, r) - } -} diff --git a/metric/handler_oss.go b/metric/handler_oss.go deleted file mode 100644 index 253f3c2891..0000000000 --- a/metric/handler_oss.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package metric - -import ( - "net/http" - - "github.com/drone/drone/core" -) - -// Server is a no-op http Metrics server. -type Server struct { -} - -// NewServer returns a new metrics server. -func NewServer(session core.Session, anonymous bool) *Server { - return new(Server) -} - -// ServeHTTP is a no-op http handler. -func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {} diff --git a/metric/handler_test.go b/metric/handler_test.go deleted file mode 100644 index 2931e135b4..0000000000 --- a/metric/handler_test.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "net/http/httptest" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/golang/mock/gomock" -) - -func TestHandleMetrics(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - mockUser := &core.User{Admin: false, Machine: true} - session := mock.NewMockSession(controller) - session.EXPECT().Get(r).Return(mockUser, nil) - - NewServer(session, false).ServeHTTP(w, r) - if got, want := w.Code, 200; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } - - if got, want := w.HeaderMap.Get("Content-Type"), "text/plain; version=0.0.4; charset=utf-8"; got != want { - t.Errorf("Want prometheus header %q, got %q", want, got) - } -} - -func TestHandleMetrics_NoSession(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - session := mock.NewMockSession(controller) - session.EXPECT().Get(r).Return(nil, nil) - - NewServer(session, false).ServeHTTP(w, r) - - if got, want := w.Code, 401; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestHandleMetrics_NoSessionButAnonymousAccessEnabled(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - session := mock.NewMockSession(controller) - session.EXPECT().Get(r).Return(nil, nil) - - NewServer(session, true).ServeHTTP(w, r) - - if got, want := w.Code, 200; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} - -func TestHandleMetrics_AccessDenied(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - w := httptest.NewRecorder() - r := httptest.NewRequest("GET", "/", nil) - - mockUser := &core.User{Admin: false, Machine: false} - session := mock.NewMockSession(controller) - session.EXPECT().Get(r).Return(mockUser, nil) - - NewServer(session, false).ServeHTTP(w, r) - if got, want := w.Code, 403; got != want { - t.Errorf("Want status code %d, got %d", want, got) - } -} diff --git a/metric/license.go b/metric/license.go deleted file mode 100644 index a7b0e6929c..0000000000 --- a/metric/license.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import "github.com/drone/drone/core" - -// License registers the license metrics. -func License(license core.LicenseService) { - // track days until expires - // track user limit - // track repo limit -} diff --git a/metric/license_test.go b/metric/license_test.go deleted file mode 100644 index 66b058b4ec..0000000000 --- a/metric/license_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric diff --git a/metric/metric.go b/metric/metric.go deleted file mode 100644 index 66b058b4ec..0000000000 --- a/metric/metric.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric diff --git a/metric/metric_oss.go b/metric/metric_oss.go deleted file mode 100644 index fbf94954c2..0000000000 --- a/metric/metric_oss.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package metric - -import "github.com/drone/drone/core" - -func BuildCount(core.BuildStore) {} -func PendingBuildCount(core.BuildStore) {} -func RunningBuildCount(core.BuildStore) {} -func RunningJobCount(core.StageStore) {} -func PendingJobCount(core.StageStore) {} -func RepoCount(core.RepositoryStore) {} -func UserCount(core.UserStore) {} diff --git a/metric/repos.go b/metric/repos.go deleted file mode 100644 index 430be9fcc8..0000000000 --- a/metric/repos.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "github.com/drone/drone/core" - - "github.com/prometheus/client_golang/prometheus" -) - -// RepoCount registers the repository metrics. -func RepoCount(repos core.RepositoryStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_repo_count", - Help: "Total number of registered repositories.", - }, func() float64 { - i, _ := repos.Count(noContext) - return float64(i) - }), - ) -} diff --git a/metric/repos_test.go b/metric/repos_test.go deleted file mode 100644 index dc012b884d..0000000000 --- a/metric/repos_test.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "testing" - - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/prometheus/client_golang/prometheus" -) - -func TestRepoCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x2 repository count - count := int64(5) - - store := mock.NewMockRepositoryStore(controller) - store.EXPECT().Count(gomock.Any()).Return(count, nil) - RepoCount(store) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_repo_count"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(count); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} diff --git a/metric/sink/config.go b/metric/sink/config.go deleted file mode 100644 index 49b4dea5cc..0000000000 --- a/metric/sink/config.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sink - -// Config configures a Datadog sink. -type Config struct { - Endpoint string - Token string - - License string - Licensor string - Subscription string - EnableGithub bool - EnableGithubEnt bool - EnableGitlab bool - EnableBitbucket bool - EnableStash bool - EnableGogs bool - EnableGitea bool - EnableGitee bool - EnableAgents bool -} diff --git a/metric/sink/datadog.go b/metric/sink/datadog.go deleted file mode 100644 index ebe01f11ae..0000000000 --- a/metric/sink/datadog.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sink - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "net/http" - "time" - - "github.com/drone/drone/core" -) - -type payload struct { - Series []series `json:"series"` -} - -type series struct { - Metric string `json:"metric"` - Points [][]int64 `json:"points"` - Host string `json:"host"` - Type string `json:"type"` - Tags []string `json:"tags,omitempty"` -} - -// Datadog defines a no-op sink to datadog. -type Datadog struct { - users core.UserStore - repos core.RepositoryStore - builds core.BuildStore - system core.System - config Config - client *http.Client -} - -// New returns a Datadog sink. -func New( - users core.UserStore, - repos core.RepositoryStore, - builds core.BuildStore, - system core.System, - config Config, -) *Datadog { - return &Datadog{ - users: users, - repos: repos, - builds: builds, - system: system, - config: config, - } -} - -// Start starts the sink. -func (d *Datadog) Start(ctx context.Context) error { - for { - diff := midnightDiff() - select { - case <-time.After(diff): - d.do(ctx, time.Now().Unix()) - case <-ctx.Done(): - return nil - } - } -} - -func (d *Datadog) do(ctx context.Context, unix int64) error { - users, err := d.users.Count(ctx) - if err != nil { - return err - } - repos, err := d.repos.Count(ctx) - if err != nil { - return err - } - builds, err := d.builds.Count(ctx) - if err != nil { - return err - } - userList, _ := d.users.ListRange(ctx, core.UserParams{ - Sort: false, - Page: 0, - Size: 5, - }) - tags := createTags(d.config) - data := new(payload) - data.Series = []series{ - { - Metric: "drone.users", - Points: [][]int64{[]int64{unix, users}}, - Type: "gauge", - Host: d.system.Host, - Tags: append(tags, createInstallerTags(userList)...), - }, - { - Metric: "drone.repos", - Points: [][]int64{[]int64{unix, repos}}, - Type: "gauge", - Host: d.system.Host, - Tags: tags, - }, - { - Metric: "drone.builds", - Points: [][]int64{[]int64{unix, builds}}, - Type: "gauge", - Host: d.system.Host, - Tags: tags, - }, - } - - buf := new(bytes.Buffer) - err = json.NewEncoder(buf).Encode(data) - if err != nil { - return err - } - - endpoint := fmt.Sprintf("%s?api_key=%s", d.config.Endpoint, d.config.Token) - req, err := http.NewRequest("POST", endpoint, buf) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := httpClient.Do(req) - if err != nil { - return err - } - - res.Body.Close() - return nil -} - -// Client returns the http client. If no custom -// client is provided, the default client is used. -func (d *Datadog) Client() *http.Client { - if d.client == nil { - return httpClient - } - return d.client -} - -// calculate the differences between now and midnight. -func midnightDiff() time.Duration { - a := time.Now() - b := time.Date(a.Year(), a.Month(), a.Day()+1, 0, 0, 0, 0, a.Location()) - return b.Sub(a) -} - -// httpClient should be used for HTTP requests. It -// is configured with a timeout for reliability. -var httpClient = &http.Client{ - Transport: &http.Transport{ - Proxy: http.ProxyFromEnvironment, - TLSHandshakeTimeout: 30 * time.Second, - DisableKeepAlives: true, - }, - Timeout: 1 * time.Minute, -} diff --git a/metric/sink/datadog_test.go b/metric/sink/datadog_test.go deleted file mode 100644 index e6c5fa1b1d..0000000000 --- a/metric/sink/datadog_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sink - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/drone/version" - "github.com/golang/mock/gomock" - "github.com/h2non/gock" -) - -var noContext = context.Background() - -func TestDo(t *testing.T) { - controller := gomock.NewController(t) - - gock.InterceptClient(httpClient) - defer func() { - gock.RestoreClient(httpClient) - gock.Off() - controller.Finish() - }() - - users := mock.NewMockUserStore(controller) - users.EXPECT().Count(gomock.Any()).Return(int64(10), nil) - users.EXPECT().ListRange(gomock.Any(), gomock.Any()).Return([]*core.User{{Email: "jane@acme.com", Created: 1257894000}}, nil) - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Count(gomock.Any()).Return(int64(20), nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Count(gomock.Any()).Return(int64(30), nil) - - gock.New("https://api.datadoghq.com"). - Post("/api/v1/series"). - JSON(sample). - Reply(200) - - d := new(Datadog) - d.users = users - d.repos = repos - d.builds = builds - d.system.Host = "test.example.com" - d.config.License = "trial" - d.config.EnableGithub = true - d.config.EnableAgents = true - d.config.Endpoint = "https://api.datadoghq.com/api/v1/series" - d.do(noContext, 915148800) - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -var sample = `{ - "series" : [ - { - "metric": "drone.users", - "points": [[915148800, 10]], - "type": "gauge", - "host": "test.example.com", - "tags": ["version:` + version.Version.String() + `","remote:github:cloud","scheduler:internal:agents","license:trial","installer:jane@acme.com","installed:2009-11-10T23:00:00Z"] - }, - { - "metric": "drone.repos", - "points": [[915148800, 20]], - "type": "gauge", - "host": "test.example.com", - "tags": ["version:` + version.Version.String() + `","remote:github:cloud","scheduler:internal:agents","license:trial"] - }, - { - "metric": "drone.builds", - "points": [[915148800, 30]], - "type": "gauge", - "host": "test.example.com", - "tags": ["version:` + version.Version.String() + `","remote:github:cloud","scheduler:internal:agents","license:trial"] - } - ] -}` diff --git a/metric/sink/tags.go b/metric/sink/tags.go deleted file mode 100644 index 06d89c7063..0000000000 --- a/metric/sink/tags.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sink - -import ( - "fmt" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/version" -) - -func createTags(config Config) []string { - tags := []string{ - fmt.Sprintf("version:%s", version.Version), - } - - switch { - case config.EnableBitbucket: - tags = append(tags, "remote:bitbucket:cloud") - case config.EnableStash: - tags = append(tags, "remote:bitbucket:server") - case config.EnableGithubEnt: - tags = append(tags, "remote:github:enterprise") - case config.EnableGithub: - tags = append(tags, "remote:github:cloud") - case config.EnableGitlab: - tags = append(tags, "remote:gitlab") - case config.EnableGogs: - tags = append(tags, "remote:gogs") - case config.EnableGitea: - tags = append(tags, "remote:gitea") - case config.EnableGitee: - tags = append(tags, "remote:gitee") - default: - tags = append(tags, "remote:undefined") - } - - switch { - case config.EnableAgents: - tags = append(tags, "scheduler:internal:agents") - default: - tags = append(tags, "scheduler:internal:local") - } - - if config.Subscription != "" { - tag := fmt.Sprintf("license:%s:%s:%s", - config.License, - config.Licensor, - config.Subscription, - ) - tags = append(tags, tag) - } else if config.Licensor != "" { - tag := fmt.Sprintf("license:%s:%s", - config.License, - config.Licensor, - ) - tags = append(tags, tag) - } else { - tag := fmt.Sprintf("license:%s", config.License) - tags = append(tags, tag) - } - return tags -} - -func createInstallerTags(users []*core.User) []string { - var tags []string - for _, user := range users { - if user.Machine { - continue - } - if len(user.Email) == 0 { - continue - } - tag1 := fmt.Sprintf("installer:%s", user.Email) - tag2 := fmt.Sprintf("installed:%s", time.Unix(user.Created, 0).UTC().Format(time.RFC3339Nano)) - tags = append(tags, tag1, tag2) - break - } - return tags -} diff --git a/metric/stages.go b/metric/stages.go deleted file mode 100644 index 0cb4a42f42..0000000000 --- a/metric/stages.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "github.com/drone/drone/core" - - "github.com/prometheus/client_golang/prometheus" -) - -// RunningJobCount provides metrics for running job counts. -func RunningJobCount(stages core.StageStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_running_jobs", - Help: "Total number of running jobs.", - }, func() float64 { - list, _ := stages.ListState(noContext, core.StatusRunning) - return float64(len(list)) - }), - ) -} - -// PendingJobCount provides metrics for pending job counts. -func PendingJobCount(stages core.StageStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_pending_jobs", - Help: "Total number of pending jobs.", - }, func() float64 { - list, _ := stages.ListState(noContext, core.StatusPending) - return float64(len(list)) - }), - ) -} diff --git a/metric/stages_test.go b/metric/stages_test.go deleted file mode 100644 index d79b6922cd..0000000000 --- a/metric/stages_test.go +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/prometheus/client_golang/prometheus" -) - -func TestStagePendingCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x5 stage count - data := []*core.Stage{{}, {}, {}, {}, {}} - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListState(gomock.Any(), core.StatusPending).Return(data, nil) - PendingJobCount(stages) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_pending_jobs"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(len(data)); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} - -func TestStageRunningCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x5 stage count - data := []*core.Stage{{}, {}, {}, {}, {}} - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListState(gomock.Any(), core.StatusRunning).Return(data, nil) - RunningJobCount(stages) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_running_jobs"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(len(data)); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} diff --git a/metric/users.go b/metric/users.go deleted file mode 100644 index fd5162df8e..0000000000 --- a/metric/users.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "context" - - "github.com/drone/drone/core" - - "github.com/prometheus/client_golang/prometheus" -) - -var noContext = context.Background() - -// UserCount provides metrics for registered users. -func UserCount(users core.UserStore) { - prometheus.MustRegister( - prometheus.NewGaugeFunc(prometheus.GaugeOpts{ - Name: "drone_user_count", - Help: "Total number of active users.", - }, func() float64 { - i, _ := users.Count(noContext) - return float64(i) - }), - ) -} diff --git a/metric/users_test.go b/metric/users_test.go deleted file mode 100644 index 646d8c2035..0000000000 --- a/metric/users_test.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package metric - -import ( - "testing" - - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/prometheus/client_golang/prometheus" -) - -func TestUserCount(t *testing.T) { - controller := gomock.NewController(t) - - // restore the default prometheus registerer - // when the unit test is complete. - snapshot := prometheus.DefaultRegisterer - defer func() { - prometheus.DefaultRegisterer = snapshot - controller.Finish() - }() - - // creates a blank registry - registry := prometheus.NewRegistry() - prometheus.DefaultRegisterer = registry - - // x2 repository count - count := int64(5) - - store := mock.NewMockUserStore(controller) - store.EXPECT().Count(gomock.Any()).Return(count, nil) - UserCount(store) - - metrics, err := registry.Gather() - if err != nil { - t.Error(err) - return - } - if want, got := len(metrics), 1; want != got { - t.Errorf("Expect registered metric") - return - } - metric := metrics[0] - if want, got := metric.GetName(), "drone_user_count"; want != got { - t.Errorf("Expect metric name %s, got %s", want, got) - } - if want, got := metric.Metric[0].Gauge.GetValue(), float64(count); want != got { - t.Errorf("Expect metric value %f, got %f", want, got) - } -} diff --git a/mock/mock.go b/mock/mock.go deleted file mode 100644 index f91c1e5d52..0000000000 --- a/mock/mock.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package mock - -//go:generate mockgen -package=mock -destination=mock_gen.go github.com/drone/drone/core Pubsub,Canceler,ConvertService,ValidateService,NetrcService,Renewer,HookParser,UserService,RepositoryService,CommitService,StatusService,HookService,FileService,Batcher,BuildStore,CronStore,LogStore,PermStore,SecretStore,GlobalSecretStore,StageStore,StepStore,RepositoryStore,UserStore,Scheduler,Session,OrganizationService,SecretService,RegistryService,ConfigService,Transferer,Triggerer,Syncer,LogStream,WebhookSender,LicenseService,TemplateStore,CardStore diff --git a/mock/mock_gen.go b/mock/mock_gen.go deleted file mode 100644 index 9848282806..0000000000 --- a/mock/mock_gen.go +++ /dev/null @@ -1,2976 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/drone/drone/core (interfaces: Pubsub,Canceler,ConvertService,ValidateService,NetrcService,Renewer,HookParser,UserService,RepositoryService,CommitService,StatusService,HookService,FileService,Batcher,BuildStore,CronStore,LogStore,PermStore,SecretStore,GlobalSecretStore,StageStore,StepStore,RepositoryStore,UserStore,Scheduler,Session,OrganizationService,SecretService,RegistryService,ConfigService,Transferer,Triggerer,Syncer,LogStream,WebhookSender,LicenseService,TemplateStore,CardStore) - -// Package mock is a generated GoMock package. -package mock - -import ( - context "context" - io "io" - http "net/http" - reflect "reflect" - - core "github.com/drone/drone/core" - gomock "github.com/golang/mock/gomock" -) - -// MockPubsub is a mock of Pubsub interface. -type MockPubsub struct { - ctrl *gomock.Controller - recorder *MockPubsubMockRecorder -} - -// MockPubsubMockRecorder is the mock recorder for MockPubsub. -type MockPubsubMockRecorder struct { - mock *MockPubsub -} - -// NewMockPubsub creates a new mock instance. -func NewMockPubsub(ctrl *gomock.Controller) *MockPubsub { - mock := &MockPubsub{ctrl: ctrl} - mock.recorder = &MockPubsubMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockPubsub) EXPECT() *MockPubsubMockRecorder { - return m.recorder -} - -// Publish mocks base method. -func (m *MockPubsub) Publish(arg0 context.Context, arg1 *core.Message) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Publish", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Publish indicates an expected call of Publish. -func (mr *MockPubsubMockRecorder) Publish(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Publish", reflect.TypeOf((*MockPubsub)(nil).Publish), arg0, arg1) -} - -// Subscribe mocks base method. -func (m *MockPubsub) Subscribe(arg0 context.Context) (<-chan *core.Message, <-chan error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Subscribe", arg0) - ret0, _ := ret[0].(<-chan *core.Message) - ret1, _ := ret[1].(<-chan error) - return ret0, ret1 -} - -// Subscribe indicates an expected call of Subscribe. -func (mr *MockPubsubMockRecorder) Subscribe(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Subscribe", reflect.TypeOf((*MockPubsub)(nil).Subscribe), arg0) -} - -// Subscribers mocks base method. -func (m *MockPubsub) Subscribers() (int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Subscribers") - ret0, _ := ret[0].(int) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Subscribers indicates an expected call of Subscribers. -func (mr *MockPubsubMockRecorder) Subscribers() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Subscribers", reflect.TypeOf((*MockPubsub)(nil).Subscribers)) -} - -// MockCanceler is a mock of Canceler interface. -type MockCanceler struct { - ctrl *gomock.Controller - recorder *MockCancelerMockRecorder -} - -// MockCancelerMockRecorder is the mock recorder for MockCanceler. -type MockCancelerMockRecorder struct { - mock *MockCanceler -} - -// NewMockCanceler creates a new mock instance. -func NewMockCanceler(ctrl *gomock.Controller) *MockCanceler { - mock := &MockCanceler{ctrl: ctrl} - mock.recorder = &MockCancelerMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockCanceler) EXPECT() *MockCancelerMockRecorder { - return m.recorder -} - -// Cancel mocks base method. -func (m *MockCanceler) Cancel(arg0 context.Context, arg1 *core.Repository, arg2 *core.Build) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Cancel", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Cancel indicates an expected call of Cancel. -func (mr *MockCancelerMockRecorder) Cancel(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Cancel", reflect.TypeOf((*MockCanceler)(nil).Cancel), arg0, arg1, arg2) -} - -// CancelPending mocks base method. -func (m *MockCanceler) CancelPending(arg0 context.Context, arg1 *core.Repository, arg2 *core.Build) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CancelPending", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// CancelPending indicates an expected call of CancelPending. -func (mr *MockCancelerMockRecorder) CancelPending(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CancelPending", reflect.TypeOf((*MockCanceler)(nil).CancelPending), arg0, arg1, arg2) -} - -// MockConvertService is a mock of ConvertService interface. -type MockConvertService struct { - ctrl *gomock.Controller - recorder *MockConvertServiceMockRecorder -} - -// MockConvertServiceMockRecorder is the mock recorder for MockConvertService. -type MockConvertServiceMockRecorder struct { - mock *MockConvertService -} - -// NewMockConvertService creates a new mock instance. -func NewMockConvertService(ctrl *gomock.Controller) *MockConvertService { - mock := &MockConvertService{ctrl: ctrl} - mock.recorder = &MockConvertServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockConvertService) EXPECT() *MockConvertServiceMockRecorder { - return m.recorder -} - -// Convert mocks base method. -func (m *MockConvertService) Convert(arg0 context.Context, arg1 *core.ConvertArgs) (*core.Config, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Convert", arg0, arg1) - ret0, _ := ret[0].(*core.Config) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Convert indicates an expected call of Convert. -func (mr *MockConvertServiceMockRecorder) Convert(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Convert", reflect.TypeOf((*MockConvertService)(nil).Convert), arg0, arg1) -} - -// MockValidateService is a mock of ValidateService interface. -type MockValidateService struct { - ctrl *gomock.Controller - recorder *MockValidateServiceMockRecorder -} - -// MockValidateServiceMockRecorder is the mock recorder for MockValidateService. -type MockValidateServiceMockRecorder struct { - mock *MockValidateService -} - -// NewMockValidateService creates a new mock instance. -func NewMockValidateService(ctrl *gomock.Controller) *MockValidateService { - mock := &MockValidateService{ctrl: ctrl} - mock.recorder = &MockValidateServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockValidateService) EXPECT() *MockValidateServiceMockRecorder { - return m.recorder -} - -// Validate mocks base method. -func (m *MockValidateService) Validate(arg0 context.Context, arg1 *core.ValidateArgs) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Validate", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Validate indicates an expected call of Validate. -func (mr *MockValidateServiceMockRecorder) Validate(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Validate", reflect.TypeOf((*MockValidateService)(nil).Validate), arg0, arg1) -} - -// MockNetrcService is a mock of NetrcService interface. -type MockNetrcService struct { - ctrl *gomock.Controller - recorder *MockNetrcServiceMockRecorder -} - -// MockNetrcServiceMockRecorder is the mock recorder for MockNetrcService. -type MockNetrcServiceMockRecorder struct { - mock *MockNetrcService -} - -// NewMockNetrcService creates a new mock instance. -func NewMockNetrcService(ctrl *gomock.Controller) *MockNetrcService { - mock := &MockNetrcService{ctrl: ctrl} - mock.recorder = &MockNetrcServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockNetrcService) EXPECT() *MockNetrcServiceMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockNetrcService) Create(arg0 context.Context, arg1 *core.User, arg2 *core.Repository) (*core.Netrc, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Netrc) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Create indicates an expected call of Create. -func (mr *MockNetrcServiceMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockNetrcService)(nil).Create), arg0, arg1, arg2) -} - -// MockRenewer is a mock of Renewer interface. -type MockRenewer struct { - ctrl *gomock.Controller - recorder *MockRenewerMockRecorder -} - -// MockRenewerMockRecorder is the mock recorder for MockRenewer. -type MockRenewerMockRecorder struct { - mock *MockRenewer -} - -// NewMockRenewer creates a new mock instance. -func NewMockRenewer(ctrl *gomock.Controller) *MockRenewer { - mock := &MockRenewer{ctrl: ctrl} - mock.recorder = &MockRenewerMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockRenewer) EXPECT() *MockRenewerMockRecorder { - return m.recorder -} - -// Renew mocks base method. -func (m *MockRenewer) Renew(arg0 context.Context, arg1 *core.User, arg2 bool) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Renew", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Renew indicates an expected call of Renew. -func (mr *MockRenewerMockRecorder) Renew(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Renew", reflect.TypeOf((*MockRenewer)(nil).Renew), arg0, arg1, arg2) -} - -// MockHookParser is a mock of HookParser interface. -type MockHookParser struct { - ctrl *gomock.Controller - recorder *MockHookParserMockRecorder -} - -// MockHookParserMockRecorder is the mock recorder for MockHookParser. -type MockHookParserMockRecorder struct { - mock *MockHookParser -} - -// NewMockHookParser creates a new mock instance. -func NewMockHookParser(ctrl *gomock.Controller) *MockHookParser { - mock := &MockHookParser{ctrl: ctrl} - mock.recorder = &MockHookParserMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockHookParser) EXPECT() *MockHookParserMockRecorder { - return m.recorder -} - -// Parse mocks base method. -func (m *MockHookParser) Parse(arg0 *http.Request, arg1 func(string) string) (*core.Hook, *core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Parse", arg0, arg1) - ret0, _ := ret[0].(*core.Hook) - ret1, _ := ret[1].(*core.Repository) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Parse indicates an expected call of Parse. -func (mr *MockHookParserMockRecorder) Parse(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Parse", reflect.TypeOf((*MockHookParser)(nil).Parse), arg0, arg1) -} - -// MockUserService is a mock of UserService interface. -type MockUserService struct { - ctrl *gomock.Controller - recorder *MockUserServiceMockRecorder -} - -// MockUserServiceMockRecorder is the mock recorder for MockUserService. -type MockUserServiceMockRecorder struct { - mock *MockUserService -} - -// NewMockUserService creates a new mock instance. -func NewMockUserService(ctrl *gomock.Controller) *MockUserService { - mock := &MockUserService{ctrl: ctrl} - mock.recorder = &MockUserServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockUserService) EXPECT() *MockUserServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockUserService) Find(arg0 context.Context, arg1, arg2 string) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockUserServiceMockRecorder) Find(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockUserService)(nil).Find), arg0, arg1, arg2) -} - -// FindLogin mocks base method. -func (m *MockUserService) FindLogin(arg0 context.Context, arg1 *core.User, arg2 string) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindLogin", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindLogin indicates an expected call of FindLogin. -func (mr *MockUserServiceMockRecorder) FindLogin(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindLogin", reflect.TypeOf((*MockUserService)(nil).FindLogin), arg0, arg1, arg2) -} - -// MockRepositoryService is a mock of RepositoryService interface. -type MockRepositoryService struct { - ctrl *gomock.Controller - recorder *MockRepositoryServiceMockRecorder -} - -// MockRepositoryServiceMockRecorder is the mock recorder for MockRepositoryService. -type MockRepositoryServiceMockRecorder struct { - mock *MockRepositoryService -} - -// NewMockRepositoryService creates a new mock instance. -func NewMockRepositoryService(ctrl *gomock.Controller) *MockRepositoryService { - mock := &MockRepositoryService{ctrl: ctrl} - mock.recorder = &MockRepositoryServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockRepositoryService) EXPECT() *MockRepositoryServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockRepositoryService) Find(arg0 context.Context, arg1 *core.User, arg2 string) (*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockRepositoryServiceMockRecorder) Find(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockRepositoryService)(nil).Find), arg0, arg1, arg2) -} - -// FindPerm mocks base method. -func (m *MockRepositoryService) FindPerm(arg0 context.Context, arg1 *core.User, arg2 string) (*core.Perm, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindPerm", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Perm) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindPerm indicates an expected call of FindPerm. -func (mr *MockRepositoryServiceMockRecorder) FindPerm(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindPerm", reflect.TypeOf((*MockRepositoryService)(nil).FindPerm), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockRepositoryService) List(arg0 context.Context, arg1 *core.User) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockRepositoryServiceMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockRepositoryService)(nil).List), arg0, arg1) -} - -// MockCommitService is a mock of CommitService interface. -type MockCommitService struct { - ctrl *gomock.Controller - recorder *MockCommitServiceMockRecorder -} - -// MockCommitServiceMockRecorder is the mock recorder for MockCommitService. -type MockCommitServiceMockRecorder struct { - mock *MockCommitService -} - -// NewMockCommitService creates a new mock instance. -func NewMockCommitService(ctrl *gomock.Controller) *MockCommitService { - mock := &MockCommitService{ctrl: ctrl} - mock.recorder = &MockCommitServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockCommitService) EXPECT() *MockCommitServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockCommitService) Find(arg0 context.Context, arg1 *core.User, arg2, arg3 string) (*core.Commit, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*core.Commit) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockCommitServiceMockRecorder) Find(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockCommitService)(nil).Find), arg0, arg1, arg2, arg3) -} - -// FindRef mocks base method. -func (m *MockCommitService) FindRef(arg0 context.Context, arg1 *core.User, arg2, arg3 string) (*core.Commit, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindRef", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*core.Commit) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindRef indicates an expected call of FindRef. -func (mr *MockCommitServiceMockRecorder) FindRef(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindRef", reflect.TypeOf((*MockCommitService)(nil).FindRef), arg0, arg1, arg2, arg3) -} - -// ListChanges mocks base method. -func (m *MockCommitService) ListChanges(arg0 context.Context, arg1 *core.User, arg2, arg3, arg4 string) ([]*core.Change, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListChanges", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]*core.Change) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListChanges indicates an expected call of ListChanges. -func (mr *MockCommitServiceMockRecorder) ListChanges(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListChanges", reflect.TypeOf((*MockCommitService)(nil).ListChanges), arg0, arg1, arg2, arg3, arg4) -} - -// MockStatusService is a mock of StatusService interface. -type MockStatusService struct { - ctrl *gomock.Controller - recorder *MockStatusServiceMockRecorder -} - -// MockStatusServiceMockRecorder is the mock recorder for MockStatusService. -type MockStatusServiceMockRecorder struct { - mock *MockStatusService -} - -// NewMockStatusService creates a new mock instance. -func NewMockStatusService(ctrl *gomock.Controller) *MockStatusService { - mock := &MockStatusService{ctrl: ctrl} - mock.recorder = &MockStatusServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockStatusService) EXPECT() *MockStatusServiceMockRecorder { - return m.recorder -} - -// Send mocks base method. -func (m *MockStatusService) Send(arg0 context.Context, arg1 *core.User, arg2 *core.StatusInput) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Send", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Send indicates an expected call of Send. -func (mr *MockStatusServiceMockRecorder) Send(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Send", reflect.TypeOf((*MockStatusService)(nil).Send), arg0, arg1, arg2) -} - -// MockHookService is a mock of HookService interface. -type MockHookService struct { - ctrl *gomock.Controller - recorder *MockHookServiceMockRecorder -} - -// MockHookServiceMockRecorder is the mock recorder for MockHookService. -type MockHookServiceMockRecorder struct { - mock *MockHookService -} - -// NewMockHookService creates a new mock instance. -func NewMockHookService(ctrl *gomock.Controller) *MockHookService { - mock := &MockHookService{ctrl: ctrl} - mock.recorder = &MockHookServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockHookService) EXPECT() *MockHookServiceMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockHookService) Create(arg0 context.Context, arg1 *core.User, arg2 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockHookServiceMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockHookService)(nil).Create), arg0, arg1, arg2) -} - -// Delete mocks base method. -func (m *MockHookService) Delete(arg0 context.Context, arg1 *core.User, arg2 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockHookServiceMockRecorder) Delete(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockHookService)(nil).Delete), arg0, arg1, arg2) -} - -// MockFileService is a mock of FileService interface. -type MockFileService struct { - ctrl *gomock.Controller - recorder *MockFileServiceMockRecorder -} - -// MockFileServiceMockRecorder is the mock recorder for MockFileService. -type MockFileServiceMockRecorder struct { - mock *MockFileService -} - -// NewMockFileService creates a new mock instance. -func NewMockFileService(ctrl *gomock.Controller) *MockFileService { - mock := &MockFileService{ctrl: ctrl} - mock.recorder = &MockFileServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockFileService) EXPECT() *MockFileServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockFileService) Find(arg0 context.Context, arg1 *core.User, arg2, arg3, arg4, arg5 string) (*core.File, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2, arg3, arg4, arg5) - ret0, _ := ret[0].(*core.File) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockFileServiceMockRecorder) Find(arg0, arg1, arg2, arg3, arg4, arg5 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockFileService)(nil).Find), arg0, arg1, arg2, arg3, arg4, arg5) -} - -// MockBatcher is a mock of Batcher interface. -type MockBatcher struct { - ctrl *gomock.Controller - recorder *MockBatcherMockRecorder -} - -// MockBatcherMockRecorder is the mock recorder for MockBatcher. -type MockBatcherMockRecorder struct { - mock *MockBatcher -} - -// NewMockBatcher creates a new mock instance. -func NewMockBatcher(ctrl *gomock.Controller) *MockBatcher { - mock := &MockBatcher{ctrl: ctrl} - mock.recorder = &MockBatcherMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockBatcher) EXPECT() *MockBatcherMockRecorder { - return m.recorder -} - -// Batch mocks base method. -func (m *MockBatcher) Batch(arg0 context.Context, arg1 *core.User, arg2 *core.Batch) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Batch", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Batch indicates an expected call of Batch. -func (mr *MockBatcherMockRecorder) Batch(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Batch", reflect.TypeOf((*MockBatcher)(nil).Batch), arg0, arg1, arg2) -} - -// MockBuildStore is a mock of BuildStore interface. -type MockBuildStore struct { - ctrl *gomock.Controller - recorder *MockBuildStoreMockRecorder -} - -// MockBuildStoreMockRecorder is the mock recorder for MockBuildStore. -type MockBuildStoreMockRecorder struct { - mock *MockBuildStore -} - -// NewMockBuildStore creates a new mock instance. -func NewMockBuildStore(ctrl *gomock.Controller) *MockBuildStore { - mock := &MockBuildStore{ctrl: ctrl} - mock.recorder = &MockBuildStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockBuildStore) EXPECT() *MockBuildStoreMockRecorder { - return m.recorder -} - -// Count mocks base method. -func (m *MockBuildStore) Count(arg0 context.Context) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Count", arg0) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Count indicates an expected call of Count. -func (mr *MockBuildStoreMockRecorder) Count(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Count", reflect.TypeOf((*MockBuildStore)(nil).Count), arg0) -} - -// Create mocks base method. -func (m *MockBuildStore) Create(arg0 context.Context, arg1 *core.Build, arg2 []*core.Stage) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockBuildStoreMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockBuildStore)(nil).Create), arg0, arg1, arg2) -} - -// Delete mocks base method. -func (m *MockBuildStore) Delete(arg0 context.Context, arg1 *core.Build) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockBuildStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockBuildStore)(nil).Delete), arg0, arg1) -} - -// DeleteBranch mocks base method. -func (m *MockBuildStore) DeleteBranch(arg0 context.Context, arg1 int64, arg2 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteBranch", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteBranch indicates an expected call of DeleteBranch. -func (mr *MockBuildStoreMockRecorder) DeleteBranch(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteBranch", reflect.TypeOf((*MockBuildStore)(nil).DeleteBranch), arg0, arg1, arg2) -} - -// DeleteDeploy mocks base method. -func (m *MockBuildStore) DeleteDeploy(arg0 context.Context, arg1 int64, arg2 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteDeploy", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteDeploy indicates an expected call of DeleteDeploy. -func (mr *MockBuildStoreMockRecorder) DeleteDeploy(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteDeploy", reflect.TypeOf((*MockBuildStore)(nil).DeleteDeploy), arg0, arg1, arg2) -} - -// DeletePull mocks base method. -func (m *MockBuildStore) DeletePull(arg0 context.Context, arg1 int64, arg2 int) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeletePull", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeletePull indicates an expected call of DeletePull. -func (mr *MockBuildStoreMockRecorder) DeletePull(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeletePull", reflect.TypeOf((*MockBuildStore)(nil).DeletePull), arg0, arg1, arg2) -} - -// Find mocks base method. -func (m *MockBuildStore) Find(arg0 context.Context, arg1 int64) (*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockBuildStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockBuildStore)(nil).Find), arg0, arg1) -} - -// FindNumber mocks base method. -func (m *MockBuildStore) FindNumber(arg0 context.Context, arg1, arg2 int64) (*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindNumber", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindNumber indicates an expected call of FindNumber. -func (mr *MockBuildStoreMockRecorder) FindNumber(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindNumber", reflect.TypeOf((*MockBuildStore)(nil).FindNumber), arg0, arg1, arg2) -} - -// FindRef mocks base method. -func (m *MockBuildStore) FindRef(arg0 context.Context, arg1 int64, arg2 string) (*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindRef", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindRef indicates an expected call of FindRef. -func (mr *MockBuildStoreMockRecorder) FindRef(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindRef", reflect.TypeOf((*MockBuildStore)(nil).FindRef), arg0, arg1, arg2) -} - -// LatestBranches mocks base method. -func (m *MockBuildStore) LatestBranches(arg0 context.Context, arg1 int64) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "LatestBranches", arg0, arg1) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// LatestBranches indicates an expected call of LatestBranches. -func (mr *MockBuildStoreMockRecorder) LatestBranches(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LatestBranches", reflect.TypeOf((*MockBuildStore)(nil).LatestBranches), arg0, arg1) -} - -// LatestDeploys mocks base method. -func (m *MockBuildStore) LatestDeploys(arg0 context.Context, arg1 int64) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "LatestDeploys", arg0, arg1) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// LatestDeploys indicates an expected call of LatestDeploys. -func (mr *MockBuildStoreMockRecorder) LatestDeploys(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LatestDeploys", reflect.TypeOf((*MockBuildStore)(nil).LatestDeploys), arg0, arg1) -} - -// LatestPulls mocks base method. -func (m *MockBuildStore) LatestPulls(arg0 context.Context, arg1 int64) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "LatestPulls", arg0, arg1) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// LatestPulls indicates an expected call of LatestPulls. -func (mr *MockBuildStoreMockRecorder) LatestPulls(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LatestPulls", reflect.TypeOf((*MockBuildStore)(nil).LatestPulls), arg0, arg1) -} - -// List mocks base method. -func (m *MockBuildStore) List(arg0 context.Context, arg1 int64, arg2, arg3 int) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockBuildStoreMockRecorder) List(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockBuildStore)(nil).List), arg0, arg1, arg2, arg3) -} - -// ListRef mocks base method. -func (m *MockBuildStore) ListRef(arg0 context.Context, arg1 int64, arg2 string, arg3, arg4 int) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRef", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRef indicates an expected call of ListRef. -func (mr *MockBuildStoreMockRecorder) ListRef(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRef", reflect.TypeOf((*MockBuildStore)(nil).ListRef), arg0, arg1, arg2, arg3, arg4) -} - -// Pending mocks base method. -func (m *MockBuildStore) Pending(arg0 context.Context) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Pending", arg0) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Pending indicates an expected call of Pending. -func (mr *MockBuildStoreMockRecorder) Pending(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Pending", reflect.TypeOf((*MockBuildStore)(nil).Pending), arg0) -} - -// Purge mocks base method. -func (m *MockBuildStore) Purge(arg0 context.Context, arg1, arg2 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Purge", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Purge indicates an expected call of Purge. -func (mr *MockBuildStoreMockRecorder) Purge(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Purge", reflect.TypeOf((*MockBuildStore)(nil).Purge), arg0, arg1, arg2) -} - -// Running mocks base method. -func (m *MockBuildStore) Running(arg0 context.Context) ([]*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Running", arg0) - ret0, _ := ret[0].([]*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Running indicates an expected call of Running. -func (mr *MockBuildStoreMockRecorder) Running(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Running", reflect.TypeOf((*MockBuildStore)(nil).Running), arg0) -} - -// Update mocks base method. -func (m *MockBuildStore) Update(arg0 context.Context, arg1 *core.Build) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockBuildStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockBuildStore)(nil).Update), arg0, arg1) -} - -// MockCronStore is a mock of CronStore interface. -type MockCronStore struct { - ctrl *gomock.Controller - recorder *MockCronStoreMockRecorder -} - -// MockCronStoreMockRecorder is the mock recorder for MockCronStore. -type MockCronStoreMockRecorder struct { - mock *MockCronStore -} - -// NewMockCronStore creates a new mock instance. -func NewMockCronStore(ctrl *gomock.Controller) *MockCronStore { - mock := &MockCronStore{ctrl: ctrl} - mock.recorder = &MockCronStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockCronStore) EXPECT() *MockCronStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockCronStore) Create(arg0 context.Context, arg1 *core.Cron) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockCronStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockCronStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockCronStore) Delete(arg0 context.Context, arg1 *core.Cron) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockCronStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockCronStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockCronStore) Find(arg0 context.Context, arg1 int64) (*core.Cron, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Cron) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockCronStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockCronStore)(nil).Find), arg0, arg1) -} - -// FindName mocks base method. -func (m *MockCronStore) FindName(arg0 context.Context, arg1 int64, arg2 string) (*core.Cron, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindName", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Cron) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindName indicates an expected call of FindName. -func (mr *MockCronStoreMockRecorder) FindName(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindName", reflect.TypeOf((*MockCronStore)(nil).FindName), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockCronStore) List(arg0 context.Context, arg1 int64) ([]*core.Cron, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Cron) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockCronStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockCronStore)(nil).List), arg0, arg1) -} - -// Ready mocks base method. -func (m *MockCronStore) Ready(arg0 context.Context, arg1 int64) ([]*core.Cron, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Ready", arg0, arg1) - ret0, _ := ret[0].([]*core.Cron) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Ready indicates an expected call of Ready. -func (mr *MockCronStoreMockRecorder) Ready(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ready", reflect.TypeOf((*MockCronStore)(nil).Ready), arg0, arg1) -} - -// Update mocks base method. -func (m *MockCronStore) Update(arg0 context.Context, arg1 *core.Cron) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockCronStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockCronStore)(nil).Update), arg0, arg1) -} - -// MockLogStore is a mock of LogStore interface. -type MockLogStore struct { - ctrl *gomock.Controller - recorder *MockLogStoreMockRecorder -} - -// MockLogStoreMockRecorder is the mock recorder for MockLogStore. -type MockLogStoreMockRecorder struct { - mock *MockLogStore -} - -// NewMockLogStore creates a new mock instance. -func NewMockLogStore(ctrl *gomock.Controller) *MockLogStore { - mock := &MockLogStore{ctrl: ctrl} - mock.recorder = &MockLogStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockLogStore) EXPECT() *MockLogStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockLogStore) Create(arg0 context.Context, arg1 int64, arg2 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockLogStoreMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockLogStore)(nil).Create), arg0, arg1, arg2) -} - -// Delete mocks base method. -func (m *MockLogStore) Delete(arg0 context.Context, arg1 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockLogStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockLogStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockLogStore) Find(arg0 context.Context, arg1 int64) (io.ReadCloser, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(io.ReadCloser) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockLogStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockLogStore)(nil).Find), arg0, arg1) -} - -// Update mocks base method. -func (m *MockLogStore) Update(arg0 context.Context, arg1 int64, arg2 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockLogStoreMockRecorder) Update(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockLogStore)(nil).Update), arg0, arg1, arg2) -} - -// MockPermStore is a mock of PermStore interface. -type MockPermStore struct { - ctrl *gomock.Controller - recorder *MockPermStoreMockRecorder -} - -// MockPermStoreMockRecorder is the mock recorder for MockPermStore. -type MockPermStoreMockRecorder struct { - mock *MockPermStore -} - -// NewMockPermStore creates a new mock instance. -func NewMockPermStore(ctrl *gomock.Controller) *MockPermStore { - mock := &MockPermStore{ctrl: ctrl} - mock.recorder = &MockPermStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockPermStore) EXPECT() *MockPermStoreMockRecorder { - return m.recorder -} - -// Delete mocks base method. -func (m *MockPermStore) Delete(arg0 context.Context, arg1 *core.Perm) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockPermStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockPermStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockPermStore) Find(arg0 context.Context, arg1 string, arg2 int64) (*core.Perm, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Perm) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockPermStoreMockRecorder) Find(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockPermStore)(nil).Find), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockPermStore) List(arg0 context.Context, arg1 string) ([]*core.Collaborator, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Collaborator) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockPermStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockPermStore)(nil).List), arg0, arg1) -} - -// Update mocks base method. -func (m *MockPermStore) Update(arg0 context.Context, arg1 *core.Perm) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockPermStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockPermStore)(nil).Update), arg0, arg1) -} - -// MockSecretStore is a mock of SecretStore interface. -type MockSecretStore struct { - ctrl *gomock.Controller - recorder *MockSecretStoreMockRecorder -} - -// MockSecretStoreMockRecorder is the mock recorder for MockSecretStore. -type MockSecretStoreMockRecorder struct { - mock *MockSecretStore -} - -// NewMockSecretStore creates a new mock instance. -func NewMockSecretStore(ctrl *gomock.Controller) *MockSecretStore { - mock := &MockSecretStore{ctrl: ctrl} - mock.recorder = &MockSecretStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockSecretStore) EXPECT() *MockSecretStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockSecretStore) Create(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockSecretStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockSecretStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockSecretStore) Delete(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockSecretStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockSecretStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockSecretStore) Find(arg0 context.Context, arg1 int64) (*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockSecretStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockSecretStore)(nil).Find), arg0, arg1) -} - -// FindName mocks base method. -func (m *MockSecretStore) FindName(arg0 context.Context, arg1 int64, arg2 string) (*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindName", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindName indicates an expected call of FindName. -func (mr *MockSecretStoreMockRecorder) FindName(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindName", reflect.TypeOf((*MockSecretStore)(nil).FindName), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockSecretStore) List(arg0 context.Context, arg1 int64) ([]*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockSecretStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockSecretStore)(nil).List), arg0, arg1) -} - -// Update mocks base method. -func (m *MockSecretStore) Update(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockSecretStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockSecretStore)(nil).Update), arg0, arg1) -} - -// MockGlobalSecretStore is a mock of GlobalSecretStore interface. -type MockGlobalSecretStore struct { - ctrl *gomock.Controller - recorder *MockGlobalSecretStoreMockRecorder -} - -// MockGlobalSecretStoreMockRecorder is the mock recorder for MockGlobalSecretStore. -type MockGlobalSecretStoreMockRecorder struct { - mock *MockGlobalSecretStore -} - -// NewMockGlobalSecretStore creates a new mock instance. -func NewMockGlobalSecretStore(ctrl *gomock.Controller) *MockGlobalSecretStore { - mock := &MockGlobalSecretStore{ctrl: ctrl} - mock.recorder = &MockGlobalSecretStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockGlobalSecretStore) EXPECT() *MockGlobalSecretStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockGlobalSecretStore) Create(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockGlobalSecretStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockGlobalSecretStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockGlobalSecretStore) Delete(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockGlobalSecretStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockGlobalSecretStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockGlobalSecretStore) Find(arg0 context.Context, arg1 int64) (*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockGlobalSecretStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockGlobalSecretStore)(nil).Find), arg0, arg1) -} - -// FindName mocks base method. -func (m *MockGlobalSecretStore) FindName(arg0 context.Context, arg1, arg2 string) (*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindName", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindName indicates an expected call of FindName. -func (mr *MockGlobalSecretStoreMockRecorder) FindName(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindName", reflect.TypeOf((*MockGlobalSecretStore)(nil).FindName), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockGlobalSecretStore) List(arg0 context.Context, arg1 string) ([]*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockGlobalSecretStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockGlobalSecretStore)(nil).List), arg0, arg1) -} - -// ListAll mocks base method. -func (m *MockGlobalSecretStore) ListAll(arg0 context.Context) ([]*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListAll", arg0) - ret0, _ := ret[0].([]*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListAll indicates an expected call of ListAll. -func (mr *MockGlobalSecretStoreMockRecorder) ListAll(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAll", reflect.TypeOf((*MockGlobalSecretStore)(nil).ListAll), arg0) -} - -// Update mocks base method. -func (m *MockGlobalSecretStore) Update(arg0 context.Context, arg1 *core.Secret) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockGlobalSecretStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockGlobalSecretStore)(nil).Update), arg0, arg1) -} - -// MockStageStore is a mock of StageStore interface. -type MockStageStore struct { - ctrl *gomock.Controller - recorder *MockStageStoreMockRecorder -} - -// MockStageStoreMockRecorder is the mock recorder for MockStageStore. -type MockStageStoreMockRecorder struct { - mock *MockStageStore -} - -// NewMockStageStore creates a new mock instance. -func NewMockStageStore(ctrl *gomock.Controller) *MockStageStore { - mock := &MockStageStore{ctrl: ctrl} - mock.recorder = &MockStageStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockStageStore) EXPECT() *MockStageStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockStageStore) Create(arg0 context.Context, arg1 *core.Stage) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockStageStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockStageStore)(nil).Create), arg0, arg1) -} - -// Find mocks base method. -func (m *MockStageStore) Find(arg0 context.Context, arg1 int64) (*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockStageStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockStageStore)(nil).Find), arg0, arg1) -} - -// FindNumber mocks base method. -func (m *MockStageStore) FindNumber(arg0 context.Context, arg1 int64, arg2 int) (*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindNumber", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindNumber indicates an expected call of FindNumber. -func (mr *MockStageStoreMockRecorder) FindNumber(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindNumber", reflect.TypeOf((*MockStageStore)(nil).FindNumber), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockStageStore) List(arg0 context.Context, arg1 int64) ([]*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockStageStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockStageStore)(nil).List), arg0, arg1) -} - -// ListIncomplete mocks base method. -func (m *MockStageStore) ListIncomplete(arg0 context.Context) ([]*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListIncomplete", arg0) - ret0, _ := ret[0].([]*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListIncomplete indicates an expected call of ListIncomplete. -func (mr *MockStageStoreMockRecorder) ListIncomplete(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListIncomplete", reflect.TypeOf((*MockStageStore)(nil).ListIncomplete), arg0) -} - -// ListState mocks base method. -func (m *MockStageStore) ListState(arg0 context.Context, arg1 string) ([]*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListState", arg0, arg1) - ret0, _ := ret[0].([]*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListState indicates an expected call of ListState. -func (mr *MockStageStoreMockRecorder) ListState(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListState", reflect.TypeOf((*MockStageStore)(nil).ListState), arg0, arg1) -} - -// ListSteps mocks base method. -func (m *MockStageStore) ListSteps(arg0 context.Context, arg1 int64) ([]*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListSteps", arg0, arg1) - ret0, _ := ret[0].([]*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListSteps indicates an expected call of ListSteps. -func (mr *MockStageStoreMockRecorder) ListSteps(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListSteps", reflect.TypeOf((*MockStageStore)(nil).ListSteps), arg0, arg1) -} - -// Update mocks base method. -func (m *MockStageStore) Update(arg0 context.Context, arg1 *core.Stage) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockStageStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockStageStore)(nil).Update), arg0, arg1) -} - -// MockStepStore is a mock of StepStore interface. -type MockStepStore struct { - ctrl *gomock.Controller - recorder *MockStepStoreMockRecorder -} - -// MockStepStoreMockRecorder is the mock recorder for MockStepStore. -type MockStepStoreMockRecorder struct { - mock *MockStepStore -} - -// NewMockStepStore creates a new mock instance. -func NewMockStepStore(ctrl *gomock.Controller) *MockStepStore { - mock := &MockStepStore{ctrl: ctrl} - mock.recorder = &MockStepStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockStepStore) EXPECT() *MockStepStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockStepStore) Create(arg0 context.Context, arg1 *core.Step) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockStepStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockStepStore)(nil).Create), arg0, arg1) -} - -// Find mocks base method. -func (m *MockStepStore) Find(arg0 context.Context, arg1 int64) (*core.Step, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Step) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockStepStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockStepStore)(nil).Find), arg0, arg1) -} - -// FindNumber mocks base method. -func (m *MockStepStore) FindNumber(arg0 context.Context, arg1 int64, arg2 int) (*core.Step, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindNumber", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Step) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindNumber indicates an expected call of FindNumber. -func (mr *MockStepStoreMockRecorder) FindNumber(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindNumber", reflect.TypeOf((*MockStepStore)(nil).FindNumber), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockStepStore) List(arg0 context.Context, arg1 int64) ([]*core.Step, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Step) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockStepStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockStepStore)(nil).List), arg0, arg1) -} - -// Update mocks base method. -func (m *MockStepStore) Update(arg0 context.Context, arg1 *core.Step) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockStepStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockStepStore)(nil).Update), arg0, arg1) -} - -// MockRepositoryStore is a mock of RepositoryStore interface. -type MockRepositoryStore struct { - ctrl *gomock.Controller - recorder *MockRepositoryStoreMockRecorder -} - -// MockRepositoryStoreMockRecorder is the mock recorder for MockRepositoryStore. -type MockRepositoryStoreMockRecorder struct { - mock *MockRepositoryStore -} - -// NewMockRepositoryStore creates a new mock instance. -func NewMockRepositoryStore(ctrl *gomock.Controller) *MockRepositoryStore { - mock := &MockRepositoryStore{ctrl: ctrl} - mock.recorder = &MockRepositoryStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockRepositoryStore) EXPECT() *MockRepositoryStoreMockRecorder { - return m.recorder -} - -// Activate mocks base method. -func (m *MockRepositoryStore) Activate(arg0 context.Context, arg1 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Activate", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Activate indicates an expected call of Activate. -func (mr *MockRepositoryStoreMockRecorder) Activate(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Activate", reflect.TypeOf((*MockRepositoryStore)(nil).Activate), arg0, arg1) -} - -// Count mocks base method. -func (m *MockRepositoryStore) Count(arg0 context.Context) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Count", arg0) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Count indicates an expected call of Count. -func (mr *MockRepositoryStoreMockRecorder) Count(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Count", reflect.TypeOf((*MockRepositoryStore)(nil).Count), arg0) -} - -// Create mocks base method. -func (m *MockRepositoryStore) Create(arg0 context.Context, arg1 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockRepositoryStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockRepositoryStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockRepositoryStore) Delete(arg0 context.Context, arg1 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockRepositoryStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockRepositoryStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockRepositoryStore) Find(arg0 context.Context, arg1 int64) (*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockRepositoryStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockRepositoryStore)(nil).Find), arg0, arg1) -} - -// FindName mocks base method. -func (m *MockRepositoryStore) FindName(arg0 context.Context, arg1, arg2 string) (*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindName", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindName indicates an expected call of FindName. -func (mr *MockRepositoryStoreMockRecorder) FindName(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindName", reflect.TypeOf((*MockRepositoryStore)(nil).FindName), arg0, arg1, arg2) -} - -// Increment mocks base method. -func (m *MockRepositoryStore) Increment(arg0 context.Context, arg1 *core.Repository) (*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Increment", arg0, arg1) - ret0, _ := ret[0].(*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Increment indicates an expected call of Increment. -func (mr *MockRepositoryStoreMockRecorder) Increment(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Increment", reflect.TypeOf((*MockRepositoryStore)(nil).Increment), arg0, arg1) -} - -// List mocks base method. -func (m *MockRepositoryStore) List(arg0 context.Context, arg1 int64) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockRepositoryStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockRepositoryStore)(nil).List), arg0, arg1) -} - -// ListAll mocks base method. -func (m *MockRepositoryStore) ListAll(arg0 context.Context, arg1, arg2 int) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListAll", arg0, arg1, arg2) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListAll indicates an expected call of ListAll. -func (mr *MockRepositoryStoreMockRecorder) ListAll(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAll", reflect.TypeOf((*MockRepositoryStore)(nil).ListAll), arg0, arg1, arg2) -} - -// ListIncomplete mocks base method. -func (m *MockRepositoryStore) ListIncomplete(arg0 context.Context) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListIncomplete", arg0) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListIncomplete indicates an expected call of ListIncomplete. -func (mr *MockRepositoryStoreMockRecorder) ListIncomplete(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListIncomplete", reflect.TypeOf((*MockRepositoryStore)(nil).ListIncomplete), arg0) -} - -// ListLatest mocks base method. -func (m *MockRepositoryStore) ListLatest(arg0 context.Context, arg1 int64) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListLatest", arg0, arg1) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListLatest indicates an expected call of ListLatest. -func (mr *MockRepositoryStoreMockRecorder) ListLatest(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLatest", reflect.TypeOf((*MockRepositoryStore)(nil).ListLatest), arg0, arg1) -} - -// ListRecent mocks base method. -func (m *MockRepositoryStore) ListRecent(arg0 context.Context, arg1 int64) ([]*core.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRecent", arg0, arg1) - ret0, _ := ret[0].([]*core.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRecent indicates an expected call of ListRecent. -func (mr *MockRepositoryStoreMockRecorder) ListRecent(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRecent", reflect.TypeOf((*MockRepositoryStore)(nil).ListRecent), arg0, arg1) -} - -// ListRunningStatus mocks base method. -func (m *MockRepositoryStore) ListRunningStatus(arg0 context.Context) ([]*core.RepoBuildStage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRunningStatus", arg0) - ret0, _ := ret[0].([]*core.RepoBuildStage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRunningStatus indicates an expected call of ListRunningStatus. -func (mr *MockRepositoryStoreMockRecorder) ListRunningStatus(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRunningStatus", reflect.TypeOf((*MockRepositoryStore)(nil).ListRunningStatus), arg0) -} - -// Update mocks base method. -func (m *MockRepositoryStore) Update(arg0 context.Context, arg1 *core.Repository) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockRepositoryStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockRepositoryStore)(nil).Update), arg0, arg1) -} - -// MockUserStore is a mock of UserStore interface. -type MockUserStore struct { - ctrl *gomock.Controller - recorder *MockUserStoreMockRecorder -} - -// MockUserStoreMockRecorder is the mock recorder for MockUserStore. -type MockUserStoreMockRecorder struct { - mock *MockUserStore -} - -// NewMockUserStore creates a new mock instance. -func NewMockUserStore(ctrl *gomock.Controller) *MockUserStore { - mock := &MockUserStore{ctrl: ctrl} - mock.recorder = &MockUserStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockUserStore) EXPECT() *MockUserStoreMockRecorder { - return m.recorder -} - -// Count mocks base method. -func (m *MockUserStore) Count(arg0 context.Context) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Count", arg0) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Count indicates an expected call of Count. -func (mr *MockUserStoreMockRecorder) Count(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Count", reflect.TypeOf((*MockUserStore)(nil).Count), arg0) -} - -// CountHuman mocks base method. -func (m *MockUserStore) CountHuman(arg0 context.Context) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CountHuman", arg0) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CountHuman indicates an expected call of CountHuman. -func (mr *MockUserStoreMockRecorder) CountHuman(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountHuman", reflect.TypeOf((*MockUserStore)(nil).CountHuman), arg0) -} - -// Create mocks base method. -func (m *MockUserStore) Create(arg0 context.Context, arg1 *core.User) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockUserStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockUserStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockUserStore) Delete(arg0 context.Context, arg1 *core.User) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockUserStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockUserStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockUserStore) Find(arg0 context.Context, arg1 int64) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockUserStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockUserStore)(nil).Find), arg0, arg1) -} - -// FindLogin mocks base method. -func (m *MockUserStore) FindLogin(arg0 context.Context, arg1 string) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindLogin", arg0, arg1) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindLogin indicates an expected call of FindLogin. -func (mr *MockUserStoreMockRecorder) FindLogin(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindLogin", reflect.TypeOf((*MockUserStore)(nil).FindLogin), arg0, arg1) -} - -// FindToken mocks base method. -func (m *MockUserStore) FindToken(arg0 context.Context, arg1 string) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindToken", arg0, arg1) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindToken indicates an expected call of FindToken. -func (mr *MockUserStoreMockRecorder) FindToken(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindToken", reflect.TypeOf((*MockUserStore)(nil).FindToken), arg0, arg1) -} - -// List mocks base method. -func (m *MockUserStore) List(arg0 context.Context) ([]*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0) - ret0, _ := ret[0].([]*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockUserStoreMockRecorder) List(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockUserStore)(nil).List), arg0) -} - -// ListRange mocks base method. -func (m *MockUserStore) ListRange(arg0 context.Context, arg1 core.UserParams) ([]*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRange", arg0, arg1) - ret0, _ := ret[0].([]*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRange indicates an expected call of ListRange. -func (mr *MockUserStoreMockRecorder) ListRange(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRange", reflect.TypeOf((*MockUserStore)(nil).ListRange), arg0, arg1) -} - -// Update mocks base method. -func (m *MockUserStore) Update(arg0 context.Context, arg1 *core.User) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockUserStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockUserStore)(nil).Update), arg0, arg1) -} - -// MockScheduler is a mock of Scheduler interface. -type MockScheduler struct { - ctrl *gomock.Controller - recorder *MockSchedulerMockRecorder -} - -// MockSchedulerMockRecorder is the mock recorder for MockScheduler. -type MockSchedulerMockRecorder struct { - mock *MockScheduler -} - -// NewMockScheduler creates a new mock instance. -func NewMockScheduler(ctrl *gomock.Controller) *MockScheduler { - mock := &MockScheduler{ctrl: ctrl} - mock.recorder = &MockSchedulerMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockScheduler) EXPECT() *MockSchedulerMockRecorder { - return m.recorder -} - -// Cancel mocks base method. -func (m *MockScheduler) Cancel(arg0 context.Context, arg1 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Cancel", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Cancel indicates an expected call of Cancel. -func (mr *MockSchedulerMockRecorder) Cancel(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Cancel", reflect.TypeOf((*MockScheduler)(nil).Cancel), arg0, arg1) -} - -// Cancelled mocks base method. -func (m *MockScheduler) Cancelled(arg0 context.Context, arg1 int64) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Cancelled", arg0, arg1) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Cancelled indicates an expected call of Cancelled. -func (mr *MockSchedulerMockRecorder) Cancelled(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Cancelled", reflect.TypeOf((*MockScheduler)(nil).Cancelled), arg0, arg1) -} - -// Pause mocks base method. -func (m *MockScheduler) Pause(arg0 context.Context) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Pause", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// Pause indicates an expected call of Pause. -func (mr *MockSchedulerMockRecorder) Pause(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Pause", reflect.TypeOf((*MockScheduler)(nil).Pause), arg0) -} - -// Request mocks base method. -func (m *MockScheduler) Request(arg0 context.Context, arg1 core.Filter) (*core.Stage, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Request", arg0, arg1) - ret0, _ := ret[0].(*core.Stage) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Request indicates an expected call of Request. -func (mr *MockSchedulerMockRecorder) Request(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Request", reflect.TypeOf((*MockScheduler)(nil).Request), arg0, arg1) -} - -// Resume mocks base method. -func (m *MockScheduler) Resume(arg0 context.Context) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Resume", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// Resume indicates an expected call of Resume. -func (mr *MockSchedulerMockRecorder) Resume(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Resume", reflect.TypeOf((*MockScheduler)(nil).Resume), arg0) -} - -// Schedule mocks base method. -func (m *MockScheduler) Schedule(arg0 context.Context, arg1 *core.Stage) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Schedule", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Schedule indicates an expected call of Schedule. -func (mr *MockSchedulerMockRecorder) Schedule(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Schedule", reflect.TypeOf((*MockScheduler)(nil).Schedule), arg0, arg1) -} - -// Stats mocks base method. -func (m *MockScheduler) Stats(arg0 context.Context) (interface{}, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Stats", arg0) - ret0, _ := ret[0].(interface{}) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Stats indicates an expected call of Stats. -func (mr *MockSchedulerMockRecorder) Stats(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockScheduler)(nil).Stats), arg0) -} - -// MockSession is a mock of Session interface. -type MockSession struct { - ctrl *gomock.Controller - recorder *MockSessionMockRecorder -} - -// MockSessionMockRecorder is the mock recorder for MockSession. -type MockSessionMockRecorder struct { - mock *MockSession -} - -// NewMockSession creates a new mock instance. -func NewMockSession(ctrl *gomock.Controller) *MockSession { - mock := &MockSession{ctrl: ctrl} - mock.recorder = &MockSessionMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockSession) EXPECT() *MockSessionMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockSession) Create(arg0 http.ResponseWriter, arg1 *core.User) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockSessionMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockSession)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockSession) Delete(arg0 http.ResponseWriter) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockSessionMockRecorder) Delete(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockSession)(nil).Delete), arg0) -} - -// Get mocks base method. -func (m *MockSession) Get(arg0 *http.Request) (*core.User, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Get", arg0) - ret0, _ := ret[0].(*core.User) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Get indicates an expected call of Get. -func (mr *MockSessionMockRecorder) Get(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockSession)(nil).Get), arg0) -} - -// MockOrganizationService is a mock of OrganizationService interface. -type MockOrganizationService struct { - ctrl *gomock.Controller - recorder *MockOrganizationServiceMockRecorder -} - -// MockOrganizationServiceMockRecorder is the mock recorder for MockOrganizationService. -type MockOrganizationServiceMockRecorder struct { - mock *MockOrganizationService -} - -// NewMockOrganizationService creates a new mock instance. -func NewMockOrganizationService(ctrl *gomock.Controller) *MockOrganizationService { - mock := &MockOrganizationService{ctrl: ctrl} - mock.recorder = &MockOrganizationServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockOrganizationService) EXPECT() *MockOrganizationServiceMockRecorder { - return m.recorder -} - -// List mocks base method. -func (m *MockOrganizationService) List(arg0 context.Context, arg1 *core.User) ([]*core.Organization, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Organization) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockOrganizationServiceMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockOrganizationService)(nil).List), arg0, arg1) -} - -// Membership mocks base method. -func (m *MockOrganizationService) Membership(arg0 context.Context, arg1 *core.User, arg2 string) (bool, bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Membership", arg0, arg1, arg2) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(bool) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Membership indicates an expected call of Membership. -func (mr *MockOrganizationServiceMockRecorder) Membership(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Membership", reflect.TypeOf((*MockOrganizationService)(nil).Membership), arg0, arg1, arg2) -} - -// MockSecretService is a mock of SecretService interface. -type MockSecretService struct { - ctrl *gomock.Controller - recorder *MockSecretServiceMockRecorder -} - -// MockSecretServiceMockRecorder is the mock recorder for MockSecretService. -type MockSecretServiceMockRecorder struct { - mock *MockSecretService -} - -// NewMockSecretService creates a new mock instance. -func NewMockSecretService(ctrl *gomock.Controller) *MockSecretService { - mock := &MockSecretService{ctrl: ctrl} - mock.recorder = &MockSecretServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockSecretService) EXPECT() *MockSecretServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockSecretService) Find(arg0 context.Context, arg1 *core.SecretArgs) (*core.Secret, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Secret) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockSecretServiceMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockSecretService)(nil).Find), arg0, arg1) -} - -// MockRegistryService is a mock of RegistryService interface. -type MockRegistryService struct { - ctrl *gomock.Controller - recorder *MockRegistryServiceMockRecorder -} - -// MockRegistryServiceMockRecorder is the mock recorder for MockRegistryService. -type MockRegistryServiceMockRecorder struct { - mock *MockRegistryService -} - -// NewMockRegistryService creates a new mock instance. -func NewMockRegistryService(ctrl *gomock.Controller) *MockRegistryService { - mock := &MockRegistryService{ctrl: ctrl} - mock.recorder = &MockRegistryServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockRegistryService) EXPECT() *MockRegistryServiceMockRecorder { - return m.recorder -} - -// List mocks base method. -func (m *MockRegistryService) List(arg0 context.Context, arg1 *core.RegistryArgs) ([]*core.Registry, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Registry) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockRegistryServiceMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockRegistryService)(nil).List), arg0, arg1) -} - -// MockConfigService is a mock of ConfigService interface. -type MockConfigService struct { - ctrl *gomock.Controller - recorder *MockConfigServiceMockRecorder -} - -// MockConfigServiceMockRecorder is the mock recorder for MockConfigService. -type MockConfigServiceMockRecorder struct { - mock *MockConfigService -} - -// NewMockConfigService creates a new mock instance. -func NewMockConfigService(ctrl *gomock.Controller) *MockConfigService { - mock := &MockConfigService{ctrl: ctrl} - mock.recorder = &MockConfigServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockConfigService) EXPECT() *MockConfigServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockConfigService) Find(arg0 context.Context, arg1 *core.ConfigArgs) (*core.Config, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Config) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockConfigServiceMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockConfigService)(nil).Find), arg0, arg1) -} - -// MockTransferer is a mock of Transferer interface. -type MockTransferer struct { - ctrl *gomock.Controller - recorder *MockTransfererMockRecorder -} - -// MockTransfererMockRecorder is the mock recorder for MockTransferer. -type MockTransfererMockRecorder struct { - mock *MockTransferer -} - -// NewMockTransferer creates a new mock instance. -func NewMockTransferer(ctrl *gomock.Controller) *MockTransferer { - mock := &MockTransferer{ctrl: ctrl} - mock.recorder = &MockTransfererMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockTransferer) EXPECT() *MockTransfererMockRecorder { - return m.recorder -} - -// Transfer mocks base method. -func (m *MockTransferer) Transfer(arg0 context.Context, arg1 *core.User) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Transfer", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Transfer indicates an expected call of Transfer. -func (mr *MockTransfererMockRecorder) Transfer(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Transfer", reflect.TypeOf((*MockTransferer)(nil).Transfer), arg0, arg1) -} - -// MockTriggerer is a mock of Triggerer interface. -type MockTriggerer struct { - ctrl *gomock.Controller - recorder *MockTriggererMockRecorder -} - -// MockTriggererMockRecorder is the mock recorder for MockTriggerer. -type MockTriggererMockRecorder struct { - mock *MockTriggerer -} - -// NewMockTriggerer creates a new mock instance. -func NewMockTriggerer(ctrl *gomock.Controller) *MockTriggerer { - mock := &MockTriggerer{ctrl: ctrl} - mock.recorder = &MockTriggererMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockTriggerer) EXPECT() *MockTriggererMockRecorder { - return m.recorder -} - -// Trigger mocks base method. -func (m *MockTriggerer) Trigger(arg0 context.Context, arg1 *core.Repository, arg2 *core.Hook) (*core.Build, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Trigger", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Build) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Trigger indicates an expected call of Trigger. -func (mr *MockTriggererMockRecorder) Trigger(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Trigger", reflect.TypeOf((*MockTriggerer)(nil).Trigger), arg0, arg1, arg2) -} - -// MockSyncer is a mock of Syncer interface. -type MockSyncer struct { - ctrl *gomock.Controller - recorder *MockSyncerMockRecorder -} - -// MockSyncerMockRecorder is the mock recorder for MockSyncer. -type MockSyncerMockRecorder struct { - mock *MockSyncer -} - -// NewMockSyncer creates a new mock instance. -func NewMockSyncer(ctrl *gomock.Controller) *MockSyncer { - mock := &MockSyncer{ctrl: ctrl} - mock.recorder = &MockSyncerMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockSyncer) EXPECT() *MockSyncerMockRecorder { - return m.recorder -} - -// Sync mocks base method. -func (m *MockSyncer) Sync(arg0 context.Context, arg1 *core.User) (*core.Batch, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Sync", arg0, arg1) - ret0, _ := ret[0].(*core.Batch) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Sync indicates an expected call of Sync. -func (mr *MockSyncerMockRecorder) Sync(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Sync", reflect.TypeOf((*MockSyncer)(nil).Sync), arg0, arg1) -} - -// MockLogStream is a mock of LogStream interface. -type MockLogStream struct { - ctrl *gomock.Controller - recorder *MockLogStreamMockRecorder -} - -// MockLogStreamMockRecorder is the mock recorder for MockLogStream. -type MockLogStreamMockRecorder struct { - mock *MockLogStream -} - -// NewMockLogStream creates a new mock instance. -func NewMockLogStream(ctrl *gomock.Controller) *MockLogStream { - mock := &MockLogStream{ctrl: ctrl} - mock.recorder = &MockLogStreamMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockLogStream) EXPECT() *MockLogStreamMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockLogStream) Create(arg0 context.Context, arg1 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockLogStreamMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockLogStream)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockLogStream) Delete(arg0 context.Context, arg1 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockLogStreamMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockLogStream)(nil).Delete), arg0, arg1) -} - -// Info mocks base method. -func (m *MockLogStream) Info(arg0 context.Context) *core.LogStreamInfo { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Info", arg0) - ret0, _ := ret[0].(*core.LogStreamInfo) - return ret0 -} - -// Info indicates an expected call of Info. -func (mr *MockLogStreamMockRecorder) Info(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Info", reflect.TypeOf((*MockLogStream)(nil).Info), arg0) -} - -// Tail mocks base method. -func (m *MockLogStream) Tail(arg0 context.Context, arg1 int64) (<-chan *core.Line, <-chan error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Tail", arg0, arg1) - ret0, _ := ret[0].(<-chan *core.Line) - ret1, _ := ret[1].(<-chan error) - return ret0, ret1 -} - -// Tail indicates an expected call of Tail. -func (mr *MockLogStreamMockRecorder) Tail(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Tail", reflect.TypeOf((*MockLogStream)(nil).Tail), arg0, arg1) -} - -// Write mocks base method. -func (m *MockLogStream) Write(arg0 context.Context, arg1 int64, arg2 *core.Line) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Write", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Write indicates an expected call of Write. -func (mr *MockLogStreamMockRecorder) Write(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Write", reflect.TypeOf((*MockLogStream)(nil).Write), arg0, arg1, arg2) -} - -// MockWebhookSender is a mock of WebhookSender interface. -type MockWebhookSender struct { - ctrl *gomock.Controller - recorder *MockWebhookSenderMockRecorder -} - -// MockWebhookSenderMockRecorder is the mock recorder for MockWebhookSender. -type MockWebhookSenderMockRecorder struct { - mock *MockWebhookSender -} - -// NewMockWebhookSender creates a new mock instance. -func NewMockWebhookSender(ctrl *gomock.Controller) *MockWebhookSender { - mock := &MockWebhookSender{ctrl: ctrl} - mock.recorder = &MockWebhookSenderMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockWebhookSender) EXPECT() *MockWebhookSenderMockRecorder { - return m.recorder -} - -// Send mocks base method. -func (m *MockWebhookSender) Send(arg0 context.Context, arg1 *core.WebhookData) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Send", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Send indicates an expected call of Send. -func (mr *MockWebhookSenderMockRecorder) Send(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Send", reflect.TypeOf((*MockWebhookSender)(nil).Send), arg0, arg1) -} - -// MockLicenseService is a mock of LicenseService interface. -type MockLicenseService struct { - ctrl *gomock.Controller - recorder *MockLicenseServiceMockRecorder -} - -// MockLicenseServiceMockRecorder is the mock recorder for MockLicenseService. -type MockLicenseServiceMockRecorder struct { - mock *MockLicenseService -} - -// NewMockLicenseService creates a new mock instance. -func NewMockLicenseService(ctrl *gomock.Controller) *MockLicenseService { - mock := &MockLicenseService{ctrl: ctrl} - mock.recorder = &MockLicenseServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockLicenseService) EXPECT() *MockLicenseServiceMockRecorder { - return m.recorder -} - -// Exceeded mocks base method. -func (m *MockLicenseService) Exceeded(arg0 context.Context) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Exceeded", arg0) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Exceeded indicates an expected call of Exceeded. -func (mr *MockLicenseServiceMockRecorder) Exceeded(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Exceeded", reflect.TypeOf((*MockLicenseService)(nil).Exceeded), arg0) -} - -// Expired mocks base method. -func (m *MockLicenseService) Expired(arg0 context.Context) bool { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Expired", arg0) - ret0, _ := ret[0].(bool) - return ret0 -} - -// Expired indicates an expected call of Expired. -func (mr *MockLicenseServiceMockRecorder) Expired(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Expired", reflect.TypeOf((*MockLicenseService)(nil).Expired), arg0) -} - -// MockTemplateStore is a mock of TemplateStore interface. -type MockTemplateStore struct { - ctrl *gomock.Controller - recorder *MockTemplateStoreMockRecorder -} - -// MockTemplateStoreMockRecorder is the mock recorder for MockTemplateStore. -type MockTemplateStoreMockRecorder struct { - mock *MockTemplateStore -} - -// NewMockTemplateStore creates a new mock instance. -func NewMockTemplateStore(ctrl *gomock.Controller) *MockTemplateStore { - mock := &MockTemplateStore{ctrl: ctrl} - mock.recorder = &MockTemplateStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockTemplateStore) EXPECT() *MockTemplateStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockTemplateStore) Create(arg0 context.Context, arg1 *core.Template) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockTemplateStoreMockRecorder) Create(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockTemplateStore)(nil).Create), arg0, arg1) -} - -// Delete mocks base method. -func (m *MockTemplateStore) Delete(arg0 context.Context, arg1 *core.Template) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockTemplateStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockTemplateStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockTemplateStore) Find(arg0 context.Context, arg1 int64) (*core.Template, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*core.Template) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockTemplateStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockTemplateStore)(nil).Find), arg0, arg1) -} - -// FindName mocks base method. -func (m *MockTemplateStore) FindName(arg0 context.Context, arg1, arg2 string) (*core.Template, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindName", arg0, arg1, arg2) - ret0, _ := ret[0].(*core.Template) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// FindName indicates an expected call of FindName. -func (mr *MockTemplateStoreMockRecorder) FindName(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindName", reflect.TypeOf((*MockTemplateStore)(nil).FindName), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockTemplateStore) List(arg0 context.Context, arg1 string) ([]*core.Template, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*core.Template) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *MockTemplateStoreMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockTemplateStore)(nil).List), arg0, arg1) -} - -// ListAll mocks base method. -func (m *MockTemplateStore) ListAll(arg0 context.Context) ([]*core.Template, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListAll", arg0) - ret0, _ := ret[0].([]*core.Template) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListAll indicates an expected call of ListAll. -func (mr *MockTemplateStoreMockRecorder) ListAll(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAll", reflect.TypeOf((*MockTemplateStore)(nil).ListAll), arg0) -} - -// Update mocks base method. -func (m *MockTemplateStore) Update(arg0 context.Context, arg1 *core.Template) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockTemplateStoreMockRecorder) Update(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockTemplateStore)(nil).Update), arg0, arg1) -} - -// MockCardStore is a mock of CardStore interface. -type MockCardStore struct { - ctrl *gomock.Controller - recorder *MockCardStoreMockRecorder -} - -// MockCardStoreMockRecorder is the mock recorder for MockCardStore. -type MockCardStoreMockRecorder struct { - mock *MockCardStore -} - -// NewMockCardStore creates a new mock instance. -func NewMockCardStore(ctrl *gomock.Controller) *MockCardStore { - mock := &MockCardStore{ctrl: ctrl} - mock.recorder = &MockCardStoreMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockCardStore) EXPECT() *MockCardStoreMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockCardStore) Create(arg0 context.Context, arg1 int64, arg2 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Create indicates an expected call of Create. -func (mr *MockCardStoreMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockCardStore)(nil).Create), arg0, arg1, arg2) -} - -// Delete mocks base method. -func (m *MockCardStore) Delete(arg0 context.Context, arg1 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// Delete indicates an expected call of Delete. -func (mr *MockCardStoreMockRecorder) Delete(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockCardStore)(nil).Delete), arg0, arg1) -} - -// Find mocks base method. -func (m *MockCardStore) Find(arg0 context.Context, arg1 int64) (io.ReadCloser, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(io.ReadCloser) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Find indicates an expected call of Find. -func (mr *MockCardStoreMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockCardStore)(nil).Find), arg0, arg1) -} - -// Update mocks base method. -func (m *MockCardStore) Update(arg0 context.Context, arg1 int64, arg2 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// Update indicates an expected call of Update. -func (mr *MockCardStoreMockRecorder) Update(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockCardStore)(nil).Update), arg0, arg1, arg2) -} diff --git a/mock/mockscm/mock.go b/mock/mockscm/mock.go deleted file mode 100644 index 3d25749bdd..0000000000 --- a/mock/mockscm/mock.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package mockscm - -//go:generate mockgen -package=mockscm -destination=mock_gen.go github.com/drone/go-scm/scm ContentService,GitService,OrganizationService,PullRequestService,RepositoryService,UserService diff --git a/mock/mockscm/mock_gen.go b/mock/mockscm/mock_gen.go deleted file mode 100644 index ab1ce294ea..0000000000 --- a/mock/mockscm/mock_gen.go +++ /dev/null @@ -1,815 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/drone/go-scm/scm (interfaces: ContentService,GitService,OrganizationService,PullRequestService,RepositoryService,UserService) - -// Package mockscm is a generated GoMock package. -package mockscm - -import ( - context "context" - reflect "reflect" - - scm "github.com/drone/go-scm/scm" - gomock "github.com/golang/mock/gomock" -) - -// MockContentService is a mock of ContentService interface. -type MockContentService struct { - ctrl *gomock.Controller - recorder *MockContentServiceMockRecorder -} - -// MockContentServiceMockRecorder is the mock recorder for MockContentService. -type MockContentServiceMockRecorder struct { - mock *MockContentService -} - -// NewMockContentService creates a new mock instance. -func NewMockContentService(ctrl *gomock.Controller) *MockContentService { - mock := &MockContentService{ctrl: ctrl} - mock.recorder = &MockContentServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockContentService) EXPECT() *MockContentServiceMockRecorder { - return m.recorder -} - -// Create mocks base method. -func (m *MockContentService) Create(arg0 context.Context, arg1, arg2 string, arg3 *scm.ContentParams) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Create indicates an expected call of Create. -func (mr *MockContentServiceMockRecorder) Create(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockContentService)(nil).Create), arg0, arg1, arg2, arg3) -} - -// Delete mocks base method. -func (m *MockContentService) Delete(arg0 context.Context, arg1, arg2 string, arg3 *scm.ContentParams) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Delete", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Delete indicates an expected call of Delete. -func (mr *MockContentServiceMockRecorder) Delete(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockContentService)(nil).Delete), arg0, arg1, arg2, arg3) -} - -// Find mocks base method. -func (m *MockContentService) Find(arg0 context.Context, arg1, arg2, arg3 string) (*scm.Content, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Content) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Find indicates an expected call of Find. -func (mr *MockContentServiceMockRecorder) Find(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockContentService)(nil).Find), arg0, arg1, arg2, arg3) -} - -// List mocks base method. -func (m *MockContentService) List(arg0 context.Context, arg1, arg2, arg3 string, arg4 scm.ListOptions) ([]*scm.ContentInfo, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]*scm.ContentInfo) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// List indicates an expected call of List. -func (mr *MockContentServiceMockRecorder) List(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockContentService)(nil).List), arg0, arg1, arg2, arg3, arg4) -} - -// Update mocks base method. -func (m *MockContentService) Update(arg0 context.Context, arg1, arg2 string, arg3 *scm.ContentParams) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Update", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Update indicates an expected call of Update. -func (mr *MockContentServiceMockRecorder) Update(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockContentService)(nil).Update), arg0, arg1, arg2, arg3) -} - -// MockGitService is a mock of GitService interface. -type MockGitService struct { - ctrl *gomock.Controller - recorder *MockGitServiceMockRecorder -} - -// MockGitServiceMockRecorder is the mock recorder for MockGitService. -type MockGitServiceMockRecorder struct { - mock *MockGitService -} - -// NewMockGitService creates a new mock instance. -func NewMockGitService(ctrl *gomock.Controller) *MockGitService { - mock := &MockGitService{ctrl: ctrl} - mock.recorder = &MockGitServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockGitService) EXPECT() *MockGitServiceMockRecorder { - return m.recorder -} - -// CompareChanges mocks base method. -func (m *MockGitService) CompareChanges(arg0 context.Context, arg1, arg2, arg3 string, arg4 scm.ListOptions) ([]*scm.Change, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CompareChanges", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]*scm.Change) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// CompareChanges indicates an expected call of CompareChanges. -func (mr *MockGitServiceMockRecorder) CompareChanges(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CompareChanges", reflect.TypeOf((*MockGitService)(nil).CompareChanges), arg0, arg1, arg2, arg3, arg4) -} - -// CreateBranch mocks base method. -func (m *MockGitService) CreateBranch(arg0 context.Context, arg1 string, arg2 *scm.CreateBranch) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateBranch", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// CreateBranch indicates an expected call of CreateBranch. -func (mr *MockGitServiceMockRecorder) CreateBranch(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateBranch", reflect.TypeOf((*MockGitService)(nil).CreateBranch), arg0, arg1, arg2) -} - -// FindBranch mocks base method. -func (m *MockGitService) FindBranch(arg0 context.Context, arg1, arg2 string) (*scm.Reference, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindBranch", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Reference) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindBranch indicates an expected call of FindBranch. -func (mr *MockGitServiceMockRecorder) FindBranch(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindBranch", reflect.TypeOf((*MockGitService)(nil).FindBranch), arg0, arg1, arg2) -} - -// FindCommit mocks base method. -func (m *MockGitService) FindCommit(arg0 context.Context, arg1, arg2 string) (*scm.Commit, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindCommit", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Commit) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindCommit indicates an expected call of FindCommit. -func (mr *MockGitServiceMockRecorder) FindCommit(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindCommit", reflect.TypeOf((*MockGitService)(nil).FindCommit), arg0, arg1, arg2) -} - -// FindTag mocks base method. -func (m *MockGitService) FindTag(arg0 context.Context, arg1, arg2 string) (*scm.Reference, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindTag", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Reference) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindTag indicates an expected call of FindTag. -func (mr *MockGitServiceMockRecorder) FindTag(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindTag", reflect.TypeOf((*MockGitService)(nil).FindTag), arg0, arg1, arg2) -} - -// ListBranches mocks base method. -func (m *MockGitService) ListBranches(arg0 context.Context, arg1 string, arg2 scm.ListOptions) ([]*scm.Reference, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListBranches", arg0, arg1, arg2) - ret0, _ := ret[0].([]*scm.Reference) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListBranches indicates an expected call of ListBranches. -func (mr *MockGitServiceMockRecorder) ListBranches(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListBranches", reflect.TypeOf((*MockGitService)(nil).ListBranches), arg0, arg1, arg2) -} - -// ListChanges mocks base method. -func (m *MockGitService) ListChanges(arg0 context.Context, arg1, arg2 string, arg3 scm.ListOptions) ([]*scm.Change, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListChanges", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*scm.Change) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListChanges indicates an expected call of ListChanges. -func (mr *MockGitServiceMockRecorder) ListChanges(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListChanges", reflect.TypeOf((*MockGitService)(nil).ListChanges), arg0, arg1, arg2, arg3) -} - -// ListCommits mocks base method. -func (m *MockGitService) ListCommits(arg0 context.Context, arg1 string, arg2 scm.CommitListOptions) ([]*scm.Commit, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListCommits", arg0, arg1, arg2) - ret0, _ := ret[0].([]*scm.Commit) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListCommits indicates an expected call of ListCommits. -func (mr *MockGitServiceMockRecorder) ListCommits(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListCommits", reflect.TypeOf((*MockGitService)(nil).ListCommits), arg0, arg1, arg2) -} - -// ListTags mocks base method. -func (m *MockGitService) ListTags(arg0 context.Context, arg1 string, arg2 scm.ListOptions) ([]*scm.Reference, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListTags", arg0, arg1, arg2) - ret0, _ := ret[0].([]*scm.Reference) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListTags indicates an expected call of ListTags. -func (mr *MockGitServiceMockRecorder) ListTags(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTags", reflect.TypeOf((*MockGitService)(nil).ListTags), arg0, arg1, arg2) -} - -// MockOrganizationService is a mock of OrganizationService interface. -type MockOrganizationService struct { - ctrl *gomock.Controller - recorder *MockOrganizationServiceMockRecorder -} - -// MockOrganizationServiceMockRecorder is the mock recorder for MockOrganizationService. -type MockOrganizationServiceMockRecorder struct { - mock *MockOrganizationService -} - -// NewMockOrganizationService creates a new mock instance. -func NewMockOrganizationService(ctrl *gomock.Controller) *MockOrganizationService { - mock := &MockOrganizationService{ctrl: ctrl} - mock.recorder = &MockOrganizationServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockOrganizationService) EXPECT() *MockOrganizationServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockOrganizationService) Find(arg0 context.Context, arg1 string) (*scm.Organization, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*scm.Organization) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Find indicates an expected call of Find. -func (mr *MockOrganizationServiceMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockOrganizationService)(nil).Find), arg0, arg1) -} - -// FindMembership mocks base method. -func (m *MockOrganizationService) FindMembership(arg0 context.Context, arg1, arg2 string) (*scm.Membership, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindMembership", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Membership) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindMembership indicates an expected call of FindMembership. -func (mr *MockOrganizationServiceMockRecorder) FindMembership(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindMembership", reflect.TypeOf((*MockOrganizationService)(nil).FindMembership), arg0, arg1, arg2) -} - -// List mocks base method. -func (m *MockOrganizationService) List(arg0 context.Context, arg1 scm.ListOptions) ([]*scm.Organization, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*scm.Organization) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// List indicates an expected call of List. -func (mr *MockOrganizationServiceMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockOrganizationService)(nil).List), arg0, arg1) -} - -// MockPullRequestService is a mock of PullRequestService interface. -type MockPullRequestService struct { - ctrl *gomock.Controller - recorder *MockPullRequestServiceMockRecorder -} - -// MockPullRequestServiceMockRecorder is the mock recorder for MockPullRequestService. -type MockPullRequestServiceMockRecorder struct { - mock *MockPullRequestService -} - -// NewMockPullRequestService creates a new mock instance. -func NewMockPullRequestService(ctrl *gomock.Controller) *MockPullRequestService { - mock := &MockPullRequestService{ctrl: ctrl} - mock.recorder = &MockPullRequestServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockPullRequestService) EXPECT() *MockPullRequestServiceMockRecorder { - return m.recorder -} - -// Close mocks base method. -func (m *MockPullRequestService) Close(arg0 context.Context, arg1 string, arg2 int) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Close", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Close indicates an expected call of Close. -func (mr *MockPullRequestServiceMockRecorder) Close(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockPullRequestService)(nil).Close), arg0, arg1, arg2) -} - -// Create mocks base method. -func (m *MockPullRequestService) Create(arg0 context.Context, arg1 string, arg2 *scm.PullRequestInput) (*scm.PullRequest, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Create", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.PullRequest) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Create indicates an expected call of Create. -func (mr *MockPullRequestServiceMockRecorder) Create(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Create", reflect.TypeOf((*MockPullRequestService)(nil).Create), arg0, arg1, arg2) -} - -// CreateComment mocks base method. -func (m *MockPullRequestService) CreateComment(arg0 context.Context, arg1 string, arg2 int, arg3 *scm.CommentInput) (*scm.Comment, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateComment", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Comment) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// CreateComment indicates an expected call of CreateComment. -func (mr *MockPullRequestServiceMockRecorder) CreateComment(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateComment", reflect.TypeOf((*MockPullRequestService)(nil).CreateComment), arg0, arg1, arg2, arg3) -} - -// DeleteComment mocks base method. -func (m *MockPullRequestService) DeleteComment(arg0 context.Context, arg1 string, arg2, arg3 int) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteComment", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// DeleteComment indicates an expected call of DeleteComment. -func (mr *MockPullRequestServiceMockRecorder) DeleteComment(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteComment", reflect.TypeOf((*MockPullRequestService)(nil).DeleteComment), arg0, arg1, arg2, arg3) -} - -// Find mocks base method. -func (m *MockPullRequestService) Find(arg0 context.Context, arg1 string, arg2 int) (*scm.PullRequest, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.PullRequest) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Find indicates an expected call of Find. -func (mr *MockPullRequestServiceMockRecorder) Find(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockPullRequestService)(nil).Find), arg0, arg1, arg2) -} - -// FindComment mocks base method. -func (m *MockPullRequestService) FindComment(arg0 context.Context, arg1 string, arg2, arg3 int) (*scm.Comment, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindComment", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Comment) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindComment indicates an expected call of FindComment. -func (mr *MockPullRequestServiceMockRecorder) FindComment(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindComment", reflect.TypeOf((*MockPullRequestService)(nil).FindComment), arg0, arg1, arg2, arg3) -} - -// List mocks base method. -func (m *MockPullRequestService) List(arg0 context.Context, arg1 string, arg2 scm.PullRequestListOptions) ([]*scm.PullRequest, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1, arg2) - ret0, _ := ret[0].([]*scm.PullRequest) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// List indicates an expected call of List. -func (mr *MockPullRequestServiceMockRecorder) List(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockPullRequestService)(nil).List), arg0, arg1, arg2) -} - -// ListChanges mocks base method. -func (m *MockPullRequestService) ListChanges(arg0 context.Context, arg1 string, arg2 int, arg3 scm.ListOptions) ([]*scm.Change, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListChanges", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*scm.Change) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListChanges indicates an expected call of ListChanges. -func (mr *MockPullRequestServiceMockRecorder) ListChanges(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListChanges", reflect.TypeOf((*MockPullRequestService)(nil).ListChanges), arg0, arg1, arg2, arg3) -} - -// ListComments mocks base method. -func (m *MockPullRequestService) ListComments(arg0 context.Context, arg1 string, arg2 int, arg3 scm.ListOptions) ([]*scm.Comment, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListComments", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*scm.Comment) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListComments indicates an expected call of ListComments. -func (mr *MockPullRequestServiceMockRecorder) ListComments(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListComments", reflect.TypeOf((*MockPullRequestService)(nil).ListComments), arg0, arg1, arg2, arg3) -} - -// ListCommits mocks base method. -func (m *MockPullRequestService) ListCommits(arg0 context.Context, arg1 string, arg2 int, arg3 scm.ListOptions) ([]*scm.Commit, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListCommits", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*scm.Commit) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListCommits indicates an expected call of ListCommits. -func (mr *MockPullRequestServiceMockRecorder) ListCommits(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListCommits", reflect.TypeOf((*MockPullRequestService)(nil).ListCommits), arg0, arg1, arg2, arg3) -} - -// Merge mocks base method. -func (m *MockPullRequestService) Merge(arg0 context.Context, arg1 string, arg2 int) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Merge", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Merge indicates an expected call of Merge. -func (mr *MockPullRequestServiceMockRecorder) Merge(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Merge", reflect.TypeOf((*MockPullRequestService)(nil).Merge), arg0, arg1, arg2) -} - -// MockRepositoryService is a mock of RepositoryService interface. -type MockRepositoryService struct { - ctrl *gomock.Controller - recorder *MockRepositoryServiceMockRecorder -} - -// MockRepositoryServiceMockRecorder is the mock recorder for MockRepositoryService. -type MockRepositoryServiceMockRecorder struct { - mock *MockRepositoryService -} - -// NewMockRepositoryService creates a new mock instance. -func NewMockRepositoryService(ctrl *gomock.Controller) *MockRepositoryService { - mock := &MockRepositoryService{ctrl: ctrl} - mock.recorder = &MockRepositoryServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockRepositoryService) EXPECT() *MockRepositoryServiceMockRecorder { - return m.recorder -} - -// CreateHook mocks base method. -func (m *MockRepositoryService) CreateHook(arg0 context.Context, arg1 string, arg2 *scm.HookInput) (*scm.Hook, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateHook", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Hook) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// CreateHook indicates an expected call of CreateHook. -func (mr *MockRepositoryServiceMockRecorder) CreateHook(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateHook", reflect.TypeOf((*MockRepositoryService)(nil).CreateHook), arg0, arg1, arg2) -} - -// CreateStatus mocks base method. -func (m *MockRepositoryService) CreateStatus(arg0 context.Context, arg1, arg2 string, arg3 *scm.StatusInput) (*scm.Status, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateStatus", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Status) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// CreateStatus indicates an expected call of CreateStatus. -func (mr *MockRepositoryServiceMockRecorder) CreateStatus(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateStatus", reflect.TypeOf((*MockRepositoryService)(nil).CreateStatus), arg0, arg1, arg2, arg3) -} - -// DeleteHook mocks base method. -func (m *MockRepositoryService) DeleteHook(arg0 context.Context, arg1, arg2 string) (*scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteHook", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Response) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// DeleteHook indicates an expected call of DeleteHook. -func (mr *MockRepositoryServiceMockRecorder) DeleteHook(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteHook", reflect.TypeOf((*MockRepositoryService)(nil).DeleteHook), arg0, arg1, arg2) -} - -// Find mocks base method. -func (m *MockRepositoryService) Find(arg0 context.Context, arg1 string) (*scm.Repository, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0, arg1) - ret0, _ := ret[0].(*scm.Repository) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Find indicates an expected call of Find. -func (mr *MockRepositoryServiceMockRecorder) Find(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockRepositoryService)(nil).Find), arg0, arg1) -} - -// FindHook mocks base method. -func (m *MockRepositoryService) FindHook(arg0 context.Context, arg1, arg2 string) (*scm.Hook, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindHook", arg0, arg1, arg2) - ret0, _ := ret[0].(*scm.Hook) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindHook indicates an expected call of FindHook. -func (mr *MockRepositoryServiceMockRecorder) FindHook(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindHook", reflect.TypeOf((*MockRepositoryService)(nil).FindHook), arg0, arg1, arg2) -} - -// FindPerms mocks base method. -func (m *MockRepositoryService) FindPerms(arg0 context.Context, arg1 string) (*scm.Perm, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindPerms", arg0, arg1) - ret0, _ := ret[0].(*scm.Perm) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindPerms indicates an expected call of FindPerms. -func (mr *MockRepositoryServiceMockRecorder) FindPerms(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindPerms", reflect.TypeOf((*MockRepositoryService)(nil).FindPerms), arg0, arg1) -} - -// List mocks base method. -func (m *MockRepositoryService) List(arg0 context.Context, arg1 scm.ListOptions) ([]*scm.Repository, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0, arg1) - ret0, _ := ret[0].([]*scm.Repository) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// List indicates an expected call of List. -func (mr *MockRepositoryServiceMockRecorder) List(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockRepositoryService)(nil).List), arg0, arg1) -} - -// ListHooks mocks base method. -func (m *MockRepositoryService) ListHooks(arg0 context.Context, arg1 string, arg2 scm.ListOptions) ([]*scm.Hook, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListHooks", arg0, arg1, arg2) - ret0, _ := ret[0].([]*scm.Hook) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListHooks indicates an expected call of ListHooks. -func (mr *MockRepositoryServiceMockRecorder) ListHooks(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListHooks", reflect.TypeOf((*MockRepositoryService)(nil).ListHooks), arg0, arg1, arg2) -} - -// ListStatus mocks base method. -func (m *MockRepositoryService) ListStatus(arg0 context.Context, arg1, arg2 string, arg3 scm.ListOptions) ([]*scm.Status, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListStatus", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]*scm.Status) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListStatus indicates an expected call of ListStatus. -func (mr *MockRepositoryServiceMockRecorder) ListStatus(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListStatus", reflect.TypeOf((*MockRepositoryService)(nil).ListStatus), arg0, arg1, arg2, arg3) -} - -// UpdateHook mocks base method. -func (m *MockRepositoryService) UpdateHook(arg0 context.Context, arg1, arg2 string, arg3 *scm.HookInput) (*scm.Hook, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateHook", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(*scm.Hook) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// UpdateHook indicates an expected call of UpdateHook. -func (mr *MockRepositoryServiceMockRecorder) UpdateHook(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateHook", reflect.TypeOf((*MockRepositoryService)(nil).UpdateHook), arg0, arg1, arg2, arg3) -} - -// MockUserService is a mock of UserService interface. -type MockUserService struct { - ctrl *gomock.Controller - recorder *MockUserServiceMockRecorder -} - -// MockUserServiceMockRecorder is the mock recorder for MockUserService. -type MockUserServiceMockRecorder struct { - mock *MockUserService -} - -// NewMockUserService creates a new mock instance. -func NewMockUserService(ctrl *gomock.Controller) *MockUserService { - mock := &MockUserService{ctrl: ctrl} - mock.recorder = &MockUserServiceMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockUserService) EXPECT() *MockUserServiceMockRecorder { - return m.recorder -} - -// Find mocks base method. -func (m *MockUserService) Find(arg0 context.Context) (*scm.User, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Find", arg0) - ret0, _ := ret[0].(*scm.User) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// Find indicates an expected call of Find. -func (mr *MockUserServiceMockRecorder) Find(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Find", reflect.TypeOf((*MockUserService)(nil).Find), arg0) -} - -// FindEmail mocks base method. -func (m *MockUserService) FindEmail(arg0 context.Context) (string, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindEmail", arg0) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindEmail indicates an expected call of FindEmail. -func (mr *MockUserServiceMockRecorder) FindEmail(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindEmail", reflect.TypeOf((*MockUserService)(nil).FindEmail), arg0) -} - -// FindLogin mocks base method. -func (m *MockUserService) FindLogin(arg0 context.Context, arg1 string) (*scm.User, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "FindLogin", arg0, arg1) - ret0, _ := ret[0].(*scm.User) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// FindLogin indicates an expected call of FindLogin. -func (mr *MockUserServiceMockRecorder) FindLogin(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindLogin", reflect.TypeOf((*MockUserService)(nil).FindLogin), arg0, arg1) -} - -// ListEmail mocks base method. -func (m *MockUserService) ListEmail(arg0 context.Context, arg1 scm.ListOptions) ([]*scm.Email, *scm.Response, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListEmail", arg0, arg1) - ret0, _ := ret[0].([]*scm.Email) - ret1, _ := ret[1].(*scm.Response) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// ListEmail indicates an expected call of ListEmail. -func (mr *MockUserServiceMockRecorder) ListEmail(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListEmail", reflect.TypeOf((*MockUserService)(nil).ListEmail), arg0, arg1) -} diff --git a/operator/manager/manager.go b/operator/manager/manager.go deleted file mode 100644 index f20e51e7a3..0000000000 --- a/operator/manager/manager.go +++ /dev/null @@ -1,557 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package manager - -import ( - "bytes" - "context" - "io" - "io/ioutil" - "time" - - "github.com/drone/drone-yaml/yaml/converter" - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -var noContext = context.Background() - -var _ BuildManager = (*Manager)(nil) - -type ( - // Context represents the minimum amount of information - // required by the runner to execute a build. - Context struct { - Repo *core.Repository `json:"repository"` - Build *core.Build `json:"build"` - Stage *core.Stage `json:"stage"` - Config *core.File `json:"config"` - Secrets []*core.Secret `json:"secrets"` - System *core.System `json:"system"` - } - - // BuildManager encapsulates complex build operations and provides - // a simplified interface for build runners. - BuildManager interface { - // Request requests the next available build stage for execution. - Request(ctx context.Context, args *Request) (*core.Stage, error) - - // Accept accepts the build stage for execution. - Accept(ctx context.Context, stage int64, machine string) (*core.Stage, error) - - // Netrc returns a valid netrc for execution. - Netrc(ctx context.Context, repo int64) (*core.Netrc, error) - - // Details fetches build details - Details(ctx context.Context, stage int64) (*Context, error) - - // Before signals the build step is about to start. - Before(ctx context.Context, step *core.Step) error - - // After signals the build step is complete. - After(ctx context.Context, step *core.Step) error - - // BeforeAll signals the build stage is about to start. - BeforeAll(ctx context.Context, stage *core.Stage) error - - // AfterAll signals the build stage is complete. - AfterAll(ctx context.Context, stage *core.Stage) error - - // Watch watches for build cancellation requests. - Watch(ctx context.Context, stage int64) (bool, error) - - // Write writes a line to the build logs - Write(ctx context.Context, step int64, line *core.Line) error - - // Upload uploads the full logs - Upload(ctx context.Context, step int64, r io.Reader) error - - // UploadBytes uploads the full logs - UploadBytes(ctx context.Context, step int64, b []byte) error - - // UploadCard creates a new card - UploadCard(ctx context.Context, step int64, input *core.CardInput) error - } - - // Request provides filters when requesting a pending - // build from the queue. This allows an agent, for example, - // to request a build that matches its architecture and kernel. - Request struct { - Kind string `json:"kind"` - Type string `json:"type"` - OS string `json:"os"` - Arch string `json:"arch"` - Variant string `json:"variant"` - Kernel string `json:"kernel"` - Labels map[string]string `json:"labels,omitempty"` - } -) - -// New returns a new Manager. -func New( - builds core.BuildStore, - cards core.CardStore, - config core.ConfigService, - converter core.ConvertService, - events core.Pubsub, - logs core.LogStore, - logz core.LogStream, - netrcs core.NetrcService, - repos core.RepositoryStore, - scheduler core.Scheduler, - secrets core.SecretStore, - globals core.GlobalSecretStore, - status core.StatusService, - stages core.StageStore, - steps core.StepStore, - system *core.System, - users core.UserStore, - webhook core.WebhookSender, -) BuildManager { - return &Manager{ - Builds: builds, - Cards: cards, - Config: config, - Converter: converter, - Events: events, - Globals: globals, - Logs: logs, - Logz: logz, - Netrcs: netrcs, - Repos: repos, - Scheduler: scheduler, - Secrets: secrets, - Status: status, - Stages: stages, - Steps: steps, - System: system, - Users: users, - Webhook: webhook, - } -} - -// Manager provides a simplified interface to the build runner so that it -// can more easily interact with the server. -type Manager struct { - Builds core.BuildStore - Cards core.CardStore - Config core.ConfigService - Converter core.ConvertService - Events core.Pubsub - Globals core.GlobalSecretStore - Logs core.LogStore - Logz core.LogStream - Netrcs core.NetrcService - Repos core.RepositoryStore - Scheduler core.Scheduler - Secrets core.SecretStore - Status core.StatusService - Stages core.StageStore - Steps core.StepStore - System *core.System - Users core.UserStore - Webhook core.WebhookSender -} - -// Request requests the next available build stage for execution. -func (m *Manager) Request(ctx context.Context, args *Request) (*core.Stage, error) { - logger := logrus.WithFields( - logrus.Fields{ - "kind": args.Kind, - "type": args.Type, - "os": args.OS, - "arch": args.Arch, - "kernel": args.Kernel, - "variant": args.Variant, - }, - ) - logger.Debugln("manager: request queue item") - - stage, err := m.Scheduler.Request(ctx, core.Filter{ - Kind: args.Kind, - Type: args.Type, - OS: args.OS, - Arch: args.Arch, - Kernel: args.Kernel, - Variant: args.Variant, - Labels: args.Labels, - }) - if err != nil && ctx.Err() != nil { - logger.Debugln("manager: context canceled") - return nil, err - } - if err != nil { - logger = logrus.WithError(err) - logger.Warnln("manager: request queue item error") - return nil, err - } - return stage, nil -} - -// Accept accepts the build stage for execution. It is possible for multiple -// agents to pull the same stage from the queue. The system uses optimistic -// locking at the database-level to prevent multiple agents from executing the -// same stage. -func (m *Manager) Accept(ctx context.Context, id int64, machine string) (*core.Stage, error) { - logger := logrus.WithFields( - logrus.Fields{ - "stage-id": id, - "machine": machine, - }, - ) - logger.Debugln("manager: accept stage") - - stage, err := m.Stages.Find(noContext, id) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find stage") - return nil, err - } - if stage.Machine != "" { - logger.Debugln("manager: stage already assigned. abort.") - return nil, db.ErrOptimisticLock - } - - stage.Machine = machine - stage.Status = core.StatusPending - stage.Updated = time.Now().Unix() - - err = m.Stages.Update(noContext, stage) - if err == db.ErrOptimisticLock { - logger = logger.WithError(err) - logger.Debugln("manager: stage processed by another agent") - } else if err != nil { - logger = logger.WithError(err) - logger.Debugln("manager: cannot update stage") - } else { - logger.Debugln("manager: stage accepted") - } - return stage, err -} - -// Details fetches build details. -func (m *Manager) Details(ctx context.Context, id int64) (*Context, error) { - logger := logrus.WithField("step-id", id) - logger.Debugln("manager: fetching stage details") - - stage, err := m.Stages.Find(noContext, id) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find stage") - return nil, err - } - build, err := m.Builds.Find(noContext, stage.BuildID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find build") - return nil, err - } - stages, err := m.Stages.List(ctx, stage.BuildID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot list stages") - return nil, err - } - build.Stages = stages - repo, err := m.Repos.Find(noContext, build.RepoID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find repository") - return nil, err - } - logger = logger.WithFields( - logrus.Fields{ - "build": build.Number, - "repo": repo.Slug, - }, - ) - user, err := m.Users.Find(noContext, repo.UserID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find repository owner") - return nil, err - } - config, err := m.Config.Find(noContext, &core.ConfigArgs{ - User: user, - Repo: repo, - Build: build, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find configuration") - return nil, err - } - - // this code is temporarily in place to detect and convert - // the legacy yaml configuration file to the new format. - config.Data, _ = converter.ConvertString(config.Data, converter.Metadata{ - Filename: repo.Config, - URL: repo.Link, - Ref: build.Ref, - }) - - config, err = m.Converter.Convert(noContext, &core.ConvertArgs{ - Build: build, - Config: config, - Repo: repo, - User: user, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot convert configuration") - return nil, err - } - var secrets []*core.Secret - tmpSecrets, err := m.Secrets.List(noContext, repo.ID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot list secrets") - return nil, err - } - tmpGlobalSecrets, err := m.Globals.List(noContext, repo.Namespace) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot list global secrets") - return nil, err - } - // TODO(bradrydzewski) can we delegate filtering - // secrets to the agent? If not, we should add - // unit tests. - for _, secret := range tmpSecrets { - if secret.PullRequest == false && - build.Event == core.EventPullRequest { - continue - } - secrets = append(secrets, secret) - } - for _, secret := range tmpGlobalSecrets { - if secret.PullRequest == false && - build.Event == core.EventPullRequest { - continue - } - secrets = append(secrets, secret) - } - return &Context{ - Repo: repo, - Build: build, - Stage: stage, - Secrets: secrets, - System: m.System, - Config: &core.File{Data: []byte(config.Data)}, - }, nil -} - -// Before signals the build step is about to start. -func (m *Manager) Before(ctx context.Context, step *core.Step) error { - logger := logrus.WithFields( - logrus.Fields{ - "step.status": step.Status, - "step.name": step.Name, - "step.id": step.ID, - }, - ) - logger.Debugln("manager: updating step status") - - err := m.Logz.Create(noContext, step.ID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot create log stream") - return err - } - updater := &updater{ - Builds: m.Builds, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, - Webhook: m.Webhook, - } - return updater.do(ctx, step) -} - -// After signals the build step is complete. -func (m *Manager) After(ctx context.Context, step *core.Step) error { - logger := logrus.WithFields( - logrus.Fields{ - "step.status": step.Status, - "step.name": step.Name, - "step.id": step.ID, - }, - ) - logger.Debugln("manager: updating step status") - - var errs error - updater := &updater{ - Builds: m.Builds, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, - Webhook: m.Webhook, - } - - if err := updater.do(ctx, step); err != nil { - errs = multierror.Append(errs, err) - logger = logger.WithError(err) - logger.Warnln("manager: cannot update step") - } - - if err := m.Logz.Delete(noContext, step.ID); err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot teardown log stream") - } - return errs -} - -// BeforeAll signals the build stage is about to start. -func (m *Manager) BeforeAll(ctx context.Context, stage *core.Stage) error { - s := &setup{ - Builds: m.Builds, - Events: m.Events, - Repos: m.Repos, - Steps: m.Steps, - Stages: m.Stages, - Status: m.Status, - Users: m.Users, - } - return s.do(ctx, stage) -} - -// AfterAll signals the build stage is complete. -func (m *Manager) AfterAll(ctx context.Context, stage *core.Stage) error { - t := &teardown{ - Builds: m.Builds, - Events: m.Events, - Logs: m.Logz, - Repos: m.Repos, - Scheduler: m.Scheduler, - Steps: m.Steps, - Stages: m.Stages, - Status: m.Status, - Users: m.Users, - Webhook: m.Webhook, - } - return t.do(ctx, stage) -} - -// Netrc returns netrc file with a valid, non-expired token -// that can be used to clone the repository. -func (m *Manager) Netrc(ctx context.Context, id int64) (*core.Netrc, error) { - logger := logrus.WithField("repo.id", id) - - repo, err := m.Repos.Find(ctx, id) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find repository") - return nil, err - } - - user, err := m.Users.Find(ctx, repo.UserID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("manager: cannot find repository owner") - return nil, err - } - - netrc, err := m.Netrcs.Create(ctx, user, repo) - if err != nil { - logger = logger.WithError(err) - logger = logger.WithField("repo.name", repo.Slug) - logger.Warnln("manager: cannot generate netrc") - } - return netrc, err -} - -// Watch watches for build cancellation requests. -func (m *Manager) Watch(ctx context.Context, id int64) (bool, error) { - ok, err := m.Scheduler.Cancelled(ctx, id) - // we expect a context cancel error here which - // indicates a polling timeout. The subscribing - // client should look for the context cancel error - // and resume polling. - if err != nil { - return ok, err - } - - // // TODO (bradrydzewski) we should be able to return - // // immediately if Cancelled returns true. This requires - // // some more testing but would avoid the extra database - // // call. - // if ok { - // return ok, err - // } - - // if no error is returned we should check - // the database to see if the build is complete. If - // complete, return true. - build, err := m.Builds.Find(ctx, id) - if err != nil { - logger := logrus.WithError(err) - logger = logger.WithField("build-id", id) - logger.Warnln("manager: cannot find build") - return ok, err - } - return build.IsDone(), nil -} - -// Write writes a line to the build logs. -func (m *Manager) Write(ctx context.Context, step int64, line *core.Line) error { - err := m.Logz.Write(ctx, step, line) - if err != nil { - logger := logrus.WithError(err) - logger = logger.WithField("step-id", step) - logger.Warnln("manager: cannot write to log stream") - } - return err -} - -// Upload uploads the full logs. -func (m *Manager) Upload(ctx context.Context, step int64, r io.Reader) error { - err := m.Logs.Create(ctx, step, r) - if err != nil { - logger := logrus.WithError(err) - logger = logger.WithField("step-id", step) - logger.Warnln("manager: cannot upload complete logs") - } - return err -} - -// UploadBytes uploads the full logs. -func (m *Manager) UploadBytes(ctx context.Context, step int64, data []byte) error { - buf := bytes.NewBuffer(data) - err := m.Logs.Create(ctx, step, buf) - if err != nil { - logger := logrus.WithError(err) - logger = logger.WithField("step-id", step) - logger.Warnln("manager: cannot upload complete logs") - } - return err -} - -// UploadCard creates card for step. -func (m *Manager) UploadCard(ctx context.Context, stepId int64, input *core.CardInput) error { - data := ioutil.NopCloser( - bytes.NewBuffer(input.Data), - ) - err := m.Cards.Create(ctx, stepId, data) - if err != nil { - logger := logrus.WithError(err) - logger.Warnln("manager: cannot create card") - } - return nil -} diff --git a/operator/manager/manager_test.go b/operator/manager/manager_test.go deleted file mode 100644 index afa0ba2adf..0000000000 --- a/operator/manager/manager_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package manager - -import ( - "io/ioutil" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} diff --git a/operator/manager/rpc/client.go b/operator/manager/rpc/client.go deleted file mode 100644 index 0cf5f118f7..0000000000 --- a/operator/manager/rpc/client.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "io" - "io/ioutil" - "log" - "net/http" - "os" - "strings" - "time" - - "github.com/drone/drone/operator/manager" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/hashicorp/go-retryablehttp" - "github.com/oxtoacart/bpool" -) - -var _ manager.BuildManager = (*Client)(nil) - -var bufpool = bpool.NewBufferPool(64) - -// Client defines an RPC client. -type Client struct { - token string - server string - client *retryablehttp.Client -} - -// NewClient returns a new rpc client that is able to -// interact with a remote build controller using the -// http transport. -func NewClient(server, token string) *Client { - client := retryablehttp.NewClient() - client.RetryMax = 30 - client.RetryWaitMax = time.Second * 10 - client.RetryWaitMin = time.Second * 1 - client.Logger = nil - return &Client{ - client: client, - server: strings.TrimSuffix(server, "/"), - token: token, - } -} - -// SetDebug enabled debug-level logging within the retryable -// http.Client. This can be useful if you are debugging network -// connectivity issues and want to monitor disconnects, -// reconnects, and retries. -func (s *Client) SetDebug(debug bool) { - if debug == true { - s.client.Logger = log.New(os.Stderr, "", log.LstdFlags) - } else { - s.client.Logger = nil - } -} - -// Request requests the next available build stage for execution. -func (s *Client) Request(ctx context.Context, args *manager.Request) (*core.Stage, error) { - timeout, cancel := context.WithTimeout(ctx, time.Minute) - defer cancel() - - in := &requestRequest{Request: args} - out := &core.Stage{} - err := s.send(timeout, "/rpc/v1/request", in, out) - - // The request is performing long polling and is subject - // to a client-side and server-side timeout. The timeout - // error is therefore expected behavior, and is not - // considered an error by the system. - if err == context.DeadlineExceeded { - return nil, nil // no error - } - return out, err -} - -// Accept accepts the build stage for execution. -func (s *Client) Accept(ctx context.Context, stage int64, machine string) (*core.Stage, error) { - in := &acceptRequest{Stage: stage, Machine: machine} - return nil, s.send(noContext, "/rpc/v1/accept", in, nil) -} - -// Netrc returns a valid netrc for execution. -func (s *Client) Netrc(ctx context.Context, repo int64) (*core.Netrc, error) { - in := &netrcRequest{repo} - out := &core.Netrc{} - err := s.send(noContext, "/rpc/v1/netrc", in, out) - return out, err -} - -// Details fetches build details -func (s *Client) Details(ctx context.Context, stage int64) (*manager.Context, error) { - in := &detailsRequest{Stage: stage} - out := &buildContextToken{} - err := s.send(noContext, "/rpc/v1/details", in, out) - if err != nil { - return nil, err - } - // the repository token is excluded from the json encoding - // by default. this workaround ensures it is available to - // the remote build agent. - out.Context.Repo.Secret = out.Secret - return out.Context, nil -} - -// Before signals the build step is about to start. -func (s *Client) Before(ctx context.Context, step *core.Step) error { - in := &stepRequest{Step: step} - out := &core.Step{} - err := s.send(noContext, "/rpc/v1/before", in, out) - if err != nil { - return err - } - // the step ID and version (optimistic locking) are - // updated when the step is created. Copy the updated - // values back to the original step object. - step.ID = out.ID - step.Version = out.Version - return err -} - -// After signals the build step is complete. -func (s *Client) After(ctx context.Context, step *core.Step) error { - in := &stepRequest{Step: step} - out := &core.Step{} - err := s.send(noContext, "/rpc/v1/after", in, out) - if err != nil { - return err - } - // the step version (optimistic locking) is updated - // when the step is created. Copy the updated values - // back to the original step object. - step.Version = out.Version - return err -} - -// BeforeAll signals the build stage is about to start. -func (s *Client) BeforeAll(ctx context.Context, stage *core.Stage) error { - in := &stageRequest{Stage: stage} - out := &core.Stage{} - err := s.send(noContext, "/rpc/v1/beforeAll", in, out) - if err != nil { - return err - } - stage.Version = out.Version - stage.Updated = out.Updated - stage.Created = out.Created - // TODO(bradrydzewski) clean this code to prevent possible - // index-out-of-bounds exceptions. - for i, step := range stage.Steps { - step.ID = out.Steps[i].ID - step.Version = out.Steps[i].Version - } - return err -} - -// AfterAll signals the build stage is complete. -func (s *Client) AfterAll(ctx context.Context, stage *core.Stage) error { - in := &stageRequest{Stage: stage} - out := &core.Stage{} - err := s.send(noContext, "/rpc/v1/afterAll", in, out) - if err != nil { - return err - } - // the stage timestamps and version (optimistic locking) - // are updated when the step is created. Copy the updated - // values back to the original step object. - stage.Version = out.Version - stage.Updated = out.Updated - stage.Created = out.Created - return err -} - -func (s *Client) Watch(ctx context.Context, build int64) (bool, error) { - in := &watchRequest{build} - out := &watchResponse{} - err := s.send(ctx, "/rpc/v1/watch", in, out) - return out.Done, err -} - -func (s *Client) Write(ctx context.Context, step int64, line *core.Line) error { - in := writePool.Get().(*writeRequest) - in.Step = step - in.Line = line - err := s.send(noContext, "/rpc/v1/write", in, nil) - writePool.Put(in) - return err -} - -func (s *Client) Upload(ctx context.Context, step int64, r io.Reader) error { - endpoint := "/rpc/v1/upload?id=" + fmt.Sprint(step) - return s.upload(noContext, endpoint, r) -} - -func (s *Client) UploadBytes(ctx context.Context, step int64, data []byte) error { - endpoint := "/rpc/v1/upload?id=" + fmt.Sprint(step) - return s.upload(noContext, endpoint, data) -} - -func (s *Client) UploadCard(ctx context.Context, step int64, input *core.CardInput) error { - return errors.New("rpc upload card not supported") -} - -func (s *Client) send(ctx context.Context, path string, in, out interface{}) error { - // Source a buffer from a pool. The agent may generate a - // large number of small requests for log entries. This will - // help reduce pressure on the garbage collector. - buf := bufpool.Get() - defer bufpool.Put(buf) - - err := json.NewEncoder(buf).Encode(in) - if err != nil { - return err - } - - url := s.server + path - req, err := retryablehttp.NewRequest("POST", url, buf) - if err != nil { - return err - } - req = req.WithContext(ctx) - req.Header.Set("X-Drone-Token", s.token) - - res, err := s.client.Do(req) - if res != nil { - defer res.Body.Close() - } - if err != nil { - return err - } - - // Check the response for a 409 conflict. This indicates an - // optimistic lock error, in which case multiple clients may - // be attempting to update the same record. Convert this error - // code to a proper error. - if res.StatusCode == 409 { - return db.ErrOptimisticLock - } - - // Check the response for a 524 deadline exceeded. This is a - // custom status code that indicates the server canceled the - // request due to an internal polling timeout (this is normal). - if res.StatusCode == 524 { - return context.DeadlineExceeded - } - - if res.StatusCode > 299 { - body, _ := ioutil.ReadAll(res.Body) - return &serverError{ - Status: res.StatusCode, - Message: string(body), - } - } - - // Check the response for a 204 no content. This indicates - // the response body is empty and should be discarded. - if res.StatusCode == 204 || out == nil { - return nil - } - - return json.NewDecoder(res.Body).Decode(out) -} - -func (s *Client) upload(ctx context.Context, path string, body interface{}) error { - url := s.server + path - req, err := retryablehttp.NewRequest("POST", url, body) - if err != nil { - return err - } - req = req.WithContext(ctx) - req.Header.Set("X-Drone-Token", s.token) - - res, err := s.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() - - if res.StatusCode > 299 { - body, _ := ioutil.ReadAll(res.Body) - return &serverError{ - Status: res.StatusCode, - Message: string(body), - } - } - return nil -} - -// helper function returns true if the http.Request should be -// retried based on error and http status code. This function -// is used by the retryablehttp.Client. -func retryFunc(ctx context.Context, resp *http.Response, err error) (bool, error) { - // do not retry on context.Canceled or context.DeadlineExceeded - if ctx.Err() != nil { - return false, ctx.Err() - } - if resp != nil { - // Check the path to prevent retries when writing to the log - // stream. This stream is temporary and ephemeral, and losing - // log lines will not negatively impact the final persisted - // log entries. - if resp.Request.URL.Path == "/rpc/v1/write" { - return false, err - } - // Check the response code. We retry on 500-range responses - // to allow the server time to recover, as 500's are typically - // not permanent errors and may relate to outages on the - // server side. - if resp.StatusCode >= 500 { - return true, nil - } - } - if err != nil { - return true, err - } - return false, nil -} diff --git a/operator/manager/rpc/client_test.go b/operator/manager/rpc/client_test.go deleted file mode 100644 index 3cb4a468da..0000000000 --- a/operator/manager/rpc/client_test.go +++ /dev/null @@ -1,506 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc - -import ( - "bytes" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/store/shared/db" - - "github.com/google/go-cmp/cmp" - "github.com/h2non/gock" -) - -func TestRequest(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/request"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"Request":{"kind":"","type":"","os":"linux","arch":"amd64","variant":"","kernel":""}}`). - Reply(200). - Type("application/json"). - BodyString(`{"id":1,"build_id":2,"number":3,"name":"build","status":"pending","errignore":false,"exit_code":0,"machine":"localhost","os":"linux","arch":"amd64","started":0,"stopped":0,"created":0,"updated":0,"version":1,"on_success":false,"on_failure":false}`) - - want := &core.Stage{ - ID: 1, - BuildID: 2, - Number: 3, - Name: "build", - Machine: "localhost", - OS: "linux", - Arch: "amd64", - Status: core.StatusPending, - ExitCode: 0, - Version: 1, - } - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - got, err := client.Request(noContext, &manager.Request{OS: "linux", Arch: "amd64"}) - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestAccept(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/accept"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"Stage":1,"Machine":"localhost"}`). - Reply(204) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - _, err := client.Accept(noContext, 1, "localhost") - if err != nil { - t.Error(err) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestNetrc(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/netrc"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"Repo":1}`). - Reply(200). - Type("application/json"). - BodyString(`{"machine":"github.com","login":"octocat","password":"12345"}`) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - got, err := client.Netrc(noContext, 1) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Password: "12345", - Login: "octocat", - Machine: "github.com", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestDetails(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/details"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"Stage":1}`). - Reply(200). - Type("application/json"). - BodyString(`{"Secret":"passphrase", "Context":{"repository":{}}}`) - - // TODO(bradrydzewski) return a mock core.BuildContext - // and validate the unmarshaled results. - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - out, err := client.Details(noContext, 1) - if err != nil { - t.Error(err) - return - } - - if out.Repo.Secret != "passphrase" { - t.Errorf("Expect repository passphrase encoded in json response") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestBefore(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/before"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"id":1,"step_id":2,"number":3,"name":"build","status":"pending","exit_code":0,"version":1}`). - Reply(200). - Type("application/json"). - BodyString(`{"id":1,"step_id":2,"number":3,"name":"build","status":"pending","exit_code":0,"version":2}`) - - before := &core.Step{ - ID: 1, - StageID: 2, - Number: 3, - Name: "build", - Status: core.StatusPending, - ExitCode: 0, - Version: 1, - } - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.Before(noContext, before) - if err != nil { - t.Error(err) - } - - after := &core.Step{ - ID: 1, - StageID: 2, - Number: 3, - Name: "build", - Status: core.StatusPending, - ExitCode: 0, - Version: 2, - } - - if diff := cmp.Diff(before, after); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestAfter(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/after"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"id":1,"step_id":2,"number":3,"name":"build","status":"failure","exit_code":1,"version":2}`). - Reply(200). - Type("application/json"). - BodyString(`{"id":1,"step_id":2,"number":3,"name":"build","status":"failure","exit_code":1,"version":3}`) - - before := &core.Step{ - ID: 1, - StageID: 2, - Number: 3, - Name: "build", - Status: core.StatusFailing, - ExitCode: 1, - Version: 2, - } - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.After(noContext, before) - if err != nil { - t.Error(err) - } - - after := &core.Step{ - ID: 1, - StageID: 2, - Number: 3, - Name: "build", - Status: core.StatusFailing, - ExitCode: 1, - Version: 3, - } - - if diff := cmp.Diff(before, after); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestBeforeAll(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/beforeAll"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"id":1,"repo_id":0,"build_id":2,"number":3,"name":"build","status":"pending","errignore":false,"exit_code":0,"machine":"localhost","os":"linux","arch":"amd64","started":0,"stopped":0,"created":0,"updated":0,"version":1,"on_success":false,"on_failure":false}`). - Reply(200). - Type("application/json"). - BodyString(`{"id":1,"repo_id":0,"build_id":2,"number":3,"name":"build","status":"pending","errignore":false,"exit_code":0,"machine":"localhost","os":"linux","arch":"amd64","started":0,"stopped":0,"created":0,"updated":0,"version":2,"on_success":false,"on_failure":false}`) - - before := &core.Stage{ - ID: 1, - BuildID: 2, - Number: 3, - Name: "build", - Machine: "localhost", - OS: "linux", - Arch: "amd64", - Status: core.StatusPending, - ExitCode: 0, - Version: 1, - } - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.BeforeAll(noContext, before) - if err != nil { - t.Error(err) - } - - after := &core.Stage{ - ID: 1, - BuildID: 2, - Number: 3, - Name: "build", - Machine: "localhost", - OS: "linux", - Arch: "amd64", - Status: core.StatusPending, - ExitCode: 0, - Version: 2, - } - - if diff := cmp.Diff(before, after); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestAfterAll(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/afterAll"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"id":1,"repo_id":0,"build_id":2,"number":3,"name":"build","status":"pending","errignore":false,"exit_code":0,"machine":"localhost","os":"linux","arch":"amd64","started":0,"stopped":0,"created":0,"updated":0,"version":1,"on_success":false,"on_failure":false}`). - Reply(200). - Type("application/json"). - BodyString(`{"id":1,"repo_id":0,"build_id":2,"number":3,"name":"build","status":"pending","errignore":false,"exit_code":0,"machine":"localhost","os":"linux","arch":"amd64","started":0,"stopped":0,"created":0,"updated":0,"version":2,"on_success":false,"on_failure":false}`) - - before := &core.Stage{ - ID: 1, - BuildID: 2, - Number: 3, - Name: "build", - Machine: "localhost", - OS: "linux", - Arch: "amd64", - Status: core.StatusPending, - ExitCode: 0, - Version: 1, - } - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.AfterAll(noContext, before) - if err != nil { - t.Error(err) - } - - after := &core.Stage{ - ID: 1, - BuildID: 2, - Number: 3, - Name: "build", - Machine: "localhost", - OS: "linux", - Arch: "amd64", - Status: core.StatusPending, - ExitCode: 0, - Version: 2, - } - - if diff := cmp.Diff(before, after); diff != "" { - t.Errorf(diff) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestBefore_OptimisticLock(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/before"). - Reply(409) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.Before(noContext, new(core.Step)) - if err != db.ErrOptimisticLock { - t.Errorf("Want optimistic lock error") - } - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestAfter_OptimisticLock(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/after"). - Reply(409) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.After(noContext, new(core.Step)) - if err != db.ErrOptimisticLock { - t.Errorf("Want optimistic lock error") - } - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestBeforeAll_OptimisticLock(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/beforeAll"). - Reply(409) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.BeforeAll(noContext, new(core.Stage)) - if err != db.ErrOptimisticLock { - t.Errorf("Want optimistic lock error") - } - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestAfterAll_OptimisticLock(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/afterAll"). - Reply(409) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.AfterAll(noContext, new(core.Stage)) - if err != db.ErrOptimisticLock { - t.Errorf("Want optimistic lock error") - } - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestWatch(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/watch"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"Build":1}`). - Reply(200). - Type("application/json"). - BodyString(`{"Done":true}`) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - done, err := client.Watch(noContext, 1) - if err != nil { - t.Error(err) - } - - if !done { - t.Errorf("Want done=true, got false") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestWrite(t *testing.T) { - defer gock.Off() - - gock.New("http://drone.company.com"). - Post("/rpc/v1/write"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`{"pos":1,"out":"whoami","time":0}`). - Reply(204) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.Write(noContext, 1, &core.Line{Number: 1, Message: "whoami", Timestamp: 0}) - if err != nil { - t.Error(err) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestUpload(t *testing.T) { - defer gock.Off() - - buf := bytes.NewBufferString(`[{"pos":1,"out":"whoami","time":0}]`) - - gock.New("http://drone.company.com"). - Post("/rpc/v1/upload"). - MatchParam("id", "1"). - MatchHeader("X-Drone-Token", "correct-horse-battery-staple"). - BodyString(`[{"pos":1,"out":"whoami","time":0}]`). - Reply(200) - - client := NewClient("http://drone.company.com", "correct-horse-battery-staple") - gock.InterceptClient(client.client.HTTPClient) - err := client.Upload(noContext, 1, buf) - if err != nil { - t.Error(err) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -// func xTestRetrySend(t *testing.T) { -// defer gock.Off() - -// gock.New("http://drone.company.com"). -// Post("/rpc/v1/write"). -// Times(5). -// Reply(http.StatusBadGateway) - -// client := NewClient("http://drone.company.com", "correct-horse-battery-staple").(*Client) -// err := client.retrySend("http://drone.company.com/rpc/v1/write", nil, nil) -// if serr, ok := err.(*serverError); !ok || serr.Status != http.StatusBadGateway { -// t.Errorf("Want bad gateway error, got %d", serr.Status) -// } - -// if gock.IsPending() { -// t.Errorf("Unfinished requests") -// } -// } diff --git a/operator/manager/rpc/error.go b/operator/manager/rpc/error.go deleted file mode 100644 index c742b53165..0000000000 --- a/operator/manager/rpc/error.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc - -type serverError struct { - Status int - Message string -} - -func (s *serverError) Error() string { - return s.Message -} diff --git a/operator/manager/rpc/server.go b/operator/manager/rpc/server.go deleted file mode 100644 index d228dcbbb7..0000000000 --- a/operator/manager/rpc/server.go +++ /dev/null @@ -1,286 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc - -import ( - "context" - "encoding/json" - "io" - "net/http" - "strconv" - "time" - - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/store/shared/db" -) - -// default http request timeout -var defaultTimeout = time.Second * 30 - -var noContext = context.Background() - -// Server is an rpc handler that enables remote interaction -// between the server and controller using the http transport. -type Server struct { - manager manager.BuildManager - secret string -} - -// NewServer returns a new rpc server that enables remote -// interaction with the build controller using the http transport. -func NewServer(manager manager.BuildManager, secret string) *Server { - return &Server{ - manager: manager, - secret: secret, - } -} - -func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { - if s.secret == "" { - w.WriteHeader(401) // not found - return - } - if r.Header.Get("X-Drone-Token") != s.secret { - w.WriteHeader(401) // not authorized - return - } - switch r.URL.Path { - case "/rpc/v1/write": - s.handleWrite(w, r) - case "/rpc/v1/request": - s.handleRequest(w, r) - case "/rpc/v1/accept": - s.handleAccept(w, r) - case "/rpc/v1/netrc": - s.handleNetrc(w, r) - case "/rpc/v1/details": - s.handleDetails(w, r) - case "/rpc/v1/before": - s.handleBefore(w, r) - case "/rpc/v1/after": - s.handleAfter(w, r) - case "/rpc/v1/beforeAll": - s.handleBeforeAll(w, r) - case "/rpc/v1/afterAll": - s.handleAfterAll(w, r) - case "/rpc/v1/watch": - s.handleWatch(w, r) - case "/rpc/v1/upload": - s.handleUpload(w, r) - default: - w.WriteHeader(404) - } -} - -func (s *Server) handleRequest(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - ctx, cancel := context.WithTimeout(ctx, defaultTimeout) - defer cancel() - - in := &requestRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - stage, err := s.manager.Request(ctx, in.Request) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(stage) -} - -func (s *Server) handleAccept(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &acceptRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - out, err := s.manager.Accept(ctx, in.Stage, in.Machine) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(out) -} - -func (s *Server) handleNetrc(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &netrcRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - netrc, err := s.manager.Netrc(ctx, in.Repo) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(netrc) -} - -func (s *Server) handleDetails(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &detailsRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - build, err := s.manager.Details(ctx, in.Stage) - if err != nil { - writeError(w, err) - return - } - out := &buildContextToken{ - Secret: build.Repo.Secret, - Context: build, - } - json.NewEncoder(w).Encode(out) -} - -func (s *Server) handleBefore(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &stepRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.Before(ctx, in.Step) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(in.Step) -} - -func (s *Server) handleAfter(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &stepRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.After(ctx, in.Step) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(in.Step) -} - -func (s *Server) handleBeforeAll(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &stageRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.BeforeAll(ctx, in.Stage) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(in.Stage) -} - -func (s *Server) handleAfterAll(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := &stageRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.AfterAll(ctx, in.Stage) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(in.Stage) -} - -func (s *Server) handleWrite(w http.ResponseWriter, r *http.Request) { - in := writePool.Get().(*writeRequest) - in.Line = nil - in.Step = 0 - - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.Write(noContext, in.Step, in.Line) - if err != nil { - writeError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) - writePool.Put(in) -} - -func (s *Server) handleUpload(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - in := r.FormValue("id") - id, err := strconv.ParseInt(in, 10, 64) - if err != nil { - writeBadRequest(w, err) - return - } - err = s.manager.Upload(ctx, id, r.Body) - if err != nil { - writeError(w, err) - return - } - w.WriteHeader(http.StatusNoContent) -} - -func (s *Server) handleWatch(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - ctx, cancel := context.WithTimeout(ctx, defaultTimeout) - defer cancel() - - in := &watchRequest{} - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - writeBadRequest(w, err) - return - } - done, err := s.manager.Watch(ctx, in.Build) - if err != nil { - writeError(w, err) - return - } - json.NewEncoder(w).Encode(&watchResponse{ - Done: done, - }) -} - -func writeBadRequest(w http.ResponseWriter, err error) { - w.WriteHeader(500) // should retry - io.WriteString(w, err.Error()) -} - -func writeError(w http.ResponseWriter, err error) { - if err == context.DeadlineExceeded { - w.WriteHeader(524) // should retry - } else if err == context.Canceled { - w.WriteHeader(524) // should retry - } else if err == db.ErrOptimisticLock { - w.WriteHeader(409) // should abort - } else { - w.WriteHeader(400) // should fail - } - io.WriteString(w, err.Error()) -} diff --git a/operator/manager/rpc/server_oss.go b/operator/manager/rpc/server_oss.go deleted file mode 100644 index 6d068b6f46..0000000000 --- a/operator/manager/rpc/server_oss.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package rpc - -import ( - "context" - "errors" - "io" - "net/http" - - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" -) - -// Server is a no-op rpc server. -type Server struct { - manager manager.BuildManager - secret string -} - -// NewServer returns a no-op rpc server. -func NewServer(manager.BuildManager, string) *Server { - return &Server{} -} - -// Request requests the next available build stage for execution. -func (Server) Request(ctx context.Context, args *manager.Request) (*core.Stage, error) { - return nil, errors.New("not implemented") -} - -// Accept accepts the build stage for execution. -func (Server) Accept(ctx context.Context, stage int64, machine string) error { - return errors.New("not implemented") -} - -// Netrc returns a valid netrc for execution. -func (Server) Netrc(ctx context.Context, repo int64) (*core.Netrc, error) { - return nil, errors.New("not implemented") -} - -// Details fetches build details -func (Server) Details(ctx context.Context, stage int64) (*manager.Context, error) { - return nil, errors.New("not implemented") -} - -// Before signals the build step is about to start. -func (Server) Before(ctx context.Context, step *core.Step) error { - return errors.New("not implemented") -} - -// After signals the build step is complete. -func (Server) After(ctx context.Context, step *core.Step) error { - return errors.New("not implemented") -} - -// Before signals the build stage is about to start. -func (Server) BeforeAll(ctx context.Context, stage *core.Stage) error { - return errors.New("not implemented") -} - -// After signals the build stage is complete. -func (Server) AfterAll(ctx context.Context, stage *core.Stage) error { - return errors.New("not implemented") -} - -// Watch watches for build cancellation requests. -func (Server) Watch(ctx context.Context, stage int64) (bool, error) { - return false, errors.New("not implemented") -} - -// Write writes a line to the build logs -func (Server) Write(ctx context.Context, step int64, line *core.Line) error { - return errors.New("not implemented") -} - -// Upload uploads the full logs -func (Server) Upload(ctx context.Context, step int64, r io.Reader) error { - return errors.New("not implemented") -} - -// UploadBytes uploads the full logs -func (Server) UploadBytes(ctx context.Context, step int64, b []byte) error { - return errors.New("not implemented") -} - -// ServeHTTP is an empty handler. -func (Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {} diff --git a/operator/manager/rpc/server_test.go b/operator/manager/rpc/server_test.go deleted file mode 100644 index 008899758a..0000000000 --- a/operator/manager/rpc/server_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc diff --git a/operator/manager/rpc/types.go b/operator/manager/rpc/types.go deleted file mode 100644 index 1ca8d97451..0000000000 --- a/operator/manager/rpc/types.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc - -import ( - "sync" - - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" -) - -type requestRequest struct { - Request *manager.Request -} - -type acceptRequest struct { - Stage int64 - Machine string -} - -type netrcRequest struct { - Repo int64 -} - -type detailsRequest struct { - Stage int64 -} - -type stageRequest struct { - Stage *core.Stage -} - -type stepRequest struct { - Step *core.Step -} - -type writeRequest struct { - Step int64 - Line *core.Line -} - -type watchRequest struct { - Build int64 -} - -type watchResponse struct { - Done bool -} - -type buildContextToken struct { - Secret string - Context *manager.Context -} - -type errorWrapper struct { - Message string -} - -var writePool = sync.Pool{ - New: func() interface{} { - return &writeRequest{} - }, -} diff --git a/operator/manager/rpc2/client.go b/operator/manager/rpc2/client.go deleted file mode 100644 index cf1892a300..0000000000 --- a/operator/manager/rpc2/client.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc2 diff --git a/operator/manager/rpc2/handler.go b/operator/manager/rpc2/handler.go deleted file mode 100644 index e015615d4c..0000000000 --- a/operator/manager/rpc2/handler.go +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -/* - -/rpc/v2/stage POST (request) -/rpc/v2/stage/{stage}?machine= POST (accept, details) -/rpc/v2/stage/{stage} PUT (beforeAll, afterAll) -/rpc/v2/stage/{stage}/steps/{step} PUT (before, after) -/rpc/v2/build/{build}/watch POST (watch) -/rpc/v2/stage/{stage}/logs/batch POST (batch) -/rpc/v2/stage/{stage}/logs/upload POST (upload) - -*/ - -package rpc2 - -import ( - "context" - "encoding/json" - "io" - "net/http" - "strconv" - "time" - - "github.com/go-chi/chi" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/render" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/store/shared/db" -) - -// default http request timeout -var defaultTimeout = time.Second * 30 - -var noContext = context.Background() - -// HandleJoin returns an http.HandlerFunc that makes an -// http.Request to join the cluster. -// -// POST /rpc/v2/nodes/:machine -func HandleJoin() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - writeOK(w) // this is a no-op - } -} - -// HandleLeave returns an http.HandlerFunc that makes an -// http.Request to leave the cluster. -// -// DELETE /rpc/v2/nodes/:machine -func HandleLeave() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - writeOK(w) // this is a no-op - } -} - -// HandlePing returns an http.HandlerFunc that makes an -// http.Request to ping the server and confirm connectivity. -// -// GET /rpc/v2/ping -func HandlePing() http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - writeOK(w) // this is a no-op - } -} - -// HandleRequest returns an http.HandlerFunc that processes an -// http.Request to request a stage from the queue for execution. -// -// POST /rpc/v2/stage -func HandleRequest(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - ctx, cancel := context.WithTimeout(ctx, defaultTimeout) - defer cancel() - - req := new(manager.Request) - err := json.NewDecoder(r.Body).Decode(req) - if err != nil { - writeError(w, err) - return - } - stage, err := m.Request(ctx, req) - if err != nil { - writeError(w, err) - } else { - writeJSON(w, stage) - } - } -} - -// HandleAccept returns an http.HandlerFunc that processes an -// http.Request to accept ownership of the stage. -// -// POST /rpc/v2/stage/{stage}?machine= -func HandleAccept(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - stage, _ := strconv.ParseInt( - chi.URLParam(r, "stage"), 10, 64) - - out, err := m.Accept(noContext, stage, r.FormValue("machine")) - if err != nil { - writeError(w, err) - } else { - writeJSON(w, out) - } - } -} - -// HandleInfo returns an http.HandlerFunc that processes an -// http.Request to get the build details. -// -// POST /rpc/v2/build/{build} -func HandleInfo(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - stage, _ := strconv.ParseInt( - chi.URLParam(r, "stage"), 10, 64) - - res, err := m.Details(noContext, stage) - if err != nil { - writeError(w, err) - return - } - - netrc, err := m.Netrc(noContext, res.Repo.ID) - if err != nil { - writeError(w, err) - return - } - - writeJSON(w, &details{ - Context: res, - Netrc: netrc, - Repo: &repository{ - Repository: res.Repo, - Secret: res.Repo.Secret, - }, - }) - } -} - -// HandleUpdateStage returns an http.HandlerFunc that processes -// an http.Request to update a stage. -// -// PUT /rpc/v2/stage/{stage} -func HandleUpdateStage(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - dst := new(core.Stage) - err := json.NewDecoder(r.Body).Decode(dst) - if err != nil { - writeError(w, err) - return - } - - if dst.Status == core.StatusPending || - dst.Status == core.StatusRunning { - err = m.BeforeAll(noContext, dst) - } else { - err = m.AfterAll(noContext, dst) - } - - if err != nil { - writeError(w, err) - } else { - writeJSON(w, dst) - } - } -} - -// HandleUpdateStep returns an http.HandlerFunc that processes -// an http.Request to update a step. -// -// POST /rpc/v2/step/{step} -func HandleUpdateStep(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - dst := new(core.Step) - err := json.NewDecoder(r.Body).Decode(dst) - if err != nil { - writeError(w, err) - return - } - - if dst.Status == core.StatusPending || - dst.Status == core.StatusRunning { - err = m.Before(noContext, dst) - } else { - err = m.After(noContext, dst) - } - - if err != nil { - writeError(w, err) - } else { - writeJSON(w, dst) - } - } -} - -// HandleWatch returns an http.HandlerFunc that accepts a -// blocking http.Request that watches a build for cancellation -// events. -// -// GET /rpc/v2/build/{build}/watch -func HandleWatch(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - ctx, cancel := context.WithTimeout(ctx, defaultTimeout) - defer cancel() - - build, _ := strconv.ParseInt( - chi.URLParamFromCtx(ctx, "build"), 10, 64) - - _, err := m.Watch(ctx, build) - if err != nil { - writeError(w, err) - } else { - writeOK(w) - } - } -} - -// HandleLogBatch returns an http.HandlerFunc that accepts an -// http.Request to submit a stream of logs to the system. -// -// POST /rpc/v2/step/{step}/logs/batch -func HandleLogBatch(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - step, _ := strconv.ParseInt( - chi.URLParam(r, "step"), 10, 64) - - lines := []*core.Line{} - err := json.NewDecoder(r.Body).Decode(&lines) - if err != nil { - writeError(w, err) - return - } - - // TODO(bradrydzewski) modify the write function to - // accept a slice of lines. - for _, line := range lines { - err := m.Write(noContext, step, line) - if err != nil { - writeError(w, err) - return - } - } - - writeOK(w) - } -} - -// HandleLogUpload returns an http.HandlerFunc that accepts an -// http.Request to upload and persist logs for a pipeline stage. -// -// POST /rpc/v2/step/{step}/logs/upload -func HandleLogUpload(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - step, _ := strconv.ParseInt( - chi.URLParam(r, "step"), 10, 64) - - err := m.Upload(noContext, step, r.Body) - if err != nil { - writeError(w, err) - } else { - writeOK(w) - } - } -} - -// HandleCardUpload returns an http.HandlerFunc that accepts an -// http.Request to upload and persist a card for a pipeline step. -// -// POST /rpc/v2/step/{step}/card -func HandleCardUpload(m manager.BuildManager) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - step, _ := strconv.ParseInt( - chi.URLParam(r, "step"), 10, 64) - - in := new(core.CardInput) - err := json.NewDecoder(r.Body).Decode(in) - if err != nil { - render.BadRequest(w, err) - return - } - - err = m.UploadCard(noContext, step, in) - if err != nil { - writeError(w, err) - } else { - writeOK(w) - } - } -} - -// write a 200 Status OK to the response body. -func writeJSON(w http.ResponseWriter, v interface{}) { - json.NewEncoder(w).Encode(v) -} - -// write a 200 Status OK to the response body. -func writeOK(w http.ResponseWriter) { - w.WriteHeader(http.StatusOK) -} - -// write an error message to the response body. -func writeError(w http.ResponseWriter, err error) { - if err == context.DeadlineExceeded { - w.WriteHeader(204) // should retry - } else if err == context.Canceled { - w.WriteHeader(204) // should retry - } else if err == db.ErrOptimisticLock { - w.WriteHeader(409) // should abort - } else { - w.WriteHeader(500) // should fail - } - io.WriteString(w, err.Error()) -} diff --git a/operator/manager/rpc2/server.go b/operator/manager/rpc2/server.go deleted file mode 100644 index f5d587f346..0000000000 --- a/operator/manager/rpc2/server.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc2 - -import ( - "net/http" - - "github.com/drone/drone/operator/manager" - - "github.com/go-chi/chi" - "github.com/go-chi/chi/middleware" -) - -// Server wraps the chi Router in a custom type for wire -// injection purposes. -type Server http.Handler - -// NewServer returns a new rpc server that enables remote -// interaction with the build controller using the http transport. -func NewServer(manager manager.BuildManager, secret string) Server { - r := chi.NewRouter() - r.Use(middleware.Recoverer) - r.Use(middleware.NoCache) - r.Use(authorization(secret)) - r.Post("/nodes/:machine", HandleJoin()) - r.Delete("/nodes/:machine", HandleLeave()) - r.Post("/ping", HandlePing()) - r.Post("/stage", HandleRequest(manager)) - r.Post("/stage/{stage}", HandleAccept(manager)) - r.Get("/stage/{stage}", HandleInfo(manager)) - r.Put("/stage/{stage}", HandleUpdateStage(manager)) - r.Put("/step/{step}", HandleUpdateStep(manager)) - r.Post("/build/{build}/watch", HandleWatch(manager)) - r.Post("/step/{step}/logs/batch", HandleLogBatch(manager)) - r.Post("/step/{step}/logs/upload", HandleLogUpload(manager)) - r.Post("/step/{step}/card", HandleCardUpload(manager)) - return Server(r) -} - -func authorization(token string) func(http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - // prevents system administrators from accidentally - // exposing drone without credentials. - if token == "" { - w.WriteHeader(403) - } else if token == r.Header.Get("X-Drone-Token") { - next.ServeHTTP(w, r) - } else { - w.WriteHeader(401) - } - }) - } -} diff --git a/operator/manager/rpc2/server_oss.go b/operator/manager/rpc2/server_oss.go deleted file mode 100644 index 6a225f50ab..0000000000 --- a/operator/manager/rpc2/server_oss.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package rpc2 - -import ( - "net/http" - - "github.com/drone/drone/operator/manager" -) - -// Server wraps the chi Router in a custom type for wire -// injection purposes. -type Server http.Handler - -// NewServer returns a new rpc server that enables remote -// interaction with the build controller using the http transport. -func NewServer(manager manager.BuildManager, secret string) Server { - return Server(http.NotFoundHandler()) -} diff --git a/operator/manager/rpc2/types.go b/operator/manager/rpc2/types.go deleted file mode 100644 index f06749b766..0000000000 --- a/operator/manager/rpc2/types.go +++ /dev/null @@ -1,27 +0,0 @@ -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package rpc2 - -// Copyright 2019 Drone.IO Inc. All rights reserved. -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" -) - -// details provides the runner with the build details and -// includes all environment data required to execute the build. -type details struct { - *manager.Context - Netrc *core.Netrc `json:"netrc"` - Repo *repository `json:"repository"` -} - -// repository wraps a repository object to include the secret -// when the repository is marshaled to json. -type repository struct { - *core.Repository - Secret string `json:"secret"` -} diff --git a/operator/manager/setup.go b/operator/manager/setup.go deleted file mode 100644 index 3e81555b15..0000000000 --- a/operator/manager/setup.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package manager - -import ( - "context" - "encoding/json" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -type setup struct { - Builds core.BuildStore - Events core.Pubsub - Repos core.RepositoryStore - Steps core.StepStore - Stages core.StageStore - Status core.StatusService - Users core.UserStore -} - -func (s *setup) do(ctx context.Context, stage *core.Stage) error { - logger := logrus.WithField("stage.id", stage.ID) - - build, err := s.Builds.Find(noContext, stage.BuildID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find the build") - return err - } - - repo, err := s.Repos.Find(noContext, build.RepoID) - if err != nil { - logger.WithError(err).WithFields( - logrus.Fields{ - "build.number": build.Number, - "build.id": build.ID, - "stage.id": stage.ID, - "repo.id": build.RepoID, - }, - ).Warnln("manager: cannot find the repository") - return err - } - - logger = logger.WithFields( - logrus.Fields{ - "build.number": build.Number, - "build.id": build.ID, - "stage.id": stage.ID, - "repo.id": build.RepoID, - }, - ) - - // // note that if multiple stages run concurrently it will attempt - // // to create the watcher multiple times. The watcher is responsible - // // for handling multiple concurrent requests and preventing duplication. - // err = s.Watcher.Register(noContext, build.ID) - // if err != nil { - // logger.WithError(err).Warnln("manager: cannot create the watcher") - // return err - // } - - if len(stage.Error) > 500 { - stage.Error = stage.Error[:500] - } - stage.Updated = time.Now().Unix() - err = s.Stages.Update(noContext, stage) - if err != nil { - logger.WithError(err). - WithField("stage.status", stage.Status). - Warnln("manager: cannot update the stage") - return err - } - - for _, step := range stage.Steps { - if len(step.Error) > 500 { - step.Error = step.Error[:500] - } - err := s.Steps.Create(noContext, step) - if err != nil { - logger.WithError(err). - WithField("stage.status", stage.Status). - WithField("step.name", step.Name). - WithField("step.id", step.ID). - Warnln("manager: cannot persist the step") - return err - } - } - - updated, err := s.updateBuild(ctx, build) - if err != nil { - logger.WithError(err).Warnln("manager: cannot update the build") - return err - } - - stages, err := s.Stages.ListSteps(noContext, build.ID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot query build stages") - return err - } - - repo.Build = build - repo.Build.Stages = stages - data, _ := json.Marshal(repo) - err = s.Events.Publish(noContext, &core.Message{ - Repository: repo.Slug, - Visibility: repo.Visibility, - Data: data, - }) - if err != nil { - logger.Warnln("manager: cannot publish build event") - } - - if updated { - user, err := s.Users.Find(noContext, repo.UserID) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot find repository owner") - return err - } - - req := &core.StatusInput{ - Repo: repo, - Build: build, - } - err = s.Status.Send(noContext, user, req) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot publish status") - } - } - - return nil -} - -// TODO(bradrydzewski) this should really be encapsulated into a single -// function call that internally uses a database transaction so that we -// can rollback if any operations fail. -func (s *setup) createSteps(ctx context.Context, stage *core.Stage) error { - var errs error - for _, step := range stage.Steps { - err := s.Steps.Create(ctx, step) - if err != nil { - errs = multierror.Append(errs, err) - } - } - return errs -} - -// helper function that updates the build status from pending to running. -// This accounts for the fact that another agent may have already updated -// the build status, which may happen if two stages execute concurrently. -func (s *setup) updateBuild(ctx context.Context, build *core.Build) (bool, error) { - if build.Status != core.StatusPending { - return false, nil - } - build.Started = time.Now().Unix() - build.Updated = time.Now().Unix() - build.Status = core.StatusRunning - err := s.Builds.Update(noContext, build) - if err == db.ErrOptimisticLock { - return false, nil - } - if err != nil { - return false, err - } - return true, nil -} diff --git a/operator/manager/setup_test.go b/operator/manager/setup_test.go deleted file mode 100644 index 553d474810..0000000000 --- a/operator/manager/setup_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package manager diff --git a/operator/manager/teardown.go b/operator/manager/teardown.go deleted file mode 100644 index ea7fd07180..0000000000 --- a/operator/manager/teardown.go +++ /dev/null @@ -1,354 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package manager - -import ( - "context" - "encoding/json" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/go-scm/scm" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -type teardown struct { - Builds core.BuildStore - Events core.Pubsub - Logs core.LogStream - Scheduler core.Scheduler - Repos core.RepositoryStore - Steps core.StepStore - Status core.StatusService - Stages core.StageStore - Users core.UserStore - Webhook core.WebhookSender -} - -func (t *teardown) do(ctx context.Context, stage *core.Stage) error { - logger := logrus.WithField("stage.id", stage.ID) - logger.Debugln("manager: stage is complete. teardown") - - build, err := t.Builds.Find(noContext, stage.BuildID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find the build") - return err - } - - logger = logger.WithFields( - logrus.Fields{ - "build.number": build.Number, - "build.id": build.ID, - "repo.id": build.RepoID, - }, - ) - - repo, err := t.Repos.Find(noContext, build.RepoID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find the repository") - return err - } - - for _, step := range stage.Steps { - if len(step.Error) > 500 { - step.Error = step.Error[:500] - } - err := t.Steps.Update(noContext, step) - if err != nil { - logger.WithError(err). - WithField("stage.status", stage.Status). - WithField("step.name", step.Name). - WithField("step.id", step.ID). - Warnln("manager: cannot persist the step") - return err - } - } - - if len(stage.Error) > 500 { - stage.Error = stage.Error[:500] - } - - stage.Updated = time.Now().Unix() - err = t.Stages.Update(noContext, stage) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot update the stage") - return err - } - - for _, step := range stage.Steps { - t.Logs.Delete(noContext, step.ID) - } - - stages, err := t.Stages.ListSteps(noContext, build.ID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot get stages") - return err - } - - // - // - // - - err = t.cancelDownstream(ctx, stages) - if err != nil { - logger.WithError(err). - Errorln("manager: cannot cancel downstream builds") - return err - } - - err = t.scheduleDownstream(ctx, stage, stages) - if err != nil { - logger.WithError(err). - Errorln("manager: cannot schedule downstream builds") - return err - } - - // - // - // - - if isBuildComplete(stages) == false { - logger.Debugln("manager: build pending completion of additional stages") - return nil - } - - logger.Debugln("manager: build is finished, teardown") - - build.Status = core.StatusPassing - build.Finished = time.Now().Unix() - for _, sibling := range stages { - if sibling.Status == core.StatusKilled { - build.Status = core.StatusKilled - break - } - if sibling.Status == core.StatusFailing { - build.Status = core.StatusFailing - break - } - if sibling.Status == core.StatusError { - build.Status = core.StatusError - break - } - } - if build.Started == 0 { - build.Started = build.Finished - } - - err = t.Builds.Update(noContext, build) - if err == db.ErrOptimisticLock { - logger.WithError(err). - Warnln("manager: build updated by another goroutine") - return nil - } - if err != nil { - logger.WithError(err). - Warnln("manager: cannot update the build") - return err - } - - repo.Build = build - repo.Build.Stages = stages - data, _ := json.Marshal(repo) - err = t.Events.Publish(noContext, &core.Message{ - Repository: repo.Slug, - Visibility: repo.Visibility, - Data: data, - }) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot publish build event") - } - - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionUpdated, - Repo: repo, - Build: build, - } - err = t.Webhook.Send(noContext, payload) - if err != nil { - logger.WithError(err).Warnln("manager: cannot send global webhook") - } - - user, err := t.Users.Find(noContext, repo.UserID) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot find repository owner") - - // this error is insufficient to fail the function, - // however, execution of the function should be halted - // to prevent a nil pointer in subsequent operations. - return nil - } - - req := &core.StatusInput{ - Repo: repo, - Build: build, - } - err = t.Status.Send(noContext, user, req) - if err != nil && err != scm.ErrNotSupported { - logger.WithError(err). - Warnln("manager: cannot publish status") - } - return nil -} - -// cancelDownstream is a helper function that tests for -// downstream stages and cancels them based on the overall -// pipeline state. -func (t *teardown) cancelDownstream( - ctx context.Context, - stages []*core.Stage, -) error { - failed := false - for _, s := range stages { - if s.IsFailed() { - failed = true - } - } - - var errs error - for _, s := range stages { - if s.Status != core.StatusWaiting { - continue - } - - var skip bool - if failed == true && s.OnFailure == false { - skip = true - } - if failed == false && s.OnSuccess == false { - skip = true - } - if skip == false { - continue - } - - if areDepsComplete(s, stages) == false { - continue - } - - logger := logrus.WithFields( - logrus.Fields{ - "stage.id": s.ID, - "stage.on_success": s.OnSuccess, - "stage.on_failure": s.OnFailure, - "stage.is_failure": failed, - "stage.depends_on": s.DependsOn, - }, - ) - logger.Debugln("manager: skipping step") - - s.Status = core.StatusSkipped - s.Started = time.Now().Unix() - s.Stopped = time.Now().Unix() - err := t.Stages.Update(noContext, s) - if err == db.ErrOptimisticLock { - t.resync(ctx, s) - continue - } - if err != nil { - logger.WithError(err). - Warnln("manager: cannot update stage status") - errs = multierror.Append(errs, err) - } - } - return errs -} - -// scheduleDownstream is a helper function that tests for -// downstream stages and schedules stages if all dependencies -// and execution requirements are met. -func (t *teardown) scheduleDownstream( - ctx context.Context, - stage *core.Stage, - stages []*core.Stage, -) error { - - var errs error - for _, sibling := range stages { - if sibling.Status == core.StatusWaiting { - if len(sibling.DependsOn) == 0 { - continue - } - - // PROBLEM: isDep only checks the direct parent - // i think .... - // if isDep(stage, sibling) == false { - // continue - // } - if areDepsComplete(sibling, stages) == false { - continue - } - // if isLastDep(stage, sibling, stages) == false { - // continue - // } - - logger := logrus.WithFields( - logrus.Fields{ - "stage.id": sibling.ID, - "stage.name": sibling.Name, - "stage.depends_on": sibling.DependsOn, - }, - ) - logger.Debugln("manager: schedule next stage") - - sibling.Status = core.StatusPending - sibling.Updated = time.Now().Unix() - err := t.Stages.Update(noContext, sibling) - if err == db.ErrOptimisticLock { - t.resync(ctx, sibling) - continue - } - if err != nil { - logger.WithError(err). - Warnln("manager: cannot update stage status") - errs = multierror.Append(errs, err) - } - - err = t.Scheduler.Schedule(noContext, sibling) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot schedule stage") - errs = multierror.Append(errs, err) - } - } - } - return errs -} - -// resync updates the stage from the database. Note that it does -// not update the Version field. This is by design. It prevents -// the current go routine from updating a stage that has been -// updated by another go routine. -func (t *teardown) resync(ctx context.Context, stage *core.Stage) error { - updated, err := t.Stages.Find(ctx, stage.ID) - if err != nil { - return err - } - stage.Status = updated.Status - stage.Error = updated.Error - stage.ExitCode = updated.ExitCode - stage.Machine = updated.Machine - stage.Started = updated.Started - stage.Stopped = updated.Stopped - stage.Created = updated.Created - stage.Updated = updated.Updated - return nil -} diff --git a/operator/manager/teardown_test.go b/operator/manager/teardown_test.go deleted file mode 100644 index 553d474810..0000000000 --- a/operator/manager/teardown_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package manager diff --git a/operator/manager/updater.go b/operator/manager/updater.go deleted file mode 100644 index c6a6dde71c..0000000000 --- a/operator/manager/updater.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package manager - -import ( - "context" - "encoding/json" - - "github.com/drone/drone/core" - - "github.com/sirupsen/logrus" -) - -type updater struct { - Builds core.BuildStore - Events core.Pubsub - Repos core.RepositoryStore - Steps core.StepStore - Stages core.StageStore - Webhook core.WebhookSender -} - -func (u *updater) do(ctx context.Context, step *core.Step) error { - logger := logrus.WithFields( - logrus.Fields{ - "step.status": step.Status, - "step.name": step.Name, - "step.id": step.ID, - }, - ) - - if len(step.Error) > 500 { - step.Error = step.Error[:500] - } - err := u.Steps.Update(noContext, step) - if err != nil { - logger.WithError(err).Warnln("manager: cannot update step") - return err - } - - stage, err := u.Stages.Find(noContext, step.StageID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find stage") - return nil - } - - build, err := u.Builds.Find(noContext, stage.BuildID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find build") - return nil - } - - repo, err := u.Repos.Find(noContext, build.RepoID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot find repo") - return nil - } - - stages, err := u.Stages.ListSteps(noContext, build.ID) - if err != nil { - logger.WithError(err).Warnln("manager: cannot list stages") - return nil - } - - repo.Build = build - repo.Build.Stages = stages - data, _ := json.Marshal(repo) - err = u.Events.Publish(noContext, &core.Message{ - Repository: repo.Slug, - Visibility: repo.Visibility, - Data: data, - }) - if err != nil { - logger.WithError(err).Warnln("manager: cannot publish build event") - } - - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionUpdated, - Repo: repo, - Build: build, - } - err = u.Webhook.Send(noContext, payload) - if err != nil { - logger.WithError(err).Warnln("manager: cannot send global webhook") - } - return nil -} diff --git a/operator/manager/util.go b/operator/manager/util.go deleted file mode 100644 index 3c17b8db7c..0000000000 --- a/operator/manager/util.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package manager - -import ( - "github.com/drone/drone/core" -) - -func isBuildComplete(stages []*core.Stage) bool { - for _, stage := range stages { - switch stage.Status { - case core.StatusPending, - core.StatusRunning, - core.StatusWaiting, - core.StatusDeclined, - core.StatusBlocked: - return false - } - } - return true -} - -func isLastStage(stage *core.Stage, stages []*core.Stage) bool { - for _, sibling := range stages { - if stage.Number == sibling.Number { - continue - } - if sibling.Updated > stage.Updated { - return false - } else if sibling.Updated == stage.Updated && - sibling.Number > stage.Number { - return false - } - } - return true -} - -func isDep(a *core.Stage, b *core.Stage) bool { - for _, name := range b.DependsOn { - if name == a.Name { - return true - } - } - return false -} - -func areDepsComplete(stage *core.Stage, stages []*core.Stage) bool { - deps := map[string]struct{}{} - for _, dep := range stage.DependsOn { - deps[dep] = struct{}{} - } - for _, sibling := range stages { - if _, ok := deps[sibling.Name]; !ok { - continue - } - if !sibling.IsDone() { - return false - } - } - return true -} - -// helper function returns true if the current stage is the last -// dependency in the tree. -func isLastDep(curr, next *core.Stage, stages []*core.Stage) bool { - deps := map[string]struct{}{} - for _, dep := range next.DependsOn { - deps[dep] = struct{}{} - } - for _, sibling := range stages { - if _, ok := deps[sibling.Name]; !ok { - continue - } - if sibling.Updated > curr.Updated { - return false - } else if sibling.Updated == curr.Updated && - sibling.Number > curr.Number { - return false - } - } - return true -} - -// helper function returns true if all dependencies are complete. -func depsComplete(stage *core.Stage, siblings []*core.Stage) bool { - for _, dep := range stage.DependsOn { - found := false - inner: - for _, sibling := range siblings { - if sibling.Name == dep { - found = true - break inner - } - } - if !found { - return false - } - } - return true -} diff --git a/operator/manager/util_test.go b/operator/manager/util_test.go deleted file mode 100644 index 553d474810..0000000000 --- a/operator/manager/util_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package manager diff --git a/operator/runner/after.go b/operator/runner/after.go deleted file mode 100644 index 25308ea5a3..0000000000 --- a/operator/runner/after.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner diff --git a/operator/runner/after_test.go b/operator/runner/after_test.go deleted file mode 100644 index f4b8566d05..0000000000 --- a/operator/runner/after_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package runner diff --git a/operator/runner/before.go b/operator/runner/before.go deleted file mode 100644 index 25308ea5a3..0000000000 --- a/operator/runner/before.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner diff --git a/operator/runner/before_test.go b/operator/runner/before_test.go deleted file mode 100644 index f4b8566d05..0000000000 --- a/operator/runner/before_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package runner diff --git a/operator/runner/convert.go b/operator/runner/convert.go deleted file mode 100644 index 918e33749c..0000000000 --- a/operator/runner/convert.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner - -import ( - "strings" - - "github.com/drone/drone-runtime/engine" - "github.com/drone/drone-runtime/runtime" - "github.com/drone/drone/core" -) - -func convertVolumes(from []string) map[string]string { - to := map[string]string{} - for _, s := range from { - parts := strings.Split(s, ":") - if len(parts) != 2 { - continue - } - key := parts[0] - val := parts[1] - to[key] = val - } - return to -} - -func convertSecrets(from []*core.Secret) map[string]string { - to := map[string]string{} - for _, secret := range from { - to[secret.Name] = secret.Data - } - return to -} - -func convertRegistry(from []*core.Registry) []*engine.DockerAuth { - var to []*engine.DockerAuth - for _, registry := range from { - to = append(to, &engine.DockerAuth{ - Address: registry.Address, - Username: registry.Username, - Password: registry.Password, - }) - } - return to -} - -func convertLines(from []*runtime.Line) []*core.Line { - var to []*core.Line - for _, v := range from { - to = append(to, &core.Line{ - Number: v.Number, - Message: v.Message, - Timestamp: v.Timestamp, - }) - } - return to -} - -func convertLine(from *runtime.Line) *core.Line { - return &core.Line{ - Number: from.Number, - Message: from.Message, - Timestamp: from.Timestamp, - } -} diff --git a/operator/runner/convert_test.go b/operator/runner/convert_test.go deleted file mode 100644 index 2fdcebfdab..0000000000 --- a/operator/runner/convert_test.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package runner - -import ( - "testing" - - "github.com/drone/drone-runtime/engine" - "github.com/drone/drone-runtime/runtime" - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -// func Test_convertSecrets(t *testing.T) { -// secrets := []*core.Secret{ -// {Name: "docker_username", Data: "octocat"}, -// {Name: "docker_password", Data: "password"}, -// } -// got := convertSecrets(secrets) - -// want := []compiler.Secret{ -// {Name: "docker_username", Value: "octocat"}, -// {Name: "docker_password", Value: "password"}, -// } - -// if diff := cmp.Diff(got, want); len(diff) != 0 { -// t.Errorf(diff) -// } -// } - -func Test_convertRegistry(t *testing.T) { - list := []*core.Registry{ - { - Address: "docker.io", - Username: "octocat", - Password: "password", - }, - } - got := convertRegistry(list) - want := []*engine.DockerAuth{ - { - Address: "docker.io", - Username: "octocat", - Password: "password", - }, - } - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func Test_convertLines(t *testing.T) { - lines := []*runtime.Line{ - { - Number: 1, - Message: "ping google.com", - Timestamp: 1257894000, - }, - { - Number: 1, - Message: "PING google.com (1.2.3.4): 56 data bytes", - Timestamp: 1257894000, - }, - } - got := convertLines(lines) - want := []*core.Line{ - { - Number: 1, - Message: "ping google.com", - Timestamp: 1257894000, - }, - { - Number: 1, - Message: "PING google.com (1.2.3.4): 56 data bytes", - Timestamp: 1257894000, - }, - } - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func Test_convertLine(t *testing.T) { - line := &runtime.Line{ - Number: 1, - Message: "ping google.com", - Timestamp: 1257894000, - } - got := convertLine(line) - want := &core.Line{ - Number: 1, - Message: "ping google.com", - Timestamp: 1257894000, - } - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/operator/runner/env.go b/operator/runner/env.go deleted file mode 100644 index 627143a5b1..0000000000 --- a/operator/runner/env.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner - -import ( - "fmt" - "regexp" - "strings" - - "github.com/drone/drone/core" -) - -func systemEnviron(system *core.System) map[string]string { - return map[string]string{ - "CI": "true", - "DRONE": "true", - "DRONE_SYSTEM_PROTO": system.Proto, - "DRONE_SYSTEM_HOST": system.Host, - "DRONE_SYSTEM_HOSTNAME": system.Host, - "DRONE_SYSTEM_VERSION": fmt.Sprint(system.Version), - } -} - -func agentEnviron(runner *Runner) map[string]string { - return map[string]string{ - "DRONE_MACHINE": runner.Machine, - "DRONE_RUNNER_HOST": runner.Machine, - "DRONE_RUNNER_HOSTNAME": runner.Machine, - "DRONE_RUNNER_PLATFORM": runner.Platform, - } -} - -func repoEnviron(repo *core.Repository) map[string]string { - return map[string]string{ - "DRONE_REPO": repo.Slug, - "DRONE_REPO_SCM": repo.SCM, - "DRONE_REPO_OWNER": repo.Namespace, - "DRONE_REPO_NAMESPACE": repo.Namespace, - "DRONE_REPO_NAME": repo.Name, - "DRONE_REPO_LINK": repo.Link, - "DRONE_REPO_BRANCH": repo.Branch, - "DRONE_REMOTE_URL": repo.HTTPURL, - "DRONE_GIT_HTTP_URL": repo.HTTPURL, - "DRONE_GIT_SSH_URL": repo.SSHURL, - "DRONE_REPO_VISIBILITY": repo.Visibility, - "DRONE_REPO_PRIVATE": fmt.Sprint(repo.Private), - - // - // these are legacy configuration parameters for backward - // compatibility with drone 0.8. - // - "CI_REPO": repo.Slug, - "CI_REPO_NAME": repo.Slug, - "CI_REPO_LINK": repo.Link, - "CI_REPO_REMOTE": repo.HTTPURL, - "CI_REMOTE_URL": repo.HTTPURL, - "CI_REPO_PRIVATE": fmt.Sprint(repo.Private), - } -} - -func stageEnviron(stage *core.Stage) map[string]string { - return map[string]string{ - "DRONE_STAGE_KIND": "pipeline", - "DRONE_STAGE_NAME": stage.Name, - "DRONE_STAGE_NUMBER": fmt.Sprint(stage.Number), - "DRONE_STAGE_MACHINE": stage.Machine, - "DRONE_STAGE_OS": stage.OS, - "DRONE_STAGE_ARCH": stage.Arch, - "DRONE_STAGE_VARIANT": stage.Variant, - "DRONE_STAGE_DEPENDS_ON": strings.Join(stage.DependsOn, ","), - } -} - -func buildEnviron(build *core.Build) map[string]string { - env := map[string]string{ - "DRONE_BRANCH": build.Target, - "DRONE_SOURCE_BRANCH": build.Source, - "DRONE_TARGET_BRANCH": build.Target, - "DRONE_COMMIT": build.After, - "DRONE_COMMIT_SHA": build.After, - "DRONE_COMMIT_BEFORE": build.Before, - "DRONE_COMMIT_AFTER": build.After, - "DRONE_COMMIT_REF": build.Ref, - "DRONE_COMMIT_BRANCH": build.Target, - "DRONE_COMMIT_LINK": build.Link, - "DRONE_COMMIT_MESSAGE": build.Message, - "DRONE_COMMIT_AUTHOR": build.Author, - "DRONE_COMMIT_AUTHOR_EMAIL": build.AuthorEmail, - "DRONE_COMMIT_AUTHOR_AVATAR": build.AuthorAvatar, - "DRONE_COMMIT_AUTHOR_NAME": build.AuthorName, - "DRONE_BUILD_NUMBER": fmt.Sprint(build.Number), - "DRONE_BUILD_EVENT": build.Event, - "DRONE_BUILD_ACTION": build.Action, - "DRONE_BUILD_CREATED": fmt.Sprint(build.Created), - "DRONE_BUILD_STARTED": fmt.Sprint(build.Started), - "DRONE_BUILD_FINISHED": fmt.Sprint(build.Finished), - "DRONE_DEPLOY_TO": build.Deploy, - - // - // these are legacy configuration parameters for backward - // compatibility with drone 0.8. - // - "CI_BUILD_NUMBER": fmt.Sprint(build.Number), - "CI_PARENT_BUILD_NUMBER": fmt.Sprint(build.Parent), - "CI_BUILD_CREATED": fmt.Sprint(build.Created), - "CI_BUILD_STARTED": fmt.Sprint(build.Started), - "CI_BUILD_FINISHED": fmt.Sprint(build.Finished), - "CI_BUILD_STATUS": build.Status, - "CI_BUILD_EVENT": build.Event, - "CI_BUILD_LINK": build.Link, - "CI_BUILD_TARGET": build.Deploy, - "CI_COMMIT_SHA": build.After, - "CI_COMMIT_REF": build.Ref, - "CI_COMMIT_BRANCH": build.Target, - "CI_COMMIT_MESSAGE": build.Message, - "CI_COMMIT_AUTHOR": build.Author, - "CI_COMMIT_AUTHOR_NAME": build.AuthorName, - "CI_COMMIT_AUTHOR_EMAIL": build.AuthorEmail, - "CI_COMMIT_AUTHOR_AVATAR": build.AuthorAvatar, - } - if strings.HasPrefix(build.Ref, "refs/tags/") { - env["DRONE_TAG"] = strings.TrimPrefix(build.Ref, "refs/tags/") - } - if build.Event == core.EventPullRequest { - env["DRONE_PULL_REQUEST"] = re.FindString(build.Ref) - } - return env -} - -func linkEnviron(repo *core.Repository, build *core.Build, system *core.System) map[string]string { - return map[string]string{ - "DRONE_BUILD_LINK": fmt.Sprintf( - "%s://%s/%s/%d", - system.Proto, - system.Host, - repo.Slug, - build.Number, - ), - } -} - -// regular expression to extract the pull request number -// from the git ref (e.g. refs/pulls/{d}/head) -var re = regexp.MustCompile("\\d+") - -// helper function combines one or more maps of environment -// variables into a single map. -func combineEnviron(env ...map[string]string) map[string]string { - c := map[string]string{} - for _, e := range env { - for k, v := range e { - c[k] = v - } - } - return c -} diff --git a/operator/runner/env_test.go b/operator/runner/env_test.go deleted file mode 100644 index 38ba37ce44..0000000000 --- a/operator/runner/env_test.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package runner - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -func Test_systemEnviron(t *testing.T) { - system := &core.System{ - Proto: "https", - Host: "meta.drone.io", - Link: "https://meta.drone.io", - Version: "v1.0.0", - } - got := systemEnviron(system) - want := map[string]string{ - "CI": "true", - "DRONE": "true", - "DRONE_SYSTEM_PROTO": "https", - "DRONE_SYSTEM_HOST": "meta.drone.io", - "DRONE_SYSTEM_HOSTNAME": "meta.drone.io", - "DRONE_SYSTEM_VERSION": "v1.0.0", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func Test_runnerEnviron(t *testing.T) { - runner := &Runner{ - Machine: "ip-12-34-56-78.us-west-2.compute.internal", - Platform: "linux/amd64", - } - got := agentEnviron(runner) - want := map[string]string{ - "DRONE_MACHINE": "ip-12-34-56-78.us-west-2.compute.internal", - "DRONE_RUNNER_HOST": "ip-12-34-56-78.us-west-2.compute.internal", - "DRONE_RUNNER_HOSTNAME": "ip-12-34-56-78.us-west-2.compute.internal", - "DRONE_RUNNER_PLATFORM": "linux/amd64", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} diff --git a/operator/runner/machine/client.go b/operator/runner/machine/client.go deleted file mode 100644 index 5e896f0ebd..0000000000 --- a/operator/runner/machine/client.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package machine - -// import ( -// "io/ioutil" -// "net/http" -// "path/filepath" - -// "docker.io/go-docker" -// "docker.io/go-docker/api" -// "github.com/docker/go-connections/tlsconfig" -// ) - -// // Client returns a new Docker client from the -// // machine directory. -// func Client(path string) (docker.APIClient, error) { -// // read the docker-machine configuration file from -// // the local machine directory. -// configPath, err := := filepath.Join(path, "config.json") -// if err != nil { -// return nil, err -// } -// config := - -// options := tlsconfig.Options{ -// CAFile: filepath.Join(path, "ca.pem"), -// CertFile: filepath.Join(path, "cert.pem"), -// KeyFile: filepath.Join(path, "key.pem"), -// InsecureSkipVerify: false, -// } -// tlsc, err := tlsconfig.Client(options) -// if err != nil { -// return nil, err -// } -// client = &http.Client{ -// Transport: &http.Transport{ -// TLSClientConfig: tlsc, -// }, -// CheckRedirect: docker.CheckRedirect, -// } -// return docker.NewClient(host, api.DefaultVersion, client, nil) -// } diff --git a/operator/runner/machine/config.go b/operator/runner/machine/config.go deleted file mode 100644 index 1fbe5fa36e..0000000000 --- a/operator/runner/machine/config.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package machine - -import ( - "bytes" - "encoding/json" - "io" - "io/ioutil" - "strings" -) - -// Config provides the Docker machine configuration. -type Config struct { - Name string - Driver struct { - IPAddress string - MachineName string - } - HostOptions struct { - EngineOptions struct { - TLSVerify bool `json:"TlsVerify"` - } - AuthOptions struct { - CertDir string - CaCertPath string - CaPrivateKeyPath string - ServerCertPath string - ServerKeyPath string - ClientKeyPath string - ClientCertPath string - StorePath string - } - } -} - -// helper function reads and unmarshalls the docker-machine -// configuration from a reader. -func parseReader(r io.Reader) (*Config, error) { - out := new(Config) - err := json.NewDecoder(r).Decode(out) - return out, err -} - -// heper function parses the docker-machine configuration -// from a json string. -func parseString(s string) (*Config, error) { - r := strings.NewReader(s) - return parseReader(r) -} - -// heper function parses the docker-machine configuration -// from a json file. -func parseFile(path string) (*Config, error) { - d, err := ioutil.ReadFile(path) - if err != nil { - return nil, err - } - r := bytes.NewReader(d) - return parseReader(r) -} diff --git a/operator/runner/machine/config_test.go b/operator/runner/machine/config_test.go deleted file mode 100644 index cfd1a6686d..0000000000 --- a/operator/runner/machine/config_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package machine diff --git a/operator/runner/machine/machine.go b/operator/runner/machine/machine.go deleted file mode 100644 index e25faf8404..0000000000 --- a/operator/runner/machine/machine.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package machine - -import ( - "errors" - "io/ioutil" - "path/filepath" -) - -// ErrNoMachines is returned when no valid or matching -// docker machines are found in the docker-machine home -// directory. -var ErrNoMachines = errors.New("No Docker Machines found") - -// Load loads the docker-machine runners. -func Load(home, match string) ([]*Config, error) { - path := filepath.Join(home, "machines") - entries, err := ioutil.ReadDir(path) - if err != nil { - return nil, err - } - // loop through the list of docker-machine home - // and capture a list of matching subdirectories. - var machines []*Config - for _, entry := range entries { - if entry.IsDir() == false { - continue - } - name := entry.Name() - confPath := filepath.Join(path, name, "config.json") - conf, err := parseFile(confPath) - if err != nil { - return nil, err - } - // If no match logic is defined, the machine is - // automatically used as a build machine. - if match == "" { - machines = append(machines, conf) - continue - } - // Else verify the machine matches the user-defined - // pattern. Use as a build machine if a match exists - match, _ := filepath.Match(match, conf.Name) - if match { - machines = append(machines, conf) - } - } - if len(machines) == 0 { - return nil, ErrNoMachines - } - return machines, nil -} diff --git a/operator/runner/machine/machine_test.go b/operator/runner/machine/machine_test.go deleted file mode 100644 index dbbb1e6fdd..0000000000 --- a/operator/runner/machine/machine_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package machine - -import ( - "testing" -) - -func TestLoad(t *testing.T) { - t.Skip() -} diff --git a/operator/runner/runner.go b/operator/runner/runner.go deleted file mode 100644 index 25c97a7f18..0000000000 --- a/operator/runner/runner.go +++ /dev/null @@ -1,611 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "runtime/debug" - "strconv" - "strings" - "sync" - "time" - - "github.com/drone/drone-runtime/engine" - "github.com/drone/drone-runtime/runtime" - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone-yaml/yaml/compiler" - "github.com/drone/drone-yaml/yaml/compiler/transform" - "github.com/drone/drone-yaml/yaml/converter" - "github.com/drone/drone-yaml/yaml/linter" - "github.com/drone/drone/core" - "github.com/drone/drone/operator/manager" - "github.com/drone/drone/plugin/registry" - "github.com/drone/drone/plugin/secret" - "github.com/drone/drone/store/shared/db" - "github.com/drone/envsubst" - "golang.org/x/sync/errgroup" - - "github.com/sirupsen/logrus" -) - -// Limits defines runtime container limits. -type Limits struct { - MemSwapLimit int64 - MemLimit int64 - ShmSize int64 - CPUQuota int64 - CPUShares int64 - CPUSet string -} - -// Runner is responsible for retrieving and executing builds, and -// reporting back their status to the central server. -type Runner struct { - sync.Mutex - - Engine engine.Engine - Manager manager.BuildManager - Registry core.RegistryService - Secrets core.SecretService - Limits Limits - Volumes []string - Networks []string - Devices []string - Privileged []string - Environ map[string]string - Machine string - Labels map[string]string - - Kind string - Type string - Platform string - OS string - Arch string - Kernel string - Variant string -} - -func (r *Runner) handleError(ctx context.Context, stage *core.Stage, err error) error { - switch stage.Status { - case core.StatusPending, - core.StatusRunning: - default: - } - for _, step := range stage.Steps { - if step.Status == core.StatusPending { - step.Status = core.StatusSkipped - } - if step.Status == core.StatusRunning { - step.Status = core.StatusPassing - step.Stopped = time.Now().Unix() - } - } - stage.Status = core.StatusError - stage.Error = err.Error() - stage.Stopped = time.Now().Unix() - switch v := err.(type) { - case *runtime.ExitError: - stage.Error = "" - stage.Status = core.StatusFailing - stage.ExitCode = v.Code - case *runtime.OomError: - stage.Error = "OOM kill signaled by host operating system" - } - return r.Manager.AfterAll(ctx, stage) -} - -// -// this is a quick copy-paste duplicate of above that -// removes some code. this is for testing purposes only. -// - -func (r *Runner) Run(ctx context.Context, id int64) error { - logger := logrus.WithFields( - logrus.Fields{ - "machine": r.Machine, - "os": r.OS, - "arch": r.Arch, - "stage-id": id, - }, - ) - - logger.Debug("runner: get stage details from server") - - defer func() { - // taking the paranoid approach to recover from - // a panic that should absolutely never happen. - if r := recover(); r != nil { - logger.Errorf("runner: unexpected panic: %s", r) - debug.PrintStack() - } - }() - - m, err := r.Manager.Details(ctx, id) - if err != nil { - logger.WithError(err).Warnln("runner: cannot get stage details") - return err - } - - logger = logger.WithFields( - logrus.Fields{ - "repo": m.Repo.Slug, - "build": m.Build.Number, - "stage": m.Stage.Number, - }, - ) - - netrc, err := r.Manager.Netrc(ctx, m.Repo.ID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("runner: cannot get netrc file") - return r.handleError(ctx, m.Stage, err) - } - if netrc == nil { - netrc = new(core.Netrc) - } - - if m.Build.Status == core.StatusKilled || m.Build.Status == core.StatusSkipped { - logger = logger.WithError(err) - logger.Infoln("runner: cannot run a canceled build") - return nil - } - - environ := combineEnviron( - agentEnviron(r), - buildEnviron(m.Build), - repoEnviron(m.Repo), - stageEnviron(m.Stage), - systemEnviron(m.System), - linkEnviron(m.Repo, m.Build, m.System), - m.Build.Params, - ) - - // - // parse configuration file - // - - // - // TODO extract the yaml document by index - // TODO mutate the yaml - // - - // this code is temporarily in place to detect and convert - // the legacy yaml configuration file to the new format. - y, err := converter.ConvertString(string(m.Config.Data), converter.Metadata{ - Filename: m.Repo.Config, - URL: m.Repo.Link, - Ref: m.Build.Ref, - }) - - if err != nil { - return err - } - - y, err = envsubst.Eval(y, func(name string) string { - env := environ[name] - if strings.Contains(env, "\n") { - env = fmt.Sprintf("%q", env) - } - return env - }) - if err != nil { - return r.handleError(ctx, m.Stage, err) - } - - manifest, err := yaml.ParseString(y) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("runner: cannot parse yaml") - return r.handleError(ctx, m.Stage, err) - } - - var pipeline *yaml.Pipeline - for _, resource := range manifest.Resources { - v, ok := resource.(*yaml.Pipeline) - if !ok { - continue - } - if v.Name == m.Stage.Name { - pipeline = v - break - } - } - if pipeline == nil { - logger = logger.WithError(err) - logger.Errorln("runner: cannot find named pipeline") - return r.handleError(ctx, m.Stage, - errors.New("cannot find named pipeline"), - ) - } - - logger = logger.WithField("pipeline", pipeline.Name) - - err = linter.Lint(pipeline, m.Repo.Trusted) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("runner: yaml lint errors") - return r.handleError(ctx, m.Stage, err) - } - - secretService := secret.Combine( - secret.Encrypted(), - secret.Static(m.Secrets), - r.Secrets, - ) - registryService := registry.Combine( - registry.Static(m.Secrets), - r.Registry, - ) - - comp := new(compiler.Compiler) - comp.PrivilegedFunc = compiler.DindFunc( - append( - r.Privileged, - "plugins/docker", - "plugins/acr", - "plugins/ecr", - "plugins/gcr", - "plugins/heroku", - ), - ) - comp.SkipFunc = compiler.SkipFunc( - compiler.SkipData{ - Action: m.Build.Action, - Branch: m.Build.Target, - Cron: m.Build.Cron, - Event: m.Build.Event, - Instance: m.System.Host, - Ref: m.Build.Ref, - Repo: m.Repo.Slug, - Target: m.Build.Deploy, - }, - ) - comp.TransformFunc = transform.Combine( - // transform.Include(), - // transform.Exclude(), - // transform.ResumeAt(), - transform.WithAuthsFunc( - func() []*engine.DockerAuth { - in := &core.RegistryArgs{ - Build: m.Build, - Repo: m.Repo, - Conf: manifest, - Pipeline: pipeline, - } - out, err := registryService.List(ctx, in) - if err != nil { - return nil - } - return convertRegistry(out) - }, - ), - transform.WithEnviron(environ), - transform.WithEnviron(r.Environ), - transform.WithLables( - map[string]string{ - "io.drone": "true", - "io.drone.build.number": fmt.Sprint(m.Build.Number), - "io.drone.repo.namespace": m.Repo.Namespace, - "io.drone.repo.name": m.Repo.Name, - "io.drone.stage.name": m.Stage.Name, - "io.drone.stage.number": fmt.Sprint(m.Stage.Number), - "io.drone.ttl": fmt.Sprint(time.Duration(m.Repo.Timeout) * time.Minute), - "io.drone.expires": fmt.Sprint(time.Now().Add(time.Duration(m.Repo.Timeout)*time.Minute + time.Hour).Unix()), - "io.drone.created": fmt.Sprint(time.Now().Unix()), - "io.drone.protected": "false", - }, - ), // TODO append labels here - transform.WithLimits( - r.Limits.MemLimit, - 0, // no clue how to apply the docker cpu limit - ), - transform.WithNetrc( - netrc.Machine, - netrc.Login, - netrc.Password, - ), - transform.WithNetworks(r.Networks), - transform.WithProxy(), - transform.WithSecretFunc( - func(name string) *engine.Secret { - in := &core.SecretArgs{ - Name: name, - Build: m.Build, - Repo: m.Repo, - Conf: manifest, - } - out, err := secretService.Find(ctx, in) - if err != nil { - return nil - } - if out == nil { - return nil - } - return &engine.Secret{ - Metadata: engine.Metadata{Name: name}, - Data: out.Data, - } - }, - ), - transform.WithVolumes( - convertVolumes(r.Volumes), - ), - ) - ir := comp.Compile(pipeline) - - steps := map[string]*core.Step{} - i := 0 - for _, s := range ir.Steps { - if s.RunPolicy == engine.RunNever { - continue - } - i++ - dst := &core.Step{ - Number: i, - Name: s.Metadata.Name, - StageID: m.Stage.ID, - Status: core.StatusPending, - ErrIgnore: s.IgnoreErr, - } - steps[dst.Name] = dst - m.Stage.Steps = append(m.Stage.Steps, dst) - } - - hooks := &runtime.Hook{ - BeforeEach: func(s *runtime.State) error { - r.Lock() - s.Step.Envs["DRONE_MACHINE"] = r.Machine - s.Step.Envs["CI_BUILD_STATUS"] = "success" - s.Step.Envs["CI_BUILD_STARTED"] = strconv.FormatInt(s.Runtime.Time, 10) - s.Step.Envs["CI_BUILD_FINISHED"] = strconv.FormatInt(time.Now().Unix(), 10) - s.Step.Envs["DRONE_BUILD_STATUS"] = "success" - s.Step.Envs["DRONE_BUILD_STARTED"] = strconv.FormatInt(s.Runtime.Time, 10) - s.Step.Envs["DRONE_BUILD_FINISHED"] = strconv.FormatInt(time.Now().Unix(), 10) - - s.Step.Envs["CI_JOB_STATUS"] = "success" - s.Step.Envs["CI_JOB_STARTED"] = strconv.FormatInt(s.Runtime.Time, 10) - s.Step.Envs["CI_JOB_FINISHED"] = strconv.FormatInt(time.Now().Unix(), 10) - s.Step.Envs["DRONE_JOB_STATUS"] = "success" - s.Step.Envs["DRONE_JOB_STARTED"] = strconv.FormatInt(s.Runtime.Time, 10) - s.Step.Envs["DRONE_JOB_FINISHED"] = strconv.FormatInt(time.Now().Unix(), 10) - s.Step.Envs["DRONE_STAGE_STATUS"] = "success" - s.Step.Envs["DRONE_STAGE_STARTED"] = strconv.FormatInt(s.Runtime.Time, 10) - s.Step.Envs["DRONE_STAGE_FINISHED"] = strconv.FormatInt(time.Now().Unix(), 10) - - if s.Runtime.Error != nil { - s.Step.Envs["CI_BUILD_STATUS"] = "failure" - s.Step.Envs["CI_JOB_STATUS"] = "failure" - s.Step.Envs["DRONE_BUILD_STATUS"] = "failure" - s.Step.Envs["DRONE_STAGE_STATUS"] = "failure" - s.Step.Envs["DRONE_JOB_STATUS"] = "failure" - } - for _, stage := range m.Build.Stages { - if stage.IsFailed() { - s.Step.Envs["DRONE_BUILD_STATUS"] = "failure" - break - } - } - - step, ok := steps[s.Step.Metadata.Name] - if ok { - step.Status = core.StatusRunning - step.Started = time.Now().Unix() - - s.Step.Envs["DRONE_STEP_NAME"] = step.Name - s.Step.Envs["DRONE_STEP_NUMBER"] = fmt.Sprint(step.Number) - } - - stepClone := new(core.Step) - *stepClone = *step - r.Unlock() - - err := r.Manager.Before(ctx, stepClone) - if err != nil { - return err - } - - r.Lock() - step.ID = stepClone.ID - step.Version = stepClone.Version - r.Unlock() - return nil - }, - - AfterEach: func(s *runtime.State) error { - r.Lock() - step, ok := steps[s.Step.Metadata.Name] - if ok { - step.Status = core.StatusPassing - step.Stopped = time.Now().Unix() - step.ExitCode = s.State.ExitCode - if s.State.ExitCode != 0 && s.State.ExitCode != 78 { - step.Status = core.StatusFailing - } - } - stepClone := new(core.Step) - *stepClone = *step - r.Unlock() - - err := r.Manager.After(ctx, stepClone) - if err != nil { - return err - } - - r.Lock() - step.Version = stepClone.Version - r.Unlock() - - return nil - }, - - GotLine: func(s *runtime.State, line *runtime.Line) error { - r.Lock() - step, ok := steps[s.Step.Metadata.Name] - r.Unlock() - if !ok { - // TODO log error - return nil - } - return r.Manager.Write(ctx, step.ID, convertLine(line)) - }, - - GotLogs: func(s *runtime.State, lines []*runtime.Line) error { - r.Lock() - step, ok := steps[s.Step.Metadata.Name] - r.Unlock() - if !ok { - // TODO log error - return nil - } - raw, _ := json.Marshal( - convertLines(lines), - ) - return r.Manager.UploadBytes(ctx, step.ID, raw) - }, - } - - runner := runtime.New( - runtime.WithEngine(r.Engine), - runtime.WithConfig(ir), - runtime.WithHooks(hooks), - ) - - m.Stage.Status = core.StatusRunning - m.Stage.Started = time.Now().Unix() - m.Stage.Machine = r.Machine - err = r.Manager.BeforeAll(ctx, m.Stage) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("runner: cannot initialize pipeline") - return r.handleError(ctx, m.Stage, err) - } - - timeout, cancel := context.WithTimeout(ctx, time.Duration(m.Repo.Timeout)*time.Minute) - defer cancel() - - logger.Infoln("runner: start execution") - - err = runner.Run(timeout) - if err != nil && err != runtime.ErrInterrupt { - logger = logger.WithError(err) - logger.Infoln("runner: execution failed") - return r.handleError(ctx, m.Stage, err) - } - logger = logger.WithError(err) - logger.Infoln("runner: execution complete") - - m.Stage.Status = core.StatusPassing - m.Stage.Stopped = time.Now().Unix() - for _, step := range m.Stage.Steps { - if step.Status == core.StatusPending { - step.Status = core.StatusSkipped - } - if step.Status == core.StatusRunning { - step.Status = core.StatusPassing - step.Stopped = time.Now().Unix() - } - } - - return r.Manager.AfterAll(ctx, m.Stage) -} - -// Start starts N build runner processes. Each process polls -// the server for pending builds to execute. -func (r *Runner) Start(ctx context.Context, n int) error { - var g errgroup.Group - for i := 0; i < n; i++ { - g.Go(func() error { - return r.start(ctx) - }) - } - return g.Wait() -} - -func (r *Runner) start(ctx context.Context) error { - for { - select { - case <-ctx.Done(): - return nil - default: - // This error is ignored on purpose. The system - // should not exit the runner on error. The run - // function logs all errors, which should be enough - // to surface potential issues to an administrator. - r.poll(ctx) - } - } -} - -func (r *Runner) poll(ctx context.Context) error { - logger := logrus.WithFields( - logrus.Fields{ - "machine": r.Machine, - "os": r.OS, - "arch": r.Arch, - }, - ) - - logger.Debugln("runner: polling queue") - p, err := r.Manager.Request(ctx, &manager.Request{ - Kind: "pipeline", - Type: "docker", - OS: r.OS, - Arch: r.Arch, - Kernel: r.Kernel, - Variant: r.Variant, - Labels: r.Labels, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("runner: cannot get queue item") - return err - } - if p == nil || p.ID == 0 { - return nil - } - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - _, err = r.Manager.Accept(ctx, p.ID, r.Machine) - if err == db.ErrOptimisticLock { - return nil - } else if err != nil { - logger.WithError(err). - WithFields( - logrus.Fields{ - "stage-id": p.ID, - "build-id": p.BuildID, - "repo-id": p.RepoID, - }).Warnln("runner: cannot ack stage") - return err - } - - go func() { - logger.Debugln("runner: watch for cancel signal") - done, _ := r.Manager.Watch(ctx, p.BuildID) - if done { - cancel() - logger.Debugln("runner: received cancel signal") - } else { - logger.Debugln("runner: done listening for cancel signals") - } - }() - - return r.Run(ctx, p.ID) -} diff --git a/operator/runner/runner_test.go b/operator/runner/runner_test.go deleted file mode 100644 index d58755fe99..0000000000 --- a/operator/runner/runner_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package runner - -import ( - "io/ioutil" - - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} diff --git a/operator/runner/secrets.go b/operator/runner/secrets.go deleted file mode 100644 index d373ef9bb7..0000000000 --- a/operator/runner/secrets.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package runner - -import "github.com/drone/drone/core" - -func toSecretMap(secrets []*core.Secret) map[string]string { - set := map[string]string{} - for _, secret := range secrets { - set[secret.Name] = secret.Data - } - return set -} diff --git a/plugin/admission/account.go b/plugin/admission/account.go deleted file mode 100644 index 0461523499..0000000000 --- a/plugin/admission/account.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "context" - "errors" - "strings" - - "github.com/drone/drone/core" -) - -// ErrMembership is returned when attempting to create a new -// user account for a user that is not a member of an approved -// organization. -var ErrMembership = errors.New("User must be a member of an approved organization") - -// Membership limits user access by organization membership. -func Membership(service core.OrganizationService, accounts []string) core.AdmissionService { - lookup := map[string]struct{}{} - for _, account := range accounts { - account = strings.TrimSpace(account) - account = strings.ToLower(account) - lookup[account] = struct{}{} - } - return &membership{service: service, account: lookup} -} - -type membership struct { - service core.OrganizationService - account map[string]struct{} -} - -func (s *membership) Admit(ctx context.Context, user *core.User) error { - // this admission policy is only enforced for - // new users. Existing users are always admitted. - if user.ID != 0 { - return nil - } - - // if the membership whitelist is empty assume the system - // is open admission. - if len(s.account) == 0 { - return nil - } - // if the username is in the whitelist when can admin - // the user without making an API call to fetch the - // organization list. - _, ok := s.account[strings.ToLower(user.Login)] - if ok { - return nil - } - // make an API call to retrive the list of organizations - // to which the user belongs. - orgs, err := s.service.List(ctx, user) - if err != nil { - return err - } - // if the user is a member of an organization in the - // account whitelist we can admit the user. - for _, org := range orgs { - _, ok := s.account[strings.ToLower(org.Name)] - if ok { - return nil - } - } - // else deny access - return ErrMembership -} diff --git a/plugin/admission/account_oss.go b/plugin/admission/account_oss.go deleted file mode 100644 index ec27eb7d0b..0000000000 --- a/plugin/admission/account_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package admission - -import "github.com/drone/drone/core" - -// Membership is a no-op admission controller -func Membership(core.OrganizationService, []string) core.AdmissionService { - return new(noop) -} diff --git a/plugin/admission/account_test.go b/plugin/admission/account_test.go deleted file mode 100644 index e80cac3071..0000000000 --- a/plugin/admission/account_test.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "context" - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var noContext = context.TODO() - -func TestMembership_MatchOrg(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - orgs := mock.NewMockOrganizationService(controller) - orgs.EXPECT().List(gomock.Any(), dummyUser).Return([]*core.Organization{ - {Name: "bar"}, {Name: "baz"}, {Name: "GiThUb"}, - }, nil) - - service := Membership(orgs, []string{"GithuB"}) - err := service.Admit(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestOrganization_MatchUser(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - service := Membership(nil, []string{"octocat"}) - err := service.Admit(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} - -func TestOrganization_MembershipError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - orgs := mock.NewMockOrganizationService(controller) - orgs.EXPECT().List(gomock.Any(), dummyUser).Return([]*core.Organization{ - {Name: "foo"}, {Name: "bar"}, - }, nil) - - service := Membership(orgs, []string{"baz"}) - err := service.Admit(noContext, dummyUser) - if err != ErrMembership { - t.Errorf("Expect ErrMembership") - } -} - -func TestOrganization_OrganizationListError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - orgs := mock.NewMockOrganizationService(controller) - orgs.EXPECT().List(gomock.Any(), dummyUser).Return(nil, errors.New("")) - - service := Membership(orgs, []string{"GithuB"}) - err := service.Admit(noContext, dummyUser) - if err == nil { - t.Errorf("Expected error") - } -} - -func TestOrganization_EmptyWhitelist(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - service := Membership(nil, []string{}) - err := service.Admit(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} diff --git a/plugin/admission/combine.go b/plugin/admission/combine.go deleted file mode 100644 index 608971bb02..0000000000 --- a/plugin/admission/combine.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package admission - -import ( - "context" - - "github.com/drone/drone/core" -) - -// Combine combines admission services. -func Combine(service ...core.AdmissionService) core.AdmissionService { - return &combined{services: service} -} - -type combined struct { - services []core.AdmissionService -} - -func (s *combined) Admit(ctx context.Context, user *core.User) error { - for _, service := range s.services { - if err := service.Admit(ctx, user); err != nil { - return err - } - } - return nil -} diff --git a/plugin/admission/combine_test.go b/plugin/admission/combine_test.go deleted file mode 100644 index b043c1c225..0000000000 --- a/plugin/admission/combine_test.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/golang/mock/gomock" -) - -func TestCombineAdmit(t *testing.T) { - user := &core.User{Login: "octocat"} - err := Combine( - Membership(nil, nil), - Membership(nil, nil), - ).Admit(noContext, user) - if err != nil { - t.Error(err) - } -} - -func TestCombineAdmit_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{Login: "octocat"} - - orgs := mock.NewMockOrganizationService(controller) - orgs.EXPECT().List(gomock.Any(), user).Return(nil, nil) - - service1 := Membership(orgs, nil) - service2 := Membership(orgs, []string{"github"}) - err := Combine(service1, service2).Admit(noContext, user) - if err != ErrMembership { - t.Errorf("expect ErrMembership") - } -} diff --git a/plugin/admission/external.go b/plugin/admission/external.go deleted file mode 100644 index bade9c568f..0000000000 --- a/plugin/admission/external.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "context" - "time" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone-go/plugin/admission" - "github.com/drone/drone/core" -) - -// External returns a new external Admission controller. -func External(endpoint, secret string, skipVerify bool) core.AdmissionService { - return &external{ - endpoint: endpoint, - secret: secret, - skipVerify: skipVerify, - } -} - -type external struct { - endpoint string - secret string - skipVerify bool -} - -func (c *external) Admit(ctx context.Context, user *core.User) error { - if c.endpoint == "" { - return nil - } - - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a request within - // one minute. - ctx, cancel := context.WithTimeout(ctx, time.Minute) - defer cancel() - - req := &admission.Request{ - Event: admission.EventLogin, - User: toUser(user), - } - if user.ID == 0 { - req.Event = admission.EventRegister - } - client := admission.Client(c.endpoint, c.secret, c.skipVerify) - result, err := client.Admit(ctx, req) - if result != nil { - user.Admin = result.Admin - } - return err -} - -func toUser(from *core.User) drone.User { - return drone.User{ - ID: from.ID, - Login: from.Login, - Email: from.Email, - Avatar: from.Avatar, - Active: from.Active, - Admin: from.Admin, - Machine: from.Machine, - Syncing: from.Syncing, - Synced: from.Synced, - Created: from.Created, - Updated: from.Updated, - LastLogin: from.LastLogin, - } -} diff --git a/plugin/admission/external_oss.go b/plugin/admission/external_oss.go deleted file mode 100644 index 8497ebb92b..0000000000 --- a/plugin/admission/external_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package admission - -import "github.com/drone/drone/core" - -// External is a no-op admission controller -func External(string, string, bool) core.AdmissionService { - return new(noop) -} diff --git a/plugin/admission/external_test.go b/plugin/admission/external_test.go deleted file mode 100644 index 5af7309a91..0000000000 --- a/plugin/admission/external_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission diff --git a/plugin/admission/nobot.go b/plugin/admission/nobot.go deleted file mode 100644 index 62eb3636b2..0000000000 --- a/plugin/admission/nobot.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "context" - "errors" - "time" - - "github.com/drone/drone/core" -) - -// ErrCannotVerify is returned when attempting to verify the -// user is a human being. -var ErrCannotVerify = errors.New("Cannot verify user authenticity") - -// Nobot enforces an admission policy that restricts access to -// users accounts that were recently created and may be bots. -// The policy expects the source control management system will -// identify and remove the bot accounts before they would be -// eligible to use the system. -func Nobot(service core.UserService, age time.Duration) core.AdmissionService { - return &nobot{service: service, age: age} -} - -type nobot struct { - age time.Duration - service core.UserService -} - -func (s *nobot) Admit(ctx context.Context, user *core.User) error { - // this admission policy is only enforced for - // new users. Existing users are always admitted. - if user.ID != 0 { - return nil - } - - // if the minimum required age is not specified the check - // is skipped. - if s.age == 0 { - return nil - } - account, err := s.service.Find(ctx, user.Token, user.Refresh) - if err != nil { - return err - } - if account.Created == 0 { - return nil - } - now := time.Now() - if time.Unix(account.Created, 0).Add(s.age).After(now) { - return ErrCannotVerify - } - return nil -} diff --git a/plugin/admission/nobot_oss.go b/plugin/admission/nobot_oss.go deleted file mode 100644 index 0f06b20e9b..0000000000 --- a/plugin/admission/nobot_oss.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package admission - -import ( - "time" - - "github.com/drone/drone/core" -) - -// Nobot is a no-op admission controller -func Nobot(core.UserService, time.Duration) core.AdmissionService { - return new(noop) -} diff --git a/plugin/admission/nobot_test.go b/plugin/admission/nobot_test.go deleted file mode 100644 index bb0ac2f3f1..0000000000 --- a/plugin/admission/nobot_test.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "errors" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/golang/mock/gomock" -) - -func TestNobot(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - localUser := &core.User{Login: "octocat"} - remoteUser := &core.User{Login: "octocat", Created: time.Now().Unix() - 120} // 120 seconds - users := mock.NewMockUserService(controller) - users.EXPECT().Find(gomock.Any(), gomock.Any(), gomock.Any()).Return(remoteUser, nil) - - admission := Nobot(users, time.Minute) // 60 seconds - err := admission.Admit(noContext, localUser) - if err != nil { - t.Error(err) - } -} - -func TestNobot_AccountTooNew(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - localUser := &core.User{Login: "octocat"} - remoteUser := &core.User{Login: "octocat", Created: time.Now().Unix()} - users := mock.NewMockUserService(controller) - users.EXPECT().Find(gomock.Any(), gomock.Any(), gomock.Any()).Return(remoteUser, nil) - - admission := Nobot(users, time.Hour) - err := admission.Admit(noContext, localUser) - if err != ErrCannotVerify { - t.Errorf("Expect ErrCannotVerify error") - } -} - -func TestNobot_ZeroDate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - localUser := &core.User{Login: "octocat"} - remoteUser := &core.User{Login: "octocat", Created: 0} - users := mock.NewMockUserService(controller) - users.EXPECT().Find(gomock.Any(), gomock.Any(), gomock.Any()).Return(remoteUser, nil) - - admission := Nobot(users, time.Minute) - err := admission.Admit(noContext, localUser) - if err != nil { - t.Error(err) - } -} - -func TestNobot_RemoteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - want := errors.New("") - users := mock.NewMockUserService(controller) - users.EXPECT().Find(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, want) - - admission := Nobot(users, time.Minute) - got := admission.Admit(noContext, new(core.User)) - if got != want { - t.Errorf("Expect error from source control management system returned") - } -} - -func TestNobot_SkipCheck(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - dummyUser := &core.User{ - Login: "octocat", - } - - admission := Nobot(nil, 0) - err := admission.Admit(noContext, dummyUser) - if err != nil { - t.Error(err) - } -} diff --git a/plugin/admission/noop.go b/plugin/admission/noop.go deleted file mode 100644 index cc60c856b1..0000000000 --- a/plugin/admission/noop.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package admission - -import ( - "context" - - "github.com/drone/drone/core" -) - -// noop is a stub admission controller. -type noop struct{} - -func (noop) Admit(context.Context, *core.User) error { - return nil -} diff --git a/plugin/admission/open.go b/plugin/admission/open.go deleted file mode 100644 index 80acf47893..0000000000 --- a/plugin/admission/open.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package admission - -import ( - "context" - "errors" - - "github.com/drone/drone/core" -) - -// ErrClosed is returned when attempting to create a new -// user account and admissions are closed. -var ErrClosed = errors.New("User registration is disabled") - -// Open enforces an open admission policy by default unless -// disabled. -func Open(disabled bool) core.AdmissionService { - return &closed{disabled: disabled} -} - -type closed struct { - disabled bool -} - -func (s *closed) Admit(ctx context.Context, user *core.User) error { - // this admission policy is only enforced for - // new users. Existing users are always admitted. - if user.ID != 0 { - return nil - } - - if s.disabled { - return ErrClosed - } - return nil -} diff --git a/plugin/admission/open_test.go b/plugin/admission/open_test.go deleted file mode 100644 index 1724854476..0000000000 --- a/plugin/admission/open_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package admission - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/golang/mock/gomock" -) - -func TestOpen(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{Login: "octocat"} - err := Open(false).Admit(noContext, user) - if err != nil { - t.Error(err) - } - - err = Open(true).Admit(noContext, user) - if err == nil { - t.Errorf("Expect error when open admission is closed") - } - - user.ID = 1 - err = Open(true).Admit(noContext, user) - if err != nil { - t.Error(err) - } -} diff --git a/plugin/config/combine.go b/plugin/config/combine.go deleted file mode 100644 index da506ee77c..0000000000 --- a/plugin/config/combine.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package config - -import ( - "context" - "errors" - - "github.com/drone/drone/core" -) - -// error returned when no configured found. -var errNotFound = errors.New("configuration: not found") - -// Combine combines the config services, allowing the system -// to source pipeline configuration from multiple sources. -func Combine(services ...core.ConfigService) core.ConfigService { - return &combined{services} -} - -type combined struct { - sources []core.ConfigService -} - -func (c *combined) Find(ctx context.Context, req *core.ConfigArgs) (*core.Config, error) { - for _, source := range c.sources { - config, err := source.Find(ctx, req) - if err != nil { - return nil, err - } - if config == nil { - continue - } - if config.Data == "" { - continue - } - return config, nil - } - return nil, errNotFound -} diff --git a/plugin/config/combine_test.go b/plugin/config/combine_test.go deleted file mode 100644 index 1d35280476..0000000000 --- a/plugin/config/combine_test.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package config - -import ( - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestCombine(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - resp := &core.Config{Data: string(mockFile)} - - service := mock.NewMockConfigService(controller) - service.EXPECT().Find(noContext, args).Return(resp, nil) - - result, err := Combine(service).Find(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != string(resp.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestCombineErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - resp := errors.New("") - service := mock.NewMockConfigService(controller) - service.EXPECT().Find(noContext, nil).Return(nil, resp) - - _, err := Combine(service).Find(noContext, nil) - if err != resp { - t.Errorf("expected config service error") - } -} - -func TestCombineNoConfig(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - resp := &core.Config{Data: string(mockFile)} - - service1 := mock.NewMockConfigService(controller) - service1.EXPECT().Find(noContext, args).Return(nil, nil) - - service2 := mock.NewMockConfigService(controller) - service2.EXPECT().Find(noContext, args).Return(resp, nil) - - result, err := Combine(service1, service2).Find(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != string(resp.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestCombineEmptyConfig(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - resp1 := &core.Config{} - resp2 := &core.Config{Data: string(mockFile)} - - service1 := mock.NewMockConfigService(controller) - service1.EXPECT().Find(noContext, args).Return(resp1, nil) - - service2 := mock.NewMockConfigService(controller) - service2.EXPECT().Find(noContext, args).Return(resp2, nil) - - result, err := Combine(service1, service2).Find(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != string(resp2.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestCombineNoConfigErr(t *testing.T) { - // args := &core.ConfigArgs{ - // User: &core.User{Login: "octocat"}, - // Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - // Build: &core.Build{After: "6d144de7"}, - // } - - service := Combine() - _, err := service.Find(noContext, nil) - if err != errNotFound { - t.Errorf("Expect not found error") - } -} diff --git a/plugin/config/global.go b/plugin/config/global.go deleted file mode 100644 index 73d05518d1..0000000000 --- a/plugin/config/global.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package config - -import ( - "context" - "time" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone-go/plugin/config" - - "github.com/drone/drone/core" -) - -// Global returns a configuration service that fetches the yaml -// configuration from a remote endpoint. -func Global(endpoint, signer string, skipVerify bool, timeout time.Duration) core.ConfigService { - if endpoint == "" { - return new(global) - } - return &global{ - client: config.Client( - endpoint, - signer, - skipVerify, - ), - timeout: timeout, - } -} - -type global struct { - client config.Plugin - timeout time.Duration -} - -func (g *global) Find(ctx context.Context, in *core.ConfigArgs) (*core.Config, error) { - if g.client == nil { - return nil, nil - } - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a response within - // the configured timeout (default 1m). - ctx, cancel := context.WithTimeout(ctx, g.timeout) - defer cancel() - - req := &config.Request{ - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - Token: drone.Token{ - Access: in.User.Token, - Refresh: in.User.Refresh, - }, - } - - res, err := g.client.Find(ctx, req) - if err != nil { - return nil, err - } - - // if no error is returned and the secret is empty, - // this indicates the client returned No Content, - // and we should exit with no secret, but no error. - if res.Data == "" { - return nil, nil - } - - return &core.Config{ - Kind: res.Kind, - Data: res.Data, - }, nil -} - -func toRepo(from *core.Repository) drone.Repo { - return drone.Repo{ - ID: from.ID, - UID: from.UID, - UserID: from.UserID, - Namespace: from.Namespace, - Name: from.Name, - Slug: from.Slug, - SCM: from.SCM, - HTTPURL: from.HTTPURL, - SSHURL: from.SSHURL, - Link: from.Link, - Branch: from.Branch, - Private: from.Private, - Visibility: from.Visibility, - Active: from.Active, - Config: from.Config, - Trusted: from.Trusted, - Protected: from.Protected, - Timeout: from.Timeout, - } -} - -func toBuild(from *core.Build) drone.Build { - return drone.Build{ - ID: from.ID, - RepoID: from.RepoID, - Trigger: from.Trigger, - Number: from.Number, - Parent: from.Parent, - Status: from.Status, - Error: from.Error, - Event: from.Event, - Action: from.Action, - Link: from.Link, - Timestamp: from.Timestamp, - Title: from.Title, - Message: from.Message, - Before: from.Before, - After: from.After, - Ref: from.Ref, - Fork: from.Fork, - Source: from.Source, - Target: from.Target, - Author: from.Author, - AuthorName: from.AuthorName, - AuthorEmail: from.AuthorEmail, - AuthorAvatar: from.AuthorAvatar, - Sender: from.Sender, - Params: from.Params, - Deploy: from.Deploy, - Started: from.Started, - Finished: from.Finished, - Created: from.Created, - Updated: from.Updated, - Version: from.Version, - } -} diff --git a/plugin/config/global_oss.go b/plugin/config/global_oss.go deleted file mode 100644 index b75a2b3a92..0000000000 --- a/plugin/config/global_oss.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package config - -import ( - "context" - "time" - - "github.com/drone/drone/core" -) - -// Global returns a no-op configuration service. -func Global(string, string, bool, time.Duration) core.ConfigService { - return new(noop) -} - -type noop struct{} - -func (noop) Find(context.Context, *core.ConfigArgs) (*core.Config, error) { - return nil, nil -} diff --git a/plugin/config/global_test.go b/plugin/config/global_test.go deleted file mode 100644 index 6b2d858698..0000000000 --- a/plugin/config/global_test.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package config - -import ( - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/h2non/gock" -) - -func TestGlobal(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/config"). - MatchHeader("Accept", "application/vnd.drone.config.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(200). - BodyString(`{"data": "{ kind: pipeline, name: default }"}`). - Done() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - service := Global("https://company.com/config", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", - false, time.Minute) - result, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != "{ kind: pipeline, name: default }" { - t.Errorf("unexpected file contents") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - return - } -} - -func TestGlobalErr(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/config"). - MatchHeader("Accept", "application/vnd.drone.config.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(404). - Done() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - service := Global("https://company.com/config", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", - false, time.Minute) - _, err := service.Find(noContext, args) - if err == nil { - t.Errorf("Expect http.Response error") - } else if err.Error() != "Not Found" { - t.Errorf("Expect Not Found error") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestGlobalEmpty(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/config"). - MatchHeader("Accept", "application/vnd.drone.config.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(204). - Done() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - service := Global("https://company.com/config", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", - false, time.Minute) - result, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if result != nil { - t.Errorf("Expect empty data") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - return - } -} - -func TestGlobalDisabled(t *testing.T) { - res, err := Global("", "", false, time.Minute).Find(noContext, nil) - if err != nil { - t.Error(err) - } - if res != nil { - t.Errorf("expect nil config when disabled") - } -} diff --git a/plugin/config/jsonnet.go b/plugin/config/jsonnet.go deleted file mode 100644 index b55cf835ac..0000000000 --- a/plugin/config/jsonnet.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package config - -import ( - "bytes" - "context" - "strings" - - "github.com/drone/drone/core" - - "github.com/google/go-jsonnet" -) - -// Jsonnet returns a configuration service that fetches the -// jsonnet file directly from the source code management (scm) -// system and converts to a yaml file. -func Jsonnet(service core.FileService, enabled bool) core.ConfigService { - return &jsonnetPlugin{ - enabled: enabled, - repos: &repo{files: service}, - } -} - -type jsonnetPlugin struct { - enabled bool - repos *repo -} - -func (p *jsonnetPlugin) Find(ctx context.Context, req *core.ConfigArgs) (*core.Config, error) { - if p.enabled == false { - return nil, nil - } - - // if the file extension is not jsonnet we can - // skip this plugin by returning zero values. - if strings.HasSuffix(req.Repo.Config, ".jsonnet") == false { - return nil, nil - } - - // get the file contents. - config, err := p.repos.Find(ctx, req) - if err != nil { - return nil, err - } - - // TODO(bradrydzewski) temporarily disable file imports - // TODO(bradrydzewski) handle object vs array output - - // create the jsonnet vm - vm := jsonnet.MakeVM() - vm.MaxStack = 500 - vm.StringOutput = false - vm.ErrorFormatter.SetMaxStackTraceSize(20) - - // convert the jsonnet file to yaml - buf := new(bytes.Buffer) - docs, err := vm.EvaluateSnippetStream(req.Repo.Config, config.Data) - if err != nil { - return nil, err - } - - // the jsonnet vm returns a stream of yaml documents - // that need to be combined into a single yaml file. - for _, doc := range docs { - buf.WriteString("---") - buf.WriteString("\n") - buf.WriteString(doc) - } - - config.Data = buf.String() - return config, nil -} diff --git a/plugin/config/jsonnet_oss.go b/plugin/config/jsonnet_oss.go deleted file mode 100644 index dc38165deb..0000000000 --- a/plugin/config/jsonnet_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package config - -import "github.com/drone/drone/core" - -// Jsonnet returns a no-op configuration service. -func Jsonnet(service core.FileService, enabled bool) core.ConfigService { - return new(noop) -} diff --git a/plugin/config/jsonnet_test.go b/plugin/config/jsonnet_test.go deleted file mode 100644 index 9d26a590cf..0000000000 --- a/plugin/config/jsonnet_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package config diff --git a/plugin/config/memoize.go b/plugin/config/memoize.go deleted file mode 100644 index f46520324e..0000000000 --- a/plugin/config/memoize.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package config - -import ( - "context" - "fmt" - - "github.com/drone/drone/core" - - lru "github.com/hashicorp/golang-lru" - "github.com/sirupsen/logrus" -) - -// cache key pattern used in the cache, comprised of the -// repository slug and commit sha. -const keyf = "%d|%d|%s|%s|%s|%s|%s" - -// Memoize caches the conversion results for subsequent calls. -// This micro-optimization is intended for multi-pipeline -// projects that would otherwise covert the file for each -// pipeline execution. -func Memoize(base core.ConfigService) core.ConfigService { - // simple cache prevents the same yaml file from being - // requested multiple times in a short period. - cache, _ := lru.New(10) - return &memoize{base: base, cache: cache} -} - -type memoize struct { - base core.ConfigService - cache *lru.Cache -} - -func (c *memoize) Find(ctx context.Context, req *core.ConfigArgs) (*core.Config, error) { - // this is a minor optimization that prevents caching if the - // base converter is a global config service and is disabled. - if global, ok := c.base.(*global); ok == true && global.client == nil { - return nil, nil - } - - // generate the key used to cache the converted file. - key := fmt.Sprintf(keyf, - req.Repo.ID, - req.Build.Created, - req.Build.Event, - req.Build.Action, - req.Build.Ref, - req.Build.After, - req.Repo.Config, - ) - - logger := logrus.WithField("repo", req.Repo.Slug). - WithField("build", req.Build.Event). - WithField("action", req.Build.Action). - WithField("ref", req.Build.Ref). - WithField("rev", req.Build.After). - WithField("config", req.Repo.Config) - - logger.Trace("extension: configuration: check cache") - - // check the cache for the file and return if exists. - cached, ok := c.cache.Get(key) - if ok { - logger.Trace("extension: configuration: cache hit") - return cached.(*core.Config), nil - } - - logger.Trace("extension: configuration: cache miss") - - // else find the configuration file. - config, err := c.base.Find(ctx, req) - if err != nil { - return nil, err - } - - if config == nil { - return nil, nil - } - if config.Data == "" { - return nil, nil - } - - // if the configuration file was retrieved - // it is temporarily cached. Note that we do - // not cache if the commit sha is empty (gogs). - if req.Build.After != "" { - c.cache.Add(key, config) - } - - return config, nil -} diff --git a/plugin/config/memoize_oss.go b/plugin/config/memoize_oss.go deleted file mode 100644 index d901b7bc3c..0000000000 --- a/plugin/config/memoize_oss.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package config - -import ( - "github.com/drone/drone/core" -) - -// Memoize caches the conversion results for subsequent calls. -// This micro-optimization is intended for multi-pipeline -// projects that would otherwise covert the file for each -// pipeline execution. -func Memoize(base core.ConfigService) core.ConfigService { - return new(noop) -} diff --git a/plugin/config/memoize_test.go b/plugin/config/memoize_test.go deleted file mode 100644 index 74975776f0..0000000000 --- a/plugin/config/memoize_test.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package config - -import ( - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestMemoize(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - conf := &core.Config{Data: "{kind: pipeline, type: docker, steps: []}"} - args := &core.ConfigArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: conf, - } - - base := mock.NewMockConfigService(controller) - base.EXPECT().Find(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base).(*memoize) - _, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } - - args.Config = nil // set to nil to prove we get the cached value - res, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != conf { - t.Errorf("Expect result from cache") - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Tag(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - Build: &core.Build{Ref: "refs/tags/v1.0.0"}, - Repo: &core.Repository{ID: 42}, - Config: &core.Config{Data: "{kind: pipeline, type: docker, steps: []}"}, - } - - base := mock.NewMockConfigService(controller) - base.EXPECT().Find(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base).(*memoize) - res, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != args.Config { - t.Errorf("Expect result from cache") - } -} - -func TestMemoize_Empty(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: &core.Config{Data: ""}, // empty - } - - base := mock.NewMockConfigService(controller) - base.EXPECT().Find(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base).(*memoize) - res, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != nil { - t.Errorf("Expect nil response") - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Nil(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: nil, - } - - base := mock.NewMockConfigService(controller) - base.EXPECT().Find(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base).(*memoize) - res, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != nil { - t.Errorf("Expect nil response") - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - } - - want := errors.New("not found") - base := mock.NewMockConfigService(controller) - base.EXPECT().Find(gomock.Any(), gomock.Any()).Return(nil, want) - - service := Memoize(base).(*memoize) - _, err := service.Find(noContext, args) - if err == nil { - t.Errorf("Expect error from base returned to caller") - return - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} diff --git a/plugin/config/repo.go b/plugin/config/repo.go deleted file mode 100644 index d6e8eb0b51..0000000000 --- a/plugin/config/repo.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package config - -import ( - "context" - - "github.com/drone/drone/core" -) - -// Repository returns a configuration service that fetches the yaml -// directly from the source code management (scm) system. -func Repository(service core.FileService) core.ConfigService { - return &repo{files: service} -} - -type repo struct { - files core.FileService -} - -func (r *repo) Find(ctx context.Context, req *core.ConfigArgs) (*core.Config, error) { - raw, err := r.files.Find(ctx, req.User, req.Repo.Slug, req.Build.After, req.Build.Ref, req.Repo.Config) - if err != nil { - return nil, err - } - return &core.Config{ - Data: string(raw.Data), - }, err -} diff --git a/plugin/config/repo_test.go b/plugin/config/repo_test.go deleted file mode 100644 index 9b89705394..0000000000 --- a/plugin/config/repo_test.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package config - -import ( - "context" - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var noContext = context.TODO() - -var mockFile = []byte(` -kind: pipeline -name: default - -steps: [] -`) - -func TestRepository(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: nil, - } - - resp := &core.File{Data: mockFile} - - files := mock.NewMockFileService(controller) - files.EXPECT().Find(noContext, args.User, args.Repo.Slug, args.Build.After, args.Build.Ref, args.Repo.Config).Return(resp, nil) - - service := Repository(files) - result, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - } - - if result.Data != string(resp.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestRepositoryErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConfigArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: nil, - } - - resp := errors.New("") - - files := mock.NewMockFileService(controller) - files.EXPECT().Find(noContext, args.User, args.Repo.Slug, args.Build.After, args.Build.Ref, args.Repo.Config).Return(nil, resp) - - service := Repository(files) - _, err := service.Find(noContext, args) - if err != resp { - t.Errorf("expect error returned from file service") - } -} diff --git a/plugin/converter/combine.go b/plugin/converter/combine.go deleted file mode 100644 index 05144807d8..0000000000 --- a/plugin/converter/combine.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package converter - -import ( - "context" - - "github.com/drone/drone/core" -) - -// Combine combines the conversion services, provision support -// for multiple conversion utilities. -func Combine(multi bool, services ...core.ConvertService) core.ConvertService { - return &combined{multi: multi, sources: services} -} - -type combined struct { - sources []core.ConvertService - - // this feature flag can be removed once we solve for - // https://github.com/harness/drone/pull/2994#issuecomment-795955312 - multi bool -} - -func (c *combined) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - for _, source := range c.sources { - config, err := source.Convert(ctx, req) - if err != nil { - return nil, err - } - if config == nil { - continue - } - if config.Data == "" { - continue - } - if c.multi { - req.Config = config - } else { - return config, nil - } - } - return req.Config, nil -} diff --git a/plugin/converter/combine_test.go b/plugin/converter/combine_test.go deleted file mode 100644 index 28d839ef8e..0000000000 --- a/plugin/converter/combine_test.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package converter - -import ( - "context" - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -var mockFile = ` -kind: pipeline -type: docker -name: testing -` - -func TestCombine(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: &core.Config{}, - } - - resp := &core.Config{Data: string(mockFile)} - - service := mock.NewMockConvertService(controller) - service.EXPECT().Convert(noContext, args).Return(resp, nil) - - result, err := Combine(false, service).Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != string(resp.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestCombineErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - resp := errors.New("") - service := mock.NewMockConvertService(controller) - service.EXPECT().Convert(noContext, nil).Return(nil, resp) - - _, err := Combine(false, service).Convert(noContext, nil) - if err != resp { - t.Errorf("expected convert service error") - } -} - -func TestCombineNoConfig(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - } - - resp := &core.Config{Data: string(mockFile)} - - service1 := mock.NewMockConvertService(controller) - service1.EXPECT().Convert(noContext, args).Return(nil, nil) - - service2 := mock.NewMockConvertService(controller) - service2.EXPECT().Convert(noContext, args).Return(&core.Config{}, nil) - - service3 := mock.NewMockConvertService(controller) - service3.EXPECT().Convert(noContext, args).Return(resp, nil) - - result, err := Combine(false, service1, service2, service3).Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result.Data != string(resp.Data) { - t.Errorf("unexpected file contents") - } -} - -func TestCombineEmptyConfig(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: &core.Config{Data: string(mockFile)}, - } - - service1 := mock.NewMockConvertService(controller) - service1.EXPECT().Convert(noContext, args).Return(nil, nil) - - result, err := Combine(false, service1).Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - - if result != args.Config { - t.Errorf("unexpected file contents") - } -} diff --git a/plugin/converter/jsonnet.go b/plugin/converter/jsonnet.go deleted file mode 100644 index a0060d5ef4..0000000000 --- a/plugin/converter/jsonnet.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package converter - -import ( - "context" - "strings" - - "github.com/drone/drone/core" - "github.com/drone/drone/plugin/converter/jsonnet" -) - -// TODO(bradrydzewski) handle jsonnet imports -// TODO(bradrydzewski) handle jsonnet object vs array output - -// Jsonnet returns a conversion service that converts the -// jsonnet file to a yaml file. -func Jsonnet(enabled bool, limit int, fileService core.FileService) core.ConvertService { - return &jsonnetPlugin{ - enabled: enabled, - limit: limit, - fileService: fileService, - } -} - -type jsonnetPlugin struct { - enabled bool - limit int - fileService core.FileService -} - -func (p *jsonnetPlugin) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - if p.enabled == false { - return nil, nil - } - - // if the file extension is not jsonnet we can - // skip this plugin by returning zero values. - if strings.HasSuffix(req.Repo.Config, ".jsonnet") == false { - return nil, nil - } - - file, err := jsonnet.Parse(req, p.fileService, p.limit, nil, nil) - - if err != nil { - return nil, err - } - return &core.Config{ - Data: file, - }, nil -} diff --git a/plugin/converter/jsonnet/jsonnet.go b/plugin/converter/jsonnet/jsonnet.go deleted file mode 100644 index ee6642cd5b..0000000000 --- a/plugin/converter/jsonnet/jsonnet.go +++ /dev/null @@ -1,202 +0,0 @@ -package jsonnet - -import ( - "bytes" - "context" - "fmt" - "path" - "strconv" - "strings" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - - "github.com/google/go-jsonnet" -) - -const repo = "repo." -const build = "build." -const param = "param." - -var noContext = context.Background() - -type importer struct { - repo *core.Repository - build *core.Build - - // jsonnet does not cache file imports and may request - // the same file multiple times. We cache the files to - // duplicate API calls. - cache map[string]jsonnet.Contents - - // limit the number of outbound requests. github limits - // the number of api requests per hour, so we should - // make sure that a single build does not abuse the api - // by importing dozens of files. - limit int - - // counts the number of outbound requests. if the count - // exceeds the limit, the importer will return errors. - count int - - fileService core.FileService - user *core.User -} - -func (i *importer) Import(importedFrom, importedPath string) (contents jsonnet.Contents, foundAt string, err error) { - if i.cache == nil { - i.cache = map[string]jsonnet.Contents{} - } - - // the import is relative to the imported from path. the - // imported path must resolve to a filepath relative to - // the root of the repository. - importedPath = path.Join( - path.Dir(importedFrom), - importedPath, - ) - - if strings.HasPrefix(importedFrom, "../") { - err = fmt.Errorf("jsonnet: cannot resolve import: %s", importedPath) - return contents, foundAt, err - } - - // if the contents exist in the cache, return the - // cached item. - if contents, ok := i.cache[importedPath]; ok { - return contents, importedPath, nil - } - - defer func() { - i.count++ - }() - - // if the import limit is exceeded log an error message. - if i.limit > 0 && i.count >= i.limit { - return contents, foundAt, errors.New("jsonnet: import limit exceeded") - } - - find, err := i.fileService.Find(noContext, i.user, i.repo.Slug, i.build.After, i.build.Ref, importedPath) - - if err != nil { - return contents, foundAt, err - } - - i.cache[importedPath] = jsonnet.MakeContents(string(find.Data)) - - return i.cache[importedPath], importedPath, err -} - -func Parse(req *core.ConvertArgs, fileService core.FileService, limit int, template *core.Template, templateData map[string]interface{}) (string, error) { - vm := jsonnet.MakeVM() - vm.MaxStack = 500 - vm.StringOutput = false - vm.ErrorFormatter.SetMaxStackTraceSize(20) - if fileService != nil && limit > 0 { - vm.Importer( - &importer{ - repo: req.Repo, - build: req.Build, - limit: limit, - user: req.User, - fileService: fileService, - }, - ) - } - - //map build/repo parameters - if req.Build != nil { - mapBuild(req.Build, vm) - } - if req.Repo != nil { - mapRepo(req.Repo, vm) - } - - var jsonnetFile string - var jsonnetFileName string - if template != nil { - jsonnetFile = template.Data - jsonnetFileName = template.Name - } else { - jsonnetFile = req.Config.Data - jsonnetFileName = req.Repo.Config - } - // map external inputs - if len(templateData) != 0 { - for k, v := range templateData { - key := fmt.Sprintf("input." + k) - val := fmt.Sprint(v) - vm.ExtVar(key, val) - } - } - - // convert the jsonnet file to yaml - buf := new(bytes.Buffer) - docs, err := vm.EvaluateAnonymousSnippetStream(jsonnetFileName, jsonnetFile) - if err != nil { - doc, err2 := vm.EvaluateAnonymousSnippet(jsonnetFileName, jsonnetFile) - if err2 != nil { - return "", err - } - docs = append(docs, doc) - } - - // the jsonnet vm returns a stream of yaml documents - // that need to be combined into a single yaml file. - for _, doc := range docs { - buf.WriteString("---") - buf.WriteString("\n") - buf.WriteString(doc) - } - - return buf.String(), nil -} - -func mapBuild(v *core.Build, vm *jsonnet.VM) { - vm.ExtVar(build+"event", v.Event) - vm.ExtVar(build+"action", v.Action) - vm.ExtVar(build+"environment", v.Deploy) - vm.ExtVar(build+"link", v.Link) - vm.ExtVar(build+"branch", v.Target) - vm.ExtVar(build+"source", v.Source) - vm.ExtVar(build+"before", v.Before) - vm.ExtVar(build+"after", v.After) - vm.ExtVar(build+"target", v.Target) - vm.ExtVar(build+"ref", v.Ref) - vm.ExtVar(build+"commit", v.After) - vm.ExtVar(build+"ref", v.Ref) - vm.ExtVar(build+"title", v.Title) - vm.ExtVar(build+"message", v.Message) - vm.ExtVar(build+"source_repo", v.Fork) - vm.ExtVar(build+"author_login", v.Author) - vm.ExtVar(build+"author_name", v.AuthorName) - vm.ExtVar(build+"author_email", v.AuthorEmail) - vm.ExtVar(build+"author_avatar", v.AuthorAvatar) - vm.ExtVar(build+"sender", v.Sender) - fromMap(v.Params, vm) -} - -func mapRepo(v *core.Repository, vm *jsonnet.VM) { - vm.ExtVar(repo+"uid", v.UID) - vm.ExtVar(repo+"name", v.Name) - vm.ExtVar(repo+"namespace", v.Namespace) - vm.ExtVar(repo+"slug", v.Slug) - vm.ExtVar(repo+"git_http_url", v.HTTPURL) - vm.ExtVar(repo+"git_ssh_url", v.SSHURL) - vm.ExtVar(repo+"link", v.Link) - vm.ExtVar(repo+"branch", v.Branch) - vm.ExtVar(repo+"config", v.Config) - vm.ExtVar(repo+"private", strconv.FormatBool(v.Private)) - vm.ExtVar(repo+"visibility", v.Visibility) - vm.ExtVar(repo+"active", strconv.FormatBool(v.Active)) - vm.ExtVar(repo+"trusted", strconv.FormatBool(v.Trusted)) - vm.ExtVar(repo+"protected", strconv.FormatBool(v.Protected)) - vm.ExtVar(repo+"ignore_forks", strconv.FormatBool(v.IgnoreForks)) - vm.ExtVar(repo+"ignore_pull_requests", strconv.FormatBool(v.IgnorePulls)) -} - -func fromMap(m map[string]string, vm *jsonnet.VM) { - for k, v := range m { - vm.ExtVar(build+param+k, v) - } -} diff --git a/plugin/converter/jsonnet/jsonnet_test.go b/plugin/converter/jsonnet/jsonnet_test.go deleted file mode 100644 index 94a661bbf1..0000000000 --- a/plugin/converter/jsonnet/jsonnet_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package jsonnet - -import ( - "io/ioutil" - "runtime" - "strings" - "testing" - - "github.com/drone/drone/core" -) - -func TestParse(t *testing.T) { - before, err := ioutil.ReadFile("../testdata/input.jsonnet") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("../testdata/input.jsonnet.golden") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - }, - Config: &core.Config{}, - } - - template := &core.Template{ - Name: "my_template.jsonnet", - Data: string(before), - } - - templateData := map[string]interface{}{ - "stepName": "my_step", - "image": "my_image", - "commands": "my_command", - } - - req.Config.Data = string(before) - - got, err := Parse(req, nil, 0, template, templateData) - if err != nil { - t.Error(err) - return - } - - want := string(after) - // on windows line endings are \r\n, lets change them to linux for comparison - if runtime.GOOS == "windows" { - want = strings.Replace(want, "\r\n", "\n", -1) - } - - if want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -func TestParseJsonnetNotTemplateFile(t *testing.T) { - before, err := ioutil.ReadFile("../testdata/single.jsonnet") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("../testdata/input.jsonnet.golden") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet", - }, - Config: &core.Config{}, - } - - req.Repo.Config = "plugin.jsonnet" - req.Config.Data = string(before) - - got, err := Parse(req, nil, 0, nil, nil) - if err != nil { - t.Error(err) - return - } - - want := string(after) - // on windows line endings are \r\n, lets change them to linux for comparison - if runtime.GOOS == "windows" { - want = strings.Replace(want, "\r\n", "\n", -1) - } - - if want != got { - t.Errorf("Want %q got %q", want, got) - } -} diff --git a/plugin/converter/jsonnet_oss.go b/plugin/converter/jsonnet_oss.go deleted file mode 100644 index 67614961fa..0000000000 --- a/plugin/converter/jsonnet_oss.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "github.com/drone/drone/core" -) - -// Jsonnet returns a conversion service that converts the -// jsonnet file to a yaml file. -func Jsonnet(enabled bool, limit int, fileService core.FileService) core.ConvertService { - return new(noop) -} diff --git a/plugin/converter/jsonnet_test.go b/plugin/converter/jsonnet_test.go deleted file mode 100644 index 243ad4537b..0000000000 --- a/plugin/converter/jsonnet_test.go +++ /dev/null @@ -1,293 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package converter - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -const jsonnetFile = `{"foo": "bar"}` -const jsonnetFileAfter = `--- -{ - "foo": "bar" -} -` - -const jsonnetStream = `[{"foo": "bar"}]` -const jsonnetStreamAfter = `--- -{ - "foo": "bar" -} -` - -const jsonnetFileImport = `local step = import '.step.libsonnet'; -{"foo": ["bar"], "steps": [step]}` -const jsonnetFileImportLib = `{"image": "app"}` -const jsonnetFileImportAfter = `--- -{ - "foo": [ - "bar" - ], - "steps": [ - { - "image": "app" - } - ] -} -` - -const jsonnetFileMultipleImports = `local step = import '.step.libsonnet'; -local step2 = import '.step2.jsonnet'; -{"foo": ["bar"], "steps": [step, step2]}` - -func TestJsonnet_Stream(t *testing.T) { - args := &core.ConvertArgs{ - Repo: &core.Repository{Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetStream}, - } - service := Jsonnet(true, 0, nil) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res == nil { - t.Errorf("Expected a converted file, got nil") - return - } - if got, want := res.Data, jsonnetStreamAfter; got != want { - t.Errorf("Want converted file %q, got %q", want, got) - } -} - -func TestJsonnet_Snippet(t *testing.T) { - args := &core.ConvertArgs{ - Repo: &core.Repository{Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFile}, - } - service := Jsonnet(true, 0, nil) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res == nil { - t.Errorf("Expected a converted file, got nil") - return - } - if got, want := res.Data, jsonnetFileAfter; got != want { - t.Errorf("Want converted file %q, got %q", want, got) - } -} - -func TestJsonnet_Error(t *testing.T) { - args := &core.ConvertArgs{ - Repo: &core.Repository{Config: ".drone.jsonnet"}, - Config: &core.Config{Data: "\\"}, // invalid jsonnet - } - service := Jsonnet(true, 0, nil) - _, err := service.Convert(noContext, args) - if err == nil { - t.Errorf("Expect jsonnet parsing error, got nil") - } -} - -func TestJsonnet_Disabled(t *testing.T) { - service := Jsonnet(false, 0, nil) - res, err := service.Convert(noContext, nil) - if err != nil { - t.Error(err) - } - if res != nil { - t.Errorf("Expect nil response when disabled") - } -} - -func TestJsonnet_NotJsonnet(t *testing.T) { - args := &core.ConvertArgs{ - Repo: &core.Repository{Config: ".drone.yml"}, - } - service := Jsonnet(true, 0, nil) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - } - if res != nil { - t.Errorf("Expect nil response when not jsonnet") - } -} - -func TestJsonnet_Import(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFileImport}, - User: &core.User{ - Token: "foobar", - }, - } - importedContent := &core.File{ - Data: []byte(jsonnetFileImportLib), - } - controller := gomock.NewController(t) - mockFileService := mock.NewMockFileService(controller) - mockFileService.EXPECT().Find(gomock.Any(), &core.User{Token: "foobar"}, "octocat/hello-world", "542ed565d03dab86f079798f937663ec1f05360b", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", ".step.libsonnet").Return(importedContent, nil).Times(2) - service := Jsonnet(true, 1, mockFileService) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - } - if got, want := res.Data, jsonnetFileImportAfter; got != want { - t.Errorf("Want converted file:\n%q\ngot\n%q", want, got) - } -} - -func TestJsonnet_ImportLimit(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFileMultipleImports}, - User: &core.User{ - Token: "foobar", - }, - } - importedContent := &core.File{ - Data: []byte(jsonnetFileImportLib), - } - controller := gomock.NewController(t) - mockFileService := mock.NewMockFileService(controller) - mockFileService.EXPECT().Find(gomock.Any(), &core.User{Token: "foobar"}, "octocat/hello-world", "542ed565d03dab86f079798f937663ec1f05360b", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", ".step.libsonnet").Return(importedContent, nil).Times(2) - - service := Jsonnet(true, 1, mockFileService) - _, err := service.Convert(noContext, args) - if err == nil { - t.Errorf("Expect nil response when jsonnet import limit is exceeded") - } -} - -func TestJsonnet_LimitZero(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFile}, - User: &core.User{ - Token: "foobar", - }, - } - - controller := gomock.NewController(t) - mockFileService := mock.NewMockFileService(controller) - mockFileService.EXPECT().Find(gomock.Any(), &core.User{Token: "foobar"}, "octocat/hello-world", "542ed565d03dab86f079798f937663ec1f05360b", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", ".step.libsonnet").Times(0) - - service := Jsonnet(true, 0, mockFileService) - res, err := service.Convert(noContext, args) - - if err != nil { - t.Error(err) - return - } - if got, want := res.Data, jsonnetFileAfter; got != want { - t.Errorf("Want converted file %q, got %q", want, got) - } -} - -func TestJsonnet_ImportLimitZero(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFileImport}, - User: &core.User{ - Token: "foobar", - }, - } - importedContent := &core.File{ - Data: []byte(jsonnetFileImportLib), - } - controller := gomock.NewController(t) - mockFileService := mock.NewMockFileService(controller) - mockFileService.EXPECT().Find(gomock.Any(), &core.User{Token: "foobar"}, "octocat/hello-world", "542ed565d03dab86f079798f937663ec1f05360b", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", ".step.libsonnet").Return(importedContent, nil).Times(2) - - service := Jsonnet(true, 0, mockFileService) - _, err := service.Convert(noContext, args) - if err == nil { - t.Errorf("Expect nil response when jsonnet import limit is exceeded") - } -} - -func TestJsonnet_ImportFileServiceNil(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFileMultipleImports}, - User: &core.User{ - Token: "foobar", - }, - } - - service := Jsonnet(true, 1, nil) - _, err := service.Convert(noContext, args) - if err == nil { - t.Errorf("Expect nil response when jsonnet import limit is exceeded") - } -} - -func TestJsonnet_FileServiceNil(t *testing.T) { - args := &core.ConvertArgs{ - Build: &core.Build{ - Ref: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - After: "542ed565d03dab86f079798f937663ec1f05360b", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.jsonnet"}, - Config: &core.Config{Data: jsonnetFile}, - User: &core.User{ - Token: "foobar", - }, - } - - service := Jsonnet(true, 1, nil) - res, err := service.Convert(noContext, args) - - if err != nil { - t.Error(err) - return - } - if got, want := res.Data, jsonnetFileAfter; got != want { - t.Errorf("Want converted file %q, got %q", want, got) - } -} \ No newline at end of file diff --git a/plugin/converter/legacy.go b/plugin/converter/legacy.go deleted file mode 100644 index 271caa8e05..0000000000 --- a/plugin/converter/legacy.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package converter - -import ( - "context" - - "github.com/drone/drone/core" -) - -// Legacy returns a conversion service that converts a -// legacy 0.8 yaml file to a yaml file. -func Legacy(enabled bool) core.ConvertService { - return &legacyPlugin{ - enabled: enabled, - } -} - -type legacyPlugin struct { - enabled bool -} - -func (p *legacyPlugin) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - if p.enabled == false { - return nil, nil - } - return &core.Config{ - Data: req.Config.Data, - }, nil -} diff --git a/plugin/converter/legacy_oss.go b/plugin/converter/legacy_oss.go deleted file mode 100644 index e5aa43e948..0000000000 --- a/plugin/converter/legacy_oss.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "github.com/drone/drone/core" -) - -// Legacy returns a conversion service that converts the -// legacy 0.8 file to a yaml file. -func Legacy(enabled bool) core.ConvertService { - return new(noop) -} diff --git a/plugin/converter/memoize.go b/plugin/converter/memoize.go deleted file mode 100644 index ea78b168a6..0000000000 --- a/plugin/converter/memoize.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package converter - -import ( - "context" - "fmt" - - "github.com/drone/drone/core" - - lru "github.com/hashicorp/golang-lru" - "github.com/sirupsen/logrus" -) - -// cache key pattern used in the cache, comprised of the -// repository slug and commit sha. -const keyf = "%d|%s|%s|%s|%s|%s" - -// Memoize caches the conversion results for subsequent calls. -// This micro-optimization is intended for multi-pipeline -// projects that would otherwise covert the file for each -// pipeline execution. -func Memoize(base core.ConvertService, size int) core.ConvertService { - // simple cache prevents the same yaml file from being - // requested multiple times in a short period. - cache, _ := lru.New(10) - return &memoize{base: base, cache: cache, size: size} -} - -type memoize struct { - base core.ConvertService - cache *lru.Cache - size int -} - -func (c *memoize) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - // this is a minor optimization that prevents caching if the - // base converter is a remote converter and is disabled. - if remote, ok := c.base.(*remote); ok == true && remote.client == nil { - return nil, nil - } - - // the client can optionally disable cacheing. - if c.size == 0 { - return c.base.Convert(ctx, req) - } - - // generate the key used to cache the converted file. - key := fmt.Sprintf(keyf, - req.Repo.ID, - req.Build.Event, - req.Build.Action, - req.Build.Ref, - req.Build.After, - req.Repo.Config, - ) - - logger := logrus.WithField("repo", req.Repo.Slug). - WithField("build", req.Build.Event). - WithField("action", req.Build.Action). - WithField("ref", req.Build.Ref). - WithField("rev", req.Build.After). - WithField("config", req.Repo.Config) - - logger.Trace("extension: conversion: check cache") - - // check the cache for the file and return if exists. - cached, ok := c.cache.Get(key) - if ok { - logger.Trace("extension: conversion: cache hit") - return cached.(*core.Config), nil - } - - logger.Trace("extension: conversion: cache miss") - - // else convert the configuration file. - config, err := c.base.Convert(ctx, req) - if err != nil { - return nil, err - } - - if config == nil { - return nil, nil - } - if config.Data == "" { - return nil, nil - } - - // if the configuration file was converted - // it is temporarily cached. Note that we do - // not cache if the commit sha is empty (gogs). - if req.Build.After != "" { - c.cache.Add(key, config) - } - - return config, nil -} diff --git a/plugin/converter/memoize_oss.go b/plugin/converter/memoize_oss.go deleted file mode 100644 index 08d4aae53d..0000000000 --- a/plugin/converter/memoize_oss.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "github.com/drone/drone/core" -) - -// Memoize caches the conversion results for subsequent calls. -// This micro-optimization is intended for multi-pipeline -// projects that would otherwise covert the file for each -// pipeline execution. -func Memoize(base core.ConvertService, size int) core.ConvertService { - return new(noop) -} diff --git a/plugin/converter/memoize_test.go b/plugin/converter/memoize_test.go deleted file mode 100644 index f700734d11..0000000000 --- a/plugin/converter/memoize_test.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package converter - -import ( - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestMemoize(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - conf := &core.Config{Data: "{kind: pipeline, type: docker, steps: []}"} - args := &core.ConvertArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: conf, - } - - base := mock.NewMockConvertService(controller) - base.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base, 10).(*memoize) - _, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } - - args.Config = nil // set to nil to prove we get the cached value - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != conf { - t.Errorf("Expect result from cache") - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Tag(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - Build: &core.Build{Ref: "refs/tags/v1.0.0"}, - Repo: &core.Repository{ID: 42}, - Config: &core.Config{Data: "{kind: pipeline, type: docker, steps: []}"}, - } - - base := mock.NewMockConvertService(controller) - base.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base, 10).(*memoize) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != args.Config { - t.Errorf("Expect result from cache") - } -} - -func TestMemoize_Empty(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: &core.Config{Data: ""}, // empty - } - - base := mock.NewMockConvertService(controller) - base.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base, 10).(*memoize) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != nil { - t.Errorf("Expect nil response") - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Nil(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - Config: nil, - } - - base := mock.NewMockConvertService(controller) - base.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(args.Config, nil) - - service := Memoize(base, 10).(*memoize) - res, err := service.Convert(noContext, args) - if err != nil { - t.Error(err) - return - } - if res != nil { - t.Errorf("Expect nil response") - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} - -func TestMemoize_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ConvertArgs{ - Build: &core.Build{After: "3950521325d4744760a96c18e3d0c67d86495af3"}, - Repo: &core.Repository{ID: 42}, - } - - want := errors.New("not found") - base := mock.NewMockConvertService(controller) - base.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(nil, want) - - service := Memoize(base, 10).(*memoize) - _, err := service.Convert(noContext, args) - if err == nil { - t.Errorf("Expect error from base returned to caller") - return - } - if got, want := service.cache.Len(), 0; got != want { - t.Errorf("Expect %d items in cache, got %d", want, got) - } -} diff --git a/plugin/converter/noop.go b/plugin/converter/noop.go deleted file mode 100644 index 422fb6c1f3..0000000000 --- a/plugin/converter/noop.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "context" - - "github.com/drone/drone/core" -) - -type noop struct{} - -func (noop) Convert(context.Context, *core.ConvertArgs) (*core.Config, error) { - return nil, nil -} diff --git a/plugin/converter/remote.go b/plugin/converter/remote.go deleted file mode 100644 index 41d59ace64..0000000000 --- a/plugin/converter/remote.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package converter - -import ( - "context" - "strings" - "time" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone-go/plugin/converter" - "github.com/drone/drone/core" -) - -// Remote returns a conversion service that converts the -// configuration file using a remote http service. -func Remote(endpoint, signer, extension string, skipVerify bool, timeout time.Duration) core.ConvertService { - if endpoint == "" { - return new(remote) - } - return &remote{ - extension: extension, - client: converter.Client( - endpoint, - signer, - skipVerify, - ), - timeout: timeout, - } -} - -type remote struct { - client converter.Plugin - extension string - timeout time.Duration -} - -func (g *remote) Convert(ctx context.Context, in *core.ConvertArgs) (*core.Config, error) { - if g.client == nil { - return nil, nil - } - if g.extension != "" { - if !strings.HasSuffix(in.Repo.Config, g.extension) { - return nil, nil - } - } - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a response within - // the configured timeout (default 1m). - ctx, cancel := context.WithTimeout(ctx, g.timeout) - defer cancel() - - req := &converter.Request{ - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - Config: drone.Config{ - Data: in.Config.Data, - }, - Token: drone.Token{ - Access: in.User.Token, - Refresh: in.User.Refresh, - }, - } - - res, err := g.client.Convert(ctx, req) - if err != nil { - return nil, err - } - if res == nil { - return nil, nil - } - - // if no error is returned and the secret is empty, - // this indicates the client returned No Content, - // and we should exit with no secret, but no error. - if res.Data == "" { - return nil, nil - } - - return &core.Config{ - Kind: res.Kind, - Data: res.Data, - }, nil -} - -func toRepo(from *core.Repository) drone.Repo { - return drone.Repo{ - ID: from.ID, - UID: from.UID, - UserID: from.UserID, - Namespace: from.Namespace, - Name: from.Name, - Slug: from.Slug, - SCM: from.SCM, - HTTPURL: from.HTTPURL, - SSHURL: from.SSHURL, - Link: from.Link, - Branch: from.Branch, - Private: from.Private, - Visibility: from.Visibility, - Active: from.Active, - Config: from.Config, - Trusted: from.Trusted, - Protected: from.Protected, - Timeout: from.Timeout, - } -} - -func toBuild(from *core.Build) drone.Build { - return drone.Build{ - ID: from.ID, - RepoID: from.RepoID, - Trigger: from.Trigger, - Number: from.Number, - Parent: from.Parent, - Status: from.Status, - Error: from.Error, - Event: from.Event, - Action: from.Action, - Link: from.Link, - Timestamp: from.Timestamp, - Title: from.Title, - Message: from.Message, - Before: from.Before, - After: from.After, - Ref: from.Ref, - Fork: from.Fork, - Source: from.Source, - Target: from.Target, - Author: from.Author, - AuthorName: from.AuthorName, - AuthorEmail: from.AuthorEmail, - AuthorAvatar: from.AuthorAvatar, - Sender: from.Sender, - Params: from.Params, - Deploy: from.Deploy, - Started: from.Started, - Finished: from.Finished, - Created: from.Created, - Updated: from.Updated, - Version: from.Version, - } -} diff --git a/plugin/converter/remote_oss.go b/plugin/converter/remote_oss.go deleted file mode 100644 index 6a9fc9f2fc..0000000000 --- a/plugin/converter/remote_oss.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "time" - - "github.com/drone/drone/core" -) - -// Remote returns a conversion service that converts the -// configuration file using a remote http service. -func Remote(endpoint, signer, extension string, skipVerify bool, timeout time.Duration) core.ConvertService { - return new(noop) -} diff --git a/plugin/converter/remote_test.go b/plugin/converter/remote_test.go deleted file mode 100644 index b5bf4c99ab..0000000000 --- a/plugin/converter/remote_test.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package converter - -import ( - "context" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/h2non/gock" -) - -func TestConvert(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/convert"). - MatchHeader("Accept", "application/vnd.drone.convert.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(200). - BodyString(`{"data": "{ kind: pipeline, type: docker, name: default }"}`). - Done() - - args := &core.ConvertArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: &core.Config{ - Data: "{ kind: pipeline, name: default }", - }, - } - - service := Remote("https://company.com/convert", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", "", - false, time.Minute) - result, err := service.Convert(context.Background(), args) - if err != nil { - t.Error(err) - return - } - - if result.Data != "{ kind: pipeline, type: docker, name: default }" { - t.Errorf("unexpected file contents") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - return - } -} diff --git a/plugin/converter/starlark.go b/plugin/converter/starlark.go deleted file mode 100644 index 67659254be..0000000000 --- a/plugin/converter/starlark.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package converter - -import ( - "context" - "strings" - - "github.com/drone/drone/core" - "github.com/drone/drone/plugin/converter/starlark" -) - -// Starlark returns a conversion service that converts the -// starlark file to a yaml file. -func Starlark(enabled bool, stepLimit uint64, sizeLimit uint64) core.ConvertService { - return &starlarkPlugin{ - enabled: enabled, - stepLimit: stepLimit, - sizeLimit: sizeLimit, - } -} - -type starlarkPlugin struct { - enabled bool - stepLimit uint64 - sizeLimit uint64 -} - -func (p *starlarkPlugin) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - if p.enabled == false { - return nil, nil - } - - // if the file extension is not jsonnet we can - // skip this plugin by returning zero values. - switch { - case strings.HasSuffix(req.Repo.Config, ".script"): - case strings.HasSuffix(req.Repo.Config, ".star"): - case strings.HasSuffix(req.Repo.Config, ".starlark"): - default: - return nil, nil - } - - file, err := starlark.Parse(req, nil, nil, p.stepLimit, p.sizeLimit) - if err != nil { - return nil, err - } - return &core.Config{ - Data: file, - }, nil -} diff --git a/plugin/converter/starlark/args.go b/plugin/converter/starlark/args.go deleted file mode 100644 index edc8bf2b95..0000000000 --- a/plugin/converter/starlark/args.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package starlark - -import ( - "fmt" - "reflect" - - "github.com/drone/drone/core" - - "go.starlark.net/starlark" - "go.starlark.net/starlarkstruct" -) - -// TODO(bradrydzewski) add repository id -// TODO(bradrydzewski) add repository timeout -// TODO(bradrydzewski) add repository counter -// TODO(bradrydzewski) add repository created -// TODO(bradrydzewski) add repository updated -// TODO(bradrydzewski) add repository synced -// TODO(bradrydzewski) add repository version - -// TODO(bradrydzewski) add build id, will always be zero value -// TODO(bradrydzewski) add build number, will always be zero value -// TODO(bradrydzewski) add build started, will always be zero value -// TODO(bradrydzewski) add build finished, will always be zero value -// TODO(bradrydzewski) add build created, will always be zero value -// TODO(bradrydzewski) add build updated, will always be zero value -// TODO(bradrydzewski) add build parent -// TODO(bradrydzewski) add build timestamp - -func createArgs(repo *core.Repository, build *core.Build, input map[string]interface{}) ([]starlark.Value, error) { - inputArgs, err := fromInput(input) - if err != nil { - return nil, err - } - args := []starlark.Value{ - starlarkstruct.FromStringDict( - starlark.String("context"), - starlark.StringDict{ - "repo": starlarkstruct.FromStringDict(starlark.String("repo"), fromRepo(repo)), - "build": starlarkstruct.FromStringDict(starlark.String("build"), fromBuild(build)), - "input": starlarkstruct.FromStringDict(starlark.String("input"), inputArgs), - }, - ), - } - return args, nil -} - -func fromInput(input map[string]interface{}) (starlark.StringDict, error) { - out := map[string]starlark.Value{} - for key, value := range input { - v := reflect.ValueOf(value) - result, err := toValue(v) - if err != nil { - return nil, err - } - out[key] = result - } - return out, nil -} - -func toValue(val reflect.Value) (starlark.Value, error) { - kind := val.Kind() - if kind == reflect.Ptr { - kind = val.Elem().Kind() - } - switch kind { - case reflect.Bool: - return starlark.Bool(val.Bool()), nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return starlark.MakeInt64(val.Int()), nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return starlark.MakeUint64(val.Uint()), nil - case reflect.Float32, reflect.Float64: - return starlark.Float(val.Float()), nil - case reflect.Map: - dict := new(starlark.Dict) - for _, key := range val.MapKeys() { - value := val.MapIndex(key) - getValue, err := toValue(reflect.ValueOf(value.Interface())) - if err != nil { - return nil, err - } - dict.SetKey( - starlark.String(fmt.Sprint(key)), - getValue, - ) - } - return dict, nil - case reflect.String: - return starlark.String(val.String()), nil - case reflect.Slice, reflect.Array: - list := new(starlark.List) - for i := 0; i < val.Len(); i++ { - keyValue := val.Index(i).Interface() - vOf := reflect.ValueOf(keyValue) - result, err := toValue(vOf) - if err != nil { - return nil, err - } - list.Append(result) - } - return list, nil - } - - return nil, fmt.Errorf("type %T is not a supported starlark type", val.Interface()) -} - -func fromBuild(v *core.Build) starlark.StringDict { - return starlark.StringDict{ - "event": starlark.String(v.Event), - "action": starlark.String(v.Action), - "cron": starlark.String(v.Cron), - "environment": starlark.String(v.Deploy), - "link": starlark.String(v.Link), - "branch": starlark.String(v.Target), - "source": starlark.String(v.Source), - "before": starlark.String(v.Before), - "after": starlark.String(v.After), - "target": starlark.String(v.Target), - "ref": starlark.String(v.Ref), - "commit": starlark.String(v.After), - "title": starlark.String(v.Title), - "message": starlark.String(v.Message), - "source_repo": starlark.String(v.Fork), - "author_login": starlark.String(v.Author), - "author_name": starlark.String(v.AuthorName), - "author_email": starlark.String(v.AuthorEmail), - "author_avatar": starlark.String(v.AuthorAvatar), - "sender": starlark.String(v.Sender), - "debug": starlark.Bool(v.Debug), - "params": fromMap(v.Params), - } -} - -func fromRepo(v *core.Repository) starlark.StringDict { - return starlark.StringDict{ - "uid": starlark.String(v.UID), - "name": starlark.String(v.Name), - "namespace": starlark.String(v.Namespace), - "slug": starlark.String(v.Slug), - "git_http_url": starlark.String(v.HTTPURL), - "git_ssh_url": starlark.String(v.SSHURL), - "link": starlark.String(v.Link), - "branch": starlark.String(v.Branch), - "config": starlark.String(v.Config), - "private": starlark.Bool(v.Private), - "visibility": starlark.String(v.Visibility), - "active": starlark.Bool(v.Active), - "trusted": starlark.Bool(v.Trusted), - "protected": starlark.Bool(v.Protected), - "ignore_forks": starlark.Bool(v.IgnoreForks), - "ignore_pull_requests": starlark.Bool(v.IgnorePulls), - } -} - -func fromMap(m map[string]string) *starlark.Dict { - dict := new(starlark.Dict) - for k, v := range m { - dict.SetKey( - starlark.String(k), - starlark.String(v), - ) - } - return dict -} diff --git a/plugin/converter/starlark/starlark.go b/plugin/converter/starlark/starlark.go deleted file mode 100644 index 5e7a79904a..0000000000 --- a/plugin/converter/starlark/starlark.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package starlark - -import ( - "bytes" - - "github.com/drone/drone/core" - "github.com/drone/drone/handler/api/errors" - - "github.com/sirupsen/logrus" - "go.starlark.net/starlark" -) - -const ( - separator = "---" - newline = "\n" -) - -// default limit for generated configuration file size. -const defaultSizeLimit = 1000000 - -var ( - // ErrMainMissing indicates the starlark script is missing - // the main method. - ErrMainMissing = errors.New("starlark: missing main function") - - // ErrMainInvalid indicates the starlark script defines a - // global variable named main, however, it is not callable. - ErrMainInvalid = errors.New("starlark: main must be a function") - - // ErrMainReturn indicates the starlark script's main method - // returns an invalid or unexpected type. - ErrMainReturn = errors.New("starlark: main returns an invalid type") - - // ErrMaximumSize indicates the starlark script generated a - // file that exceeds the maximum allowed file size. - ErrMaximumSize = errors.New("starlark: maximum file size exceeded") - - // ErrCannotLoad indicates the starlark script is attempting to - // load an external file which is currently restricted. - ErrCannotLoad = errors.New("starlark: cannot load external scripts") -) - -func Parse(req *core.ConvertArgs, template *core.Template, templateData map[string]interface{}, stepLimit uint64, sizeLimit uint64) (string, error) { - thread := &starlark.Thread{ - Name: "drone", - Load: noLoad, - Print: func(_ *starlark.Thread, msg string) { - logrus.WithFields(logrus.Fields{ - "namespace": req.Repo.Namespace, - "name": req.Repo.Name, - }).Traceln(msg) - }, - } - var starlarkFile string - var starlarkFileName string - if template != nil { - starlarkFile = template.Data - starlarkFileName = template.Name - } else { - starlarkFile = req.Config.Data - starlarkFileName = req.Repo.Config - } - - globals, err := starlark.ExecFile(thread, starlarkFileName, starlarkFile, nil) - if err != nil { - return "", err - } - - // find the main method in the starlark script and - // cast to a callable type. If not callable the script - // is invalid. - mainVal, ok := globals["main"] - if !ok { - return "", ErrMainMissing - } - main, ok := mainVal.(starlark.Callable) - if !ok { - return "", ErrMainInvalid - } - - // create the input args and invoke the main method - // using the input args. - args, err := createArgs(req.Repo, req.Build, templateData) - if err != nil { - return "", err - } - - // set the maximum number of operations in the script. this - // mitigates long running scripts. - if stepLimit == 0 { - stepLimit = 50000 - } - thread.SetMaxExecutionSteps(stepLimit) - - // execute the main method in the script. - mainVal, err = starlark.Call(thread, main, args, nil) - if err != nil { - return "", err - } - - buf := new(bytes.Buffer) - switch v := mainVal.(type) { - case *starlark.List: - for i := 0; i < v.Len(); i++ { - item := v.Index(i) - buf.WriteString(separator) - buf.WriteString(newline) - if err := write(buf, item); err != nil { - return "", err - } - buf.WriteString(newline) - } - case *starlark.Dict: - if err := write(buf, v); err != nil { - return "", err - } - default: - return "", ErrMainReturn - } - - if sizeLimit == 0 { - sizeLimit = defaultSizeLimit - } - - // this is a temporary workaround until we - // implement a LimitWriter. - if b := buf.Bytes(); uint64(len(b)) > sizeLimit { - return "", ErrMaximumSize - } - return buf.String(), nil -} - -func noLoad(_ *starlark.Thread, _ string) (starlark.StringDict, error) { - return nil, ErrCannotLoad -} diff --git a/plugin/converter/starlark/starlark_test.go b/plugin/converter/starlark/starlark_test.go deleted file mode 100644 index 8e7871c974..0000000000 --- a/plugin/converter/starlark/starlark_test.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package starlark - -import ( - "io/ioutil" - "testing" - - "github.com/drone/drone/core" -) - -func TestParseStarlark(t *testing.T) { - before, err := ioutil.ReadFile("../testdata/starlark.input.star") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("../testdata/starlark.input.star.golden") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - }, - Config: &core.Config{}, - } - template := &core.Template{ - Name: "my_template.star", - Data: string(before), - } - - templateData := map[string]interface{}{ - "stepName": "my_step", - "image": "my_image", - "commands": "my_command", - } - - req.Config.Data = string(before) - - parsedFile, err := Parse(req, template, templateData, 0, 0) - if err != nil { - t.Error(err) - return - } - - if want, got := parsedFile, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -func TestParseStarlarkNotTemplateFile(t *testing.T) { - before, err := ioutil.ReadFile("../testdata/single.star") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("../testdata/single.star.golden") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.star", - }, - Config: &core.Config{}, - } - - req.Repo.Config = "plugin.starlark.star" - req.Config.Data = string(before) - - parsedFile, err := Parse(req, nil, nil, 0, 0) - if err != nil { - t.Error(err) - return - } - - if want, got := parsedFile, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} diff --git a/plugin/converter/starlark/write.go b/plugin/converter/starlark/write.go deleted file mode 100644 index 5b5f5f04f0..0000000000 --- a/plugin/converter/starlark/write.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package starlark - -import ( - "encoding/json" - "fmt" - "io" - - "go.starlark.net/starlark" -) - -type writer interface { - io.Writer - io.ByteWriter - io.StringWriter -} - -func write(out writer, v starlark.Value) error { - if marshaler, ok := v.(json.Marshaler); ok { - jsonData, err := marshaler.MarshalJSON() - if err != nil { - return err - } - out.Write(jsonData) - return nil - } - - switch v := v.(type) { - case starlark.NoneType: - out.WriteString("null") - case starlark.Bool: - fmt.Fprintf(out, "%t", v) - case starlark.Int: - out.WriteString(v.String()) - case starlark.Float: - fmt.Fprintf(out, "%g", v) - case starlark.String: - s := string(v) - if isQuoteSafe(s) { - fmt.Fprintf(out, "%q", s) - } else { - data, _ := json.Marshal(s) - out.Write(data) - } - case starlark.Indexable: - out.WriteByte('[') - for i, n := 0, starlark.Len(v); i < n; i++ { - if i > 0 { - out.WriteString(", ") - } - if err := write(out, v.Index(i)); err != nil { - return err - } - } - out.WriteByte(']') - case *starlark.Dict: - out.WriteByte('{') - for i, itemPair := range v.Items() { - key := itemPair[0] - value := itemPair[1] - if i > 0 { - out.WriteString(", ") - } - if err := write(out, key); err != nil { - return err - } - out.WriteString(": ") - if err := write(out, value); err != nil { - return err - } - } - out.WriteByte('}') - default: - return fmt.Errorf("value %s (type `%s') can't be converted to JSON", v.String(), v.Type()) - } - return nil -} - -func isQuoteSafe(s string) bool { - for _, r := range s { - if r < 0x20 || r >= 0x10000 { - return false - } - } - return true -} diff --git a/plugin/converter/starlark_oss.go b/plugin/converter/starlark_oss.go deleted file mode 100644 index 0444be33d5..0000000000 --- a/plugin/converter/starlark_oss.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import "github.com/drone/drone/core" - -func Starlark(enabled bool, stepLimit uint64, sizeLimit uint64) core.ConvertService { - return new(noop) -} diff --git a/plugin/converter/starlark_test.go b/plugin/converter/starlark_test.go deleted file mode 100644 index 6f3c8fd36f..0000000000 --- a/plugin/converter/starlark_test.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package converter - -import ( - "io/ioutil" - "runtime" - "strings" - "testing" - - "github.com/drone/drone/core" -) - -func TestStarlarkConvert(t *testing.T) { - plugin := Starlark(true, 0, 0) - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - }, - Config: &core.Config{}, - } - - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config != nil { - t.Error("Want nil config when configuration is not starlark file") - return - } - - before, err := ioutil.ReadFile("testdata/single.star") - if err != nil { - t.Error(err) - return - } - after, err := ioutil.ReadFile("testdata/single.star.golden") - if err != nil { - t.Error(err) - return - } - - req.Repo.Config = "single.star" - req.Config.Data = string(before) - config, err = plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config == nil { - t.Error("Want non-nil configuration") - return - } - - if want, got := config.Data, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -// this test verifies the starlark file can generate a multi-document -// yaml file that defines multiple pipelines. -func TestConvert_Multi(t *testing.T) { - before, err := ioutil.ReadFile("testdata/multi.star") - if err != nil { - t.Error(err) - return - } - after, err := ioutil.ReadFile("testdata/multi.star.golden") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.star", - }, - Config: &core.Config{ - Data: string(before), - }, - } - - plugin := Starlark(true, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - config, err = plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config == nil { - t.Error("Want non-nil configuration") - return - } - - want := string(after) - // on windows line endings are \r\n, lets change them to linux for comparison - if runtime.GOOS == "windows" { - want = strings.Replace(want, "\r\n", "\n", -1) - } - - got := config.Data - if want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -// this test verifies the plugin is skipped when it has -// not been explicitly enabled. -func TestConvert_Skip(t *testing.T) { - plugin := Starlark(false, 0, 0) - config, err := plugin.Convert(noContext, nil) - if err != nil { - t.Error(err) - return - } - if config != nil { - t.Errorf("Expect nil config returned when plugin disabled") - } -} - -// this test verifies the plugin is skipped when the config -// file extension is not a starlark extension. -func TestConvert_SkipYaml(t *testing.T) { - req := &core.ConvertArgs{ - Repo: &core.Repository{ - Config: ".drone.yaml", - }, - } - - plugin := Starlark(true, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config != nil { - t.Errorf("Expect nil config returned for non-starlark files") - } -} - -// this test verifies the plugin returns error -// if the generated file size is exceeded. -func TestConvert_SizeLimit(t *testing.T) { - smallFileSizeLimit := uint64(1) - plugin := Starlark(true, 0, smallFileSizeLimit) - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - }, - Config: &core.Config{}, - } - - before, err := ioutil.ReadFile("testdata/single.star") - if err != nil { - t.Error(err) - return - } - - req.Repo.Config = "single.star" - req.Config.Data = string(before) - _, expectedError := plugin.Convert(noContext, req) - if expectedError == nil { - t.Error("Expected 'starlark: maximum file size exceeded' error") - return - } -} \ No newline at end of file diff --git a/plugin/converter/template.go b/plugin/converter/template.go deleted file mode 100644 index a8d2ab595b..0000000000 --- a/plugin/converter/template.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package converter - -import ( - "bytes" - "context" - "database/sql" - "errors" - "path/filepath" - "regexp" - templating "text/template" - - "github.com/drone/drone/core" - "github.com/drone/drone/plugin/converter/jsonnet" - "github.com/drone/drone/plugin/converter/starlark" - "github.com/drone/funcmap" - - "gopkg.in/yaml.v2" -) - -var ( - // templateFileRE regex to verifying kind is template. - templateFileRE = regexp.MustCompilePOSIX("^kind:[[:space:]]+template[[:space:]]?+$") - errTemplateNotFound = errors.New("template converter: template name given not found") - errTemplateSyntaxErrors = errors.New("template converter: there is a problem with the yaml file provided") - errTemplateExtensionInvalid = errors.New("template extension invalid. must be yaml, starlark or jsonnet") -) - -func Template(templateStore core.TemplateStore, stepLimit uint64, sizeLimit uint64) core.ConvertService { - return &templatePlugin{ - templateStore: templateStore, - stepLimit: stepLimit, - sizeLimit: sizeLimit, - } -} - -type templatePlugin struct { - templateStore core.TemplateStore - stepLimit uint64 - sizeLimit uint64 -} - -func (p *templatePlugin) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - // check type is yaml - configExt := filepath.Ext(req.Repo.Config) - - if configExt != ".yml" && configExt != ".yaml" { - return nil, nil - } - - // check kind is template - if templateFileRE.MatchString(req.Config.Data) == false { - return nil, nil - } - // map to templateArgs - var templateArgs core.TemplateArgs - err := yaml.Unmarshal([]byte(req.Config.Data), &templateArgs) - if err != nil { - return nil, errTemplateSyntaxErrors - } - // get template from db - template, err := p.templateStore.FindName(ctx, templateArgs.Load, req.Repo.Namespace) - if err == sql.ErrNoRows { - return nil, errTemplateNotFound - } - if err != nil { - return nil, err - } - - switch filepath.Ext(templateArgs.Load) { - case ".yml", ".yaml": - return parseYaml(req, template, templateArgs) - case ".star", ".starlark", ".script": - return parseStarlark(req, template, templateArgs, p.stepLimit, p.sizeLimit) - case ".jsonnet": - return parseJsonnet(req, template, templateArgs) - default: - return nil, errTemplateExtensionInvalid - } -} - -func parseYaml(req *core.ConvertArgs, template *core.Template, templateArgs core.TemplateArgs) (*core.Config, error) { - data := map[string]interface{}{ - "build": toBuild(req.Build), - "repo": toRepo(req.Repo), - "input": templateArgs.Data, - } - tmpl, err := templating.New(template.Name).Funcs(funcmap.SafeFuncs).Parse(template.Data) - if err != nil { - return nil, err - } - var out bytes.Buffer - err = tmpl.Execute(&out, data) - if err != nil { - return nil, err - } - return &core.Config{ - Data: out.String(), - }, nil -} - -func parseJsonnet(req *core.ConvertArgs, template *core.Template, templateArgs core.TemplateArgs) (*core.Config, error) { - file, err := jsonnet.Parse(req, nil, 0, template, templateArgs.Data) - if err != nil { - return nil, err - } - return &core.Config{ - Data: file, - }, nil -} - -func parseStarlark(req *core.ConvertArgs, template *core.Template, templateArgs core.TemplateArgs, stepLimit uint64, sizeLimit uint64) (*core.Config, error) { - file, err := starlark.Parse(req, template, templateArgs.Data, stepLimit, sizeLimit) - if err != nil { - return nil, err - } - return &core.Config{ - Data: file, - }, nil -} diff --git a/plugin/converter/template_oss.go b/plugin/converter/template_oss.go deleted file mode 100644 index 29ca662e87..0000000000 --- a/plugin/converter/template_oss.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package converter - -import ( - "context" - - "github.com/drone/drone/core" -) - -func Template(templateStore core.TemplateStore, stepLimit uint64, sizeLimit uint64) core.ConvertService { - return &templatePlugin{ - templateStore: templateStore, - } -} - -type templatePlugin struct { - templateStore core.TemplateStore -} - -func (p *templatePlugin) Convert(ctx context.Context, req *core.ConvertArgs) (*core.Config, error) { - return nil, nil -} diff --git a/plugin/converter/template_test.go b/plugin/converter/template_test.go deleted file mode 100644 index f78d2a86ca..0000000000 --- a/plugin/converter/template_test.go +++ /dev/null @@ -1,553 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package converter - -import ( - "encoding/json" - "errors" - "io/ioutil" - "runtime" - "strings" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestTemplatePluginConvertStarlark(t *testing.T) { - templateArgs, err := ioutil.ReadFile("testdata/starlark.template.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - - beforeInput, err := ioutil.ReadFile("testdata/starlark.input.star") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("testdata/starlark.input.star.golden") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "plugin.starlark", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - if config == nil { - t.Error("Want non-nil configuration") - return - } - - if want, got := config.Data, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -func TestTemplatePluginConvertNotYamlFile(t *testing.T) { - - plugin := Template(nil, 0, 0) - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.star", - }, - Config: &core.Config{}, - } - - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config != nil { - t.Errorf("Expect nil config returned for non-starlark files") - } -} - -func TestTemplatePluginConvertDroneFileTypePipeline(t *testing.T) { - args, err := ioutil.ReadFile("testdata/drone.yml") - if err != nil { - t.Error(err) - return - } - plugin := Template(nil, 0, 0) - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - }, - Config: &core.Config{Data: string(args)}, - } - - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - if config != nil { - t.Errorf("Expect nil config returned for non-starlark files") - } -} - -// Test makes sure that we don't skip templating for neither the "yml" or "yaml" extension. -func TestTemplatePluginConvertDroneFileYamlExtensions(t *testing.T) { - extensions := []string{"yml", "yaml"} - dummyErr := errors.New("dummy-error") - - for _, extension := range extensions { - t.Run(extension, func(t *testing.T) { - args, err := ioutil.ReadFile("testdata/yaml.template.yml") - if err != nil { - t.Error(err) - return - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, dummyErr) - - plugin := Template(templates, 0, 0) - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone." + extension, - }, - Config: &core.Config{Data: string(args)}, - } - - _, err = plugin.Convert(noContext, req) - if err != nil && err != dummyErr { - t.Error(err) - } - if err == nil { - t.Errorf("Templating was skipped") - } - }) - } -} - -func TestTemplatePluginConvertTemplateNotFound(t *testing.T) { - templateArgs, err := ioutil.ReadFile("testdata/starlark.template.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{Data: string(templateArgs)}, - } - - controller := gomock.NewController(t) - defer controller.Finish() - - template := &core.Template{ - Name: "plugin.starlark", - Data: "", - } - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(nil, nil) - - plugin := Template(templates, 0, 0) - - config, err := plugin.Convert(noContext, req) - if config != nil { - t.Errorf("template converter: template name given not found") - } -} - -func TestTemplatePluginConvertJsonnet(t *testing.T) { - templateArgs, err := ioutil.ReadFile("testdata/jsonnet.template.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - - beforeInput, err := ioutil.ReadFile("testdata/input.jsonnet") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("testdata/input.jsonnet.golden") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "plugin.jsonnet", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - if config == nil { - t.Error("Want non-nil configuration") - return - } - - want := string(after) - // on windows line endings are \r\n, lets change them to linux for comparison - if runtime.GOOS == "windows" { - want = strings.Replace(want, "\r\n", "\n", -1) - } - - got := config.Data - if want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -func TestTemplateNestedValuesPluginConvertStarlark(t *testing.T) { - type Pipeline struct { - Kind string `json:"kind"` - Name string `json:"name"` - Steps []struct { - Name string `json:"name"` - Image string `json:"image"` - Commands []string `json:"commands"` - } `json:"steps"` - } - - templateArgs, err := ioutil.ReadFile("testdata/starlark-nested.template.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - - beforeInput, err := ioutil.ReadFile("testdata/starlark.input-nested.star") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("testdata/starlark.input-nested.star.golden") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "test.nested.starlark", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - if config == nil { - t.Error("Want non-nil configuration") - return - } - result := Pipeline{} - err = json.Unmarshal(after, &result) - beforeConfig := Pipeline{} - err = json.Unmarshal([]byte(config.Data), &beforeConfig) - - if want, got := beforeConfig.Name, result.Name; want != got { - t.Errorf("Want %q got %q", want, got) - } - if want, got := beforeConfig.Kind, result.Kind; want != got { - t.Errorf("Want %q got %q", want, got) - } - if want, got := beforeConfig.Steps[0].Name, result.Steps[0].Name; want != got { - t.Errorf("Want %q got %q", want, got) - } - if want, got := beforeConfig.Steps[0].Commands[0], result.Steps[0].Commands[0]; want != got { - t.Errorf("Want %q got %q", want, got) - } - if want, got := beforeConfig.Steps[0].Image, result.Steps[0].Image; want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -func TestTemplatePluginConvertYaml(t *testing.T) { - templateArgs, err := ioutil.ReadFile("testdata/yaml.template.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - - beforeInput, err := ioutil.ReadFile("testdata/yaml.input.yml") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("testdata/yaml.input.golden") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "plugin.yaml", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - if config == nil { - t.Error("Want non-nil configuration") - return - } - - if want, got := config.Data, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} - -// tests to check error is thrown if user has already loaded a template file of invalid extension -// and refers to it in the drone.yml file -func TestTemplatePluginConvertInvalidTemplateExtension(t *testing.T) { - // reads yml input file which refers to a template file of invalid extensions - templateArgs, err := ioutil.ReadFile("testdata/yaml.template.invalid.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - // reads the template drone.yml - beforeInput, err := ioutil.ReadFile("testdata/yaml.input.yml") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "plugin.txt", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if config != nil { - t.Errorf("template extension invalid. must be yaml, starlark or jsonnet") - } -} - -func TestTemplatePluginConvertYamlWithComment(t *testing.T) { - templateArgs, err := ioutil.ReadFile("testdata/yaml.template.comment.yml") - if err != nil { - t.Error(err) - return - } - - req := &core.ConvertArgs{ - Build: &core.Build{ - After: "3d21ec53a331a6f037a91c368710b99387d012c1", - }, - Repo: &core.Repository{ - Slug: "octocat/hello-world", - Config: ".drone.yml", - Namespace: "octocat", - }, - Config: &core.Config{ - Data: string(templateArgs), - }, - } - - beforeInput, err := ioutil.ReadFile("testdata/yaml.input.yml") - if err != nil { - t.Error(err) - return - } - - after, err := ioutil.ReadFile("testdata/yaml.input.golden") - if err != nil { - t.Error(err) - return - } - - template := &core.Template{ - Name: "plugin.yaml", - Data: string(beforeInput), - Namespace: "octocat", - } - - controller := gomock.NewController(t) - defer controller.Finish() - - templates := mock.NewMockTemplateStore(controller) - templates.EXPECT().FindName(gomock.Any(), template.Name, req.Repo.Namespace).Return(template, nil) - - plugin := Template(templates, 0, 0) - config, err := plugin.Convert(noContext, req) - if err != nil { - t.Error(err) - return - } - - if config == nil { - t.Error("Want non-nil configuration") - return - } - - if want, got := config.Data, string(after); want != got { - t.Errorf("Want %q got %q", want, got) - } -} diff --git a/plugin/converter/testdata/drone.yml b/plugin/converter/testdata/drone.yml deleted file mode 100644 index a451f7bb49..0000000000 --- a/plugin/converter/testdata/drone.yml +++ /dev/null @@ -1,6 +0,0 @@ -kind: pipeline -load: plugin.starlark -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/converter/testdata/input.jsonnet b/plugin/converter/testdata/input.jsonnet deleted file mode 100644 index 25f586354a..0000000000 --- a/plugin/converter/testdata/input.jsonnet +++ /dev/null @@ -1,18 +0,0 @@ -local stepName = std.extVar("input.stepName"); -local image = std.extVar("input.image"); -local commands = std.extVar("input.commands"); - -{ - "kind": "pipeline", - "type": "docker", - "name": "default", - "steps": [ - { - "name": stepName, - "image": image, - "commands": [ - commands - ] - } - ] -} diff --git a/plugin/converter/testdata/input.jsonnet.golden b/plugin/converter/testdata/input.jsonnet.golden deleted file mode 100644 index ba8143b76a..0000000000 --- a/plugin/converter/testdata/input.jsonnet.golden +++ /dev/null @@ -1,15 +0,0 @@ ---- -{ - "kind": "pipeline", - "name": "default", - "steps": [ - { - "commands": [ - "my_command" - ], - "image": "my_image", - "name": "my_step" - } - ], - "type": "docker" -} diff --git a/plugin/converter/testdata/jsonnet.template.yml b/plugin/converter/testdata/jsonnet.template.yml deleted file mode 100644 index f9ab53474c..0000000000 --- a/plugin/converter/testdata/jsonnet.template.yml +++ /dev/null @@ -1,6 +0,0 @@ -kind: template -load: plugin.jsonnet -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/converter/testdata/multi.star b/plugin/converter/testdata/multi.star deleted file mode 100644 index 0ea00f920d..0000000000 --- a/plugin/converter/testdata/multi.star +++ /dev/null @@ -1,6 +0,0 @@ -def main(ctx): - return [{ - 'kind': 'pipeline', - 'type': 'docker', - 'name': 'default' - }] \ No newline at end of file diff --git a/plugin/converter/testdata/multi.star.golden b/plugin/converter/testdata/multi.star.golden deleted file mode 100644 index 4c81db209e..0000000000 --- a/plugin/converter/testdata/multi.star.golden +++ /dev/null @@ -1,2 +0,0 @@ ---- -{"kind": "pipeline", "type": "docker", "name": "default"} diff --git a/plugin/converter/testdata/single.jsonnet b/plugin/converter/testdata/single.jsonnet deleted file mode 100644 index 8640885f8d..0000000000 --- a/plugin/converter/testdata/single.jsonnet +++ /dev/null @@ -1,14 +0,0 @@ -{ - "kind": "pipeline", - "name": "default", - "steps": [ - { - "commands": [ - "my_command" - ], - "image": "my_image", - "name": "my_step" - } - ], - "type": "docker" -} diff --git a/plugin/converter/testdata/single.star b/plugin/converter/testdata/single.star deleted file mode 100644 index 52be305852..0000000000 --- a/plugin/converter/testdata/single.star +++ /dev/null @@ -1,11 +0,0 @@ -def main(ctx): - print(ctx.build) - print(ctx.build.commit) - print(ctx.repo) - print(ctx.repo.namespace) - print(ctx.repo.name) - return { - 'kind': 'pipeline', - 'type': 'docker', - 'name': 'default' - } diff --git a/plugin/converter/testdata/single.star.golden b/plugin/converter/testdata/single.star.golden deleted file mode 100644 index 33a6f44001..0000000000 --- a/plugin/converter/testdata/single.star.golden +++ /dev/null @@ -1 +0,0 @@ -{"kind": "pipeline", "type": "docker", "name": "default"} \ No newline at end of file diff --git a/plugin/converter/testdata/starlark-nested.template.yml b/plugin/converter/testdata/starlark-nested.template.yml deleted file mode 100644 index 401ebab957..0000000000 --- a/plugin/converter/testdata/starlark-nested.template.yml +++ /dev/null @@ -1,8 +0,0 @@ -kind: template -load: test.nested.starlark -data: - builds: - name: build - image: mcr.microsoft.com/dotnet/sdk:5.0 - commands: - - dotnet build \ No newline at end of file diff --git a/plugin/converter/testdata/starlark.input-nested.star b/plugin/converter/testdata/starlark.input-nested.star deleted file mode 100644 index b29fe69d28..0000000000 --- a/plugin/converter/testdata/starlark.input-nested.star +++ /dev/null @@ -1,8 +0,0 @@ -def main(ctx): - return { - "kind": "pipeline", - "name": "default", - "steps": [ - ctx.input.builds - ] - } \ No newline at end of file diff --git a/plugin/converter/testdata/starlark.input-nested.star.golden b/plugin/converter/testdata/starlark.input-nested.star.golden deleted file mode 100644 index 2e8eeeaa9f..0000000000 --- a/plugin/converter/testdata/starlark.input-nested.star.golden +++ /dev/null @@ -1 +0,0 @@ -{"kind": "pipeline", "name": "default", "steps": [{"name": "build", "image": "mcr.microsoft.com/dotnet/sdk:5.0", "commands": ["dotnet build"]}]} \ No newline at end of file diff --git a/plugin/converter/testdata/starlark.input.star b/plugin/converter/testdata/starlark.input.star deleted file mode 100644 index 19ad92b816..0000000000 --- a/plugin/converter/testdata/starlark.input.star +++ /dev/null @@ -1,14 +0,0 @@ -def main(ctx): - return { - "kind": "pipeline", - "name": "build", - "steps": [ - { - "name": ctx.input.stepName, - "image": ctx.input.image, - "commands": [ - ctx.input.commands - ] - } - ] - } diff --git a/plugin/converter/testdata/starlark.input.star.golden b/plugin/converter/testdata/starlark.input.star.golden deleted file mode 100644 index 28a4bbb1bb..0000000000 --- a/plugin/converter/testdata/starlark.input.star.golden +++ /dev/null @@ -1 +0,0 @@ -{"kind": "pipeline", "name": "build", "steps": [{"name": "my_step", "image": "my_image", "commands": ["my_command"]}]} \ No newline at end of file diff --git a/plugin/converter/testdata/starlark.template.yml b/plugin/converter/testdata/starlark.template.yml deleted file mode 100644 index d11f5b3a37..0000000000 --- a/plugin/converter/testdata/starlark.template.yml +++ /dev/null @@ -1,7 +0,0 @@ ---- -kind: template -load: plugin.starlark -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/converter/testdata/yaml.input.golden b/plugin/converter/testdata/yaml.input.golden deleted file mode 100644 index 20b7e9a59b..0000000000 --- a/plugin/converter/testdata/yaml.input.golden +++ /dev/null @@ -1,8 +0,0 @@ -kind: pipeline -type: docker -name: defaults -steps: - - name: MY_STEP - image: my_image - commands: - - my_command \ No newline at end of file diff --git a/plugin/converter/testdata/yaml.input.yml b/plugin/converter/testdata/yaml.input.yml deleted file mode 100644 index 954d25d39e..0000000000 --- a/plugin/converter/testdata/yaml.input.yml +++ /dev/null @@ -1,8 +0,0 @@ -kind: pipeline -type: docker -name: defaults -steps: - - name: {{ upper .input.stepName }} - image: {{ .input.image }} - commands: - - {{ .input.commands }} \ No newline at end of file diff --git a/plugin/converter/testdata/yaml.template.comment.yml b/plugin/converter/testdata/yaml.template.comment.yml deleted file mode 100644 index eba9a589f5..0000000000 --- a/plugin/converter/testdata/yaml.template.comment.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -# this is a comment -kind: template -load: plugin.yaml -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/converter/testdata/yaml.template.invalid.yml b/plugin/converter/testdata/yaml.template.invalid.yml deleted file mode 100644 index fe06494688..0000000000 --- a/plugin/converter/testdata/yaml.template.invalid.yml +++ /dev/null @@ -1,6 +0,0 @@ -kind: template -load: plugin.txt -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/converter/testdata/yaml.template.yml b/plugin/converter/testdata/yaml.template.yml deleted file mode 100644 index eb2cbce0fa..0000000000 --- a/plugin/converter/testdata/yaml.template.yml +++ /dev/null @@ -1,6 +0,0 @@ -kind: template -load: plugin.yaml -data: - stepName: my_step - image: my_image - commands: my_command diff --git a/plugin/registry/auths/auth.go b/plugin/registry/auths/auth.go deleted file mode 100644 index b99bb0ae95..0000000000 --- a/plugin/registry/auths/auth.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package auths - -import ( - "bytes" - "encoding/base64" - "encoding/json" - "io" - "os" - "strings" - - "github.com/drone/drone/core" -) - -// config represents the Docker client configuration, -// typically located at ~/.docker/config.json -type config struct { - Auths map[string]struct { - Auth string `json:"auth"` - } `json:"auths"` -} - -// Parse parses the registry credential from the reader. -func Parse(r io.Reader) ([]*core.Registry, error) { - c := new(config) - err := json.NewDecoder(r).Decode(c) - if err != nil { - return nil, err - } - var auths []*core.Registry - for k, v := range c.Auths { - username, password := decode(v.Auth) - auths = append(auths, &core.Registry{ - Address: k, - Username: username, - Password: password, - }) - } - return auths, nil -} - -// ParseFile parses the registry credential file. -func ParseFile(filepath string) ([]*core.Registry, error) { - f, err := os.Open(filepath) - if err != nil { - return nil, err - } - defer f.Close() - return Parse(f) -} - -// ParseString parses the registry credential file. -func ParseString(s string) ([]*core.Registry, error) { - return Parse(strings.NewReader(s)) -} - -// ParseBytes parses the registry credential file. -func ParseBytes(b []byte) ([]*core.Registry, error) { - return Parse(bytes.NewReader(b)) -} - -// encode returns the encoded credentials. -func encode(username, password string) string { - return base64.StdEncoding.EncodeToString( - []byte(username + ":" + password), - ) -} - -// decode returns the decoded credentials. -func decode(s string) (username, password string) { - d, err := base64.StdEncoding.DecodeString(s) - if err != nil { - return - } - parts := strings.SplitN(string(d), ":", 2) - if len(parts) > 0 { - username = parts[0] - } - if len(parts) > 1 { - password = parts[1] - } - return -} diff --git a/plugin/registry/auths/auth_test.go b/plugin/registry/auths/auth_test.go deleted file mode 100644 index 7f1467a4a9..0000000000 --- a/plugin/registry/auths/auth_test.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package auths - -import ( - "os" - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -func TestParse(t *testing.T) { - got, err := ParseString(sample) - if err != nil { - t.Error(err) - return - } - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestParseBytes(t *testing.T) { - got, err := ParseBytes([]byte(sample)) - if err != nil { - t.Error(err) - return - } - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestParseErr(t *testing.T) { - _, err := ParseString("") - if err == nil { - t.Errorf("Expect unmarshal error") - } -} - -func TestParseFile(t *testing.T) { - got, err := ParseFile("./testdata/config.json") - if err != nil { - t.Error(err) - return - } - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestParseFileErr(t *testing.T) { - _, err := ParseFile("./testdata/x.json") - if _, ok := err.(*os.PathError); !ok { - t.Errorf("Expect error when file does not exist") - } -} - -func TestEncodeDecode(t *testing.T) { - username := "octocat" - password := "correct-horse-battery-staple" - - encoded := encode(username, password) - decodedUsername, decodedPassword := decode(encoded) - if got, want := decodedUsername, username; got != want { - t.Errorf("Want decoded username %s, got %s", want, got) - } - if got, want := decodedPassword, password; got != want { - t.Errorf("Want decoded password %s, got %s", want, got) - } -} - -func TestDecodeInvalid(t *testing.T) { - username, password := decode("b2N0b2NhdDp==") - if username != "" || password != "" { - t.Errorf("Expect decoding error") - } -} - -var sample = `{ - "auths": { - "https://index.docker.io/v1/": { - "auth": "b2N0b2NhdDpjb3JyZWN0LWhvcnNlLWJhdHRlcnktc3RhcGxl" - } - } -}` diff --git a/plugin/registry/auths/testdata/config.json b/plugin/registry/auths/testdata/config.json deleted file mode 100644 index 382c6908b1..0000000000 --- a/plugin/registry/auths/testdata/config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "auths": { - "https://index.docker.io/v1/": { - "auth": "b2N0b2NhdDpjb3JyZWN0LWhvcnNlLWJhdHRlcnktc3RhcGxl" - } - } -} \ No newline at end of file diff --git a/plugin/registry/auths/testdata/config2.json b/plugin/registry/auths/testdata/config2.json deleted file mode 100644 index 4ce3413de0..0000000000 --- a/plugin/registry/auths/testdata/config2.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "auths": { - "https://gcr.io": { - "auth": "b2N0b2NhdDpjb3JyZWN0LWhvcnNlLWJhdHRlcnktc3RhcGxl" - } - } -} \ No newline at end of file diff --git a/plugin/registry/combine.go b/plugin/registry/combine.go deleted file mode 100644 index bb5da54670..0000000000 --- a/plugin/registry/combine.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package registry - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - - "github.com/sirupsen/logrus" -) - -// Combine combines the registry services, allowing the -// system to source registry credential from multiple sources. -func Combine(services ...core.RegistryService) core.RegistryService { - return &combined{services} -} - -type combined struct { - sources []core.RegistryService -} - -func (c *combined) List(ctx context.Context, req *core.RegistryArgs) ([]*core.Registry, error) { - var all []*core.Registry - for _, source := range c.sources { - list, err := source.List(ctx, req) - if err != nil { - return all, err - } - all = append(all, list...) - } - // if trace level debugging is enabled we print - // all registry credentials retrieved from the - // various registry sources. - logger := logger.FromContext(ctx) - if logrus.IsLevelEnabled(logrus.TraceLevel) { - if len(all) == 0 { - logger.Traceln("registry: no registry credentials loaded") - } - for _, registry := range all { - logger.WithField("address", registry.Address). - Traceln("registry: registry credentials loaded") - } - } - return all, nil -} diff --git a/plugin/registry/combine_test.go b/plugin/registry/combine_test.go deleted file mode 100644 index e7c95f1014..0000000000 --- a/plugin/registry/combine_test.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package registry - -import ( - "os" - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -func TestCombineSources(t *testing.T) { - source := Combine( - FileSource("./auths/testdata/config.json"), - FileSource("./auths/testdata/config2.json"), - FileSource(""), // no source file, must not error - ) - got, err := source.List(noContext, &core.RegistryArgs{}) - if err != nil { - t.Error(err) - return - } - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - { - Address: "https://gcr.io", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestCombineSources_Err(t *testing.T) { - source := Combine( - FileSource("./auths/testdata/config.json"), - FileSource("./auths/testdata/x.json"), - ) - _, err := source.List(noContext, &core.RegistryArgs{}) - if _, ok := err.(*os.PathError); !ok { - t.Errorf("Expect error when file does not exist") - } -} diff --git a/plugin/registry/encrypted.go b/plugin/registry/encrypted.go deleted file mode 100644 index f7a76a9331..0000000000 --- a/plugin/registry/encrypted.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package registry - -import ( - "context" - "crypto/aes" - "crypto/cipher" - "encoding/base64" - "errors" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - "github.com/drone/drone/plugin/registry/auths" -) - -// Encrypted returns a new encrypted registry credentials -// provider that sources credentials from the encrypted strings -// in the yaml file. -func Encrypted() core.RegistryService { - return new(encrypted) -} - -type encrypted struct { -} - -func (c *encrypted) List(ctx context.Context, in *core.RegistryArgs) ([]*core.Registry, error) { - var results []*core.Registry - - for _, match := range in.Pipeline.PullSecrets { - logger := logger.FromContext(ctx). - WithField("name", match). - WithField("kind", "secret") - logger.Trace("image_pull_secrets: find encrypted secret") - - // lookup the named secret in the manifest. If the - // secret does not exist, return a nil variable, - // allowing the next secret controller in the chain - // to be invoked. - data, ok := getEncrypted(in.Conf, match) - if !ok { - logger.Trace("image_pull_secrets: no matching encrypted secret in yaml") - return nil, nil - } - - decoded, err := base64.StdEncoding.DecodeString(string(data)) - if err != nil { - logger.WithError(err).Trace("image_pull_secrets: cannot decode secret") - return nil, err - } - - decrypted, err := decrypt(decoded, []byte(in.Repo.Secret)) - if err != nil { - logger.WithError(err).Trace("image_pull_secrets: cannot decrypt secret") - return nil, err - } - - parsed, err := auths.ParseBytes(decrypted) - if err != nil { - logger.WithError(err).Trace("image_pull_secrets: cannot parse decrypted secret") - return nil, err - } - - logger.Trace("image_pull_secrets: found encrypted secret") - results = append(results, parsed...) - } - - return results, nil -} - -func getEncrypted(manifest *yaml.Manifest, match string) (data string, ok bool) { - for _, resource := range manifest.Resources { - secret, ok := resource.(*yaml.Secret) - if !ok { - continue - } - if secret.Name != match { - continue - } - if secret.Data == "" { - continue - } - return secret.Data, true - } - return -} - -func decrypt(ciphertext []byte, key []byte) (plaintext []byte, err error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - - gcm, err := cipher.NewGCM(block) - if err != nil { - return nil, err - } - - if len(ciphertext) < gcm.NonceSize() { - return nil, errors.New("malformed ciphertext") - } - - return gcm.Open(nil, - ciphertext[:gcm.NonceSize()], - ciphertext[gcm.NonceSize():], - nil, - ) -} diff --git a/plugin/registry/endpoint.go b/plugin/registry/endpoint.go deleted file mode 100644 index f69c9c4112..0000000000 --- a/plugin/registry/endpoint.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry - -import ( - "context" - - "github.com/drone/drone-go/plugin/registry" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" -) - -// EndpointSource returns a registry credential provider -// that sources registry credentials from an http endpoint. -func EndpointSource(endpoint, secret string, skipVerify bool) core.RegistryService { - return &service{ - endpoint: endpoint, - secret: secret, - skipVerify: skipVerify, - } -} - -type service struct { - endpoint string - secret string - skipVerify bool -} - -func (c *service) List(ctx context.Context, in *core.RegistryArgs) ([]*core.Registry, error) { - if c.endpoint == "" { - return nil, nil - } - logger := logger.FromContext(ctx) - logger.Trace("registry: plugin: get credentials") - - req := ®istry.Request{ - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - } - client := registry.Client(c.endpoint, c.secret, c.skipVerify) - res, err := client.List(ctx, req) - if err != nil { - logger.WithError(err).Warn("registry: plugin: cannot get credentials") - return nil, err - } - - var registries []*core.Registry - for _, registry := range res { - registries = append(registries, &core.Registry{ - Address: registry.Address, - Username: registry.Username, - Password: registry.Password, - }) - logger.WithField("address", registry.Address). - Trace("registry: plugin: found credentials") - } - return registries, nil -} diff --git a/plugin/registry/endpoint_oss.go b/plugin/registry/endpoint_oss.go deleted file mode 100644 index f94c3892c1..0000000000 --- a/plugin/registry/endpoint_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package registry - -import "github.com/drone/drone/core" - -// EndpointSource returns a no-op registry credential provider. -func EndpointSource(string, string, bool) core.RegistryService { - return new(noop) -} diff --git a/plugin/registry/endpoint_test.go b/plugin/registry/endpoint_test.go deleted file mode 100644 index e5bbde55e2..0000000000 --- a/plugin/registry/endpoint_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" - "github.com/h2non/gock" -) - -var noContext = context.TODO() - -func TestEndpointSource(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/auths"). - MatchHeader("Accept", "application/vnd.drone.registry.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(200). - BodyString(`[{"address":"index.docker.io","username":"octocat","password":"pa55word"}]`). - Done() - - service := EndpointSource("https://company.com/auths", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", false) - got, err := service.List(noContext, &core.RegistryArgs{Repo: &core.Repository{}, Build: &core.Build{}}) - if err != nil { - t.Error(err) - return - } - - want := []*core.Registry{ - { - Address: "index.docker.io", - Username: "octocat", - Password: "pa55word", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - return - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - return - } -} - -func TestEndpointSource_Err(t *testing.T) { - defer gock.Off() - - gock.New("https://company.com"). - Post("/auths"). - MatchHeader("Accept", "application/vnd.drone.registry.v1\\+json"). - MatchHeader("Accept-Encoding", "identity"). - MatchHeader("Content-Type", "application/json"). - Reply(404) - - service := EndpointSource("https://company.com/auths", "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", false) - _, err := service.List(noContext, &core.RegistryArgs{Repo: &core.Repository{}, Build: &core.Build{}}) - if err == nil { - t.Errorf("Expect http.Response error") - } else if err.Error() != "Not Found" { - t.Errorf("Expect Not Found error") - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestNotConfigured(t *testing.T) { - service := EndpointSource("", "", false) - registry, err := service.List(noContext, &core.RegistryArgs{}) - if err != nil { - t.Error(err) - } - if registry != nil { - t.Errorf("Expect nil registry") - } -} diff --git a/plugin/registry/external.go b/plugin/registry/external.go deleted file mode 100644 index a3d2d51816..0000000000 --- a/plugin/registry/external.go +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry - -import ( - "context" - "time" - - "github.com/drone/drone-go/plugin/secret" - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - "github.com/drone/drone/plugin/registry/auths" - - droneapi "github.com/drone/drone-go/drone" -) - -// External returns a new external Secret controller. -func External(endpoint, secret string, skipVerify bool) core.RegistryService { - return &externalController{ - endpoint: endpoint, - secret: secret, - skipVerify: skipVerify, - } -} - -type externalController struct { - endpoint string - secret string - skipVerify bool -} - -func (c *externalController) List(ctx context.Context, in *core.RegistryArgs) ([]*core.Registry, error) { - var results []*core.Registry - - for _, match := range in.Pipeline.PullSecrets { - logger := logger.FromContext(ctx). - WithField("name", match). - WithField("kind", "secret"). - WithField("secret", c.endpoint) - logger.Trace("image_pull_secrets: find secret") - - // lookup the named secret in the manifest. If the - // secret does not exist, return a nil variable, - // allowing the next secret controller in the chain - // to be invoked. - path, name, ok := getExternal(in.Conf, match) - if !ok { - logger.Trace("image_pull_secrets: no matching secret resource in yaml") - return nil, nil - } - - logger = logger. - WithField("get.path", path). - WithField("get.name", name) - - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a request within - // one minute. - ctx, cancel := context.WithTimeout(ctx, time.Minute) - defer cancel() - - req := &secret.Request{ - Name: name, - Path: path, - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - } - client := secret.Client(c.endpoint, c.secret, c.skipVerify) - res, err := client.Find(ctx, req) - if err != nil { - logger.WithError(err).Trace("image_pull_secrets: cannot get secret") - return nil, err - } - - // if no error is returned and the secret is empty, - // this indicates the client returned No Content, - // and we should exit with no secret, but no error. - if res.Data == "" { - return nil, nil - } - - // The secret can be restricted to non-pull request - // events. If the secret is restricted, return - // empty results. - if (res.Pull == false && res.PullRequest == false) && - in.Build.Event == core.EventPullRequest { - logger.WithError(err).Trace("image_pull_secrets: pull_request access denied") - return nil, nil - } - - parsed, err := auths.ParseString(res.Data) - if err != nil { - return nil, err - } - - logger.Trace("image_pull_secrets: found secret") - results = append(results, parsed...) - } - - return results, nil -} - -func getExternal(manifest *yaml.Manifest, match string) (path, name string, ok bool) { - for _, resource := range manifest.Resources { - secret, ok := resource.(*yaml.Secret) - if !ok { - continue - } - if secret.Name != match { - continue - } - if secret.Get.Name == "" && secret.Get.Path == "" { - continue - } - return secret.Get.Path, secret.Get.Name, true - } - return -} - -func toRepo(from *core.Repository) droneapi.Repo { - return droneapi.Repo{ - ID: from.ID, - UID: from.UID, - UserID: from.UserID, - Namespace: from.Namespace, - Name: from.Name, - Slug: from.Slug, - SCM: from.SCM, - HTTPURL: from.HTTPURL, - SSHURL: from.SSHURL, - Link: from.Link, - Branch: from.Branch, - Private: from.Private, - Visibility: from.Visibility, - Active: from.Active, - Config: from.Config, - Trusted: from.Trusted, - Protected: from.Protected, - Timeout: from.Timeout, - } -} - -func toBuild(from *core.Build) droneapi.Build { - return droneapi.Build{ - ID: from.ID, - RepoID: from.RepoID, - Trigger: from.Trigger, - Number: from.Number, - Parent: from.Parent, - Status: from.Status, - Error: from.Error, - Event: from.Event, - Action: from.Action, - Link: from.Link, - Timestamp: from.Timestamp, - Title: from.Title, - Message: from.Message, - Before: from.Before, - After: from.After, - Ref: from.Ref, - Fork: from.Fork, - Source: from.Source, - Target: from.Target, - Author: from.Author, - AuthorName: from.AuthorName, - AuthorEmail: from.AuthorEmail, - AuthorAvatar: from.AuthorAvatar, - Sender: from.Sender, - Params: from.Params, - Deploy: from.Deploy, - Started: from.Started, - Finished: from.Finished, - Created: from.Created, - Updated: from.Updated, - Version: from.Version, - } -} diff --git a/plugin/registry/external_oss.go b/plugin/registry/external_oss.go deleted file mode 100644 index 683782066a..0000000000 --- a/plugin/registry/external_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package registry - -import "github.com/drone/drone/core" - -// External returns a no-op registry credential provider. -func External(string, string, bool) core.RegistryService { - return new(noop) -} diff --git a/plugin/registry/external_test.go b/plugin/registry/external_test.go deleted file mode 100644 index 1bfb397da2..0000000000 --- a/plugin/registry/external_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry diff --git a/plugin/registry/file.go b/plugin/registry/file.go deleted file mode 100644 index 4b7755aa76..0000000000 --- a/plugin/registry/file.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/plugin/registry/auths" - - "github.com/sirupsen/logrus" -) - -// FileSource returns a registry credential provider that -// sources registry credentials from a .docker/config.json file. -func FileSource(path string) core.RegistryService { - return ®istryConfig{ - path: path, - } -} - -type registryConfig struct { - path string -} - -func (r *registryConfig) List(ctx context.Context, req *core.RegistryArgs) ([]*core.Registry, error) { - // configuration of the .docker/config.json file path - // is optional. Ignore if empty string. - if r.path == "" { - return nil, nil - } - - logger := logrus.WithField("config", r.path) - logger.Traceln("registry: parsing docker config.json file") - - regs, err := auths.ParseFile(r.path) - if err != nil { - logger.WithError(err).Errorln("registry: cannot parse docker config.json file") - return nil, err - } - - return regs, err -} diff --git a/plugin/registry/file_oss.go b/plugin/registry/file_oss.go deleted file mode 100644 index fef39cb084..0000000000 --- a/plugin/registry/file_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package registry - -import "github.com/drone/drone/core" - -// FileSource returns a no-op registry credential provider. -func FileSource(string) core.RegistryService { - return new(noop) -} diff --git a/plugin/registry/file_test.go b/plugin/registry/file_test.go deleted file mode 100644 index 40f9eb310e..0000000000 --- a/plugin/registry/file_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package registry - -import ( - "os" - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -func TestFileSource(t *testing.T) { - source := FileSource("./auths/testdata/config.json") - got, err := source.List(noContext, &core.RegistryArgs{}) - if err != nil { - t.Error(err) - } - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFileSourceErr(t *testing.T) { - source := FileSource("./auths/testdata/x.json") - _, err := source.List(noContext, &core.RegistryArgs{}) - if _, ok := err.(*os.PathError); !ok { - t.Errorf("Expect error when file does not exist") - } -} diff --git a/plugin/registry/noop.go b/plugin/registry/noop.go deleted file mode 100644 index 8e2e77e241..0000000000 --- a/plugin/registry/noop.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package registry - -import ( - "context" - - "github.com/drone/drone/core" -) - -type noop struct{} - -func (noop) List(context.Context, *core.RegistryArgs) ([]*core.Registry, error) { - return nil, nil -} diff --git a/plugin/registry/static.go b/plugin/registry/static.go deleted file mode 100644 index 5f734fd2f1..0000000000 --- a/plugin/registry/static.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package registry - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - "github.com/drone/drone/plugin/registry/auths" -) - -// Static returns a new static credentials controller. -func Static(secrets []*core.Secret) core.RegistryService { - return &staticController{secrets: secrets} -} - -type staticController struct { - secrets []*core.Secret -} - -func (c *staticController) List(ctx context.Context, in *core.RegistryArgs) ([]*core.Registry, error) { - static := map[string]*core.Secret{} - for _, secret := range c.secrets { - static[secret.Name] = secret - } - - var results []*core.Registry - for _, name := range in.Pipeline.PullSecrets { - logger := logger.FromContext(ctx).WithField("name", name) - logger.Trace("registry: database: find secret") - - secret, ok := static[name] - if !ok { - logger.Trace("registry: database: cannot find secret") - continue - } - - // The secret can be restricted to non-pull request - // events. If the secret is restricted, return - // empty results. - if secret.PullRequest == false && - in.Build.Event == core.EventPullRequest { - logger.Trace("registry: database: pull_request access denied") - continue - } - - logger.Trace("registry: database: secret found") - parsed, err := auths.ParseString(secret.Data) - if err != nil { - logger.WithError(err).Error("registry: database: parsing error") - return nil, err - } - - results = append(results, parsed...) - } - return results, nil -} diff --git a/plugin/registry/static_test.go b/plugin/registry/static_test.go deleted file mode 100644 index 2eb8b2cdc6..0000000000 --- a/plugin/registry/static_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package registry - -import ( - "testing" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -var mockDockerAuthConfig = `{ - "auths": { - "https://index.docker.io/v1/": { - "auth": "b2N0b2NhdDpjb3JyZWN0LWhvcnNlLWJhdHRlcnktc3RhcGxl" - } - } -}` - -func TestStatic(t *testing.T) { - secrets := []*core.Secret{ - { - Name: "dockerhub", - Data: mockDockerAuthConfig, - }, - } - - manifest, err := yaml.ParseString("kind: pipeline\nimage_pull_secrets: [ dockerhub ]") - if err != nil { - t.Error(err) - return - } - - args := &core.RegistryArgs{ - Build: &core.Build{Event: core.EventPush}, - Conf: manifest, - Pipeline: manifest.Resources[0].(*yaml.Pipeline), - } - service := Static(secrets) - got, err := service.List(noContext, args) - if err != nil { - t.Error(err) - return - } - - want := []*core.Registry{ - { - Address: "https://index.docker.io/v1/", - Username: "octocat", - Password: "correct-horse-battery-staple", - }, - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - return - } -} - -func TestStatic_NoMatch(t *testing.T) { - secrets := []*core.Secret{ - { - Name: "dockerhub", - Data: mockDockerAuthConfig, - }, - } - - manifest, err := yaml.ParseString("kind: pipeline\nimage_pull_secrets: [ unknown ]") - if err != nil { - t.Error(err) - return - } - - args := &core.RegistryArgs{ - Build: &core.Build{Event: core.EventPush}, - Conf: manifest, - Pipeline: manifest.Resources[0].(*yaml.Pipeline), - } - service := Static(secrets) - got, err := service.List(noContext, args) - if err != nil { - t.Error(err) - return - } - if len(got) != 0 { - t.Errorf("Expect no results") - } -} - -func TestStatic_DisablePullRequest(t *testing.T) { - secrets := []*core.Secret{ - { - Name: "dockerhub", - Data: mockDockerAuthConfig, - PullRequest: false, - }, - } - - manifest, err := yaml.ParseString("kind: pipeline\nimage_pull_secrets: [ dockerhub ]") - if err != nil { - t.Error(err) - return - } - - args := &core.RegistryArgs{ - Build: &core.Build{Event: core.EventPullRequest}, - Conf: manifest, - Pipeline: manifest.Resources[0].(*yaml.Pipeline), - } - service := Static(secrets) - got, err := service.List(noContext, args) - if err != nil { - t.Error(err) - return - } - if len(got) != 0 { - t.Errorf("Expect no results") - } -} diff --git a/plugin/secret/combine.go b/plugin/secret/combine.go deleted file mode 100644 index 9c85ce8017..0000000000 --- a/plugin/secret/combine.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package secret - -import ( - "context" - "strings" - - "github.com/drone/drone/core" -) - -// Combine combines the secret services, allowing the system -// to get pipeline secrets from multiple sources. -func Combine(services ...core.SecretService) core.SecretService { - return &combined{services} -} - -type combined struct { - sources []core.SecretService -} - -func (c *combined) Find(ctx context.Context, in *core.SecretArgs) (*core.Secret, error) { - // Ignore any requests for the .docker/config.json file. - // This file is reserved for internal use only, and is - // never exposed to the build environment. - if isDockerConfig(in.Name) { - return nil, nil - } - - for _, source := range c.sources { - secret, err := source.Find(ctx, in) - if err != nil { - return nil, err - } - if secret == nil { - continue - } - // if the secret object is not nil, but is empty - // we should assume the secret service returned a - // 204 no content, and proceed to the next service - // in the chain. - if secret.Data == "" { - continue - } - return secret, nil - } - return nil, nil -} - -// helper function returns true if the build event matches the -// docker_auth_config variable name. -func isDockerConfig(name string) bool { - return strings.EqualFold(name, "docker_auth_config") || - strings.EqualFold(name, ".dockerconfigjson") || - strings.EqualFold(name, ".dockerconfig") -} diff --git a/plugin/secret/combine_test.go b/plugin/secret/combine_test.go deleted file mode 100644 index fb44c68461..0000000000 --- a/plugin/secret/combine_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package secret diff --git a/plugin/secret/encrypted.go b/plugin/secret/encrypted.go deleted file mode 100644 index fb65feaeae..0000000000 --- a/plugin/secret/encrypted.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package secret - -import ( - "context" - "crypto/aes" - "crypto/cipher" - "encoding/base64" - "errors" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" -) - -// Encrypted returns a new encrypted Secret controller. -func Encrypted() core.SecretService { - return new(encrypted) -} - -type encrypted struct { -} - -func (c *encrypted) Find(ctx context.Context, in *core.SecretArgs) (*core.Secret, error) { - logger := logger.FromContext(ctx). - WithField("name", in.Name). - WithField("kind", "secret") - - // lookup the named secret in the manifest. If the - // secret does not exist, return a nil variable, - // allowing the next secret controller in the chain - // to be invoked. - data, ok := getEncrypted(in.Conf, in.Name) - if !ok { - logger.Trace("secret: encrypted: no matching secret") - return nil, nil - } - - // if the build event is a pull request and the source - // repository is a fork, the secret is not exposed to - // the pipeline, for security reasons. - if in.Repo.Private == false && - in.Build.Event == core.EventPullRequest && - in.Build.Fork != "" { - logger.Trace("secret: encrypted: restricted from forks") - return nil, nil - } - - decoded, err := base64.StdEncoding.DecodeString(string(data)) - if err != nil { - logger.WithError(err).Trace("secret: encrypted: cannot decode") - return nil, err - } - - decrypted, err := decrypt(decoded, []byte(in.Repo.Secret)) - if err != nil { - logger.WithError(err).Trace("secret: encrypted: cannot decrypt") - return nil, err - } - - logger.Trace("secret: encrypted: found matching secret") - - return &core.Secret{ - Name: in.Name, - Data: string(decrypted), - }, nil -} - -func getEncrypted(manifest *yaml.Manifest, match string) (data string, ok bool) { - for _, resource := range manifest.Resources { - secret, ok := resource.(*yaml.Secret) - if !ok { - continue - } - if secret.Name != match { - continue - } - if secret.Data == "" { - continue - } - return secret.Data, true - } - return -} - -func decrypt(ciphertext []byte, key []byte) (plaintext []byte, err error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - - gcm, err := cipher.NewGCM(block) - if err != nil { - return nil, err - } - - if len(ciphertext) < gcm.NonceSize() { - return nil, errors.New("malformed ciphertext") - } - - return gcm.Open(nil, - ciphertext[:gcm.NonceSize()], - ciphertext[gcm.NonceSize():], - nil, - ) -} diff --git a/plugin/secret/encrypted_test.go b/plugin/secret/encrypted_test.go deleted file mode 100644 index 69a03023f9..0000000000 --- a/plugin/secret/encrypted_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package secret diff --git a/plugin/secret/external.go b/plugin/secret/external.go deleted file mode 100644 index e74dbc13d8..0000000000 --- a/plugin/secret/external.go +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secret - -import ( - "context" - "time" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" - "github.com/drone/drone/logger" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone-go/plugin/secret" -) - -// External returns a new external Secret controller. -func External(endpoint, secret string, skipVerify bool) core.SecretService { - return &externalController{ - endpoint: endpoint, - secret: secret, - skipVerify: skipVerify, - } -} - -type externalController struct { - endpoint string - secret string - skipVerify bool -} - -func (c *externalController) Find(ctx context.Context, in *core.SecretArgs) (*core.Secret, error) { - if c.endpoint == "" { - return nil, nil - } - - logger := logger.FromContext(ctx). - WithField("name", in.Name). - WithField("kind", "secret") - - // lookup the named secret in the manifest. If the - // secret does not exist, return a nil variable, - // allowing the next secret controller in the chain - // to be invoked. - path, name, ok := getExternal(in.Conf, in.Name) - if !ok { - logger.Trace("secret: external: no matching secret") - return nil, nil - } - - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a request within - // one minute. - ctx, cancel := context.WithTimeout(ctx, time.Minute) - defer cancel() - - req := &secret.Request{ - Name: name, - Path: path, - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - } - client := secret.Client(c.endpoint, c.secret, c.skipVerify) - res, err := client.Find(ctx, req) - if err != nil { - logger.WithError(err).Trace("secret: external: cannot get secret") - return nil, err - } - - // if no error is returned and the secret is empty, - // this indicates the client returned No Content, - // and we should exit with no secret, but no error. - if res.Data == "" { - logger.Trace("secret: external: secret disabled for pull requests") - return nil, nil - } - - // the secret can be restricted to non-pull request - // events. If the secret is restricted, return - // empty results. - if (res.Pull == false && res.PullRequest == false) && - in.Build.Event == core.EventPullRequest { - logger.Trace("secret: external: restricted from forks") - return nil, nil - } - - logger.Trace("secret: external: found matching secret") - - return &core.Secret{ - Name: in.Name, - Data: res.Data, - PullRequest: res.Pull, - }, nil -} - -func getExternal(manifest *yaml.Manifest, match string) (path, name string, ok bool) { - for _, resource := range manifest.Resources { - secret, ok := resource.(*yaml.Secret) - if !ok { - continue - } - if secret.Name != match { - continue - } - if secret.Get.Name == "" && secret.Get.Path == "" { - continue - } - return secret.Get.Path, secret.Get.Name, true - } - return -} - -func toRepo(from *core.Repository) drone.Repo { - return drone.Repo{ - ID: from.ID, - UID: from.UID, - UserID: from.UserID, - Namespace: from.Namespace, - Name: from.Name, - Slug: from.Slug, - SCM: from.SCM, - HTTPURL: from.HTTPURL, - SSHURL: from.SSHURL, - Link: from.Link, - Branch: from.Branch, - Private: from.Private, - Visibility: from.Visibility, - Active: from.Active, - Config: from.Config, - Trusted: from.Trusted, - Protected: from.Protected, - Timeout: from.Timeout, - } -} - -func toBuild(from *core.Build) drone.Build { - return drone.Build{ - ID: from.ID, - RepoID: from.RepoID, - Trigger: from.Trigger, - Number: from.Number, - Parent: from.Parent, - Status: from.Status, - Error: from.Error, - Event: from.Event, - Action: from.Action, - Link: from.Link, - Timestamp: from.Timestamp, - Title: from.Title, - Message: from.Message, - Before: from.Before, - After: from.After, - Ref: from.Ref, - Fork: from.Fork, - Source: from.Source, - Target: from.Target, - Author: from.Author, - AuthorName: from.AuthorName, - AuthorEmail: from.AuthorEmail, - AuthorAvatar: from.AuthorAvatar, - Sender: from.Sender, - Params: from.Params, - Deploy: from.Deploy, - Started: from.Started, - Finished: from.Finished, - Created: from.Created, - Updated: from.Updated, - Version: from.Version, - } -} diff --git a/plugin/secret/external_oss.go b/plugin/secret/external_oss.go deleted file mode 100644 index 04500f0275..0000000000 --- a/plugin/secret/external_oss.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package secret - -import ( - "context" - - "github.com/drone/drone/core" -) - -// External returns a no-op registry secret provider. -func External(string, string, bool) core.SecretService { - return new(noop) -} - -type noop struct{} - -func (noop) Find(context.Context, *core.SecretArgs) (*core.Secret, error) { - return nil, nil -} diff --git a/plugin/secret/external_test.go b/plugin/secret/external_test.go deleted file mode 100644 index fb44c68461..0000000000 --- a/plugin/secret/external_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package secret diff --git a/plugin/secret/static.go b/plugin/secret/static.go deleted file mode 100644 index b531e44b08..0000000000 --- a/plugin/secret/static.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package secret - -import ( - "context" - "strings" - - "github.com/drone/drone/core" -) - -// Static returns a new static Secret controller. -func Static(secrets []*core.Secret) core.SecretService { - return &staticController{secrets: secrets} -} - -type staticController struct { - secrets []*core.Secret -} - -func (c *staticController) Find(ctx context.Context, in *core.SecretArgs) (*core.Secret, error) { - for _, secret := range c.secrets { - if !strings.EqualFold(secret.Name, in.Name) { - continue - } - // The secret can be restricted to non-pull request - // events. If the secret is restricted, return - // empty results. - if secret.PullRequest == false && - in.Build.Event == core.EventPullRequest { - continue - } - return secret, nil - } - return nil, nil -} diff --git a/plugin/secret/static_test.go b/plugin/secret/static_test.go deleted file mode 100644 index 6c6f430620..0000000000 --- a/plugin/secret/static_test.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package secret - -import ( - "context" - "testing" - - "github.com/drone/drone/core" -) - -var noContext = context.Background() - -func TestStatic(t *testing.T) { - secrets := []*core.Secret{ - {Name: "docker_username"}, - {Name: "docker_password"}, - } - args := &core.SecretArgs{ - Name: "docker_password", - Build: &core.Build{Event: core.EventPush}, - } - service := Static(secrets) - secret, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if secret != secrets[1] { - t.Errorf("expect docker_password") - } -} - -func TestStaticNotFound(t *testing.T) { - secrets := []*core.Secret{ - {Name: "docker_username"}, - {Name: "docker_password"}, - } - args := &core.SecretArgs{ - Name: "slack_token", - Build: &core.Build{Event: core.EventPush}, - } - service := Static(secrets) - secret, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if secret != nil { - t.Errorf("Expect secret not found") - } -} - -func TestStaticPullRequestDisabled(t *testing.T) { - secrets := []*core.Secret{ - {Name: "docker_username"}, - {Name: "docker_password", PullRequest: false}, - } - args := &core.SecretArgs{ - Name: "docker_password", - Build: &core.Build{Event: core.EventPullRequest}, - } - service := Static(secrets) - secret, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if secret != nil { - t.Errorf("Expect secret not found") - } -} - -func TestStaticPullRequestEnabled(t *testing.T) { - secrets := []*core.Secret{ - {Name: "docker_username"}, - {Name: "docker_password", PullRequest: true}, - } - args := &core.SecretArgs{ - Name: "docker_password", - Build: &core.Build{Event: core.EventPullRequest}, - } - service := Static(secrets) - secret, err := service.Find(noContext, args) - if err != nil { - t.Error(err) - return - } - if err != nil { - t.Error(err) - return - } - if secret != secrets[1] { - t.Errorf("expect docker_username") - } -} diff --git a/plugin/validator/combine.go b/plugin/validator/combine.go deleted file mode 100644 index c771c88eb8..0000000000 --- a/plugin/validator/combine.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "context" - - "github.com/drone/drone/core" -) - -// Combine combines the conversion services, provision support -// for multiple conversion utilities. -func Combine(services ...core.ValidateService) core.ValidateService { - return &combined{services} -} - -type combined struct { - sources []core.ValidateService -} - -func (c *combined) Validate(ctx context.Context, req *core.ValidateArgs) error { - for _, source := range c.sources { - if err := source.Validate(ctx, req); err != nil { - return err - } - } - return nil -} diff --git a/plugin/validator/combine_test.go b/plugin/validator/combine_test.go deleted file mode 100644 index 22627c7a38..0000000000 --- a/plugin/validator/combine_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package validator - -import ( - "context" - "errors" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -var mockFile = ` -kind: pipeline -type: docker -name: testing -` - -func TestCombine(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - args := &core.ValidateArgs{ - User: &core.User{Login: "octocat"}, - Repo: &core.Repository{Slug: "octocat/hello-world", Config: ".drone.yml"}, - Build: &core.Build{After: "6d144de7"}, - Config: &core.Config{}, - } - - service := mock.NewMockValidateService(controller) - service.EXPECT().Validate(noContext, args).Return(nil) - - err := Combine(service).Validate(noContext, args) - if err != nil { - t.Error(err) - } -} - -func TestCombineErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - resp := errors.New("") - service := mock.NewMockValidateService(controller) - service.EXPECT().Validate(noContext, nil).Return(resp) - - err := Combine(service).Validate(noContext, nil) - if err != resp { - t.Errorf("expected convert service error") - } -} diff --git a/plugin/validator/filter.go b/plugin/validator/filter.go deleted file mode 100644 index 0a1090511f..0000000000 --- a/plugin/validator/filter.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "context" - "path/filepath" - - "github.com/drone/drone/core" -) - -// Filter returns a validation service that skips -// pipelines that do not match the filter criteria. -func Filter(include, exclude []string) core.ValidateService { - return &filter{ - include: include, - exclude: exclude, - } -} - -type filter struct { - include []string - exclude []string -} - -func (f *filter) Validate(ctx context.Context, in *core.ValidateArgs) error { - if len(f.include) > 0 { - for _, pattern := range f.include { - ok, _ := filepath.Match(pattern, in.Repo.Slug) - if ok { - return nil - } - } - - // if the include list is specified, and the - // repository does not match any patterns in - // the include list, it should be skipped. - return core.ErrValidatorSkip - } - - if len(f.exclude) > 0 { - for _, pattern := range f.exclude { - ok, _ := filepath.Match(pattern, in.Repo.Slug) - if ok { - // if the exclude list is specified, and - // the repository matches a pattern in the - // exclude list, it should be skipped. - return core.ErrValidatorSkip - } - } - } - - return nil -} diff --git a/plugin/validator/filter_test.go b/plugin/validator/filter_test.go deleted file mode 100644 index 990cc9a0c6..0000000000 --- a/plugin/validator/filter_test.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package validator - -import ( - "testing" - - "github.com/drone/drone/core" -) - -func TestFilter_None(t *testing.T) { - f := Filter(nil, nil) - if err := f.Validate(noContext, nil); err != nil { - t.Error(err) - } -} - -func TestFilter_Include(t *testing.T) { - args := &core.ValidateArgs{ - Repo: &core.Repository{Slug: "octocat/hello-world"}, - } - - f := Filter([]string{"octocat/hello-world"}, nil) - if err := f.Validate(noContext, args); err != nil { - t.Error(err) - } - - f = Filter([]string{"octocat/*"}, nil) - if err := f.Validate(noContext, args); err != nil { - t.Error(err) - } - - f = Filter([]string{"spaceghost/*"}, nil) - if err := f.Validate(noContext, args); err != core.ErrValidatorSkip { - t.Errorf("Expect ErrValidatorSkip, got %s", err) - } -} - -func TestFilter_Exclude(t *testing.T) { - args := &core.ValidateArgs{ - Repo: &core.Repository{Slug: "octocat/hello-world"}, - } - - f := Filter(nil, []string{"octocat/hello-world"}) - if err := f.Validate(noContext, args); err != core.ErrValidatorSkip { - t.Errorf("Expect ErrValidatorSkip, got %s", err) - } - - f = Filter(nil, []string{"octocat/*"}) - if err := f.Validate(noContext, args); err != core.ErrValidatorSkip { - t.Errorf("Expect ErrValidatorSkip, got %s", err) - } - - f = Filter(nil, []string{"spaceghost/*"}) - if err := f.Validate(noContext, args); err != nil { - t.Error(err) - } -} diff --git a/plugin/validator/noop.go b/plugin/validator/noop.go deleted file mode 100644 index 5010301051..0000000000 --- a/plugin/validator/noop.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package validator - -import ( - "context" - - "github.com/drone/drone/core" -) - -type noop struct{} - -func (noop) Validate(context.Context, *core.ValidateArgs) error { return nil } diff --git a/plugin/validator/remote.go b/plugin/validator/remote.go deleted file mode 100644 index 1bdfb43910..0000000000 --- a/plugin/validator/remote.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package validator - -import ( - "context" - "time" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone-go/plugin/validator" - "github.com/drone/drone/core" -) - -// Remote returns a conversion service that converts the -// configuration file using a remote http service. -func Remote(endpoint, signer string, skipVerify bool, timeout time.Duration) core.ValidateService { - return &remote{ - endpoint: endpoint, - secret: signer, - skipVerify: skipVerify, - timeout: timeout, - } -} - -type remote struct { - endpoint string - secret string - skipVerify bool - timeout time.Duration -} - -func (g *remote) Validate(ctx context.Context, in *core.ValidateArgs) error { - if g.endpoint == "" { - return nil - } - // include a timeout to prevent an API call from - // hanging the build process indefinitely. The - // external service must return a response within - // the configured timeout (default 1m). - ctx, cancel := context.WithTimeout(ctx, g.timeout) - defer cancel() - - req := &validator.Request{ - Repo: toRepo(in.Repo), - Build: toBuild(in.Build), - Config: drone.Config{ - Data: in.Config.Data, - }, - } - client := validator.Client(g.endpoint, g.secret, g.skipVerify) - err := client.Validate(ctx, req) - switch err { - case validator.ErrBlock: - return core.ErrValidatorBlock - case validator.ErrSkip: - return core.ErrValidatorSkip - default: - return err - } -} - -func toRepo(from *core.Repository) drone.Repo { - return drone.Repo{ - ID: from.ID, - UID: from.UID, - UserID: from.UserID, - Namespace: from.Namespace, - Name: from.Name, - Slug: from.Slug, - SCM: from.SCM, - HTTPURL: from.HTTPURL, - SSHURL: from.SSHURL, - Link: from.Link, - Branch: from.Branch, - Private: from.Private, - Visibility: from.Visibility, - Active: from.Active, - Config: from.Config, - Trusted: from.Trusted, - Protected: from.Protected, - Timeout: from.Timeout, - } -} - -func toBuild(from *core.Build) drone.Build { - return drone.Build{ - ID: from.ID, - RepoID: from.RepoID, - Trigger: from.Trigger, - Number: from.Number, - Parent: from.Parent, - Status: from.Status, - Error: from.Error, - Event: from.Event, - Action: from.Action, - Link: from.Link, - Timestamp: from.Timestamp, - Title: from.Title, - Message: from.Message, - Before: from.Before, - After: from.After, - Ref: from.Ref, - Fork: from.Fork, - Source: from.Source, - Target: from.Target, - Author: from.Author, - AuthorName: from.AuthorName, - AuthorEmail: from.AuthorEmail, - AuthorAvatar: from.AuthorAvatar, - Sender: from.Sender, - Params: from.Params, - Deploy: from.Deploy, - Started: from.Started, - Finished: from.Finished, - Created: from.Created, - Updated: from.Updated, - Version: from.Version, - } -} diff --git a/plugin/validator/remote_oss.go b/plugin/validator/remote_oss.go deleted file mode 100644 index c6a9d4c169..0000000000 --- a/plugin/validator/remote_oss.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package validator - -import ( - "time" - - "github.com/drone/drone/core" -) - -// Remote returns a conversion service that converts the -// configuration file using a remote http service. -func Remote(endpoint, signer string, skipVerify bool, timeout time.Duration) core.ValidateService { - return new(noop) -} diff --git a/plugin/validator/remote_test.go b/plugin/validator/remote_test.go deleted file mode 100644 index 518de4719c..0000000000 --- a/plugin/validator/remote_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package validator diff --git a/plugin/webhook/config.go b/plugin/webhook/config.go deleted file mode 100644 index b712a041da..0000000000 --- a/plugin/webhook/config.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package webhook - -import "github.com/drone/drone/core" - -// Config provides the webhook configuration. -type Config struct { - Events []string - Endpoint []string - Secret string - System *core.System -} diff --git a/plugin/webhook/webhook.go b/plugin/webhook/webhook.go deleted file mode 100644 index 60eff999d3..0000000000 --- a/plugin/webhook/webhook.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package webhook - -import ( - "bytes" - "context" - "crypto/sha256" - "encoding/base64" - "encoding/json" - "net/http" - "path/filepath" - "time" - - "github.com/drone/drone/core" - - "github.com/99designs/httpsignatures-go" -) - -// required http headers -var headers = []string{ - "date", - "digest", -} - -var signer = httpsignatures.NewSigner( - httpsignatures.AlgorithmHmacSha256, - headers..., -) - -// New returns a new Webhook sender. -func New(config Config) core.WebhookSender { - return &sender{ - Events: config.Events, - Endpoints: config.Endpoint, - Secret: config.Secret, - System: config.System, - } -} - -type payload struct { - *core.WebhookData - System *core.System `json:"system,omitempty"` -} - -type sender struct { - Client *http.Client - Events []string - Endpoints []string - Secret string - System *core.System -} - -// Send sends the JSON encoded webhook to the global -// HTTP endpoints. -func (s *sender) Send(ctx context.Context, in *core.WebhookData) error { - if len(s.Endpoints) == 0 { - return nil - } - if s.match(in.Event, in.Action) == false { - return nil - } - wrapper := payload{ - WebhookData: in, - System: s.System, - } - data, _ := json.Marshal(wrapper) - for _, endpoint := range s.Endpoints { - err := s.send(endpoint, s.Secret, in.Event, data) - if err != nil { - return err - } - } - return nil -} - -func (s *sender) send(endpoint, secret, event string, data []byte) error { - ctx := context.Background() - ctx, cancel := context.WithTimeout(ctx, time.Minute) - defer cancel() - - buf := bytes.NewBuffer(data) - req, err := http.NewRequest("POST", endpoint, buf) - if err != nil { - return err - } - - req = req.WithContext(ctx) - req.Header.Add("X-Drone-Event", event) - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Digest", "SHA-256="+digest(data)) - req.Header.Add("Date", time.Now().UTC().Format(http.TimeFormat)) - err = signer.SignRequest("hmac-key", s.Secret, req) - if err != nil { - return err - } - res, err := s.client().Do(req) - if res != nil { - res.Body.Close() - } - return err -} - -func (s *sender) match(event, action string) bool { - if len(s.Events) == 0 { - return true - } - var name string - switch { - case action == "": - name = event - case action != "": - name = event + ":" + action - } - for _, pattern := range s.Events { - if ok, _ := filepath.Match(pattern, name); ok { - return true - } - } - return false -} - -func (s *sender) client() *http.Client { - if s.Client == nil { - return http.DefaultClient - } - return s.Client -} - -func digest(data []byte) string { - h := sha256.New() - h.Write(data) - return base64.StdEncoding.EncodeToString(h.Sum(nil)) -} diff --git a/plugin/webhook/webhook_oss.go b/plugin/webhook/webhook_oss.go deleted file mode 100644 index 4bba616869..0000000000 --- a/plugin/webhook/webhook_oss.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package webhook - -import ( - "context" - - "github.com/drone/drone/core" -) - -// New returns a no-op Webhook sender. -func New(Config) core.WebhookSender { - return new(noop) -} - -type noop struct{} - -func (noop) Send(context.Context, *core.WebhookData) error { - return nil -} diff --git a/plugin/webhook/webhook_test.go b/plugin/webhook/webhook_test.go deleted file mode 100644 index 4e4b7d7fcf..0000000000 --- a/plugin/webhook/webhook_test.go +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package webhook - -import ( - "context" - "net/http" - "testing" - - "github.com/drone/drone/core" - - "github.com/99designs/httpsignatures-go" - "github.com/h2non/gock" -) - -var noContext = context.Background() - -func TestWebhook(t *testing.T) { - defer gock.Off() - - webhook := &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionCreated, - User: &core.User{Login: "octocat"}, - } - - matchSignature := func(r *http.Request, _ *gock.Request) (bool, error) { - signature, err := httpsignatures.FromRequest(r) - if err != nil { - return false, err - } - return signature.IsValid("GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", r), nil - } - - gock.New("https://company.com"). - Post("/hooks"). - AddMatcher(matchSignature). - MatchHeader("X-Drone-Event", "user"). - MatchHeader("Content-Type", "application/json"). - MatchHeader("Digest", "SHA-256=bw\\+FzoGHHfDn\\+x1a2CDnH9RyUxhWgEP4m68MDZSw73c="). - JSON(webhook). - Reply(200). - Type("application/json") - - config := Config{ - Endpoint: []string{"https://company.com/hooks"}, - Secret: "GMEuUHQfmrMRsseWxi9YlIeBtn9lm6im", - } - sender := New(config) - err := sender.Send(noContext, webhook) - if err != nil { - t.Error(err) - } - - if gock.IsPending() { - t.Errorf("Unfinished requests") - } -} - -func TestWebhook_CustomClient(t *testing.T) { - sender := new(sender) - if sender.client() != http.DefaultClient { - t.Errorf("Expect default http client") - } - - custom := &http.Client{} - sender.Client = custom - if sender.client() != custom { - t.Errorf("Expect custom http client") - } -} - -func TestWebhook_NoEndpoints(t *testing.T) { - webhook := &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionCreated, - User: &core.User{Login: "octocat"}, - } - - config := Config{ - Endpoint: []string{}, - Secret: "correct-horse-battery-staple", - } - sender := New(config) - err := sender.Send(noContext, webhook) - if err != nil { - t.Error(err) - } -} - -func TestWebhook_NoMatch(t *testing.T) { - webhook := &core.WebhookData{ - Event: core.WebhookEventUser, - Action: core.WebhookActionCreated, - User: &core.User{Login: "octocat"}, - } - - config := Config{ - Events: []string{"repo:disabled"}, - Endpoint: []string{"https://localhost:1234"}, - Secret: "correct-horse-battery-staple", - } - sender := New(config) - err := sender.Send(noContext, webhook) - if err != nil { - t.Error(err) - } -} - -func TestWebhook_Match(t *testing.T) { - tests := []struct { - events []string - event string - action string - matched bool - }{ - { - event: "repo", - action: "enabled", - matched: true, - }, - { - events: []string{"user", "repo"}, - event: "repo", - matched: true, - }, - { - events: []string{"repo:disabled", "repo:enabled"}, - event: "repo", - action: "enabled", - matched: true, - }, - { - events: []string{"repo:disabled", "repo:*"}, - event: "repo", - action: "enabled", - matched: true, - }, - { - events: []string{"repo:disabled", "user:created"}, - event: "repo", - action: "enabled", - matched: false, - }, - { - events: []string{"repo", "user"}, - event: "repo", - action: "enabled", - matched: false, - }, - } - for i, test := range tests { - s := new(sender) - s.Events = test.events - if s.match(test.event, test.action) != test.matched { - t.Errorf("Expect matched %v at index %d", test.matched, i) - } - } -} diff --git a/pubsub/doc.go b/pubsub/doc.go deleted file mode 100644 index 4253db8daa..0000000000 --- a/pubsub/doc.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package pubsub implements a thread safe publish subscriber, -// allowing multiple publishers to distribute messages to -// multiple subscribers. -package pubsub diff --git a/pubsub/hub.go b/pubsub/hub.go deleted file mode 100644 index 42143974d0..0000000000 --- a/pubsub/hub.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pubsub - -import ( - "context" - "sync" - - "github.com/drone/drone/core" -) - -type hub struct { - sync.Mutex - - subs map[*subscriber]struct{} -} - -// newHub creates a new publish subscriber. -func newHub() core.Pubsub { - return &hub{ - subs: map[*subscriber]struct{}{}, - } -} - -func (h *hub) Publish(ctx context.Context, e *core.Message) error { - h.Lock() - for s := range h.subs { - s.publish(e) - } - h.Unlock() - return nil -} - -func (h *hub) Subscribe(ctx context.Context) (<-chan *core.Message, <-chan error) { - h.Lock() - s := &subscriber{ - handler: make(chan *core.Message, 100), - quit: make(chan struct{}), - } - h.subs[s] = struct{}{} - h.Unlock() - errc := make(chan error) - go func() { - defer close(errc) - select { - case <-ctx.Done(): - h.Lock() - delete(h.subs, s) - h.Unlock() - s.close() - } - }() - return s.handler, errc -} - -func (h *hub) Subscribers() (int, error) { - h.Lock() - c := len(h.subs) - h.Unlock() - return c, nil -} diff --git a/pubsub/hub_redis.go b/pubsub/hub_redis.go deleted file mode 100644 index 8b546d6e69..0000000000 --- a/pubsub/hub_redis.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package pubsub - -import ( - "context" - "encoding/json" - "fmt" - "os" - "sync" - - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -const ( - redisPubSubEvents = "drone-events" - redisPubSubCapacity = 100 -) - -func newHubRedis(r redisdb.RedisDB) core.Pubsub { - h := &hubRedis{ - rdb: r, - subscribers: make(map[chan<- *core.Message]struct{}), - } - - go r.Subscribe(context.Background(), redisPubSubEvents, redisPubSubCapacity, h) - - return h -} - -type hubRedis struct { - sync.Mutex - rdb redisdb.RedisDB - subscribers map[chan<- *core.Message]struct{} -} - -// Publish publishes a new message. All subscribers will get it. -func (h *hubRedis) Publish(ctx context.Context, e *core.Message) (err error) { - client := h.rdb.Client() - - data, err := json.Marshal(e) - if err != nil { - return - } - - _, err = client.Publish(ctx, redisPubSubEvents, data).Result() - if err != nil { - return - } - - return -} - -// Subscribe add a new subscriber. The subscriber gets event until its context is not finished. -func (h *hubRedis) Subscribe(ctx context.Context) (<-chan *core.Message, <-chan error) { - chMessage := make(chan *core.Message, redisPubSubCapacity) - chErr := make(chan error) - - h.Lock() - h.subscribers[chMessage] = struct{}{} - h.Unlock() - - go func() { - <-ctx.Done() - - h.Lock() - delete(h.subscribers, chMessage) - h.Unlock() - - close(chMessage) - close(chErr) - }() - - return chMessage, chErr -} - -// Subscribers returns number of subscribers. -func (h *hubRedis) Subscribers() (int, error) { - h.Lock() - n := len(h.subscribers) - h.Unlock() - - return n, nil -} - -// ProcessMessage relays the message to all subscribers listening to drone events. -// It is a part of redisdb.PubSubProcessor implementation and it's called internally by redisdb.Subscribe. -func (h *hubRedis) ProcessMessage(s string) { - message := &core.Message{} - err := json.Unmarshal([]byte(s), message) - if err != nil { - // Ignore invalid messages. This is a "should not happen" situation, - // because messages are encoded as json in Publish(). - _, _ = fmt.Fprintf(os.Stderr, "pubsub/redis: failed to unmarshal a message. %s\n", err) - return - } - - h.Lock() - for ss := range h.subscribers { - select { - case ss <- message: - default: // messages are lost if a subscriber channel reaches its capacity - } - } - h.Unlock() -} - -// ProcessError is a part of redisdb.PubSubProcessor implementation. -func (h *hubRedis) ProcessError(error) {} diff --git a/pubsub/hub_test.go b/pubsub/hub_test.go deleted file mode 100644 index 866eb888de..0000000000 --- a/pubsub/hub_test.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package pubsub - -import ( - "context" - "sync" - "testing" - - "github.com/drone/drone/core" -) - -func TestBus(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - p := newHub() - events, errc := p.Subscribe(ctx) - - got, err := p.Subscribers() - if err != nil { - t.Errorf("Test failed with an error: %s", err.Error()) - return - } - - if want := 1; got != want { - t.Errorf("Want %d subscribers, got %d", want, got) - } - - w := sync.WaitGroup{} - w.Add(1) - go func() { - p.Publish(ctx, new(core.Message)) - p.Publish(ctx, new(core.Message)) - p.Publish(ctx, new(core.Message)) - w.Done() - }() - w.Wait() - - w.Add(3) - go func() { - for { - select { - case <-errc: - return - case <-events: - w.Done() - } - } - }() - w.Wait() - - cancel() -} diff --git a/pubsub/pubsub.go b/pubsub/pubsub.go deleted file mode 100644 index f4f2db160e..0000000000 --- a/pubsub/pubsub.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package pubsub - -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new publish subscriber. If Redis client passed as parameter is not nil it uses -// a Redis implementation, otherwise it uses an in-memory implementation. -func New(r redisdb.RedisDB) core.Pubsub { - if r != nil { - return newHubRedis(r) - } - - return newHub() -} diff --git a/pubsub/pubsub_oss.go b/pubsub/pubsub_oss.go deleted file mode 100644 index 85a01fd69f..0000000000 --- a/pubsub/pubsub_oss.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package pubsub - -import ( - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new in-memory publish subscriber. -func New(r redisdb.RedisDB) core.Pubsub { - return newHub() -} diff --git a/pubsub/sub.go b/pubsub/sub.go deleted file mode 100644 index 00f835c7b1..0000000000 --- a/pubsub/sub.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pubsub - -import ( - "sync" - - "github.com/drone/drone/core" -) - -type subscriber struct { - sync.Mutex - - handler chan *core.Message - quit chan struct{} - done bool -} - -func (s *subscriber) publish(event *core.Message) { - select { - case <-s.quit: - case s.handler <- event: - default: - // events are sent on a buffered channel. If there - // is a slow consumer that is not processing events, - // the buffered channel will fill and newer messages - // are ignored. - } -} - -func (s *subscriber) close() { - s.Lock() - if s.done == false { - close(s.quit) - s.done = true - } - s.Unlock() -} diff --git a/pubsub/sub_test.go b/pubsub/sub_test.go deleted file mode 100644 index 1ac4f88e71..0000000000 --- a/pubsub/sub_test.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package pubsub - -import ( - "testing" - - "github.com/drone/drone/core" -) - -func nop(*core.Message) {} - -func TestSubscription_publish(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Message, 5), - quit: make(chan struct{}), - } - - e := new(core.Message) - s.publish(e) - - if got, want := len(s.handler), 1; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } - if got, want := <-s.handler, e; got != want { - t.Errorf("Want event received from channel") - } - if got, want := len(s.handler), 0; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } -} - -func TestSubscription_buffer(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Message, 1), - quit: make(chan struct{}), - } - - // the buffer size is 1 to simulate what happens - // if the subscriber cannot keep up with processing - // and the buffer fills up. In this case, events - // should be ignored until pending events are - // processed. - - e := new(core.Message) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - - if got, want := len(s.handler), 1; got != want { - t.Errorf("Want buffered channel size %d, got %d", want, got) - } -} - -func TestSubscription_stop(t *testing.T) { - s := &subscriber{ - handler: make(chan *core.Message, 1), - quit: make(chan struct{}), - } - - if got, want := s.done, false; got != want { - t.Errorf("Want subscription open") - } - - s.close() - if got, want := s.done, true; got != want { - t.Errorf("Want subscription closed") - } - - // if the subscription is closed we should - // ignore any new events being published. - - e := new(core.Message) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) - s.publish(e) -} diff --git a/scheduler/queue/canceller.go b/scheduler/queue/canceller.go deleted file mode 100644 index 458076cdbb..0000000000 --- a/scheduler/queue/canceller.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package queue - -import ( - "context" - "sync" - "time" -) - -type canceller struct { - sync.Mutex - - subscribers map[chan struct{}]int64 - cancelled map[int64]time.Time -} - -func newCanceller() *canceller { - return &canceller{ - subscribers: make(map[chan struct{}]int64), - cancelled: make(map[int64]time.Time), - } -} - -func (c *canceller) Cancel(ctx context.Context, id int64) error { - c.Lock() - c.cancelled[id] = time.Now().Add(time.Minute * 5) - for subscriber, build := range c.subscribers { - if id == build { - close(subscriber) - } - } - c.collect() - c.Unlock() - return nil -} - -func (c *canceller) Cancelled(ctx context.Context, id int64) (bool, error) { - subscriber := make(chan struct{}) - c.Lock() - c.subscribers[subscriber] = id - c.Unlock() - - defer func() { - c.Lock() - delete(c.subscribers, subscriber) - c.Unlock() - }() - - for { - select { - case <-ctx.Done(): - return false, ctx.Err() - case <-time.After(time.Minute): - c.Lock() - _, ok := c.cancelled[id] - c.Unlock() - if ok { - return true, nil - } - case <-subscriber: - return true, nil - } - } -} - -func (c *canceller) collect() { - // the list of cancelled builds is stored with a ttl, and - // is not removed until the ttl is reached. This provides - // adequate window for clients with connectivity issues to - // reconnect and receive notification of cancel events. - now := time.Now() - for build, timestamp := range c.cancelled { - if now.After(timestamp) { - delete(c.cancelled, build) - } - } -} diff --git a/scheduler/queue/canceller_redis.go b/scheduler/queue/canceller_redis.go deleted file mode 100644 index d43a976f87..0000000000 --- a/scheduler/queue/canceller_redis.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package queue - -import ( - "context" - "fmt" - "os" - "strconv" - "sync" - "time" - - "github.com/drone/drone/service/redisdb" - - "github.com/go-redis/redis/v8" -) - -const ( - redisPubSubCancel = "drone-cancel" - redisCancelValuePrefix = "drone-cancel-" - redisCancelValueTimeout = 5 * time.Minute - redisCancelValue = "canceled" -) - -func newCancellerRedis(r redisdb.RedisDB) *cancellerRedis { - h := &cancellerRedis{ - rdb: r, - subscribers: make(map[*cancelSubscriber]struct{}), - } - - go r.Subscribe(context.Background(), redisPubSubCancel, 1, h) - - return h -} - -type cancellerRedis struct { - rdb redisdb.RedisDB - subscribers map[*cancelSubscriber]struct{} - sync.Mutex -} - -type cancelSubscriber struct { - id int64 - ch chan<- error -} - -// Cancel informs all subscribers that a build with the provided id is cancelled. -func (c *cancellerRedis) Cancel(ctx context.Context, id int64) (err error) { - client := c.rdb.Client() - - ids := strconv.FormatInt(id, 10) - - // publish a cancel event to all subscribers (runners) waiting to - _, err = client.Publish(ctx, redisPubSubCancel, ids).Result() - if err != nil { - return - } - - // put a limited duration value in case a runner isn't listening currently. - _, err = client.Set(ctx, redisCancelValuePrefix+ids, redisCancelValue, redisCancelValueTimeout).Result() - if err != nil { - return - } - - return -} - -// Cancelled waits until it gets info that a build with the provided id is cancelled. -// The waiting is aborted when the provided context is done. -func (c *cancellerRedis) Cancelled(ctx context.Context, id int64) (isCancelled bool, err error) { - client := c.rdb.Client() - - ids := strconv.FormatInt(id, 10) - - // first check if the build is already cancelled - - result, err := client.Get(ctx, redisCancelValuePrefix+ids).Result() - if err != nil && err != redis.Nil { - return - } - - isCancelled = err != redis.Nil && result == redisCancelValue - if isCancelled { - return - } - - // if it is not cancelled, subscribe and listen to cancel build events - // until the context is cancelled or until the build is cancelled. - - ch := make(chan error) - sub := &cancelSubscriber{id: id, ch: ch} - - c.Lock() - c.subscribers[sub] = struct{}{} - c.Unlock() - - select { - case err = <-ch: - // If the build is cancelled or an error happened, - // than the subscriber is removed from the set by other go routine - isCancelled = err != nil - case <-ctx.Done(): - // If the context is cancelled then the subscriber must be be removed here. - c.Lock() - delete(c.subscribers, sub) - c.Unlock() - } - - return -} - -// ProcessMessage informs all subscribers listening to cancellation that the build with this id is cancelled. -// It is a part of redisdb.PubSubProcessor implementation and it's called internally by Subscribe. -func (c *cancellerRedis) ProcessMessage(s string) { - id, err := strconv.ParseInt(s, 10, 64) - if err != nil { - // Ignore invalid messages. This is a "should not happen" situation, - // because all messages are integers as strings in method Cancel(). - _, _ = fmt.Fprintf(os.Stderr, "canceller/redis: message is not an integer: %s\n", s) - return - } - - c.Lock() - for ss := range c.subscribers { - if ss.id == id { - ss.ch <- nil - close(ss.ch) - delete(c.subscribers, ss) - } - } - c.Unlock() -} - -// ProcessError informs all subscribers that an error happened and clears the set of subscribers. -// The set of subscribers is cleared because each subscriber receives only one message, -// so an error could cause that the message is missed - it's safer to return an error. -// It is a part of redisdb.PubSubProcessor implementation and it's called internally by Subscribe. -func (c *cancellerRedis) ProcessError(err error) { - c.Lock() - for ss := range c.subscribers { - ss.ch <- err - close(ss.ch) - delete(c.subscribers, ss) - } - c.Unlock() -} diff --git a/scheduler/queue/canceller_test.go b/scheduler/queue/canceller_test.go deleted file mode 100644 index d09df161fe..0000000000 --- a/scheduler/queue/canceller_test.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package queue - -import ( - "context" - "testing" - "time" -) - -var noContext = context.Background() - -func TestCollect(t *testing.T) { - c := newCanceller() - c.Cancel(noContext, 1) - c.Cancel(noContext, 2) - c.Cancel(noContext, 3) - c.Cancel(noContext, 4) - c.Cancel(noContext, 5) - c.cancelled[3] = c.cancelled[3].Add(time.Minute * -1) - c.cancelled[4] = time.Now().Add(time.Second * -1) - c.cancelled[5] = time.Now().Add(time.Second * -1) - c.collect() - - if got, want := len(c.cancelled), 3; got != want { - t.Errorf("Want 3 cancelled builds in the cache, got %d", got) - } - if _, ok := c.cancelled[4]; ok { - t.Errorf("Expect build id [4] removed") - } - if _, ok := c.cancelled[5]; ok { - t.Errorf("Expect build id [5] removed") - } -} diff --git a/scheduler/queue/queue.go b/scheduler/queue/queue.go deleted file mode 100644 index 255130c179..0000000000 --- a/scheduler/queue/queue.go +++ /dev/null @@ -1,348 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package queue - -import ( - "context" - "sync" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" - - "github.com/drone/drone-go/drone" -) - -type queue struct { - sync.Mutex - globMx redisdb.LockErr - - ready chan struct{} - paused bool - interval time.Duration - throttle int - store core.StageStore - workers map[*worker]struct{} - ctx context.Context -} - -// newQueue returns a new Queue backed by the build datastore. -func newQueue(ctx context.Context, store core.StageStore) *queue { - q := &queue{ - store: store, - globMx: redisdb.LockErrNoOp{}, - ready: make(chan struct{}, 1), - workers: map[*worker]struct{}{}, - interval: time.Minute, - ctx: ctx, - } - go q.start() - return q -} - -func (q *queue) Schedule(ctx context.Context, stage *core.Stage) error { - select { - case q.ready <- struct{}{}: - default: - } - return nil -} - -func (q *queue) Pause(ctx context.Context) error { - q.Lock() - q.paused = true - q.Unlock() - return nil -} - -func (q *queue) Paused(ctx context.Context) (bool, error) { - q.Lock() - paused := q.paused - q.Unlock() - return paused, nil -} - -func (q *queue) Resume(ctx context.Context) error { - q.Lock() - q.paused = false - q.Unlock() - - select { - case q.ready <- struct{}{}: - default: - } - return nil -} - -func (q *queue) Request(ctx context.Context, params core.Filter) (*core.Stage, error) { - ctx, cancel := context.WithCancel(ctx) - defer cancel() - w := &worker{ - kind: params.Kind, - typ: params.Type, - os: params.OS, - arch: params.Arch, - kernel: params.Kernel, - variant: params.Variant, - labels: params.Labels, - channel: make(chan *core.Stage), - done: ctx.Done(), - } - q.Lock() - q.workers[w] = struct{}{} - q.Unlock() - - select { - case q.ready <- struct{}{}: - default: - } - - select { - case <-ctx.Done(): - q.Lock() - delete(q.workers, w) - q.Unlock() - return nil, ctx.Err() - case b := <-w.channel: - return b, nil - } -} - -func (q *queue) signal(ctx context.Context) error { - if err := q.globMx.LockContext(ctx); err != nil { - return err - } - defer q.globMx.UnlockContext(ctx) - - q.Lock() - count := len(q.workers) - pause := q.paused - q.Unlock() - if pause { - return nil - } - if count == 0 { - return nil - } - items, err := q.store.ListIncomplete(ctx) - if err != nil { - return err - } - - q.Lock() - defer q.Unlock() - for _, item := range items { - if item.Status == core.StatusRunning { - continue - } - if item.Machine != "" { - continue - } - - // if the stage defines concurrency limits we - // need to make sure those limits are not exceeded - // before proceeding. - if withinLimits(item, items) == false { - continue - } - - // if the system defines concurrency limits - // per repository we need to make sure those limits - // are not exceeded before proceeding. - if shouldThrottle(item, items, item.LimitRepo) == true { - continue - } - - loop: - for w := range q.workers { - // the worker must match the resource kind and type - if !matchResource(w.kind, w.typ, item.Kind, item.Type) { - continue - } - - if w.os != "" || w.arch != "" || w.variant != "" || w.kernel != "" { - // the worker is platform-specific. check to ensure - // the queue item matches the worker platform. - if w.os != item.OS { - continue - } - if w.arch != item.Arch { - continue - } - // if the pipeline defines a variant it must match - // the worker variant (e.g. arm6, arm7, etc). - if item.Variant != "" && item.Variant != w.variant { - continue - } - // if the pipeline defines a kernel version it must match - // the worker kernel version (e.g. 1709, 1803). - if item.Kernel != "" && item.Kernel != w.kernel { - continue - } - } - - if len(item.Labels) > 0 || len(w.labels) > 0 { - if !checkLabels(item.Labels, w.labels) { - continue - } - } - - // // the queue has 60 seconds to ack the item, otherwise - // // it is eligible for processing by another worker. - // // item.Expires = time.Now().Add(time.Minute).Unix() - // err := q.store.Update(ctx, item) - - // if err != nil { - // log.Ctx(ctx).Warn(). - // Err(err). - // Int64("build_id", item.BuildID). - // Int64("stage_id", item.ID). - // Msg("cannot update queue item") - // continue - // } - - // TODO: refactor to its own unexported method - sendWork := func() bool { - select { - case w.channel <- item: - return true - case <-w.done: - // Worker will exit when we call the deferred q.Unlock() - case <-time.After(q.interval): - // Worker failed to ack before timeout - } - return false - } - if sendWork() { - delete(q.workers, w) - break loop - } - } - } - return nil -} - -func (q *queue) start() error { - for { - select { - case <-q.ctx.Done(): - return q.ctx.Err() - case <-q.ready: - q.signal(q.ctx) - case <-time.After(q.interval): - q.signal(q.ctx) - } - } -} - -type worker struct { - kind string - typ string - os string - arch string - kernel string - variant string - labels map[string]string - channel chan *core.Stage - done <-chan struct{} -} - -type counter struct { - counts map[string]int -} - -func checkLabels(a, b map[string]string) bool { - if len(a) != len(b) { - return false - } - for k, v := range a { - if w, ok := b[k]; !ok || v != w { - return false - } - } - return true -} - -func withinLimits(stage *core.Stage, siblings []*core.Stage) bool { - if stage.Limit == 0 { - return true - } - count := 0 - for _, sibling := range siblings { - if sibling.RepoID != stage.RepoID { - continue - } - if sibling.ID == stage.ID { - continue - } - if sibling.Name != stage.Name { - continue - } - if sibling.ID < stage.ID || - sibling.Status == core.StatusRunning { - count++ - } - } - return count < stage.Limit -} - -func shouldThrottle(stage *core.Stage, siblings []*core.Stage, limit int) bool { - // if no throttle limit is defined (default) then - // return false to indicate no throttling is needed. - if limit == 0 { - return false - } - // if the repository is running it is too late - // to skip and we can exit - if stage.Status == drone.StatusRunning { - return false - } - - count := 0 - // loop through running stages to count number of - // running stages for the parent repository. - for _, sibling := range siblings { - // ignore stages from other repository. - if sibling.RepoID != stage.RepoID { - continue - } - // ignore this stage and stages that were - // scheduled after this stage. - if sibling.ID >= stage.ID { - continue - } - count++ - } - // if the count of running stages exceeds the - // throttle limit return true. - return count >= limit -} - -// matchResource is a helper function that returns -func matchResource(kinda, typea, kindb, typeb string) bool { - if kinda == "" { - kinda = "pipeline" - } - if kindb == "" { - kindb = "pipeline" - } - if typea == "" { - typea = "docker" - } - if typeb == "" { - typeb = "docker" - } - return kinda == kindb && typea == typeb -} diff --git a/scheduler/queue/queue_test.go b/scheduler/queue/queue_test.go deleted file mode 100644 index 892434607c..0000000000 --- a/scheduler/queue/queue_test.go +++ /dev/null @@ -1,418 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package queue - -import ( - "context" - "math/rand" - "sync" - "testing" - "time" - - "github.com/drone/drone-go/drone" - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestQueue(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - items := []*core.Stage{ - {ID: 3, OS: "linux", Arch: "amd64"}, - {ID: 2, OS: "linux", Arch: "amd64"}, - {ID: 1, OS: "linux", Arch: "amd64"}, - } - - ctx := context.Background() - store := mock.NewMockStageStore(controller) - store.EXPECT().ListIncomplete(ctx).Return(items, nil).Times(1) - store.EXPECT().ListIncomplete(ctx).Return(items[1:], nil).Times(1) - store.EXPECT().ListIncomplete(ctx).Return(items[2:], nil).Times(1) - - q := newQueue(ctx, store) - for _, item := range items { - next, err := q.Request(ctx, core.Filter{OS: "linux", Arch: "amd64"}) - if err != nil { - t.Error(err) - return - } - if got, want := next, item; got != want { - t.Errorf("Want build %d, got %d", want.ID, got.ID) - } - } -} - -func TestQueueCancel(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - ctx, cancel := context.WithCancel(context.Background()) - store := mock.NewMockStageStore(controller) - store.EXPECT().ListIncomplete(ctx).Return(nil, nil) - - q := newQueue(ctx, store) - - var wg sync.WaitGroup - wg.Add(1) - - go func() { - build, err := q.Request(ctx, core.Filter{OS: "linux/amd64", Arch: "amd64"}) - if err != context.Canceled { - t.Errorf("Expected context.Canceled error, got %s", err) - } - if build != nil { - t.Errorf("Expect nil build when subscribe canceled") - } - wg.Done() - }() - <-time.After(10 * time.Millisecond) - - q.Lock() - count := len(q.workers) - q.Unlock() - - if got, want := count, 1; got != want { - t.Errorf("Want %d listener, got %d", want, got) - } - - cancel() - wg.Wait() -} - -func TestQueuePush(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - item1 := &core.Stage{ - ID: 1, - OS: "linux", - Arch: "amd64", - } - item2 := &core.Stage{ - ID: 2, - OS: "linux", - Arch: "amd64", - } - - ctx := context.Background() - store := mock.NewMockStageStore(controller) - - q := &queue{ - store: store, - ready: make(chan struct{}, 1), - } - q.Schedule(ctx, item1) - q.Schedule(ctx, item2) - select { - case <-q.ready: - case <-time.After(time.Millisecond): - t.Errorf("Expect queue signaled on push") - } -} - -func TestMatchResource(t *testing.T) { - tests := []struct { - kinda, typea, kindb, typeb string - want bool - }{ - // unspecified in yaml, unspecified by agent - {"", "", "", "", true}, - - // unspecified in yaml, specified by agent - {"pipeline", "docker", "", "", true}, - {"pipeline", "", "", "", true}, - {"", "docker", "", "", true}, - - // specified in yaml, unspecified by agent - {"", "", "pipeline", "docker", true}, - {"", "", "pipeline", "", true}, - {"", "", "", "docker", true}, - - // specified in yaml, specified by agent - {"pipeline", "docker", "pipeline", "docker", true}, - {"pipeline", "exec", "pipeline", "docker", false}, - {"approval", "slack", "pipeline", "docker", false}, - - // misc - {"", "docker", "pipeline", "docker", true}, - {"pipeline", "", "pipeline", "docker", true}, - {"pipeline", "docker", "", "docker", true}, - {"pipeline", "docker", "pipeline", "", true}, - } - - for i, test := range tests { - got, want := matchResource(test.kinda, test.typea, test.kindb, test.typeb), test.want - if got != want { - t.Errorf("Unexpected results at index %d", i) - } - } -} - -func TestShouldThrottle(t *testing.T) { - tests := []struct { - ID int64 - RepoID int64 - Status string - Limit int - Want bool - }{ - // repo 1: 2 running, 1 pending - {Want: false, ID: 1, RepoID: 1, Status: drone.StatusRunning, Limit: 2}, - {Want: false, ID: 2, RepoID: 1, Status: drone.StatusRunning, Limit: 2}, - {Want: true, ID: 3, RepoID: 1, Status: drone.StatusPending, Limit: 2}, - - // repo 2: 1 running, 1 pending - {Want: false, ID: 4, RepoID: 2, Status: drone.StatusRunning, Limit: 2}, - {Want: false, ID: 5, RepoID: 2, Status: drone.StatusPending, Limit: 2}, - - // repo 3: 3 running, 1 pending - {Want: false, ID: 6, RepoID: 3, Status: drone.StatusRunning, Limit: 2}, - {Want: false, ID: 7, RepoID: 3, Status: drone.StatusRunning, Limit: 2}, - {Want: false, ID: 8, RepoID: 3, Status: drone.StatusRunning, Limit: 2}, - {Want: true, ID: 9, RepoID: 3, Status: drone.StatusPending, Limit: 2}, - - // repo 4: 2 running, 1 pending, no limit - {Want: false, ID: 10, RepoID: 4, Status: drone.StatusRunning, Limit: 0}, - {Want: false, ID: 11, RepoID: 4, Status: drone.StatusRunning, Limit: 0}, - {Want: false, ID: 12, RepoID: 4, Status: drone.StatusPending, Limit: 0}, - } - var stages []*core.Stage - for _, test := range tests { - stages = append(stages, &core.Stage{ - ID: test.ID, - RepoID: test.RepoID, - Status: test.Status, - LimitRepo: test.Limit, - }) - } - for i, test := range tests { - stage := stages[i] - if got, want := shouldThrottle(stage, stages, stage.LimitRepo), test.Want; got != want { - t.Errorf("Unexpected results at index %d", i) - } - } -} - -func TestWithinLimits(t *testing.T) { - tests := []struct { - result bool - stage *core.Stage - stages []*core.Stage - }{ - // multiple stages executing for same repository and with same - // name, but no concurrency limits exist. expect true. - { - result: true, - stage: &core.Stage{ - ID: 3, RepoID: 1, Name: "build", Limit: 0, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "running"}, - {ID: 2, RepoID: 1, Name: "build", Status: "running"}, - {ID: 3, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 1, no existing stages - // exist for same repository id. expect true. - { - result: true, - stage: &core.Stage{ - ID: 3, RepoID: 2, Name: "build", Limit: 0, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "running"}, - {ID: 2, RepoID: 1, Name: "build", Status: "running"}, - {ID: 3, RepoID: 2, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 1, no existing stages - // exist for same stage name. expect true. - { - result: true, - stage: &core.Stage{ - ID: 3, RepoID: 1, Name: "build", Limit: 0, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "test", Status: "running"}, - {ID: 2, RepoID: 1, Name: "test", Status: "running"}, - {ID: 3, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // single stage with concurrency 1, no existing stages - // exist. expect true. - { - result: true, - stage: &core.Stage{ - ID: 1, RepoID: 1, Name: "build", Limit: 1, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 1, other named stages - // exist in the queue, but they come after this stage. - // expect true. - { - result: true, - stage: &core.Stage{ - ID: 1, RepoID: 1, Name: "build", Limit: 1, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "pending"}, - {ID: 2, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 1, however, stage with same - // repository and name is already executing. expect false. - { - result: false, - stage: &core.Stage{ - ID: 2, RepoID: 1, Name: "build", Limit: 1, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "running"}, - {ID: 2, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 2. one existing stage in the - // queue before this stage. expect true. - { - result: true, - stage: &core.Stage{ - ID: 2, RepoID: 1, Name: "build", Limit: 2, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "running"}, - {ID: 2, RepoID: 1, Name: "build", Status: "pending"}, - {ID: 3, RepoID: 1, Name: "build", Status: "pending"}, - }, - }, - - // stage with concurrency 1. stages start out of order, and the - // second named stage starts before its predecessor. Its predecessor - // should not execute. expect false. - { - result: false, - stage: &core.Stage{ - ID: 1, RepoID: 1, Name: "build", Limit: 1, - }, - stages: []*core.Stage{ - {ID: 1, RepoID: 1, Name: "build", Status: "pending"}, - {ID: 2, RepoID: 1, Name: "build", Status: "running"}, - }, - }, - } - - for i, test := range tests { - if got, want := withinLimits(test.stage, test.stages), test.result; got != want { - t.Errorf("Unexpected results at index %d", i) - } - } -} - -func TestWithinLimits_Old(t *testing.T) { - tests := []struct { - ID int64 - RepoID int64 - Name string - Limit int - Want bool - }{ - {Want: true, ID: 1, RepoID: 1, Name: "foo"}, - {Want: true, ID: 2, RepoID: 2, Name: "bar", Limit: 1}, - {Want: true, ID: 3, RepoID: 1, Name: "bar", Limit: 1}, - {Want: false, ID: 4, RepoID: 1, Name: "bar", Limit: 1}, - {Want: false, ID: 5, RepoID: 1, Name: "bar", Limit: 1}, - {Want: true, ID: 6, RepoID: 1, Name: "baz", Limit: 2}, - {Want: true, ID: 7, RepoID: 1, Name: "baz", Limit: 2}, - {Want: false, ID: 8, RepoID: 1, Name: "baz", Limit: 2}, - {Want: false, ID: 9, RepoID: 1, Name: "baz", Limit: 2}, - {Want: true, ID: 10, RepoID: 1, Name: "baz", Limit: 0}, - } - var stages []*core.Stage - for _, test := range tests { - stages = append(stages, &core.Stage{ - ID: test.ID, - RepoID: test.RepoID, - Name: test.Name, - Limit: test.Limit, - }) - } - for i, test := range tests { - stage := stages[i] - if got, want := withinLimits(stage, stages), test.Want; got != want { - t.Errorf("Unexpected results at index %d", i) - } - } -} - -func incomplete(n int) ([]*core.Stage, error) { - ret := make([]*core.Stage, n) - for i := range ret { - ret[i] = &core.Stage{ - OS: "linux/amd64", - Arch: "amd64", - } - } - return ret, nil -} - -func TestQueueDeadlock(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - n := 10 - donechan := make(chan struct{}, n) - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - store := mock.NewMockStageStore(controller) - store.EXPECT().ListIncomplete(ctx).Return(incomplete(n)).AnyTimes() - - q := newQueue(ctx, store) - doWork := func(i int) bool { - select { - case <-ctx.Done(): - return false - default: - } - ctx, cancel := context.WithTimeout(ctx, - time.Duration(i+rand.Intn(1000/n))*time.Millisecond) - defer cancel() - if i%3 == 0 { - // Randomly cancel some contexts to simulate timeouts - cancel() - } - _, err := q.Request(ctx, core.Filter{OS: "linux/amd64", Arch: "amd64"}) - if err != nil && err != context.Canceled && err != - context.DeadlineExceeded { - t.Errorf("Expected context.Canceled or context.DeadlineExceeded error, got %s", err) - } - select { - case donechan <- struct{}{}: - case <-ctx.Done(): - } - return true - } - for i := 0; i < n; i++ { - go func(i int) { - // Spawn n workers, doing work until the parent context is canceled - for doWork(i) { - } - }(i) - } - // Wait for n * 10 tasks to complete, then exit and cancel all the workers. - for seen := 0; seen < n*10; seen++ { - <-donechan - } -} diff --git a/scheduler/queue/scheduler.go b/scheduler/queue/scheduler.go deleted file mode 100644 index 44620870f9..0000000000 --- a/scheduler/queue/scheduler.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package queue - -import ( - "context" - "errors" -) - -type scheduler struct { - *queue - *canceller -} - -func (d scheduler) Stats(context.Context) (interface{}, error) { - return nil, errors.New("not implemented") -} diff --git a/scheduler/queue/scheduler_non_oss.go b/scheduler/queue/scheduler_non_oss.go deleted file mode 100644 index 54830729e4..0000000000 --- a/scheduler/queue/scheduler_non_oss.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build !oss -// +build !oss - -package queue - -import ( - "context" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new scheduler. -func New(store core.StageStore, r redisdb.RedisDB) core.Scheduler { - if r == nil { - return scheduler{ - queue: newQueue(context.Background(), store), - canceller: newCanceller(), - } - } - - sched := schedulerRedis{ - queue: newQueue(context.Background(), store), - cancellerRedis: newCancellerRedis(r), - } - - const globalMutexExpiryTime = 10 * time.Second - sched.globMx = r.NewMutex("drone-scheduler-mx", globalMutexExpiryTime) - - return sched -} diff --git a/scheduler/queue/scheduler_oss.go b/scheduler/queue/scheduler_oss.go deleted file mode 100644 index 88d7d01af4..0000000000 --- a/scheduler/queue/scheduler_oss.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//go:build oss -// +build oss - -package queue - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/service/redisdb" -) - -// New creates a new scheduler. -func New(store core.StageStore, r redisdb.RedisDB) core.Scheduler { - return scheduler{ - queue: newQueue(context.Background(), store), - canceller: newCanceller(), - } -} diff --git a/scheduler/queue/scheduler_redis.go b/scheduler/queue/scheduler_redis.go deleted file mode 100644 index a14cf489f7..0000000000 --- a/scheduler/queue/scheduler_redis.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !oss - -package queue - -import ( - "context" - "errors" -) - -type schedulerRedis struct { - *queue - *cancellerRedis -} - -func (d schedulerRedis) Stats(context.Context) (interface{}, error) { - return nil, errors.New("not implemented") -} diff --git a/scheduler/scheduler.go b/scheduler/scheduler.go deleted file mode 100644 index 4aba165f90..0000000000 --- a/scheduler/scheduler.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package scheduler diff --git a/scripts/build.sh b/scripts/build.sh deleted file mode 100755 index 6012d0182a..0000000000 --- a/scripts/build.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh - -echo "building docker images for ${GOOS}/${GOARCH} ..." - -REPO="github.com/drone/drone" - -# compile the server using the cgo -go build -ldflags "-extldflags \"-static\"" -o release/linux/${GOARCH}/drone-server ${REPO}/cmd/drone-server diff --git a/server/server.go b/server/server.go deleted file mode 100644 index 93db19d852..0000000000 --- a/server/server.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package server - -import ( - "context" - "crypto/tls" - "net/http" - "os" - "path/filepath" - "time" - - "golang.org/x/crypto/acme/autocert" - "golang.org/x/sync/errgroup" -) - -// A Server defines parameters for running an HTTP server. -type Server struct { - Acme bool - Email string - Addr string - Cert string - Key string - Host string - Handler http.Handler -} - -const timeoutGracefulShutdown = 5 * time.Second - -// ListenAndServe initializes a server to respond to HTTP network requests. -func (s Server) ListenAndServe(ctx context.Context) error { - if s.Acme { - return s.listenAndServeAcme(ctx) - } else if s.Key != "" { - return s.listenAndServeTLS(ctx) - } - err := s.listenAndServe(ctx) - if err == http.ErrServerClosed { - err = nil - } - return err -} - -func (s Server) listenAndServe(ctx context.Context) error { - var g errgroup.Group - s1 := &http.Server{ - Addr: s.Addr, - Handler: s.Handler, - } - g.Go(func() error { - <-ctx.Done() - - ctxShutdown, cancelFunc := context.WithTimeout(context.Background(), timeoutGracefulShutdown) - defer cancelFunc() - - return s1.Shutdown(ctxShutdown) - }) - g.Go(s1.ListenAndServe) - return g.Wait() -} - -func (s Server) listenAndServeTLS(ctx context.Context) error { - var g errgroup.Group - s1 := &http.Server{ - Addr: ":http", - Handler: http.HandlerFunc(redirect), - } - s2 := &http.Server{ - Addr: ":https", - Handler: s.Handler, - } - g.Go(s1.ListenAndServe) - g.Go(func() error { - return s2.ListenAndServeTLS( - s.Cert, - s.Key, - ) - }) - g.Go(func() error { - <-ctx.Done() - - var gShutdown errgroup.Group - ctxShutdown, cancelFunc := context.WithTimeout(context.Background(), timeoutGracefulShutdown) - defer cancelFunc() - - gShutdown.Go(func() error { - return s1.Shutdown(ctxShutdown) - }) - gShutdown.Go(func() error { - return s2.Shutdown(ctxShutdown) - }) - - return gShutdown.Wait() - }) - return g.Wait() -} - -func (s Server) listenAndServeAcme(ctx context.Context) error { - var g errgroup.Group - - c := cacheDir() - m := &autocert.Manager{ - Email: s.Email, - Cache: autocert.DirCache(c), - Prompt: autocert.AcceptTOS, - HostPolicy: autocert.HostWhitelist(s.Host), - } - s1 := &http.Server{ - Addr: ":http", - Handler: m.HTTPHandler(s.Handler), - } - s2 := &http.Server{ - Addr: ":https", - Handler: s.Handler, - TLSConfig: &tls.Config{ - GetCertificate: m.GetCertificate, - NextProtos: []string{"h2", "http/1.1"}, - MinVersion: tls.VersionTLS12, - }, - } - g.Go(s1.ListenAndServe) - g.Go(func() error { - return s2.ListenAndServeTLS("", "") - }) - g.Go(func() error { - <-ctx.Done() - - var gShutdown errgroup.Group - ctxShutdown, cancelFunc := context.WithTimeout(context.Background(), timeoutGracefulShutdown) - defer cancelFunc() - - gShutdown.Go(func() error { - return s1.Shutdown(ctxShutdown) - }) - gShutdown.Go(func() error { - return s2.Shutdown(ctxShutdown) - }) - - return gShutdown.Wait() - }) - return g.Wait() -} - -func redirect(w http.ResponseWriter, req *http.Request) { - target := "https://" + req.Host + req.URL.Path - http.Redirect(w, req, target, http.StatusTemporaryRedirect) -} - -func cacheDir() string { - const base = "golang-autocert" - if xdg := os.Getenv("XDG_CACHE_HOME"); xdg != "" { - return filepath.Join(xdg, base) - } - return filepath.Join(os.Getenv("HOME"), ".cache", base) -} diff --git a/service/canceler/canceler.go b/service/canceler/canceler.go deleted file mode 100644 index bfef4d77e6..0000000000 --- a/service/canceler/canceler.go +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package canceler - -import ( - "context" - "encoding/json" - "runtime/debug" - "time" - - "github.com/drone/drone/core" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -var noContext = context.Background() - -type service struct { - builds core.BuildStore - events core.Pubsub - repos core.RepositoryStore - scheduler core.Scheduler - stages core.StageStore - status core.StatusService - steps core.StepStore - users core.UserStore - webhooks core.WebhookSender -} - -// New returns a new cancellation service that encapsulates -// all cancellation operations. -func New( - builds core.BuildStore, - events core.Pubsub, - repos core.RepositoryStore, - scheduler core.Scheduler, - stages core.StageStore, - status core.StatusService, - steps core.StepStore, - users core.UserStore, - webhooks core.WebhookSender, -) core.Canceler { - return &service{ - builds: builds, - events: events, - repos: repos, - scheduler: scheduler, - stages: stages, - status: status, - steps: steps, - users: users, - webhooks: webhooks, - } -} - -// Cancel cancels a build. -func (s *service) Cancel(ctx context.Context, repo *core.Repository, build *core.Build) error { - return s.cancel(ctx, repo, build, core.StatusKilled) -} - -// CancelPending cancels all pending builds of the same event -// and reference with lower build numbers. -func (s *service) CancelPending(ctx context.Context, repo *core.Repository, build *core.Build) error { - defer func() { - if err := recover(); err != nil { - debug.PrintStack() - } - }() - - // switch { - // case repo.CancelPulls && build.Event == core.EventPullRequest: - // case repo.CancelPush && build.Event == core.EventPush: - // default: - // return nil - // } - - switch build.Event { - // on the push and pull request builds can be automatically - // cancelled by the system. - case core.EventPush, core.EventPullRequest: - default: - return nil - } - - // get a list of all incomplete builds from the database - // for all repositories. this will need to be filtered. - incomplete, err := s.repos.ListIncomplete(ctx) - if err != nil { - return err - } - - var result error - for _, item := range incomplete { - // ignore incomplete items in the list that do - // not match the repository or build - if !match(build, item) { - continue - } - - err := s.cancel(ctx, repo, item.Build, core.StatusSkipped) - if err != nil { - result = multierror.Append(result, err) - } - } - - return result -} - -func (s *service) cancel(ctx context.Context, repo *core.Repository, build *core.Build, status string) error { - logger := logrus.WithFields( - logrus.Fields{ - "repo": repo.Slug, - "ref": build.Ref, - "build": build.Number, - "event": build.Event, - "status": build.Status, - }, - ) - - // do not cancel the build if the build status is - // complete. only cancel the build if the status is - // running or pending. - switch build.Status { - case core.StatusPending, core.StatusRunning: - default: - return nil - } - - // update the build status to killed. if the update fails - // due to an optimistic lock error it means the build has - // already started, and should now be ignored. - build.Status = status - build.Finished = time.Now().Unix() - if build.Started == 0 { - build.Started = time.Now().Unix() - } - - err := s.builds.Update(ctx, build) - if err != nil { - logger.WithError(err). - Warnln("canceler: cannot update build status to cancelled") - return err - } - - // notify the scheduler to cancel the build. this will - // instruct runners subscribing to the scheduler to - // cancel execution. - err = s.scheduler.Cancel(ctx, build.ID) - if err != nil { - logger.WithError(err). - Warnln("canceler: cannot signal cancelled build is complete") - } - - // update the commit status in the remote source - // control management system. - user, err := s.users.Find(ctx, repo.UserID) - if err == nil { - err := s.status.Send(ctx, user, &core.StatusInput{ - Repo: repo, - Build: build, - }) - if err != nil { - logger.WithError(err). - Debugln("canceler: cannot set status") - } - } - - stages, err := s.stages.ListSteps(ctx, build.ID) - if err != nil { - logger.WithError(err). - Debugln("canceler: cannot list build stages") - } - - // update the status of all steps to indicate they - // were killed or skipped. - for _, stage := range stages { - if stage.IsDone() { - continue - } - if stage.Started != 0 { - stage.Status = core.StatusKilled - } else { - stage.Status = core.StatusSkipped - stage.Started = time.Now().Unix() - } - stage.Stopped = time.Now().Unix() - err := s.stages.Update(ctx, stage) - if err != nil { - logger.WithError(err). - WithField("stage", stage.Number). - Debugln("canceler: cannot update stage status") - } - - // update the status of all steps to indicate they - // were killed or skipped. - for _, step := range stage.Steps { - if step.IsDone() { - continue - } - if step.Started != 0 { - step.Status = core.StatusKilled - } else { - step.Status = core.StatusSkipped - step.Started = time.Now().Unix() - } - step.Stopped = time.Now().Unix() - step.ExitCode = 130 - err := s.steps.Update(ctx, step) - if err != nil { - logger.WithError(err). - WithField("stage", stage.Number). - WithField("step", step.Number). - Debugln("canceler: cannot update step status") - } - } - } - - logger.WithError(err). - Debugln("canceler: successfully cancelled build") - - build.Stages = stages - - // trigger a pubsub event to notify subscribers that - // the build was cancelled. Specifically, this should - // live update the user interface. - repoCopy := new(core.Repository) - *repoCopy = *repo - repoCopy.Build = build - repoCopy.Build.Stages = stages - data, _ := json.Marshal(repoCopy) - err = s.events.Publish(noContext, &core.Message{ - Repository: repo.Slug, - Visibility: repo.Visibility, - Data: data, - }) - if err != nil { - logger.WithError(err). - Warnln("canceler: cannot publish cancel event") - } - - // trigger a webhook to notify subscribing systems that - // the build was cancelled. - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionUpdated, - Repo: repo, - Build: build, - } - err = s.webhooks.Send(ctx, payload) - if err != nil { - logger.WithError(err). - Warnln("manager: cannot send global webhook") - } - - return nil -} diff --git a/service/canceler/canceler_test.go b/service/canceler/canceler_test.go deleted file mode 100644 index b04c78ad42..0000000000 --- a/service/canceler/canceler_test.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package canceler - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/go-chi/chi" - - "github.com/golang/mock/gomock" -) - -func TestCancelPending_IgnoreEvent(t *testing.T) { - ignore := []string{ - core.EventCron, - core.EventCustom, - core.EventPromote, - core.EventRollback, - core.EventTag, - } - for _, event := range ignore { - s := new(service) - err := s.CancelPending(noContext, nil, &core.Build{Event: event}) - if err != nil { - t.Errorf("Expect cancel skipped for event type %s", event) - } - } -} - -func TestCancelRunning_IgnoreEvent(t *testing.T) { - ignore := []string{ - core.EventCron, - core.EventCustom, - core.EventPromote, - core.EventRollback, - core.EventTag, - } - for _, event := range ignore { - s := new(service) - err := s.CancelPending(noContext, nil, &core.Build{Event: event}) - if err != nil { - t.Errorf("Expect cancel skipped for event type %s", event) - } - } -} - -func TestCancel(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockStages := []*core.Stage{ - {Status: core.StatusPassing}, - { - Status: core.StatusPending, - Steps: []*core.Step{ - {Status: core.StatusPassing}, - {Status: core.StatusPending}, - }, - }, - } - - mockBuildCopy := new(core.Build) - *mockBuildCopy = *mockBuild - - repos := mock.NewMockRepositoryStore(controller) - - events := mock.NewMockPubsub(controller) - events.EXPECT().Publish(gomock.Any(), gomock.Any()).Return(nil) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Update(gomock.Any(), mockBuildCopy).Return(nil) - - users := mock.NewMockUserStore(controller) - users.EXPECT().Find(gomock.Any(), mockRepo.UserID).Return(mockUser, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().ListSteps(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - stages.EXPECT().Update(gomock.Any(), mockStages[1]).Return(nil) - - steps := mock.NewMockStepStore(controller) - steps.EXPECT().Update(gomock.Any(), mockStages[1].Steps[1]).Return(nil) - - status := mock.NewMockStatusService(controller) - status.EXPECT().Send(gomock.Any(), mockUser, gomock.Any()).Return(nil) - - webhook := mock.NewMockWebhookSender(controller) - webhook.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - scheduler := mock.NewMockScheduler(controller) - scheduler.EXPECT().Cancel(gomock.Any(), mockBuild.ID).Return(nil) - - c := new(chi.Context) - c.URLParams.Add("owner", "octocat") - c.URLParams.Add("name", "hello-world") - c.URLParams.Add("number", "1") - - s := New(builds, events, repos, scheduler, stages, status, steps, users, webhook) - err := s.Cancel(noContext, mockRepo, mockBuildCopy) - if err != nil { - t.Error(err) - } -} - -var ( - mockRepo = &core.Repository{ - ID: 1, - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Counter: 42, - Branch: "master", - } - - mockBuild = &core.Build{ - ID: 1, - Number: 1, - RepoID: 1, - Status: core.StatusPending, - Event: core.EventPush, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - } - - mockUser = &core.User{ - ID: 1, - Login: "octocat", - } -) diff --git a/service/canceler/match.go b/service/canceler/match.go deleted file mode 100644 index d868fa620b..0000000000 --- a/service/canceler/match.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package canceler - -import "github.com/drone/drone/core" - -func match(build *core.Build, with *core.Repository) bool { - // filter out existing builds for others - // repositories. - if with.ID != build.RepoID { - return false - } - // filter out builds that are newer than - // the current build. - if with.Build.Number >= build.Number { - return false - } - - if with.CancelRunning == true { - if with.Build.Status != core.StatusRunning && with.Build.Status != core.StatusPending { - return false - } - } else { - if with.Build.Status != core.StatusPending { - return false - } - } - - // filter out builds that do not match - // the same event type. - if with.Build.Event != build.Event { - return false - } - // filter out builds that do not match - // the same reference. - if with.Build.Ref != build.Ref { - return false - } - return true -} diff --git a/service/canceler/match_test.go b/service/canceler/match_test.go deleted file mode 100644 index a35628dfc9..0000000000 --- a/service/canceler/match_test.go +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package canceler - -import ( - "testing" - - "github.com/drone/drone/core" -) - -func TestMatchPendingBuild(t *testing.T) { - tests := []struct { - build *core.Build - repo *core.Repository - want bool - }{ - // does not match repository id - { - build: &core.Build{RepoID: 2}, - repo: &core.Repository{ID: 1}, - want: false, - }, - // does not match build number requirement that - // must be older than current build - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 3}}, - want: false, - }, - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 2}}, - want: false, - }, - // does not match required status - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 1, Status: core.StatusPassing}}, - want: false, - }, - // does not match (one of) required event types - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPullRequest}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusPending, - Event: core.EventPush, - }}, - want: false, - }, - // does not match ref - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPush, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusPending, - Event: core.EventPush, - Ref: "refs/heads/develop", - }}, - want: false, - }, - - // - // successful matches - // - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPush, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusPending, - Event: core.EventPush, - Ref: "refs/heads/master", - }, CancelRunning: false}, - want: true, - }, - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPullRequest, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusPending, - Event: core.EventPullRequest, - Ref: "refs/heads/master", - }, CancelRunning: false}, - want: true, - }, - } - - for i, test := range tests { - if got, want := match(test.build, test.repo), test.want; got != want { - t.Errorf("Want match %v at index %d, got %v", want, i, got) - } - } -} - -func TestMatchRunningBuilds(t *testing.T) { - tests := []struct { - build *core.Build - repo *core.Repository - want bool - }{ - // does not match repository id - { - build: &core.Build{RepoID: 2}, - repo: &core.Repository{ID: 1}, - want: false, - }, - // does not match build number requirement that - // must be older than current build - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 3}}, - want: false, - }, - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 2}}, - want: false, - }, - // does not match required status - { - build: &core.Build{RepoID: 1, Number: 2}, - repo: &core.Repository{ID: 1, Build: &core.Build{Number: 1, Status: core.StatusError}}, - want: false, - }, - // does not match (one of) required event types - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPullRequest}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusRunning, - Event: core.EventPush, - }}, - want: false, - }, - // does not match ref - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPush, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusRunning, - Event: core.EventPush, - Ref: "refs/heads/develop", - }}, - want: false, - }, - - // - // successful matches - // - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPullRequest, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusRunning, - Event: core.EventPullRequest, - Ref: "refs/heads/master", - }, CancelRunning: true}, - want: true, - }, - { - build: &core.Build{RepoID: 1, Number: 2, Event: core.EventPush, Ref: "refs/heads/master"}, - repo: &core.Repository{ID: 1, Build: &core.Build{ - Number: 1, - Status: core.StatusRunning, - Event: core.EventPush, - Ref: "refs/heads/master", - }, CancelRunning: true}, - want: true, - }, - } - - for i, test := range tests { - if got, want := match(test.build, test.repo), test.want; got != want { - t.Errorf("Want match %v at index %d, got %v", want, i, got) - } - } -} diff --git a/service/canceler/reaper/reaper.go b/service/canceler/reaper/reaper.go deleted file mode 100644 index 9de39b11d5..0000000000 --- a/service/canceler/reaper/reaper.go +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package reaper - -import ( - "context" - "runtime/debug" - "time" - - "github.com/drone/drone/core" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -// Reaper finds and kills zombie jobs that are permanently -// stuck in a pending or running state. -type Reaper struct { - Repos core.RepositoryStore - Builds core.BuildStore - Stages core.StageStore - Canceler core.Canceler - Pending time.Duration // Pending is the pending pipeline deadline - Running time.Duration // Running is the running pipeline deadline - - // Buffer is applied when calculating whether or not the timeout - // period is exceeded. The added buffer helps prevent false positives. - Buffer time.Duration -} - -// New returns a new Reaper. -func New( - repos core.RepositoryStore, - builds core.BuildStore, - stages core.StageStore, - canceler core.Canceler, - running time.Duration, - pending time.Duration, - buffer time.Duration, -) *Reaper { - if running == 0 { - running = time.Hour * 24 - } - if pending == 0 { - pending = time.Hour * 24 - } - - if buffer == 0 { - buffer = time.Minute * 30 - } - return &Reaper{ - Repos: repos, - Builds: builds, - Stages: stages, - Canceler: canceler, - Pending: pending, - Running: running, - Buffer: buffer, - } -} - -// Start starts the reaper. -func (r *Reaper) Start(ctx context.Context, dur time.Duration) error { - ticker := time.NewTicker(dur) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - return nil - case <-ticker.C: - r.reap(ctx) - } - } -} - -func (r *Reaper) reap(ctx context.Context) error { - defer func() { - // taking the paranoid approach to recover from - // a panic that should absolutely never happen. - if r := recover(); r != nil { - logrus.Errorf("reaper: unexpected panic: %s", r) - debug.PrintStack() - } - }() - - logrus.Traceln("reaper: finding zombie builds") - - var result error - pending, err := r.Builds.Pending(ctx) - if err != nil { - logrus.WithError(err). - Errorf("reaper: cannot get pending builds") - result = multierror.Append(result, err) - } - for _, build := range pending { - logger := logrus. - WithField("build.id", build.ID). - WithField("build.number", build.Number). - WithField("build.repo_id", build.RepoID). - WithField("build.status", build.Status). - WithField("build.created", build.Created) - - // if a build is pending for longer than the maximum - // pending time limit, the build is maybe cancelled. - if isExceeded(build.Created, r.Pending, r.Buffer) { - logger.Traceln("reaper: cancel build: time limit exceeded") - err = r.reapMaybe(ctx, build) - if err != nil { - logger.WithError(err). - Errorln("reaper: cannot cancel build") - result = multierror.Append(result, err) - } - } else { - logger.Traceln("reaper: ignore build: time limit not exceeded") - } - } - - running, err := r.Builds.Running(ctx) - if err != nil { - logrus.WithError(err). - Errorf("reaper: cannot get running builds") - result = multierror.Append(result, err) - } - for _, build := range running { - logger := logrus. - WithField("build.id", build.ID). - WithField("build.number", build.Number). - WithField("build.repo_id", build.RepoID). - WithField("build.status", build.Status). - WithField("build.created", build.Created) - - // if a build is running for longer than the maximum - // running time limit, the build is maybe cancelled. - if isExceeded(build.Started, r.Running, r.Buffer) { - logger.Traceln("reaper: cancel build: time limit exceeded") - - err = r.reapMaybe(ctx, build) - if err != nil { - logger.WithError(err). - Errorln("reaper: cannot cancel build") - result = multierror.Append(result, err) - } - } else { - logger.Traceln("reaper: ignore build: time limit not exceeded") - } - } - - return result -} - -func (r *Reaper) reapMaybe(ctx context.Context, build *core.Build) error { - repo, err := r.Repos.Find(ctx, build.RepoID) - if err != nil { - return err - } - - // if the build status is pending we can immediately - // cancel the build and all build stages. - if build.Status == core.StatusPending { - // TODO trace log entry - return r.Canceler.Cancel(ctx, repo, build) - } - - stages, err := r.Stages.List(ctx, build.ID) - if err != nil { - return err - } - - var started int64 - for _, stage := range stages { - if stage.IsDone() { - continue - } - if stage.Started > started { - started = stage.Started - } - } - - // if the build stages are all pending we can immediately - // cancel the build. - if started == 0 { - // TODO trace log entry - return r.Canceler.Cancel(ctx, repo, build) - } - - // if the build stage has exceeded the timeout by a reasonable - // margin cancel the build and all build stages, else ignore. - if isExceeded(started, time.Duration(repo.Timeout)*time.Minute, r.Buffer) { - // TODO trace log entry - return r.Canceler.Cancel(ctx, repo, build) - } - - // TODO trace log entry - return nil -} diff --git a/service/canceler/reaper/reaper_test.go b/service/canceler/reaper/reaper_test.go deleted file mode 100644 index b98ea934eb..0000000000 --- a/service/canceler/reaper/reaper_test.go +++ /dev/null @@ -1,396 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package reaper - -import ( - "context" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var nocontext = context.Background() - -// -// reap tests -// - -// this test confirms that pending builds that -// exceed the deadline are canceled, and pending -// builds that do not exceed the deadline are -// ignored. -func TestReapPending(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockRepo := &core.Repository{ - ID: 2, - } - mockBuild := &core.Build{ - ID: 1, - RepoID: mockRepo.ID, - Status: core.StatusPending, - Created: mustParse("2006-01-01T00:00:00").Unix(), // expire > 24 hours, must cancel - } - mockPending := []*core.Build{ - mockBuild, - { - ID: 2, - RepoID: mockRepo.ID, - Status: core.StatusPending, - Created: mustParse("2006-01-02T14:30:00").Unix(), // expire < 1 hours, must ignore - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil).Times(1) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Pending(gomock.Any()).Return(mockPending, nil) - builds.EXPECT().Running(gomock.Any()).Return(nil, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := New( - repos, - builds, - nil, - canceler, - time.Hour*24, - time.Hour*24, - time.Minute*30, - ) - - r.reap(nocontext) -} - -// this test confirms that running builds that -// exceed the deadline are canceled, and running -// builds that do not exceed the deadline are -// ignored. -func TestReapRunning(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockRepo := &core.Repository{ - ID: 2, - Timeout: 60, - } - mockBuild := &core.Build{ - ID: 1, - RepoID: mockRepo.ID, - Status: core.StatusRunning, - Started: mustParse("2006-01-01T00:00:00").Unix(), // expire > 24 hours, must cancel - } - mockRunning := []*core.Build{ - mockBuild, - { - ID: 2, - RepoID: mockRepo.ID, - Status: core.StatusRunning, - Started: mustParse("2006-01-02T14:30:00").Unix(), // expire < 1 hours, must ignore - }, - } - mockStages := []*core.Stage{ - { - BuildID: mockBuild.ID, - Status: core.StatusPending, - Started: 0, - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil).Times(1) - - builds := mock.NewMockBuildStore(controller) - builds.EXPECT().Pending(gomock.Any()).Return(nil, nil) - builds.EXPECT().Running(gomock.Any()).Return(mockRunning, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().List(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := New( - repos, - builds, - stages, - canceler, - time.Hour*24, - time.Hour*24, - time.Minute*30, - ) - - r.reap(nocontext) -} - -// -// reap maybe tests -// - -// this test confirms that the build is cancelled -// if the build status is pending. -func TestReapPendingMaybe(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockBuild := &core.Build{ - ID: 1, - RepoID: 2, - Status: core.StatusPending, - } - mockRepo := &core.Repository{ - ID: 2, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := &Reaper{ - Repos: repos, - Stages: nil, - Canceler: canceler, - } - - r.reapMaybe(nocontext, mockBuild) -} - -// this test confirms that the build is cancelled -// if the build status is running, and the stage -// started date is greater than the expiry date. -func TestReapRunningMaybe(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockBuild := &core.Build{ - ID: 1, - RepoID: 2, - Status: core.StatusRunning, - } - mockRepo := &core.Repository{ - ID: 2, - Timeout: 60, - } - mockStages := []*core.Stage{ - { - Status: core.StatusRunning, - Started: mustParse("2006-01-02T13:00:00").Unix(), // running 2 hours, 1 hour longer than timeout - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().List(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := &Reaper{ - Repos: repos, - Stages: stages, - Canceler: canceler, - } - - r.reapMaybe(nocontext, mockBuild) -} - -// this test confirms that if the build status is -// running, but all stages have a pending status, -// the build is cancelled (this likely points to some -// sort of race condition, and should not happen). -func TestReapRunningMaybe_AllStagesPending(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockBuild := &core.Build{ - ID: 1, - RepoID: 2, - Status: core.StatusRunning, - } - mockRepo := &core.Repository{ - ID: 2, - Timeout: 60, - } - mockStages := []*core.Stage{ - { - Status: core.StatusPending, - Started: 0, - }, - { - Status: core.StatusPending, - Started: 0, - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().List(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := &Reaper{ - Repos: repos, - Stages: stages, - Canceler: canceler, - } - - r.reapMaybe(nocontext, mockBuild) -} - -// this test confirms that if the build status is -// running, but all stages have a finished status, -// the build is cancelled (this likely points to some -// sort of race condition, and should not happen). -func TestReapRunningMaybe_AllStagesFinished(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockBuild := &core.Build{ - ID: 1, - RepoID: 2, - Status: core.StatusRunning, - } - mockRepo := &core.Repository{ - ID: 2, - Timeout: 60, - } - mockStages := []*core.Stage{ - { - Status: core.StatusPassing, - Started: mustParse("2006-01-02T14:40:00").Unix(), - }, - { - Status: core.StatusPassing, - Started: mustParse("2006-01-02T14:50:00").Unix(), - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().List(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - canceler := mock.NewMockCanceler(controller) - canceler.EXPECT().Cancel(gomock.Any(), mockRepo, mockBuild) - - r := &Reaper{ - Repos: repos, - Stages: stages, - Canceler: canceler, - } - - r.reapMaybe(nocontext, mockBuild) -} - -// this test confirms that if the build status is -// running, but the stage start time has not exceeded -// the timeout period, the build is NOT cancelled. -func TestReapRunningMaybe_NotExpired(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - - mockBuild := &core.Build{ - ID: 1, - RepoID: 2, - Status: core.StatusRunning, - } - mockRepo := &core.Repository{ - ID: 2, - Timeout: 60, - } - mockStages := []*core.Stage{ - { - Status: core.StatusPassing, - Started: mustParse("2006-01-02T14:50:00").Unix(), - }, - { - Status: core.StatusRunning, - Started: mustParse("2006-01-02T14:55:00").Unix(), - }, - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().Find(gomock.Any(), mockBuild.RepoID).Return(mockRepo, nil) - - stages := mock.NewMockStageStore(controller) - stages.EXPECT().List(gomock.Any(), mockBuild.ID).Return(mockStages, nil) - - r := &Reaper{ - Repos: repos, - Stages: stages, - Canceler: nil, - } - - r.reapMaybe(nocontext, mockBuild) -} - -// -// Failure Scenarios -// - -func TestReapRunningMaybe_ErrorGetRepo(t *testing.T) { - -} - -func TestReapRunningMaybe_ErrorListStages(t *testing.T) { - -} diff --git a/service/canceler/reaper/util.go b/service/canceler/reaper/util.go deleted file mode 100644 index 5bf710f292..0000000000 --- a/service/canceler/reaper/util.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package reaper - -import "time" - -// helper function returns the current time. -var now = time.Now - -// helper function returns true if the time exceeded the -// timeout duration. -func isExceeded(unix int64, timeout, buffer time.Duration) bool { - return now().After( - time.Unix(unix, 0).Add(timeout).Add(buffer), - ) -} diff --git a/service/canceler/reaper/util_test.go b/service/canceler/reaper/util_test.go deleted file mode 100644 index 342e1585ce..0000000000 --- a/service/canceler/reaper/util_test.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package reaper - -import ( - "testing" - "time" -) - -func TestIsExceeded(t *testing.T) { - defer func() { - now = time.Now - }() - now = func() time.Time { - return mustParse("2006-01-02T15:00:00") - } - var tests = []struct { - unix int64 - timeout time.Duration - buffer time.Duration - exceeded bool - }{ - // timestamp equal to current time, not expired - { - unix: mustParse("2006-01-02T15:00:00").Unix(), - timeout: time.Minute * 60, - buffer: time.Minute * 5, - exceeded: false, - }, - // timestamp is not gt current time - timeout, not expired - { - unix: mustParse("2006-01-02T14:00:00").Unix(), - timeout: time.Minute * 60, - buffer: 0, - exceeded: false, - }, - // timestamp is gt current time - timeout, expired - { - unix: mustParse("2006-01-02T13:59:00").Unix(), - timeout: time.Minute * 60, - buffer: 0, - exceeded: true, - }, - // timestamp is not gt current time - timeout - buffer, not expired - { - unix: mustParse("2006-01-02T13:59:00").Unix(), - timeout: time.Minute * 60, - buffer: time.Minute * 5, - exceeded: false, - }, - // timestamp is gt current time - timeout - buffer, expired - { - unix: mustParse("2006-01-02T13:04:05").Unix(), - timeout: time.Minute * 60, - buffer: time.Minute * 5, - exceeded: true, - }, - } - for i, test := range tests { - got, want := isExceeded(test.unix, test.timeout, test.buffer), test.exceeded - if got != want { - t.Errorf("Want exceeded %v, got %v at index %v", want, got, i) - } - } -} - -func mustParse(s string) time.Time { - t, err := time.Parse("2006-01-02T15:04:05", s) - if err != nil { - panic(err) - } - return t -} diff --git a/service/commit/commit.go b/service/commit/commit.go deleted file mode 100644 index c313b3a56a..0000000000 --- a/service/commit/commit.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package commit - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// New returns a new CommitServiceFactory. -func New(client *scm.Client, renew core.Renewer) core.CommitService { - return &service{ - client: client, - renew: renew, - } -} - -type service struct { - renew core.Renewer - client *scm.Client -} - -func (s *service) Find(ctx context.Context, user *core.User, repo, sha string) (*core.Commit, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - commit, _, err := s.client.Git.FindCommit(ctx, repo, sha) - if err != nil { - return nil, err - } - return &core.Commit{ - Sha: commit.Sha, - Message: commit.Message, - Link: commit.Link, - Author: &core.Committer{ - Name: commit.Author.Name, - Email: commit.Author.Email, - Date: commit.Author.Date.Unix(), - Login: commit.Author.Login, - Avatar: commit.Author.Avatar, - }, - Committer: &core.Committer{ - Name: commit.Committer.Name, - Email: commit.Committer.Email, - Date: commit.Committer.Date.Unix(), - Login: commit.Committer.Login, - Avatar: commit.Committer.Avatar, - }, - }, nil -} - -func (s *service) FindRef(ctx context.Context, user *core.User, repo, ref string) (*core.Commit, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - - switch s.client.Driver { - case scm.DriverBitbucket, - scm.DriverStash: - ref = scm.TrimRef(ref) - branch, _, err := s.client.Git.FindBranch(ctx, repo, ref) // wont work for a Tag - if err != nil { - return nil, err - } - ref = branch.Sha - } - - commit, _, err := s.client.Git.FindCommit(ctx, repo, ref) - if err != nil { - return nil, err - } - return &core.Commit{ - Sha: commit.Sha, - Ref: ref, - Message: commit.Message, - Link: commit.Link, - Author: &core.Committer{ - Name: commit.Author.Name, - Email: commit.Author.Email, - Date: commit.Author.Date.Unix(), - Login: commit.Author.Login, - Avatar: commit.Author.Avatar, - }, - Committer: &core.Committer{ - Name: commit.Committer.Name, - Email: commit.Committer.Email, - Date: commit.Committer.Date.Unix(), - Login: commit.Committer.Login, - Avatar: commit.Committer.Avatar, - }, - }, nil -} - -func (s *service) ListChanges(ctx context.Context, user *core.User, repo, sha, ref string) ([]*core.Change, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - out, _, err := s.client.Git.ListChanges(ctx, repo, sha, scm.ListOptions{Size: 100}) - if err != nil { - return nil, err - } - var changes []*core.Change - for _, change := range out { - changes = append(changes, &core.Change{ - Path: change.Path, - Added: change.Added, - Renamed: change.Renamed, - Deleted: change.Deleted, - }) - } - return changes, nil -} diff --git a/service/commit/commit_test.go b/service/commit/commit_test.go deleted file mode 100644 index 279f524020..0000000000 --- a/service/commit/commit_test.go +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package commit - -import ( - "context" - "testing" - "time" - - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var noContext = context.Background() - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockCommit := &scm.Commit{ - Sha: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Message: "Merge pull request #6 from Spaceghost/patch-1\n\nNew line at end of file.", - Author: scm.Signature{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: time.Unix(1532303087, 0), - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Committer: scm.Signature{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: time.Unix(1532303087, 0), - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - } - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().FindCommit(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa").Return(mockCommit, nil, nil) - - client := new(scm.Client) - client.Git = mockGit - - want := &core.Commit{ - Sha: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "", - Message: "Merge pull request #6 from Spaceghost/patch-1\n\nNew line at end of file.", - Author: &core.Committer{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: 1532303087, - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Committer: &core.Committer{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: 1532303087, - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - } - - service := New(client, mockRenewer) - got, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa") - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFind_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().FindCommit(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa").Return(nil, nil, scm.ErrNotFound) - - client := new(scm.Client) - client.Git = mockGit - - service := New(client, mockRenewer) - _, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa") - if err != scm.ErrNotFound { - t.Errorf("Want not found error, got %v", err) - } -} - -func TestFind_ErrRenew(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer) - _, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa") - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} - -func TestFindRef(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockCommit := &scm.Commit{ - Sha: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Message: "Merge pull request #6 from Spaceghost/patch-1\n\nNew line at end of file.", - Author: scm.Signature{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: time.Unix(1532303087, 0), - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Committer: scm.Signature{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: time.Unix(1532303087, 0), - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - } - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().FindCommit(gomock.Any(), "octocat/hello-world", "master").Return(mockCommit, nil, nil) - - client := new(scm.Client) - client.Git = mockGit - - want := &core.Commit{ - Sha: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "master", - Message: "Merge pull request #6 from Spaceghost/patch-1\n\nNew line at end of file.", - Author: &core.Committer{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: 1532303087, - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Committer: &core.Committer{ - Name: "The Octocat", - Email: "octocat@nowhere.com", - Date: 1532303087, - Login: "octocat", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - }, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - } - - service := New(client, mockRenewer) - got, err := service.FindRef(noContext, mockUser, "octocat/hello-world", "master") - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFindRef_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().FindCommit(gomock.Any(), "octocat/hello-world", "master").Return(nil, nil, scm.ErrNotFound) - - client := new(scm.Client) - client.Git = mockGit - - service := New(client, mockRenewer) - _, err := service.FindRef(noContext, mockUser, "octocat/hello-world", "master") - if err != scm.ErrNotFound { - t.Errorf("Want not found error, got %v", err) - } -} - -func TestFindRef_ErrRenew(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer) - _, err := service.FindRef(noContext, mockUser, "octocat/hello-world", "master") - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} - -func TestListChanges(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockChanges := []*scm.Change{ - { - Path: "file1", - Added: false, - Deleted: false, - Renamed: false, - }, - { - Path: "file2", - Added: true, - Deleted: false, - Renamed: false, - }, - { - Path: "file2", - Added: false, - Deleted: true, - Renamed: false, - }, - { - Path: "file3", - Added: false, - Deleted: false, - Renamed: true, - }, - } - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().ListChanges(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", gomock.Any()).Return(mockChanges, nil, nil) - - client := new(scm.Client) - client.Git = mockGit - - want := []*core.Change{ - { - Path: "file1", - Added: false, - Deleted: false, - Renamed: false, - }, - { - Path: "file2", - Added: true, - Deleted: false, - Renamed: false, - }, - { - Path: "file2", - Added: false, - Deleted: true, - Renamed: false, - }, - { - Path: "file3", - Added: false, - Deleted: false, - Renamed: true, - }, - } - - service := New(client, mockRenewer) - got, err := service.ListChanges(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master") - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestListChanges_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockGit := mockscm.NewMockGitService(controller) - mockGit.EXPECT().ListChanges(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", gomock.Any()).Return(nil, nil, scm.ErrNotFound) - - client := new(scm.Client) - client.Git = mockGit - - service := New(client, mockRenewer) - _, err := service.ListChanges(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master") - if err != scm.ErrNotFound { - t.Errorf("Want not found error, got %v", err) - } -} - -func TestListChanges_ErrRenew(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer) - _, err := service.ListChanges(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master") - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} diff --git a/service/content/cache/contents.go b/service/content/cache/contents.go deleted file mode 100644 index e10ce49a52..0000000000 --- a/service/content/cache/contents.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cache - -import ( - "context" - "fmt" - - "github.com/drone/drone/core" - - "github.com/hashicorp/golang-lru" -) - -// content key pattern used in the cache, comprised of the -// repository slug, commit and path. -const contentKey = "%s/%s/%s" - -// Contents returns a new FileService that is wrapped -// with an in-memory cache. -func Contents(base core.FileService) core.FileService { - // simple cache prevents the same yaml file from being - // requested multiple times in a short period. - cache, _ := lru.New(25) - return &service{ - service: base, - cache: cache, - } -} - -type service struct { - cache *lru.Cache - service core.FileService - user *core.User -} - -func (s *service) Find(ctx context.Context, user *core.User, repo, commit, ref, path string) (*core.File, error) { - key := fmt.Sprintf(contentKey, repo, commit, path) - cached, ok := s.cache.Get(key) - if ok { - return cached.(*core.File), nil - } - file, err := s.service.Find(ctx, user, repo, commit, ref, path) - if err != nil { - return nil, err - } - s.cache.Add(key, file) - return file, nil -} diff --git a/service/content/cache/contents_oss.go b/service/content/cache/contents_oss.go deleted file mode 100644 index 4fa8734990..0000000000 --- a/service/content/cache/contents_oss.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package cache - -import "github.com/drone/drone/core" - -// Contents returns the default FileService with no caching -// enabled. -func Contents(base core.FileService) core.FileService { - return base -} diff --git a/service/content/cache/contents_test.go b/service/content/cache/contents_test.go deleted file mode 100644 index b623c1be60..0000000000 --- a/service/content/cache/contents_test.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cache - -import ( - "context" - "fmt" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/go-scm/scm" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var noContext = context.Background() - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockFile := &core.File{ - Data: []byte("hello world"), - Hash: []byte(""), - } - - mockContents := mock.NewMockFileService(controller) - mockContents.EXPECT().Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml").Return(mockFile, nil) - - service := Contents(mockContents).(*service) - - want := &core.File{ - Data: []byte("hello world"), - Hash: []byte(""), - } - - got, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } - - if len(service.cache.Keys()) == 0 { - t.Errorf("Expect item added to cache") - } -} - -func TestFindError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockContents := mock.NewMockFileService(controller) - mockContents.EXPECT().Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml").Return(nil, scm.ErrNotFound) - - service := Contents(mockContents).(*service) - - _, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err != scm.ErrNotFound { - t.Errorf("Expect not found error") - } -} - -func TestFindCache(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockFile := &core.File{ - Data: []byte("hello world"), - Hash: []byte(""), - } - - key := fmt.Sprintf(contentKey, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", ".drone.yml") - service := Contents(nil).(*service) - service.cache.Add(key, mockFile) - - want := &core.File{ - Data: []byte("hello world"), - Hash: []byte(""), - } - - got, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} diff --git a/service/content/content.go b/service/content/content.go deleted file mode 100644 index 9eeda4a79e..0000000000 --- a/service/content/content.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package contents - -import ( - "context" - "strings" - "time" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// default number of backoff attempts. -var attempts = 3 - -// default time to wait after failed attempt. -var wait = time.Second * 15 - -// New returns a new FileService. -func New(client *scm.Client, renewer core.Renewer) core.FileService { - return &service{ - client: client, - renewer: renewer, - attempts: attempts, - wait: wait, - } -} - -type service struct { - renewer core.Renewer - client *scm.Client - attempts int - wait time.Duration -} - -func (s *service) Find(ctx context.Context, user *core.User, repo, commit, ref, path string) (*core.File, error) { - // TODO(gogs) ability to fetch a yaml by pull request ref. - // it is not currently possible to fetch the yaml - // configuration file from a pull request sha. This - // workaround defaults to master. - if s.client.Driver == scm.DriverGogs && - strings.HasPrefix(ref, "refs/pull") { - commit = "master" - } - // TODO(gogs) ability to fetch a file in tag from commit sha. - // this is a workaround for gogs which does not allow - // fetching a file by commit sha for a tag. This forces - // fetching a file by reference instead. - if s.client.Driver == scm.DriverGogs && - strings.HasPrefix(ref, "refs/tag") { - commit = ref - } - err := s.renewer.Renew(ctx, user, false) - if err != nil { - return nil, err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - content, err := s.findRetry(ctx, repo, path, commit) - if err != nil { - return nil, err - } - return &core.File{ - Data: content.Data, - Hash: []byte{}, - }, nil -} - -// helper function attempts to get the yaml configuration file -// with backoff on failure. This may be required due to eventual -// consistency issues with the github datastore. -func (s *service) findRetry(ctx context.Context, repo, path, commit string) (content *scm.Content, err error) { - for i := 0; i < s.attempts; i++ { - content, _, err = s.client.Contents.Find(ctx, repo, path, commit) - // if no error is returned we can exit immediately. - if err == nil { - return - } - // wait a few seconds before retry. according to github - // support 30 seconds total should be enough time. we - // try 3 x 15 seconds, giving a total of 45 seconds. - time.Sleep(s.wait) - } - return -} diff --git a/service/content/content_test.go b/service/content/content_test.go deleted file mode 100644 index 157d319f75..0000000000 --- a/service/content/content_test.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package contents - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - "github.com/google/go-cmp/cmp" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockFile := &scm.Content{ - Path: ".drone.yml", - Data: []byte("hello world"), - } - - mockContents := mockscm.NewMockContentService(controller) - mockContents.EXPECT().Find(gomock.Any(), "octocat/hello-world", ".drone.yml", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa").Return(mockFile, nil, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Contents = mockContents - - want := &core.File{ - Data: []byte("hello world"), - Hash: []byte(""), - } - - service := New(client, mockRenewer) - got, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFind_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockContents := mockscm.NewMockContentService(controller) - mockContents.EXPECT().Find(gomock.Any(), "octocat/hello-world", ".drone.yml", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa").Return(nil, nil, scm.ErrNotFound) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Contents = mockContents - - s := New(client, mockRenewer) - s.(*service).attempts = 1 - s.(*service).wait = 0 - _, err := s.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err != scm.ErrNotFound { - t.Errorf("Expect not found error, got %s", err) - } -} - -func TestFind_RenewalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - client := new(scm.Client) - - service := New(client, mockRenewer) - _, err := service.Find(noContext, mockUser, "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", "master", ".drone.yml") - if err == nil { - t.Errorf("Expect error refreshing token") - } -} diff --git a/service/hook/hook.go b/service/hook/hook.go deleted file mode 100644 index eb46419164..0000000000 --- a/service/hook/hook.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hook - -import ( - "context" - "time" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// New returns a new HookService. -func New(client *scm.Client, addr string, renew core.Renewer) core.HookService { - return &service{client: client, addr: addr, renew: renew} -} - -type service struct { - renew core.Renewer - client *scm.Client - addr string -} - -func (s *service) Create(ctx context.Context, user *core.User, repo *core.Repository) error { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - Expires: time.Unix(user.Expiry, 0), - }) - hook := &scm.HookInput{ - Name: "drone", - Target: s.addr + "/hook", - Secret: repo.Signer, - Events: scm.HookEvents{ - Branch: true, - Deployment: true, - PullRequest: true, - Push: true, - Tag: true, - }, - } - return replaceHook(ctx, s.client, repo.Slug, hook) -} - -func (s *service) Delete(ctx context.Context, user *core.User, repo *core.Repository) error { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return err - } - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - Expires: time.Unix(user.Expiry, 0), - }) - return deleteHook(ctx, s.client, repo.Slug, s.addr) -} diff --git a/service/hook/hook_test.go b/service/hook/hook_test.go deleted file mode 100644 index 10bf069bef..0000000000 --- a/service/hook/hook_test.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package hook - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestCreate(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockHooks := []*scm.Hook{} - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Signer: "abc123", - } - - hook := &scm.HookInput{ - Name: "drone", - Target: "https://drone.company.com/hook", - Secret: "abc123", - Events: scm.HookEvents{ - Branch: true, - Deployment: true, - PullRequest: true, - Push: true, - Tag: true, - }, - } - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockRepos := mockscm.NewMockRepositoryService(controller) - mockRepos.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(mockHooks, nil, nil) - mockRepos.EXPECT().CreateHook(gomock.Any(), "octocat/hello-world", hook).Return(nil, nil, nil) - - client := new(scm.Client) - client.Repositories = mockRepos - - service := New(client, "https://drone.company.com", mockRenewer) - err := service.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } -} - -func TestCreate_RenewErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, "https://drone.company.com", mockRenewer) - err := service.Create(noContext, mockUser, nil) - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} - -func TestDelete(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockHooks := []*scm.Hook{ - { - ID: "1", - Name: "drone", - Target: "https://drone.company.com/hook", - }, - } - mockRepo := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Signer: "abc123", - } - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - mockRepos := mockscm.NewMockRepositoryService(controller) - mockRepos.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(mockHooks, nil, nil) - mockRepos.EXPECT().DeleteHook(gomock.Any(), "octocat/hello-world", "1").Return(nil, nil) - - client := new(scm.Client) - client.Repositories = mockRepos - - service := New(client, "https://drone.company.com", mockRenewer) - err := service.Delete(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } -} - -func TestDelete_RenewErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, "https://drone.company.com", mockRenewer) - err := service.Delete(noContext, mockUser, nil) - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} diff --git a/service/hook/parser/parse.go b/service/hook/parser/parse.go deleted file mode 100644 index 91b409b06c..0000000000 --- a/service/hook/parser/parse.go +++ /dev/null @@ -1,406 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package parser - -import ( - "errors" - "fmt" - "net/http" - "net/http/httputil" - "os" - "strconv" - "strings" - "time" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// TODO(bradrydzewski): stash, push hook missing link -// TODO(bradrydzewski): stash, tag hook missing timestamp -// TODO(bradrydzewski): stash, tag hook missing commit message -// TODO(bradrydzewski): stash, tag hook missing link -// TODO(bradrydzewski): stash, pull request hook missing link -// TODO(bradrydzewski): stash, hooks missing repository clone http url -// TODO(bradrydzewski): stash, hooks missing repository clone ssh url -// TODO(bradrydzewski): stash, hooks missing repository html link - -// TODO(bradrydzewski): gogs, push hook missing author avatar, using sender instead. -// TODO(bradrydzewski): gogs, pull request hook missing commit sha. -// TODO(bradrydzewski): gogs, tag hook missing commit sha. -// TODO(bradrydzewski): gogs, sender missing Name field. -// TODO(bradrydzewski): gogs, push hook missing repository html url - -// TODO(bradrydzewski): gitea, push hook missing author avatar, using sender instead. -// TODO(bradrydzewski): gitea, tag hook missing commit sha. -// TODO(bradrydzewski): gitea, sender missing Name field. -// TODO(bradrydzewski): gitea, push hook missing repository html url - -// TODO(bradrydzewski): bitbucket, pull request hook missing author email. -// TODO(bradrydzewski): bitbucket, hooks missing default repository branch. - -// TODO(bradrydzewski): github, push hook timestamp is negative value. -// TODO(bradrydzewski): github, pull request message is empty - -// represents a deleted ref in the github webhook. -const emptyCommit = "0000000000000000000000000000000000000000" - -// this is intended for local testing and instructs the handler -// to print the contents of the hook to stdout. -var debugPrintHook = false - -func init() { - debugPrintHook, _ = strconv.ParseBool( - os.Getenv("DRONE_DEBUG_DUMP_HOOK"), - ) -} - -// New returns a new HookParser. -func New(client *scm.Client) core.HookParser { - return &parser{client} -} - -type parser struct { - client *scm.Client -} - -func (p *parser) Parse(req *http.Request, secretFunc func(string) string) (*core.Hook, *core.Repository, error) { - if debugPrintHook { - // if DRONE_DEBUG_DUMP_HOOK=true print the http.Request - // headers and body to stdout. - out, _ := httputil.DumpRequest(req, true) - os.Stderr.Write(out) - } - - // callback function provides the webhook parser with - // a per-repository secret key used to verify the webhook - // payload signature for authenticity. - fn := func(webhook scm.Webhook) (string, error) { - if webhook == nil { - // HACK(bradrydzewski) if the incoming webhook is nil - // we assume it is an unknown event or action. A more - // permanent fix is to update go-scm to return an - // scm.ErrUnknownAction error. - return "", scm.ErrUnknownEvent - } - repo := webhook.Repository() - slug := scm.Join(repo.Namespace, repo.Name) - secret := secretFunc(slug) - if secret == "" { - return secret, errors.New("Cannot find repository") - } - return secret, nil - } - - payload, err := p.client.Webhooks.Parse(req, fn) - if err == scm.ErrUnknownEvent { - return nil, nil, nil - } - if err != nil { - return nil, nil, err - } - - var repo *core.Repository - var hook *core.Hook - - switch v := payload.(type) { - case *scm.PushHook: - // github sends push hooks when tags and branches are - // deleted. These hooks should be ignored. - if v.Commit.Sha == emptyCommit { - return nil, nil, nil - } - // github sends push hooks when tags are created. The - // push hook contains more information than the tag hook, - // so we choose to use the push hook for tags. - if strings.HasPrefix(v.Ref, "refs/tags/") { - hook = &core.Hook{ - Trigger: core.TriggerHook, // core.TriggerHook - Event: core.EventTag, - Action: core.ActionCreate, - Link: v.Commit.Link, - Timestamp: v.Commit.Author.Date.Unix(), - Message: v.Commit.Message, - Before: v.Before, - After: v.Commit.Sha, - Source: scm.TrimRef(v.BaseRef), - Target: scm.TrimRef(v.BaseRef), - Ref: v.Ref, - Author: v.Commit.Author.Login, - AuthorName: v.Commit.Author.Name, - AuthorEmail: v.Commit.Author.Email, - AuthorAvatar: v.Commit.Author.Avatar, - Sender: v.Sender.Login, - } - } else { - hook = &core.Hook{ - Trigger: core.TriggerHook, //core.TriggerHook, - Event: core.EventPush, - Link: v.Commit.Link, - Timestamp: v.Commit.Author.Date.Unix(), - Message: v.Commit.Message, - Before: v.Before, - After: v.Commit.Sha, - Ref: v.Ref, - Source: strings.TrimPrefix(v.Ref, "refs/heads/"), - Target: strings.TrimPrefix(v.Ref, "refs/heads/"), - Author: v.Commit.Author.Login, - AuthorName: v.Commit.Author.Name, - AuthorEmail: v.Commit.Author.Email, - AuthorAvatar: v.Commit.Author.Avatar, - Sender: v.Sender.Login, - } - } - repo = &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - Link: v.Repo.Link, - Branch: v.Repo.Branch, - Private: v.Repo.Private, - HTTPURL: v.Repo.Clone, - SSHURL: v.Repo.CloneSSH, - } - // gogs and gitea do not include the author avatar in - // the webhook, but they do include the sender avatar. - // use the sender avatar when necessary. - if hook.AuthorAvatar == "" { - hook.AuthorAvatar = v.Sender.Avatar - } - return hook, repo, nil - case *scm.TagHook: - if v.Action != scm.ActionCreate { - return nil, nil, nil - } - // when a tag is created github sends both a push hook - // and a tag create hook. The push hook contains more - // information, so we choose to use the push hook and - // ignore the native tag hook. - if p.client.Driver == scm.DriverGithub || - p.client.Driver == scm.DriverGitea || - p.client.Driver == scm.DriverGitlab { - return nil, nil, nil - } - - // the tag hook does not include the commit link, message - // or timestamp. In some cases it does not event include - // the sha (gogs). Note that we may need to fetch additional - // details to augment the webhook. - hook = &core.Hook{ - Trigger: core.TriggerHook, // core.TriggerHook, - Event: core.EventTag, - Action: core.ActionCreate, - Link: "", - Timestamp: 0, - Message: "", - After: v.Ref.Sha, - Ref: v.Ref.Name, - Source: v.Ref.Name, - Target: v.Ref.Name, - Author: v.Sender.Login, - AuthorName: v.Sender.Name, - AuthorEmail: v.Sender.Email, - AuthorAvatar: v.Sender.Avatar, - Sender: v.Sender.Login, - } - repo = &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - Link: v.Repo.Link, - Branch: v.Repo.Branch, - Private: v.Repo.Private, - HTTPURL: v.Repo.Clone, - SSHURL: v.Repo.CloneSSH, - } - // TODO(bradrydzewski) can we use scm.ExpandRef here? - if !strings.HasPrefix(hook.Ref, "refs/tags/") { - hook.Ref = fmt.Sprintf("refs/tags/%s", hook.Ref) - } - if hook.AuthorAvatar == "" { - hook.AuthorAvatar = v.Sender.Avatar - } - return hook, repo, nil - case *scm.PullRequestHook: - - // TODO(bradrydzewski) cleanup the pr close hook code. - if v.Action == scm.ActionClose { - return &core.Hook{ - Trigger: core.TriggerHook, - Event: core.EventPullRequest, - Action: core.ActionClose, - After: v.PullRequest.Sha, - Ref: v.PullRequest.Ref, - }, &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - }, nil - } - - if v.Action != scm.ActionOpen && v.Action != scm.ActionSync { - return nil, nil, nil - } - // Pull Requests are not supported for Bitbucket due - // to lack of refs (e.g. refs/pull-requests/42/from). - // Please contact Bitbucket Support if you would like to - // see this feature enabled: - // https://bitbucket.org/site/master/issues/5814/repository-refs-for-pull-requests - if p.client.Driver == scm.DriverBitbucket { - return nil, nil, nil - } - hook = &core.Hook{ - Trigger: core.TriggerHook, // core.TriggerHook, - Event: core.EventPullRequest, - Action: v.Action.String(), - Link: v.PullRequest.Link, - Timestamp: v.PullRequest.Created.Unix(), - Title: v.PullRequest.Title, - Message: v.PullRequest.Body, - Before: v.PullRequest.Base.Sha, - After: v.PullRequest.Sha, - Ref: v.PullRequest.Ref, - Fork: v.PullRequest.Fork, - Source: v.PullRequest.Source, - Target: v.PullRequest.Target, - Author: v.PullRequest.Author.Login, - AuthorName: v.PullRequest.Author.Name, - AuthorEmail: v.PullRequest.Author.Email, - AuthorAvatar: v.PullRequest.Author.Avatar, - Sender: v.Sender.Login, - } - // HACK this is a workaround for github. The pull - // request title is populated, but not the message. - if hook.Message == "" { - hook.Message = hook.Title - } - repo = &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - Link: v.Repo.Link, - Branch: v.Repo.Branch, - Private: v.Repo.Private, - HTTPURL: v.Repo.Clone, - SSHURL: v.Repo.CloneSSH, - } - if hook.AuthorAvatar == "" { - hook.AuthorAvatar = v.Sender.Avatar - } - return hook, repo, nil - case *scm.BranchHook: - - // TODO(bradrydzewski) cleanup the branch hook code. - if v.Action == scm.ActionDelete { - return &core.Hook{ - Trigger: core.TriggerHook, - Event: core.EventPush, - After: v.Ref.Sha, - Action: core.ActionDelete, - Target: scm.TrimRef(v.Ref.Name), - }, &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - }, nil - } - - if v.Action != scm.ActionCreate { - return nil, nil, nil - } - if p.client.Driver != scm.DriverStash { - return nil, nil, nil - } - hook = &core.Hook{ - Trigger: core.TriggerHook, // core.TriggerHook, - Event: core.EventPush, - Link: "", - Timestamp: 0, - Message: "", - After: v.Ref.Sha, - Ref: v.Ref.Name, - Source: v.Ref.Name, - Target: v.Ref.Name, - Author: v.Sender.Login, - AuthorName: v.Sender.Name, - AuthorEmail: v.Sender.Email, - AuthorAvatar: v.Sender.Avatar, - Sender: v.Sender.Login, - } - repo = &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - Link: v.Repo.Link, - Branch: v.Repo.Branch, - Private: v.Repo.Private, - HTTPURL: v.Repo.Clone, - SSHURL: v.Repo.CloneSSH, - } - return hook, repo, nil - case *scm.DeployHook: - hook = &core.Hook{ - Trigger: core.TriggerHook, - Event: core.EventPromote, - Link: v.TargetURL, - Timestamp: time.Now().Unix(), - Message: v.Desc, - After: v.Ref.Sha, - Ref: v.Ref.Path, - Source: v.Ref.Name, - Target: v.Ref.Name, - Author: v.Sender.Login, - AuthorName: v.Sender.Name, - AuthorEmail: v.Sender.Email, - AuthorAvatar: v.Sender.Avatar, - Sender: v.Sender.Login, - Deployment: v.Target, - DeploymentID: v.Number, - Params: toMap(v.Data), - } - repo = &core.Repository{ - UID: v.Repo.ID, - Namespace: v.Repo.Namespace, - Name: v.Repo.Name, - Slug: scm.Join(v.Repo.Namespace, v.Repo.Name), - Link: v.Repo.Link, - Branch: v.Repo.Branch, - Private: v.Repo.Private, - HTTPURL: v.Repo.Clone, - SSHURL: v.Repo.CloneSSH, - } - return hook, repo, nil - default: - return nil, nil, nil - } -} - -func toMap(src interface{}) map[string]string { - set, ok := src.(map[string]interface{}) - if !ok { - return nil - } - dst := map[string]string{} - for k, v := range set { - dst[k] = fmt.Sprint(v) - } - return dst -} diff --git a/service/hook/parser/parse_test.go b/service/hook/parser/parse_test.go deleted file mode 100644 index effeab0ef0..0000000000 --- a/service/hook/parser/parse_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package parser diff --git a/service/hook/parser/testdata/bitbucket_pull_open.json b/service/hook/parser/testdata/bitbucket_pull_open.json deleted file mode 100644 index 00825c69a4..0000000000 --- a/service/hook/parser/testdata/bitbucket_pull_open.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "{bc771cbf-829e-4c4b-b71f-a0eb3ac2b860}", - "Namespace": "brydzewski", - "Name": "foo", - "Perm": null, - "Branch": "", - "Private": true, - "Clone": "https://bitbucket.org/brydzewski/foo.git", - "CloneSSH": "git@bitbucket.org:brydzewski/foo.git", - "Link": "https://bitbucket.org/brydzewski/foo", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 1, - "Title": "Awesome new feature", - "Body": "made some changes", - "Sha": "507a576e59b3", - "Ref": "refs/pull-requests/1/from", - "Source": "develop", - "Target": "master", - "Fork": "brydzewski/foo", - "Link": "https://bitbucket.org/brydzewski/foo/pull-requests/1", - "Closed": false, - "Merged": false, - "Author": { - "Login": "brydzewski", - "Name": "Brad Rydzewski", - "Email": "", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - }, - "Created": "2018-07-02T21:51:39.492248Z", - "Updated": "2018-07-02T21:51:39.532546Z" - }, - "Sender": { - "Login": "brydzewski", - "Name": "Brad Rydzewski", - "Email": "", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/bitbucket_pull_open.json.golden b/service/hook/parser/testdata/bitbucket_pull_open.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/bitbucket_push.json b/service/hook/parser/testdata/bitbucket_push.json deleted file mode 100644 index 655904fa17..0000000000 --- a/service/hook/parser/testdata/bitbucket_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "{bc771cbf-829e-4c4b-b71f-a0eb3ac2b860}", - "Namespace": "brydzewski", - "Name": "foo", - "Perm": null, - "Branch": "", - "Private": true, - "Clone": "https://bitbucket.org/brydzewski/foo.git", - "CloneSSH": "git@bitbucket.org:brydzewski/foo.git", - "Link": "https://bitbucket.org/brydzewski/foo", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "141977fedf5cf35aa290ac87d4b5177ac4cd9de1", - "Message": "Update README\n", - "Author": { - "Name": "Brad Rydzewski", - "Email": "brad.rydzewski@gmail.com", - "Date": "2018-07-02T20:26:56Z", - "Login": "brydzewski", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - }, - "Committer": { - "Name": "Brad Rydzewski", - "Email": "brad.rydzewski@gmail.com", - "Date": "2018-07-02T20:26:56Z", - "Login": "brydzewski", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - }, - "Link": "https://bitbucket.org/brydzewski/foo/commits/141977fedf5cf35aa290ac87d4b5177ac4cd9de1" - }, - "Sender": { - "Login": "brydzewski", - "Name": "Brad Rydzewski", - "Email": "", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/bitbucket_push.json.golden b/service/hook/parser/testdata/bitbucket_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/bitbucket_tag.json b/service/hook/parser/testdata/bitbucket_tag.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/bitbucket_tag.json.golden b/service/hook/parser/testdata/bitbucket_tag.json.golden deleted file mode 100644 index 0f965056db..0000000000 --- a/service/hook/parser/testdata/bitbucket_tag.json.golden +++ /dev/null @@ -1,26 +0,0 @@ -{ - "Ref": { - "Name": "feature/x", - "Sha": "141977fedf5cf35aa290ac87d4b5177ac4cd9de1" - }, - "Repo": { - "ID": "{bc771cbf-829e-4c4b-b71f-a0eb3ac2b860}", - "Namespace": "brydzewski", - "Name": "foo", - "Perm": null, - "Branch": "", - "Private": true, - "Clone": "https://bitbucket.org/brydzewski/foo.git", - "CloneSSH": "git@bitbucket.org:brydzewski/foo.git", - "Link": "https://bitbucket.org/brydzewski/foo", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Action": "created", - "Sender": { - "Login": "brydzewski", - "Name": "Brad Rydzewski", - "Email": "", - "Avatar": "https://bitbucket.org/account/brydzewski/avatar/32/" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/gitea_pull_open.json b/service/hook/parser/testdata/gitea_pull_open.json deleted file mode 100644 index 3226f79720..0000000000 --- a/service/hook/parser/testdata/gitea_pull_open.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "6589", - "Namespace": "jcitizen", - "Name": "my-repo", - "Perm": { - "Pull": false, - "Push": false, - "Admin": false - }, - "Branch": "master", - "Private": false, - "Clone": "https://try.gitea.io/jcitizen/my-repo.git", - "CloneSSH": "git@try.gitea.io:jcitizen/my-repo.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 1, - "Title": "Add License File", - "Body": "Using a BSD License", - "Sha": "2eba238e33607c1fa49253182e9fff42baafa1eb", - "Ref": "refs/pull/1/head", - "Source": "feature", - "Target": "master", - "Fork": "jcitizen/my-repo", - "Link": "https://try.gitea.io/jcitizen/my-repo/pulls/1", - "Closed": false, - "Merged": false, - "Author": { - "Login": "jcitizen", - "Name": "", - "Email": "jane@example.com", - "Avatar": "https://secure.gravatar.com/avatar/66f07ff48e6a9cb393de7a34e03bb52a?d=identicon" - }, - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Sender": { - "Login": "jcitizen", - "Name": "", - "Email": "jane@example.com", - "Avatar": "https://secure.gravatar.com/avatar/66f07ff48e6a9cb393de7a34e03bb52a?d=identicon" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/gitea_pull_open.json.golden b/service/hook/parser/testdata/gitea_pull_open.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gitea_push.json b/service/hook/parser/testdata/gitea_push.json deleted file mode 100644 index 2f9b3ca4b4..0000000000 --- a/service/hook/parser/testdata/gitea_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "61", - "Namespace": "gogits", - "Name": "hello-world", - "Perm": {}, - "Branch": "master", - "Private": true, - "Clone": "http://try.gitea.io/gogits/hello-world.git", - "CloneSSH": "git@localhost:gogits/hello-world.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "4522cbcefc20728a5b72b3a86af35e608622c514", - "Message": "Updated readme\n", - "Author": { - "Name": "Unknwon", - "Email": "noreply@gogs.io", - "Date": "2017-12-09T01:35:07Z", - "Login": "unknwon", - "Avatar": "" - }, - "Committer": { - "Name": "Unknwon", - "Email": "noreply@gogs.io", - "Date": "2017-12-09T01:35:07Z", - "Login": "unknwon", - "Avatar": "" - }, - "Link": "http://try.gitea.io/gogits/hello-world/compare/9836a96a253cce25d17988fcf41b8c4205cf779f...4522cbcefc20728a5b72b3a86af35e608622c514" - }, - "Sender": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - } -} diff --git a/service/hook/parser/testdata/gitea_push.json.golden b/service/hook/parser/testdata/gitea_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gitea_tag.json b/service/hook/parser/testdata/gitea_tag.json deleted file mode 100644 index dacc2ac34e..0000000000 --- a/service/hook/parser/testdata/gitea_tag.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "Ref": { - "Name": "v1.0.0", - "Sha": "" - }, - "Repo": { - "ID": "61", - "Namespace": "gogits", - "Name": "hello-world", - "Perm": {}, - "Branch": "master", - "Private": true, - "Clone": "http://try.gitea.io/gogits/hello-world.git", - "CloneSSH": "git@localhost:gogits/hello-world.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Action": "created", - "Sender": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - } -} diff --git a/service/hook/parser/testdata/gitea_tag.json.golden b/service/hook/parser/testdata/gitea_tag.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/github_pull_create.json b/service/hook/parser/testdata/github_pull_create.json deleted file mode 100644 index 100ff8162b..0000000000 --- a/service/hook/parser/testdata/github_pull_create.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "13933572", - "Namespace": "bradrydzewski", - "Name": "drone-test-go", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "https://github.com/bradrydzewski/drone-test-go.git", - "CloneSSH": "git@github.com:bradrydzewski/drone-test-go.git", - "Link": "https://github.com/bradrydzewski/drone-test-go", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 1, - "Title": "Update .drone.yml", - "Body": "", - "Sha": "d2b75aa7797ec26b088fa2dd527e9d2c052fcedd", - "Ref": "refs/pull/1/head", - "Source": "master", - "Target": "bradrydzewski-patch-1", - "Fork": "bradrydzewski/drone-test-go", - "Link": "https://github.com/bradrydzewski/drone-test-go/pull/1.diff", - "Closed": false, - "Merged": false, - "Author": { - "Login": "bradrydzewski", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/817538?v=4" - }, - "Created": "2018-06-22T23:54:09Z", - "Updated": "2018-06-22T23:54:09Z" - }, - "Sender": { - "Login": "bradrydzewski", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/817538?v=4" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/github_pull_create.json.golden b/service/hook/parser/testdata/github_pull_create.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/github_pull_sync.json b/service/hook/parser/testdata/github_pull_sync.json deleted file mode 100644 index 44f1398cc8..0000000000 --- a/service/hook/parser/testdata/github_pull_sync.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "synchronized", - "Repo": { - "ID": "13933572", - "Namespace": "bradrydzewski", - "Name": "drone-test-go", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "https://github.com/bradrydzewski/drone-test-go.git", - "CloneSSH": "git@github.com:bradrydzewski/drone-test-go.git", - "Link": "https://github.com/bradrydzewski/drone-test-go", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 1, - "Title": "Update .drone.yml", - "Body": "this is an edit", - "Sha": "8102e371cd01cf668893cb2d04a04d52331b1dc9", - "Ref": "refs/pull/1/head", - "Source": "master", - "Target": "bradrydzewski-patch-1", - "Fork": "bradrydzewski/drone-test-go", - "Link": "https://github.com/bradrydzewski/drone-test-go/pull/1.diff", - "Closed": false, - "Merged": false, - "Author": { - "Login": "bradrydzewski", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/817538?v=4" - }, - "Created": "2018-06-22T23:54:09Z", - "Updated": "2018-06-25T19:21:45Z" - }, - "Sender": { - "Login": "bradrydzewski", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/817538?v=4" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/github_pull_sync.json.golden b/service/hook/parser/testdata/github_pull_sync.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/github_push.json b/service/hook/parser/testdata/github_push.json deleted file mode 100644 index 067a0c4ebb..0000000000 --- a/service/hook/parser/testdata/github_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "135493233", - "Namespace": "Codertocat", - "Name": "Hello-World", - "Perm": null, - "Branch": "master", - "Private": false, - "Clone": "https://github.com/Codertocat/Hello-World.git", - "CloneSSH": "git@github.com:Codertocat/Hello-World.git", - "Link": "https://github.com/Codertocat/Hello-World", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "199eddf46df50de8d02e99bf1c5fdb4101338224", - "Message": "Update README", - "Author": { - "Name": "Codertocat", - "Email": "21031067+Codertocat@users.noreply.github.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "Codertocat", - "Avatar": "" - }, - "Committer": { - "Name": "GitHub", - "Email": "noreply@github.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "web-flow", - "Avatar": "" - }, - "Link": "https://github.com/Codertocat/Hello-World/compare/a10867b14bb7...000000000000" - }, - "Sender": { - "Login": "Codertocat", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/21031067?v=4" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/github_push.json.golden b/service/hook/parser/testdata/github_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/github_push_tag.json b/service/hook/parser/testdata/github_push_tag.json deleted file mode 100644 index 247c48c0fc..0000000000 --- a/service/hook/parser/testdata/github_push_tag.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/tags/v0.0.1", - "Repo": { - "ID": "13933572", - "Namespace": "bradrydzewski", - "Name": "drone-test-go", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "https://github.com/bradrydzewski/drone-test-go.git", - "CloneSSH": "git@github.com:bradrydzewski/drone-test-go.git", - "Link": "https://github.com/bradrydzewski/drone-test-go", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "d2b75aa7797ec26b088fa2dd527e9d2c052fcedd", - "Message": "Update .drone.yml", - "Author": { - "Name": "Brad Rydzewski", - "Email": "brad.rydzewski@gmail.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "bradrydzewski", - "Avatar": "" - }, - "Committer": { - "Name": "GitHub", - "Email": "noreply@github.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "web-flow", - "Avatar": "" - }, - "Link": "https://github.com/bradrydzewski/drone-test-go/compare/v0.0.1" - }, - "Sender": { - "Login": "bradrydzewski", - "Name": "", - "Email": "", - "Avatar": "https://avatars1.githubusercontent.com/u/817538?v=4" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/github_push_tag.json.golden b/service/hook/parser/testdata/github_push_tag.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gitlab_pull_open.json b/service/hook/parser/testdata/gitlab_pull_open.json deleted file mode 100644 index 6558c1cd22..0000000000 --- a/service/hook/parser/testdata/gitlab_pull_open.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "4861503", - "Namespace": "sytses", - "Name": "hello-world", - "Perm": null, - "Branch": "master", - "Private": false, - "Clone": "https://gitlab.com/gitlab-org/hello-world.git", - "CloneSSH": "git@gitlab.com:gitlab-org/hello-world.git", - "Link": "https://gitlab.com/gitlab-org/hello-world", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 1, - "Title": "update readme", - "Body": "adding build instructions to readme", - "Sha": "c4c79227ed610f1151f05bbc5be33b4f340d39c8", - "Ref": "refs/merge-requests/1/head", - "Source": "feature", - "Target": "master", - "Fork": "sytses/hello-world", - "Link": "https://gitlab.com/gitlab-org/hello-world/merge_requests/1", - "Closed": false, - "Merged": false, - "Author": { - "Login": "sytses", - "Name": "Sid Sijbrandij", - "Email": "", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - }, - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Sender": { - "Login": "sytses", - "Name": "Sid Sijbrandij", - "Email": "", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/gitlab_pull_open.json.golden b/service/hook/parser/testdata/gitlab_pull_open.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gitlab_push.json b/service/hook/parser/testdata/gitlab_push.json deleted file mode 100644 index 93716b63da..0000000000 --- a/service/hook/parser/testdata/gitlab_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "4861503", - "Namespace": "sytses", - "Name": "hello-world", - "Perm": null, - "Branch": "master", - "Private": false, - "Clone": "https://gitlab.com/gitlab-org/hello-world.git", - "CloneSSH": "git@gitlab.com:gitlab-org/hello-world.git", - "Link": "https://gitlab.com/gitlab-org/hello-world", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "2adc9465c4edfc33834e173fe89436a7cb899a1d", - "Message": "added readme\n", - "Author": { - "Name": "Sid Sijbrandij", - "Email": "noreply@gitlab.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "sytses", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - }, - "Committer": { - "Name": "Sid Sijbrandij", - "Email": "noreply@gitlab.com", - "Date": "0001-01-01T00:00:00Z", - "Login": "sytses", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - }, - "Link": "https://gitlab.com/gitlab-org/hello-world/commit/2adc9465c4edfc33834e173fe89436a7cb899a1d" - }, - "Sender": { - "Login": "sytses", - "Name": "Sid Sijbrandij", - "Email": "noreply@gitlab.com", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/gitlab_push.json.golden b/service/hook/parser/testdata/gitlab_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gitlab_tag.json b/service/hook/parser/testdata/gitlab_tag.json deleted file mode 100644 index c3c09576fb..0000000000 --- a/service/hook/parser/testdata/gitlab_tag.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "Ref": { - "Name": "v1.0.0", - "Sha": "2adc9465c4edfc33834e173fe89436a7cb899a1d" - }, - "Repo": { - "ID": "4861503", - "Namespace": "sytses", - "Name": "hello-world", - "Perm": null, - "Branch": "master", - "Private": false, - "Clone": "https://gitlab.com/gitlab-org/hello-world.git", - "CloneSSH": "git@gitlab.com:gitlab-org/hello-world.git", - "Link": "https://gitlab.com/gitlab-org/hello-world", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Action": "created", - "Sender": { - "Login": "sytses", - "Name": "Sid Sijbrandij", - "Email": "noreply@gitlab.com", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87?s=80\u0026d=identicon" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/gitlab_tag.json.golden b/service/hook/parser/testdata/gitlab_tag.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gogs_pull_create.json b/service/hook/parser/testdata/gogs_pull_create.json deleted file mode 100644 index eb79219f58..0000000000 --- a/service/hook/parser/testdata/gogs_pull_create.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "61", - "Namespace": "gogits", - "Name": "hello-world", - "Perm": {}, - "Branch": "master", - "Private": true, - "Clone": "http://try.gogs.io/gogits/hello-world.git", - "CloneSSH": "git@localhost:gogits/hello-world.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 2, - "Title": "huge improvements", - "Body": "", - "Sha": "", - "Ref": "refs/pull/2/head", - "Source": "feature", - "Target": "master", - "Fork": "gogits/hello-world", - "Link": "http://try.gogs.io/gogits/hello-world/pulls/2", - "Closed": false, - "Merged": false, - "Author": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - }, - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Sender": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - } -} diff --git a/service/hook/parser/testdata/gogs_push.json b/service/hook/parser/testdata/gogs_push.json deleted file mode 100644 index 15bb470f9d..0000000000 --- a/service/hook/parser/testdata/gogs_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "61", - "Namespace": "gogits", - "Name": "hello-world", - "Perm": {}, - "Branch": "master", - "Private": true, - "Clone": "http://try.gogs.io/gogits/hello-world.git", - "CloneSSH": "git@localhost:gogits/hello-world.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "4522cbcefc20728a5b72b3a86af35e608622c514", - "Message": "Updated readme\n", - "Author": { - "Name": "Unknwon", - "Email": "noreply@gogs.io", - "Date": "2017-12-09T01:35:07Z", - "Login": "unknwon", - "Avatar": "" - }, - "Committer": { - "Name": "Unknwon", - "Email": "noreply@gogs.io", - "Date": "2017-12-09T01:35:07Z", - "Login": "unknwon", - "Avatar": "" - }, - "Link": "http://try.gogs.io/gogits/hello-world/compare/9836a96a253cce25d17988fcf41b8c4205cf779f...4522cbcefc20728a5b72b3a86af35e608622c514" - }, - "Sender": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - } -} diff --git a/service/hook/parser/testdata/gogs_push.json.golden b/service/hook/parser/testdata/gogs_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/gogs_tag.json b/service/hook/parser/testdata/gogs_tag.json deleted file mode 100644 index 4f9ee41e13..0000000000 --- a/service/hook/parser/testdata/gogs_tag.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "Ref": { - "Name": "v1.0.0", - "Sha": "" - }, - "Repo": { - "ID": "61", - "Namespace": "gogits", - "Name": "hello-world", - "Perm": {}, - "Branch": "master", - "Private": true, - "Clone": "http://try.gogs.io/gogits/hello-world.git", - "CloneSSH": "git@localhost:gogits/hello-world.git", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Action": "created", - "Sender": { - "Login": "unknwon", - "Name": "", - "Email": "noreply@gogs.io", - "Avatar": "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87" - } -} diff --git a/service/hook/parser/testdata/gogs_tag.json.golden b/service/hook/parser/testdata/gogs_tag.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/stash_pull_create.json b/service/hook/parser/testdata/stash_pull_create.json deleted file mode 100644 index 5d29c8f493..0000000000 --- a/service/hook/parser/testdata/stash_pull_create.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "Action": "opened", - "Repo": { - "ID": "1", - "Namespace": "PRJ", - "Name": "my-repo", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "", - "CloneSSH": "", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "PullRequest": { - "Number": 2, - "Title": "added LICENSE", - "Body": "added BSD license text", - "Sha": "208b0a5c05eddadad01f2aed8802fe0c3b3eaf5e", - "Ref": "refs/pull-requests/2/from", - "Source": "develop", - "Target": "master", - "Fork": "PRJ/my-repo", - "Link": "", - "Closed": false, - "Merged": false, - "Author": { - "Login": "jcitizen", - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - }, - "Created": "2018-07-05T12:21:30-07:00", - "Updated": "2018-07-05T12:21:30-07:00" - }, - "Sender": { - "Login": "jcitizen", - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/stash_pull_create.json.golden b/service/hook/parser/testdata/stash_pull_create.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/stash_push.json b/service/hook/parser/testdata/stash_push.json deleted file mode 100644 index 989746d4a0..0000000000 --- a/service/hook/parser/testdata/stash_push.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "Ref": "refs/heads/master", - "Repo": { - "ID": "1", - "Namespace": "PRJ", - "Name": "my-repo", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "", - "CloneSSH": "", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Commit": { - "Sha": "823b2230a56056231c9425d63758fa87078a66b4", - "Message": "", - "Author": { - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Date": "2018-07-05T18:22:00Z", - "Login": "jcitizen", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - }, - "Committer": { - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Date": "2018-07-05T18:22:00Z", - "Login": "jcitizen", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - }, - "Link": "" - }, - "Sender": { - "Login": "jcitizen", - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/stash_push.json.golden b/service/hook/parser/testdata/stash_push.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/parser/testdata/stash_push_tag.json b/service/hook/parser/testdata/stash_push_tag.json deleted file mode 100644 index 63af99d1ff..0000000000 --- a/service/hook/parser/testdata/stash_push_tag.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "Ref": { - "Name": "v1.1.0", - "Sha": "823b2230a56056231c9425d63758fa87078a66b4" - }, - "Repo": { - "ID": "1", - "Namespace": "PRJ", - "Name": "my-repo", - "Perm": null, - "Branch": "master", - "Private": true, - "Clone": "", - "CloneSSH": "", - "Link": "", - "Created": "0001-01-01T00:00:00Z", - "Updated": "0001-01-01T00:00:00Z" - }, - "Action": "created", - "Sender": { - "Login": "jcitizen", - "Name": "Jane Citizen", - "Email": "jane@example.com", - "Avatar": "https://www.gravatar.com/avatar/9e26471d35a78862c17e467d87cddedf.jpg" - } -} \ No newline at end of file diff --git a/service/hook/parser/testdata/stash_push_tag.json.golden b/service/hook/parser/testdata/stash_push_tag.json.golden deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/service/hook/util.go b/service/hook/util.go deleted file mode 100644 index da689c8cc1..0000000000 --- a/service/hook/util.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hook - -import ( - "context" - "net/url" - - "github.com/drone/go-scm/scm" -) - -func replaceHook(ctx context.Context, client *scm.Client, repo string, hook *scm.HookInput) error { - if err := deleteHook(ctx, client, repo, hook.Target); err != nil { - return err - } - _, _, err := client.Repositories.CreateHook(ctx, repo, hook) - return err -} - -func deleteHook(ctx context.Context, client *scm.Client, repo, target string) error { - u, _ := url.Parse(target) - h, err := findHook(ctx, client, repo, u.Host) - if err != nil { - return err - } - if h == nil { - return nil - } - _, err = client.Repositories.DeleteHook(ctx, repo, h.ID) - return err -} - -func findHook(ctx context.Context, client *scm.Client, repo, host string) (*scm.Hook, error) { - hooks, _, err := client.Repositories.ListHooks(ctx, repo, scm.ListOptions{Size: 100}) - if err != nil { - return nil, err - } - for _, hook := range hooks { - u, err := url.Parse(hook.Target) - if err != nil { - continue - } - if u.Host == host { - return hook, nil - } - } - return nil, nil -} diff --git a/service/hook/util_test.go b/service/hook/util_test.go deleted file mode 100644 index 0515452a5b..0000000000 --- a/service/hook/util_test.go +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package hook - -import ( - "context" - "io" - "testing" - - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -func TestFindHook(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hooks := []*scm.Hook{ - {Target: "http://192.168.0.%31/hook"}, - {Target: "https://drone.company.com/hook"}, - } - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) - - client := new(scm.Client) - client.Repositories = remote - - hook, err := findHook(context.Background(), client, "octocat/hello-world", "drone.company.com") - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(hook, hooks[1]); len(diff) > 0 { - t.Errorf(diff) - } -} - -func TestFindHook_ListError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(nil, nil, io.EOF) - - client := new(scm.Client) - client.Repositories = remote - - _, err := findHook(context.Background(), client, "octocat/hello-world", "core.company.com") - if err == nil { - t.Errorf("Want hook request failure to return error") - } -} - -func TestReplaceHook_CreateHook(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hooks := []*scm.Hook{} - hookInput := &scm.HookInput{ - Target: "https://drone.company.com/hook", - } - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) - remote.EXPECT().CreateHook(gomock.Any(), "octocat/hello-world", hookInput).Return(nil, nil, nil) - - client := new(scm.Client) - client.Repositories = remote - - err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) - if err != nil { - t.Error(err) - } -} - -func TestReplaceHook_UpdateHook(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hooks := []*scm.Hook{ - { - ID: "1", - Target: "https://drone.company.com/hook", - }, - } - hookInput := &scm.HookInput{ - Target: "https://drone.company.com/hook", - } - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) - remote.EXPECT().DeleteHook(gomock.Any(), "octocat/hello-world", "1").Return(nil, nil) - remote.EXPECT().CreateHook(gomock.Any(), "octocat/hello-world", hookInput).Return(nil, nil, nil) - - client := new(scm.Client) - client.Repositories = remote - - err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) - if err != nil { - t.Error(err) - } -} - -func TestReplaceHook_DeleteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hooks := []*scm.Hook{ - { - ID: "1", - Target: "https://drone.company.com/hook", - }, - } - hookInput := &scm.HookInput{ - Target: "https://drone.company.com/hook", - } - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) - remote.EXPECT().DeleteHook(gomock.Any(), "octocat/hello-world", "1").Return(nil, io.EOF) - - client := new(scm.Client) - client.Repositories = remote - - err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) - if err == nil { - t.Errorf("Expect error if hook deletion fails") - } -} - -func TestReplaceHook_DeleteFindError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hookInput := &scm.HookInput{ - Target: "https://drone.company.com/hook", - } - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(nil, nil, io.EOF) - - client := new(scm.Client) - client.Repositories = remote - - err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) - if err == nil { - t.Errorf("Expect error if hook deletion fails") - } -} - -func TestReplaceHook_CreateError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - hooks := []*scm.Hook{} - hookInput := &scm.HookInput{ - Target: "https://drone.company.com/hook", - } - - remote := mockscm.NewMockRepositoryService(controller) - remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) - remote.EXPECT().CreateHook(gomock.Any(), "octocat/hello-world", hookInput).Return(nil, nil, io.EOF) - - client := new(scm.Client) - client.Repositories = remote - - err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) - if err == nil { - t.Errorf("Expect error if hook creation fails") - } -} diff --git a/service/license/load.go b/service/license/load.go deleted file mode 100644 index 8163dd31f0..0000000000 --- a/service/license/load.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !nolimit -// +build !oss - -package license - -import ( - "encoding/json" - "strings" - - "github.com/drone/drone/core" - "github.com/drone/go-license/license" - "github.com/drone/go-license/license/licenseutil" -) - -// embedded public key used to verify license signatures. -var publicKey = []byte("GB/hFnXEg63vDZ2W6mKFhLxZTuxMrlN/C/0iVZ2LfPQ=") - -// License renewal endpoint. -const licenseEndpoint = "https://license.drone.io/api/v1/license/renew" - -// Trial returns a default license with trial terms based -// on the source code management system. -func Trial(provider string) *core.License { - switch provider { - case "gitea", "gogs": - return &core.License{ - Kind: core.LicenseTrial, - Repos: 0, - Users: 0, - Builds: 0, - Nodes: 0, - } - default: - return &core.License{ - Kind: core.LicenseTrial, - Repos: 0, - Users: 0, - Builds: 5000, - Nodes: 0, - } - } -} - -// Load loads the license from file. -func Load(path string) (*core.License, error) { - pub, err := licenseutil.DecodePublicKey(publicKey) - if err != nil { - return nil, err - } - - var decoded *license.License - if strings.HasPrefix(path, "-----BEGIN LICENSE KEY-----") { - decoded, err = license.Decode([]byte(path), pub) - } else { - decoded, err = license.DecodeFile(path, pub) - } - - if err != nil { - return nil, err - } - - license := new(core.License) - license.Expires = decoded.Exp - license.Licensor = decoded.Cus - license.Subscription = decoded.Sub - license.Users = int64(decoded.Lim) - - if decoded.Dat != nil { - dat := new(core.License) - json.Unmarshal(decoded.Dat, dat) - license.Repos = dat.Repos - } - - return license, err -} diff --git a/service/license/load_test.go b/service/license/load_test.go deleted file mode 100644 index 27b50732fb..0000000000 --- a/service/license/load_test.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build !nolimit -// +build !oss - -package license diff --git a/service/license/nolimit.go b/service/license/nolimit.go deleted file mode 100644 index 631ce1dcf4..0000000000 --- a/service/license/nolimit.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build nolimit -// +build !oss - -package license - -import ( - "github.com/drone/drone/core" -) - -// DefaultLicense is an empty license with no restrictions. -var DefaultLicense = &core.License{Kind: core.LicenseFree} - -func Trial(string) *core.License { return DefaultLicense } -func Load(string) (*core.License, error) { return DefaultLicense, nil } diff --git a/service/license/nolimit_oss.go b/service/license/nolimit_oss.go deleted file mode 100644 index a118caf0e9..0000000000 --- a/service/license/nolimit_oss.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build nolimit -// +build oss - -package license - -import ( - "github.com/drone/drone/core" -) - -// DefaultLicense is an empty license with no restrictions. -var DefaultLicense = &core.License{Kind: core.LicenseFoss} - -func Trial(string) *core.License { return DefaultLicense } -func Load(string) (*core.License, error) { return DefaultLicense, nil } diff --git a/service/license/service.go b/service/license/service.go deleted file mode 100644 index ca4fa001bc..0000000000 --- a/service/license/service.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package license - -import ( - "context" - "time" - - "github.com/drone/drone/core" -) - -// NewService returns a new License service. -func NewService( - users core.UserStore, - repos core.RepositoryStore, - builds core.BuildStore, - license *core.License, -) core.LicenseService { - return &service{ - users: users, - repos: repos, - builds: builds, - license: license, - } -} - -type service struct { - users core.UserStore - repos core.RepositoryStore - builds core.BuildStore - license *core.License -} - -func (s *service) Exceeded(ctx context.Context) (bool, error) { - if limit := s.license.Builds; limit > 0 { - count, _ := s.builds.Count(ctx) - if count > limit { - return true, core.ErrBuildLimit - } - } - if limit := s.license.Users; limit > 0 { - count, _ := s.users.Count(ctx) - if count > limit { - return true, core.ErrUserLimit - } - } - if limit := s.license.Repos; limit > 0 { - count, _ := s.repos.Count(ctx) - if count > limit { - return true, core.ErrRepoLimit - } - } - return false, nil -} - -func (s *service) Expired(ctx context.Context) bool { - return s.license.Expired() -} - -func (s *service) Expires(ctx context.Context) time.Time { - return s.license.Expires -} diff --git a/service/linker/linker.go b/service/linker/linker.go deleted file mode 100644 index 1adcd7e082..0000000000 --- a/service/linker/linker.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package linker - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// New returns a new Linker server. -func New(client *scm.Client) core.Linker { - return &service{ - client: client, - } -} - -type service struct { - client *scm.Client -} - -func (s *service) Link(ctx context.Context, repo, ref, sha string) (string, error) { - return s.client.Linker.Resource(ctx, repo, scm.Reference{ - Path: ref, - Sha: sha, - }) -} diff --git a/service/linker/linker_test.go b/service/linker/linker_test.go deleted file mode 100644 index 5dd85e24e8..0000000000 --- a/service/linker/linker_test.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package linker diff --git a/service/netrc/netrc.go b/service/netrc/netrc.go deleted file mode 100644 index 9a0603fb1c..0000000000 --- a/service/netrc/netrc.go +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package netrc - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -var _ core.NetrcService = (*Service)(nil) - -// Service implements a netrc file generation service. -type Service struct { - client *scm.Client - renewer core.Renewer - private bool - username string - password string -} - -// New returns a new Netrc service. -func New( - client *scm.Client, - renewer core.Renewer, - private bool, - username string, - password string, -) core.NetrcService { - return &Service{ - client: client, - renewer: renewer, - private: private, - username: username, - password: password, - } -} - -// Create creates a netrc file for the user and repository. -func (s *Service) Create(ctx context.Context, user *core.User, repo *core.Repository) (*core.Netrc, error) { - // if the repository is public and private mode is disabled, - // authentication is not required. - if repo.Private == false && s.private == false { - return nil, nil - } - - netrc := new(core.Netrc) - err := netrc.SetMachine(repo.HTTPURL) - if err != nil { - return nil, err - } - - if s.username != "" && s.password != "" { - netrc.Password = s.password - netrc.Login = s.username - return netrc, nil - } - - // force refresh the authorization token to prevent - // it from expiring during pipeline execution. - err = s.renewer.Renew(ctx, user, true) - if err != nil { - return nil, err - } - - switch s.client.Driver { - case scm.DriverGitlab, scm.DriverGitee: - netrc.Login = "oauth2" - netrc.Password = user.Token - case scm.DriverBitbucket: - netrc.Login = "x-token-auth" - netrc.Password = user.Token - case scm.DriverGithub, scm.DriverGogs, scm.DriverGitea: - netrc.Password = "x-oauth-basic" - netrc.Login = user.Token - } - return netrc, nil -} diff --git a/service/netrc/netrc_test.go b/service/netrc/netrc_test.go deleted file mode 100644 index 176379f98e..0000000000 --- a/service/netrc/netrc_test.go +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package netrc - -import ( - "context" - "net/url" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/go-scm/scm" - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" -) - -var noContext = context.Background() - -func TestNetrc(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://github.com/octocat/hello-world"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true) - - mockClient := &scm.Client{Driver: scm.DriverGithub} - - s := New(mockClient, mockRenewer, false, "", "") - got, err := s.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "github.com", - Login: "755bb80e5b", - Password: "x-oauth-basic", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_Gitlab(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://gitlab.com/octocat/hello-world"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true) - - s := Service{ - renewer: mockRenewer, - client: &scm.Client{Driver: scm.DriverGitlab}, - } - got, err := s.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "gitlab.com", - Login: "oauth2", - Password: "755bb80e5b", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_Gogs(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://try.gogs.io/octocat/hello-world"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true) - - s := Service{ - renewer: mockRenewer, - client: &scm.Client{Driver: scm.DriverGogs}, - } - got, err := s.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "try.gogs.io", - Login: "755bb80e5b", - Password: "x-oauth-basic", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_Bitbucket(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://bitbucket.org/octocat/hello-world"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true) - - s := Service{ - renewer: mockRenewer, - client: &scm.Client{Driver: scm.DriverBitbucket}, - } - got, err := s.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "bitbucket.org", - Login: "x-token-auth", - Password: "755bb80e5b", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_Gitee(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://gitee.com/kit101/drone-yml-test"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true) - - s := Service{ - renewer: mockRenewer, - client: &scm.Client{Driver: scm.DriverGitee}, - } - got, err := s.Create(noContext, mockUser, mockRepo) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "gitee.com", - Login: "oauth2", - Password: "755bb80e5b", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_Nil(t *testing.T) { - s := Service{ - private: false, - } - netrc, _ := s.Create(noContext, &core.User{}, &core.Repository{Private: false}) - if netrc != nil { - t.Errorf("Expect nil netrc file when public repository") - } -} - -func TestNetrc_MalformedURL(t *testing.T) { - s := Service{ - private: true, - } - _, err := s.Create(noContext, &core.User{}, &core.Repository{HTTPURL: ":::"}) - if _, ok := err.(*url.Error); !ok { - t.Errorf("Expect error when URL malformed") - } -} - -func TestNetrc_StaticLogin(t *testing.T) { - s := Service{ - private: true, - username: "octocat", - password: "password", - } - got, err := s.Create(noContext, &core.User{}, &core.Repository{HTTPURL: "https://github.com/octocat/hello-world"}) - if err != nil { - t.Error(err) - } - - want := &core.Netrc{ - Machine: "github.com", - Login: "octocat", - Password: "password", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestNetrc_RenewErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{Private: true, HTTPURL: "https://github.com/octocat/hello-world"} - mockUser := &core.User{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, true).Return(scm.ErrNotAuthorized) - - s := Service{ - renewer: mockRenewer, - client: &scm.Client{Driver: scm.DriverGithub}, - } - _, err := s.Create(noContext, mockUser, mockRepo) - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} diff --git a/service/org/cache.go b/service/org/cache.go deleted file mode 100644 index fac539ed44..0000000000 --- a/service/org/cache.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package orgs - -import ( - "context" - "fmt" - "sync" - "time" - - "github.com/drone/drone/core" - - lru "github.com/hashicorp/golang-lru" -) - -// content key pattern used in the cache, comprised of the -// organization name and username. -const contentKey = "%s/%s" - -// NewCache wraps the service with a simple cache to store -// organization membership. -func NewCache(base core.OrganizationService, size int, ttl time.Duration) core.OrganizationService { - // simple cache prevents the same yaml file from being - // requested multiple times in a short period. - cache, _ := lru.New(25) - - return &cacher{ - cache: cache, - base: base, - size: size, - ttl: ttl, - } -} - -type cacher struct { - mu sync.Mutex - - base core.OrganizationService - size int - ttl time.Duration - - cache *lru.Cache -} - -type item struct { - expiry time.Time - member bool - admin bool -} - -func (c *cacher) List(ctx context.Context, user *core.User) ([]*core.Organization, error) { - return c.base.List(ctx, user) -} - -func (c *cacher) Membership(ctx context.Context, user *core.User, name string) (bool, bool, error) { - key := fmt.Sprintf(contentKey, user.Login, name) - now := time.Now() - - // get the membership details from the cache. - cached, ok := c.cache.Get(key) - if ok { - item := cached.(*item) - // if the item is expired it can be ejected - // from the cache, else if not expired we return - // the cached results. - if now.After(item.expiry) { - c.cache.Remove(cached) - } else { - return item.member, item.admin, nil - } - } - - // get up-to-date membership details due to a cache - // miss or expired cache item. - member, admin, err := c.base.Membership(ctx, user, name) - if err != nil { - return false, false, err - } - - c.cache.Add(key, &item{ - expiry: now.Add(c.ttl), - member: member, - admin: admin, - }) - - return member, admin, nil -} diff --git a/service/org/cache_test.go b/service/org/cache_test.go deleted file mode 100644 index 7923aa7b44..0000000000 --- a/service/org/cache_test.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package orgs - -import ( - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -func TestCache(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - Login: "octocat", - } - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, true, nil).Times(1) - - service := NewCache(mockOrgService, 10, time.Minute).(*cacher) - admin, member, err := service.Membership(noContext, mockUser, "github") - if err != nil { - t.Error(err) - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect cache size %d, got %d", want, got) - } - if admin == false { - t.Errorf("Expect admin true, got false") - } - if member == false { - t.Errorf("Expect member true, got false") - } - - admin, member, err = service.Membership(noContext, mockUser, "github") - if err != nil { - t.Error(err) - } - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect cache size still %d, got %d", want, got) - } - if admin == false { - t.Errorf("Expect cached admin true, got false") - } - if member == false { - t.Errorf("Expect cached member true, got false") - } -} - -func TestCache_Expired(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - Login: "octocat", - } - - mockOrgService := mock.NewMockOrganizationService(controller) - mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, true, nil).Times(1) - - service := NewCache(mockOrgService, 10, time.Minute).(*cacher) - service.cache.Add("octocat/github", &item{ - expiry: time.Now().Add(time.Hour * -1), - member: true, - admin: true, - }) - admin, member, err := service.Membership(noContext, mockUser, "github") - if err != nil { - t.Error(err) - } - - if got, want := service.cache.Len(), 1; got != want { - t.Errorf("Expect cache size still %d, got %d", want, got) - } - if admin == false { - t.Errorf("Expect cached admin true, got false") - } - if member == false { - t.Errorf("Expect cached member true, got false") - } -} diff --git a/service/org/org.go b/service/org/org.go deleted file mode 100644 index 37a5b7c95b..0000000000 --- a/service/org/org.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package orgs - -import ( - "context" - "time" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// New returns a new OrganizationService. -func New(client *scm.Client, renewer core.Renewer) core.OrganizationService { - return &service{ - client: client, - renewer: renewer, - } -} - -type service struct { - renewer core.Renewer - client *scm.Client -} - -func (s *service) List(ctx context.Context, user *core.User) ([]*core.Organization, error) { - err := s.renewer.Renew(ctx, user, false) - if err != nil { - return nil, err - } - token := &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - } - if user.Expiry != 0 { - token.Expires = time.Unix(user.Expiry, 0) - } - ctx = context.WithValue(ctx, scm.TokenKey{}, token) - out, _, err := s.client.Organizations.List(ctx, scm.ListOptions{Size: 100}) - if err != nil { - return nil, err - } - var orgs []*core.Organization - for _, org := range out { - orgs = append(orgs, &core.Organization{ - Name: org.Name, - Avatar: org.Avatar, - }) - } - return orgs, nil -} - -func (s *service) Membership(ctx context.Context, user *core.User, name string) (bool, bool, error) { - err := s.renewer.Renew(ctx, user, false) - if err != nil { - return false, false, err - } - token := &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - } - if user.Expiry != 0 { - token.Expires = time.Unix(user.Expiry, 0) - } - ctx = context.WithValue(ctx, scm.TokenKey{}, token) - out, _, err := s.client.Organizations.FindMembership(ctx, name, user.Login) - if err != nil { - return false, false, err - } - switch { - case out.Active == false: - return false, false, nil - case out.Role == scm.RoleUndefined: - return false, false, nil - case out.Role == scm.RoleAdmin: - return true, true, nil - default: - return true, false, nil - } -} diff --git a/service/org/org_test.go b/service/org/org_test.go deleted file mode 100644 index 34861e6dda..0000000000 --- a/service/org/org_test.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package orgs - -import ( - "context" - "testing" - "time" - - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - "github.com/google/go-cmp/cmp" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkToken := func(ctx context.Context, opts scm.ListOptions) { - got, ok := ctx.Value(scm.TokenKey{}).(*scm.Token) - if !ok { - t.Errorf("Expect token stored in context") - return - } - want := &scm.Token{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - Expires: time.Unix(1532292869, 0), - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } - if got, want := opts.Size, 100; got != want { - t.Errorf("Want page size %d, got %d", want, got) - } - if got, want := opts.Page, 0; got != want { - t.Errorf("Want page number %d, got %d", want, got) - } - } - - mockUser := &core.User{ - Login: "octocat", - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - Expiry: 1532292869, - } - mockOrgs := []*scm.Organization{ - { - Name: "github", - Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", - }, - } - mockOrgService := mockscm.NewMockOrganizationService(controller) - mockOrgService.EXPECT().List(gomock.Any(), gomock.Any()).Do(checkToken).Return(mockOrgs, nil, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Organizations = mockOrgService - - want := []*core.Organization{ - { - Name: "github", - Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", - }, - } - service := New(client, mockRenewer) - got, err := service.List(noContext, mockUser) - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestList_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockOrgs := mockscm.NewMockOrganizationService(controller) - mockOrgs.EXPECT().List(gomock.Any(), gomock.Any()).Return(nil, nil, scm.ErrNotAuthorized) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Organizations = mockOrgs - - service := New(client, mockRenewer) - got, err := service.List(noContext, mockUser) - if err == nil { - t.Errorf("Expect error finding user") - } - if got != nil { - t.Errorf("Expect nil user on error") - } -} - -func TestList_RenewalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer) - _, err := service.List(noContext, mockUser) - if err == nil { - t.Errorf("Expect error refreshing token") - } -} diff --git a/service/redisdb/lockerr.go b/service/redisdb/lockerr.go deleted file mode 100644 index a94d1cdb54..0000000000 --- a/service/redisdb/lockerr.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package redisdb - -import ( - "context" -) - -// LockErr is an interface with lock and unlock functions that return an error. -// Method names are chosen so that redsync.Mutex implements the interface. -type LockErr interface { - LockContext(context.Context) error - UnlockContext(context.Context) (bool, error) -} - -// LockErrNoOp is a dummy no-op locker -type LockErrNoOp struct{} - -func (l LockErrNoOp) LockContext(context.Context) error { return nil } -func (l LockErrNoOp) UnlockContext(context.Context) (bool, error) { return false, nil } diff --git a/service/redisdb/redisdb.go b/service/redisdb/redisdb.go deleted file mode 100644 index bca1011a95..0000000000 --- a/service/redisdb/redisdb.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2021 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package redisdb - -import ( - "context" - "fmt" - "time" - - "github.com/go-redsync/redsync/v4" - "github.com/go-redsync/redsync/v4/redis/goredis/v8" - - "github.com/drone/drone/cmd/drone-server/config" - - "github.com/go-redis/redis/v8" - "github.com/sirupsen/logrus" -) - -func New(config config.Config) (srv RedisDB, err error) { - var options *redis.Options - - if config.Redis.ConnectionString != "" { - options, err = redis.ParseURL(config.Redis.ConnectionString) - if err != nil { - return - } - } else if config.Redis.Addr != "" { - options = &redis.Options{ - Addr: config.Redis.Addr, - Password: config.Redis.Password, - DB: config.Redis.DB, - } - } else { - return - } - - rdb := redis.NewClient(options) - - _, err = rdb.Ping(context.Background()).Result() - if err != nil { - err = fmt.Errorf("redis not accessibe: %w", err) - return - } - - rs := redsync.New(goredis.NewPool(rdb)) - - srv = redisService{ - rdb: rdb, - mutexGen: rs, - } - - return -} - -type RedisDB interface { - Client() redis.Cmdable - Subscribe(ctx context.Context, channelName string, channelSize int, proc PubSubProcessor) - NewMutex(name string, expiry time.Duration) LockErr -} - -type redisService struct { - rdb *redis.Client - mutexGen *redsync.Redsync -} - -// Client exposes redis.Cmdable interface -func (r redisService) Client() redis.Cmdable { - return r.rdb -} - -type PubSubProcessor interface { - ProcessMessage(s string) - ProcessError(err error) -} - -var backoffDurations = []time.Duration{ - 0, time.Second, 3 * time.Second, 5 * time.Second, 10 * time.Second, 20 * time.Second, -} - -// Subscribe subscribes to a redis pub-sub channel. The messages are processed with the supplied PubSubProcessor. -// In case of en error the function will automatically reconnect with an increasing back of delay. -// The only way to exit this function is to terminate or expire the supplied context. -func (r redisService) Subscribe(ctx context.Context, channelName string, channelSize int, proc PubSubProcessor) { - var connectTry int - for { - err := func() (err error) { - defer func() { - // panic recovery because external PubSubProcessor methods might cause panics. - if p := recover(); p != nil { - err = fmt.Errorf("redis pubsub: panic: %v", p) - } - }() - - var options []redis.ChannelOption - - if channelSize > 1 { - options = append(options, redis.WithChannelSize(channelSize)) - } - - pubsub := r.rdb.Subscribe(ctx, channelName) - ch := pubsub.Channel(options...) - - defer func() { - _ = pubsub.Close() - }() - - // make sure the connection is successful - err = pubsub.Ping(ctx) - if err != nil { - return - } - - connectTry = 0 // successfully connected, reset the counter - - logrus. - WithField("try", connectTry+1). - WithField("channel", channelName). - Trace("redis pubsub: subscribed") - - for { - select { - case m, ok := <-ch: - if !ok { - err = fmt.Errorf("redis pubsub: channel=%s closed", channelName) - return - } - - proc.ProcessMessage(m.Payload) - - case <-ctx.Done(): - err = ctx.Err() - return - } - } - }() - if err == nil { - // should not happen, the function should always exit with an error - continue - } - - proc.ProcessError(err) - - if err == context.Canceled || err == context.DeadlineExceeded { - logrus. - WithField("channel", channelName). - Trace("redis pubsub: finished") - return - } - - dur := backoffDurations[connectTry] - - logrus. - WithError(err). - WithField("try", connectTry+1). - WithField("pause", dur.String()). - WithField("channel", channelName). - Error("redis pubsub: connection failed, reconnecting") - - time.Sleep(dur) - - if connectTry < len(backoffDurations)-1 { - connectTry++ - } - } -} - -func (r redisService) NewMutex(name string, expiry time.Duration) LockErr { - var options []redsync.Option - if expiry > 0 { - options = append(options, redsync.WithExpiry(expiry)) - } - - return r.mutexGen.NewMutex(name, options...) -} diff --git a/service/repo/repo.go b/service/repo/repo.go deleted file mode 100644 index 9a578c5127..0000000000 --- a/service/repo/repo.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repo - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -type service struct { - renew core.Renewer - client *scm.Client - visibility string - trusted bool -} - -// New returns a new Repository service, providing access to the -// repository information from the source code management system. -func New(client *scm.Client, renewer core.Renewer, visibility string, trusted bool) core.RepositoryService { - return &service{ - renew: renewer, - client: client, - visibility: visibility, - trusted: trusted, - } -} - -func (s *service) List(ctx context.Context, user *core.User) ([]*core.Repository, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - repos := []*core.Repository{} - opts := scm.ListOptions{Size: 100} - for { - result, meta, err := s.client.Repositories.List(ctx, opts) - if err != nil { - return nil, err - } - for _, src := range result { - if src != nil { - repos = append(repos, convertRepository(src, s.visibility, s.trusted)) - } - } - opts.Page = meta.Page.Next - opts.URL = meta.Page.NextURL - - if opts.Page == 0 && opts.URL == "" { - break - } - } - return repos, nil -} - -func (s *service) Find(ctx context.Context, user *core.User, repo string) (*core.Repository, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - result, _, err := s.client.Repositories.Find(ctx, repo) - if err != nil { - return nil, err - } - return convertRepository(result, s.visibility, s.trusted), nil -} - -func (s *service) FindPerm(ctx context.Context, user *core.User, repo string) (*core.Perm, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - result, _, err := s.client.Repositories.FindPerms(ctx, repo) - if err != nil { - return nil, err - } - return &core.Perm{ - Read: result.Pull, - Write: result.Push, - Admin: result.Admin, - }, nil -} diff --git a/service/repo/repo_test.go b/service/repo/repo_test.go deleted file mode 100644 index 299c79a175..0000000000 --- a/service/repo/repo_test.go +++ /dev/null @@ -1,289 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repo - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - "github.com/google/go-cmp/cmp" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockRepo := &scm.Repository{ - Namespace: "octocat", - Name: "hello-world", - } - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().Find(gomock.Any(), "octocat/hello-world").Return(mockRepo, nil, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - service := New(client, mockRenewer, "", false) - - want := &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Visibility: "public", - } - - got, err := service.Find(noContext, mockUser, "octocat/hello-world") - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFind_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().Find(gomock.Any(), "octocat/hello-world").Return(nil, nil, scm.ErrNotFound) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - service := New(client, mockRenewer, "", false) - _, err := service.Find(noContext, mockUser, "octocat/hello-world") - if err != scm.ErrNotFound { - t.Errorf("Expect not found error, got %v", err) - } -} - -func TestFind_RefreshErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer, "", false) - _, err := service.Find(noContext, mockUser, "octocat/hello-world") - if err == nil { - t.Errorf("Expect error refreshing token") - } -} - -func TestFindPerm(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockPerm := &scm.Perm{ - Pull: true, - Push: true, - Admin: true, - } - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().FindPerms(gomock.Any(), "octocat/hello-world").Return(mockPerm, nil, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - service := New(client, mockRenewer, "", false) - - want := &core.Perm{ - Read: true, - Write: true, - Admin: true, - } - - got, err := service.FindPerm(noContext, mockUser, "octocat/hello-world") - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFindPerm_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().FindPerms(gomock.Any(), "octocat/hello-world").Return(nil, nil, scm.ErrNotFound) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - service := New(client, mockRenewer, "", false) - _, err := service.FindPerm(noContext, mockUser, "octocat/hello-world") - if err != scm.ErrNotFound { - t.Errorf("Expect not found error, got %v", err) - } -} - -func TestFindPerm_RefreshErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer, "", false) - _, err := service.FindPerm(noContext, mockUser, "octocat/hello-world") - if err == nil { - t.Errorf("Expect error refreshing token") - } -} - -func TestList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockRepos := []*scm.Repository{ - { - Namespace: "octocat", - Name: "hello-world", - }, - } - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().List(gomock.Any(), gomock.Any()).Return(mockRepos, &scm.Response{}, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - want := []*core.Repository{ - { - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Visibility: "public", - }, - } - - service := New(client, mockRenewer, "", false) - got, err := service.List(noContext, mockUser) - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestList_Err(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().List(gomock.Any(), gomock.Any()).Return(nil, &scm.Response{}, scm.ErrNotAuthorized) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - service := New(client, mockRenewer, "", false) - _, err := service.List(noContext, mockUser) - if err != scm.ErrNotAuthorized { - t.Errorf("Want not authorized error, got %v", err) - } -} - -func TestList_RefreshErr(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer, "", false) - _, err := service.List(noContext, mockUser) - if err == nil { - t.Errorf("Expect error refreshing token") - } -} - -func TestListWithNilRepo(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - mockRepos := []*scm.Repository{ - { - Namespace: "octocat", - Name: "hello-world", - }, - nil, - } - - mockRepoService := mockscm.NewMockRepositoryService(controller) - mockRepoService.EXPECT().List(gomock.Any(), gomock.Any()).Return(mockRepos, &scm.Response{}, nil) - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false) - - client := new(scm.Client) - client.Repositories = mockRepoService - - want := []*core.Repository{ - { - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Visibility: "public", - }, - } - - service := New(client, mockRenewer, "", false) - got, err := service.List(noContext, mockUser) - if err != nil { - t.Error(err) - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} diff --git a/service/repo/util.go b/service/repo/util.go deleted file mode 100644 index ecb62ebcea..0000000000 --- a/service/repo/util.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repo - -import ( - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// convertRepository is a helper function that converts a -// repository from the source code management system to the -// local datastructure. -func convertRepository(src *scm.Repository, visibility string, trusted bool) *core.Repository { - return &core.Repository{ - UID: src.ID, - Namespace: src.Namespace, - Name: src.Name, - Slug: scm.Join(src.Namespace, src.Name), - HTTPURL: src.Clone, - SSHURL: src.CloneSSH, - Link: src.Link, - Private: src.Private, - Visibility: convertVisibility(src, visibility), - Branch: src.Branch, - Trusted: trusted, - Archived: src.Archived, - } -} - -// convertVisibility is a helper function that returns the -// repository visibility based on the privacy flag. -func convertVisibility(src *scm.Repository, visibility string) string { - // if the visibility is set to internal (github enterprise and gitlab) - // and the global visibility is empty, automatically set to internal. - if visibility == "" && src.Visibility == scm.VisibilityInternal { - return core.VisibilityInternal - } - - switch { - case src.Private == true: - return core.VisibilityPrivate - case visibility == core.VisibilityInternal: - return core.VisibilityInternal - default: - return core.VisibilityPublic - } -} diff --git a/service/repo/util_test.go b/service/repo/util_test.go deleted file mode 100644 index d4cfd66ce2..0000000000 --- a/service/repo/util_test.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repo - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - - "github.com/google/go-cmp/cmp" -) - -func TestConvertRepository(t *testing.T) { - from := &scm.Repository{ - ID: "42", - Namespace: "octocat", - Name: "hello-world", - Branch: "master", - Private: true, - Clone: "https://github.com/octocat/hello-world.git", - CloneSSH: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - want := &core.Repository{ - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - Private: true, - Branch: "master", - Visibility: core.VisibilityPrivate, - } - got := convertRepository(from, "", false) - if diff := cmp.Diff(want, got); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestConvertVisibility(t *testing.T) { - tests := []struct { - r *scm.Repository - v string - }{ - { - r: &scm.Repository{Private: false}, - v: core.VisibilityPublic, - }, - { - r: &scm.Repository{Private: true}, - v: core.VisibilityPrivate, - }, - { - r: &scm.Repository{Private: true, Visibility: scm.VisibilityInternal}, - v: core.VisibilityInternal, - }, - } - - for i, test := range tests { - if got, want := convertVisibility(test.r, ""), test.v; got != want { - t.Errorf("Want visibility %s, got %s for index %d", got, want, i) - } - } -} - -func TestDefinedVisibility(t *testing.T) { - from := &scm.Repository{ - ID: "42", - Namespace: "octocat", - Name: "hello-world", - Branch: "master", - Private: false, - Clone: "https://github.com/octocat/hello-world.git", - CloneSSH: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - } - want := &core.Repository{ - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - Private: false, - Branch: "master", - Visibility: core.VisibilityInternal, - } - got := convertRepository(from, "internal", false) - if diff := cmp.Diff(want, got); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/service/status/status.go b/service/status/status.go deleted file mode 100644 index 3adbdb1f52..0000000000 --- a/service/status/status.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package status - -import ( - "context" - "fmt" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - "github.com/drone/go-scm/scm/driver/github" -) - -// Config configures the Status service. -type Config struct { - Base string - Name string - Disabled bool -} - -// New returns a new StatusService -func New(client *scm.Client, renew core.Renewer, config Config) core.StatusService { - return &service{ - client: client, - renew: renew, - base: config.Base, - name: config.Name, - disabled: config.Disabled, - } -} - -type service struct { - renew core.Renewer - client *scm.Client - base string - name string - disabled bool -} - -func (s *service) Send(ctx context.Context, user *core.User, req *core.StatusInput) error { - if s.disabled || req.Build.Event == core.EventCron { - return nil - } - - err := s.renew.Renew(ctx, user, false) - if err != nil { - return err - } - - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - - // HACK(bradrydzewski) provides support for the github deployment API - if req.Build.DeployID != 0 && s.client.Driver == scm.DriverGithub { - // TODO(bradrydzewski) only update the deployment status when the - // build completes. - if req.Build.Finished == 0 { - return nil - } - _, _, err = s.client.Repositories.(*github.RepositoryService).CreateDeployStatus(ctx, req.Repo.Slug, &scm.DeployStatus{ - Number: req.Build.DeployID, - Desc: createDesc(req.Build.Status), - State: convertStatus(req.Build.Status), - Target: fmt.Sprintf("%s/%s/%d", s.base, req.Repo.Slug, req.Build.Number), - Environment: req.Build.Deploy, - }) - return err - } - - _, _, err = s.client.Repositories.CreateStatus(ctx, req.Repo.Slug, req.Build.After, &scm.StatusInput{ - Title: fmt.Sprintf("Build #%d", req.Build.Number), - Desc: createDesc(req.Build.Status), - Label: createLabel(s.name, req.Build.Event, req.Build.Deploy), - State: convertStatus(req.Build.Status), - Target: fmt.Sprintf("%s/%s/%d", s.base, req.Repo.Slug, req.Build.Number), - }) - if err == scm.ErrNotSupported { - return nil - } - return err -} diff --git a/service/status/status_test.go b/service/status/status_test.go deleted file mode 100644 index 70d7426ac8..0000000000 --- a/service/status/status_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package status - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestStatus(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - statusInput := &scm.StatusInput{ - Title: "Build #1", - State: scm.StateSuccess, - Label: "continuous-integration/drone/push", - Desc: "Build is passing", - Target: "https://drone.company.com/octocat/hello-world/1", - } - - mockRepos := mockscm.NewMockRepositoryService(controller) - mockRepos.EXPECT().CreateStatus(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", statusInput).Return(nil, nil, nil) - - client := new(scm.Client) - client.Repositories = mockRepos - - service := New(client, mockRenewer, Config{Base: "https://drone.company.com"}) - err := service.Send(noContext, mockUser, &core.StatusInput{ - Repo: &core.Repository{Slug: "octocat/hello-world"}, - Build: &core.Build{ - Number: 1, - Event: core.EventPush, - Status: core.StatusPassing, - After: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - }, - }) - if err != nil { - t.Error(err) - } -} - -func TestStatus_ErrNotSupported(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(nil) - - statusInput := &scm.StatusInput{ - Title: "Build #1", - State: scm.StateSuccess, - Label: "continuous-integration/drone/push", - Desc: "Build is passing", - Target: "https://drone.company.com/octocat/hello-world/1", - } - - mockRepos := mockscm.NewMockRepositoryService(controller) - mockRepos.EXPECT().CreateStatus(gomock.Any(), "octocat/hello-world", "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", statusInput).Return(nil, nil, scm.ErrNotSupported) - - client := new(scm.Client) - client.Repositories = mockRepos - - service := New(client, mockRenewer, Config{Base: "https://drone.company.com"}) - err := service.Send(noContext, mockUser, &core.StatusInput{ - Repo: &core.Repository{Slug: "octocat/hello-world"}, - Build: &core.Build{ - Number: 1, - Event: core.EventPush, - Status: core.StatusPassing, - After: "a6586b3db244fb6b1198f2b25c213ded5b44f9fa", - }, - }) - if err != nil { - t.Error(err) - } -} - -func TestStatus_RenewalError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{} - - mockRenewer := mock.NewMockRenewer(controller) - mockRenewer.EXPECT().Renew(gomock.Any(), mockUser, false).Return(scm.ErrNotAuthorized) - - service := New(nil, mockRenewer, Config{Base: "https://drone.company.com"}) - err := service.Send(noContext, mockUser, &core.StatusInput{Build: &core.Build{}}) - if err == nil { - t.Errorf("Expect error refreshing token") - } -} - -func TestStatus_Disabled(t *testing.T) { - service := New(nil, nil, Config{Disabled: true}) - err := service.Send(noContext, nil, nil) - if err != nil { - t.Error(err) - } -} diff --git a/service/status/util.go b/service/status/util.go deleted file mode 100644 index eb6d93026b..0000000000 --- a/service/status/util.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package status - -import ( - "fmt" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" - "github.com/gosimple/slug" -) - -func createLabel(name, event, deployTo string) string { - if name == "" { - name = "continuous-integration/drone" - } - switch event { - case core.EventPush: - return fmt.Sprintf("%s/push", name) - case core.EventPullRequest: - return fmt.Sprintf("%s/pr", name) - case core.EventTag: - return fmt.Sprintf("%s/tag", name) - case core.EventPromote: - return fmt.Sprintf("%s/promote/%s", name, slug.Make(deployTo)) - default: - return name - } -} - -func createDesc(state string) string { - switch state { - case core.StatusBlocked: - return "Build is pending approval" - case core.StatusDeclined: - return "Build was declined" - case core.StatusError: - return "Build encountered an error" - case core.StatusFailing: - return "Build is failing" - case core.StatusKilled: - return "Build was killed" - case core.StatusPassing: - return "Build is passing" - case core.StatusWaiting: - return "Build is pending" - case core.StatusPending: - return "Build is pending" - case core.StatusRunning: - return "Build is running" - case core.StatusSkipped: - return "Build was skipped" - default: - return "Build is in an unknown state" - } -} - -func convertStatus(state string) scm.State { - switch state { - case core.StatusBlocked: - return scm.StatePending - case core.StatusDeclined: - return scm.StateCanceled - case core.StatusError: - return scm.StateError - case core.StatusFailing: - return scm.StateFailure - case core.StatusKilled: - return scm.StateCanceled - case core.StatusPassing: - return scm.StateSuccess - case core.StatusPending: - return scm.StatePending - case core.StatusRunning: - return scm.StatePending - case core.StatusSkipped: - return scm.StateUnknown - default: - return scm.StateUnknown - } -} diff --git a/service/status/util_test.go b/service/status/util_test.go deleted file mode 100644 index 40aa2ff0e3..0000000000 --- a/service/status/util_test.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package status - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -func TestCreateLabel(t *testing.T) { - tests := []struct { - name string - event string - label string - deployTo string - }{ - { - event: core.EventPullRequest, - label: "continuous-integration/drone/pr", - }, - { - event: core.EventPush, - label: "continuous-integration/drone/push", - }, - { - event: core.EventTag, - label: "continuous-integration/drone/tag", - }, - { - event: core.EventPromote, - deployTo: "production", - label: "continuous-integration/drone/promote/production", - }, - { - event: core.EventPromote, - deployTo: "$production%", - label: "continuous-integration/drone/promote/production", - }, - { - event: "unknown", - label: "continuous-integration/drone", - }, - { - name: "drone", - event: core.EventPush, - label: "drone/push", - }, - } - for _, test := range tests { - if got, want := createLabel(test.name, test.event, test.deployTo), test.label; got != want { - t.Errorf("Want label %q, got %q", want, got) - } - } -} - -func TestCreateDesc(t *testing.T) { - tests := []struct { - status string - desc string - }{ - - { - status: core.StatusBlocked, - desc: "Build is pending approval", - }, - { - status: core.StatusDeclined, - desc: "Build was declined", - }, - { - status: core.StatusError, - desc: "Build encountered an error", - }, - { - status: core.StatusFailing, - desc: "Build is failing", - }, - { - status: core.StatusKilled, - desc: "Build was killed", - }, - { - status: core.StatusPassing, - desc: "Build is passing", - }, - { - status: core.StatusWaiting, - desc: "Build is pending", - }, - { - status: core.StatusPending, - desc: "Build is pending", - }, - { - status: core.StatusRunning, - desc: "Build is running", - }, - { - status: core.StatusSkipped, - desc: "Build was skipped", - }, - { - status: "unknown", - desc: "Build is in an unknown state", - }, - } - for _, test := range tests { - if got, want := createDesc(test.status), test.desc; got != want { - t.Errorf("Want dest %q, got %q", want, got) - } - } -} - -func TestConvertStatus(t *testing.T) { - tests := []struct { - from string - to scm.State - }{ - { - from: core.StatusBlocked, - to: scm.StatePending, - }, - { - from: core.StatusDeclined, - to: scm.StateCanceled, - }, - { - from: core.StatusError, - to: scm.StateError, - }, - { - from: core.StatusFailing, - to: scm.StateFailure, - }, - { - from: core.StatusKilled, - to: scm.StateCanceled, - }, - { - from: core.StatusPassing, - to: scm.StateSuccess, - }, - { - from: core.StatusPending, - to: scm.StatePending, - }, - { - from: core.StatusRunning, - to: scm.StatePending, - }, - { - from: core.StatusSkipped, - to: scm.StateUnknown, - }, - { - from: "unknown", - to: scm.StateUnknown, - }, - } - for _, test := range tests { - if got, want := convertStatus(test.from), test.to; got != want { - t.Errorf("Want status %v, got %v", want, got) - } - } -} diff --git a/service/syncer/filter.go b/service/syncer/filter.go deleted file mode 100644 index 2a403e85cd..0000000000 --- a/service/syncer/filter.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package syncer - -import ( - "strings" - - "github.com/drone/drone/core" -) - -// FilterFunc can be used to filter which repositories are -// synchronized with the local datastore. -type FilterFunc func(*core.Repository) bool - -// NamespaceFilter is a filter function that returns true -// if the repository namespace matches a provided namespace -// in the list. -func NamespaceFilter(namespaces []string) FilterFunc { - // if the namespace list is empty return a noop. - if len(namespaces) == 0 { - return noopFilter - } - return func(r *core.Repository) bool { - for _, namespace := range namespaces { - if strings.EqualFold(namespace, r.Namespace) { - return true - } - } - return false - } -} - -// noopFilter is a filter function that always returns true. -func noopFilter(*core.Repository) bool { - return true -} diff --git a/service/syncer/filter_oss.go b/service/syncer/filter_oss.go deleted file mode 100644 index db3f08ff02..0000000000 --- a/service/syncer/filter_oss.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package syncer - -import "github.com/drone/drone/core" - -// FilterFunc can be used to filter which repositories are -// synchronized with the local datastore. -type FilterFunc func(*core.Repository) bool - -// NamespaceFilter is a no-op filter. -func NamespaceFilter(namespaces []string) FilterFunc { - return noopFilter -} - -// noopFilter is a filter function that always returns true. -func noopFilter(*core.Repository) bool { - return true -} diff --git a/service/syncer/filter_test.go b/service/syncer/filter_test.go deleted file mode 100644 index 67345bd7e0..0000000000 --- a/service/syncer/filter_test.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package syncer - -import ( - "testing" - - "github.com/drone/drone/core" -) - -func TestNamespaceFilter(t *testing.T) { - tests := []struct { - namespace string - namespaces []string - match bool - }{ - { - namespace: "octocat", - namespaces: []string{"octocat"}, - match: true, - }, - { - namespace: "OCTocat", - namespaces: []string{"octOCAT"}, - match: true, - }, - { - namespace: "spaceghost", - namespaces: []string{"octocat"}, - match: false, - }, - { - namespace: "spaceghost", - namespaces: []string{}, - match: true, // no-op filter - }, - } - for _, test := range tests { - r := &core.Repository{Namespace: test.namespace} - f := NamespaceFilter(test.namespaces) - if got, want := f(r), test.match; got != want { - t.Errorf("Want match %v for namespace %q and namespaces %v", want, test.namespace, test.namespaces) - } - } -} diff --git a/service/syncer/syncer.go b/service/syncer/syncer.go deleted file mode 100644 index 78194d3d39..0000000000 --- a/service/syncer/syncer.go +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package syncer - -import ( - "context" - "runtime/debug" - "strings" - "time" - - "github.com/drone/drone/core" - - "github.com/sirupsen/logrus" -) - -// New returns a new Synchronizer. -func New( - repoz core.RepositoryService, - repos core.RepositoryStore, - users core.UserStore, - batch core.Batcher, -) *Synchronizer { - return &Synchronizer{ - repoz: repoz, - repos: repos, - users: users, - batch: batch, - match: noopFilter, - } -} - -// Synchronizer synchronizes user repositories and permissions -// between a remote source code management system and the local -// data store. -type Synchronizer struct { - repoz core.RepositoryService - repos core.RepositoryStore - users core.UserStore - batch core.Batcher - match FilterFunc -} - -// SetFilter sets the filter function. -func (s *Synchronizer) SetFilter(fn FilterFunc) { - s.match = fn -} - -// Sync synchronizes the user repository list in 6 easy steps. -func (s *Synchronizer) Sync(ctx context.Context, user *core.User) (*core.Batch, error) { - logger := logrus.WithField("login", user.Login) - logger.Debugln("syncer: begin repository sync") - - defer func() { - // taking the paranoid approach to recover from - // a panic that should absolutely never happen. - if err := recover(); err != nil { - logger = logger.WithField("error", err) - logger.Errorf("syncer: unexpected panic\n%s\n", debug.Stack()) - } - - // when the synchronization process is complete - // be sure to update the user sync date. - user.Syncing = false - user.Synced = time.Now().Unix() - s.users.Update(context.Background(), user) - }() - - if user.Syncing == false { - user.Syncing = true - err := s.users.Update(ctx, user) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("syncer: cannot update user") - return nil, err - } - } - - batch := &core.Batch{} - remote := map[string]*core.Repository{} - local := map[string]*core.Repository{} - - // - // STEP1: get the list of repositories from the remote - // source code management system (e.g. GitHub). - // - - { - repos, err := s.repoz.List(ctx, user) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("syncer: cannot get remote repository list") - return nil, err - } - for _, repo := range repos { - if strings.Count(repo.Slug, "/") > 1 { - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("uid", repo.UID). - Traceln("syncer: skipping subrepositories") - } - } else if repo.Archived { - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("uid", repo.UID). - Traceln("syncer: skipping archived repositories") - } - } else if s.match(repo) { - remote[repo.UID] = repo - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("uid", repo.UID). - Traceln("syncer: remote repository matches filter") - } - } else { - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", repo.Namespace). - WithField("name", repo.Name). - WithField("uid", repo.UID). - Traceln("syncer: remote repository does not match filter") - } - } - } - } - - // - // STEP2: get the list of repositories stored in the - // local database. - // - - { - repos, err := s.repos.List(ctx, user.ID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("syncer: cannot get cached repository list") - return nil, err - } - - for _, repo := range repos { - local[repo.UID] = repo - } - } - - // - // STEP3 find repos that exist in the remote system, - // but do not exist locally. Insert. - // - - for k, v := range remote { - _, ok := local[k] - if ok { - continue - } - v.Synced = time.Now().Unix() - v.Created = time.Now().Unix() - v.Updated = time.Now().Unix() - v.Version = 1 - batch.Insert = append(batch.Insert, v) - - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", v.Namespace). - WithField("name", v.Name). - WithField("uid", v.UID). - Traceln("syncer: remote repository not in database") - } - } - - // - // STEP4 find repos that exist in the remote system and - // in the local system, but with incorrect data. Update. - // - - for k, v := range local { - vv, ok := remote[k] - if !ok { - continue - } - if diff(v, vv) { - merge(v, vv) - v.Synced = time.Now().Unix() - v.Updated = time.Now().Unix() - batch.Update = append(batch.Update, v) - - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", v.Namespace). - WithField("name", v.Name). - WithField("uid", v.UID). - Traceln("syncer: repository requires update") - } - } - } - - // - // STEP5 find repos that exist in the local system, - // but not in the remote system. Revoke permissions. - // - - for k, v := range local { - _, ok := remote[k] - if ok { - continue - } - batch.Revoke = append(batch.Revoke, v) - - if logrus.GetLevel() == logrus.TraceLevel { - logger.WithField("namespace", v.Namespace). - WithField("name", v.Name). - WithField("uid", v.UID). - Traceln("syncer: repository in database not in remote repository list") - } - } - - // - // STEP6 update the database. - // - - if err := s.batch.Batch(ctx, user, batch); err != nil { - logger = logger.WithError(err) - logger.Warnln("syncer: cannot batch update") - return nil, err - } - - logger.Debugln("syncer: finished repository sync") - return batch, nil -} diff --git a/service/syncer/syncer_test.go b/service/syncer/syncer_test.go deleted file mode 100644 index 0d7ae7aaf0..0000000000 --- a/service/syncer/syncer_test.go +++ /dev/null @@ -1,485 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package syncer - -import ( - "context" - "database/sql" - "io/ioutil" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/drone/go-scm/scm" - "github.com/sirupsen/logrus" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -// TODO(bradrydzewski) test failure to update user -// TODO(bradrydzewski) test recover from unexpected panic - -var noContext = context.Background() - -func init() { - logrus.SetOutput(ioutil.Discard) - logrus.SetLevel(logrus.TraceLevel) -} - -func TestSync(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{}, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{ - { - UID: "1", - Slug: "octocat/hello-world", - Namespace: "octocat", - Name: "hello-world", - Private: false, - Visibility: core.VisibilityPublic, - }, - }, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - - want := &core.Batch{ - Insert: []*core.Repository{ - { - UID: "1", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Visibility: core.VisibilityPublic, - Version: 1, - }, - }, - } - - ignore := cmpopts.IgnoreFields(core.Repository{}, - "Synced", "Created", "Updated") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that we are able to recognize when -// a repository has been updated. -func TestSync_Update(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{ - {UID: "1", Namespace: "octocat", Name: "hello-world"}, - {UID: "2", Namespace: "octocat", Name: "Spoon-Knife", Private: false}, - }, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{ - { - UID: "1", - Slug: "octocat/hello-world", - Namespace: "octocat", - Name: "hello-world", - }, - { - UID: "2", - Slug: "octocat/Spoon-Knife", - Namespace: "octocat", - Name: "Spoon-Knife", - Private: true, - }, - }, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - - want := &core.Batch{ - Update: []*core.Repository{ - { - UID: "2", - Namespace: "octocat", - Name: "Spoon-Knife", - Slug: "octocat/Spoon-Knife", - Private: true, - }, - }, - } - - ignore := cmpopts.IgnoreFields(core.Repository{}, - "Synced", "Created", "Updated") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that we are able to recognize when -// a repository has been renamed. -func TestSync_Rename(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{ - {ID: 101, UID: "1", Namespace: "octocat", Name: "hello-world"}, - {ID: 102, UID: "2", Namespace: "octocat", Name: "Spoon-Knife"}, - }, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{ - { - UID: "1", - Slug: "octocat/hello-world", - Namespace: "octocat", - Name: "hello-world", - }, - { - UID: "2", - Slug: "octocat/Spoon-Knife", - Namespace: "octocat", - Name: "Spork-Knife", - }, - }, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - want := &core.Batch{ - Update: []*core.Repository{ - {ID: 102, UID: "2", Namespace: "octocat", Name: "Spork-Knife", Slug: "octocat/Spork-Knife"}, - }, - } - ignore := cmpopts.IgnoreFields(core.Repository{}, - "Synced", "Created", "Updated") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that we are able to recognize when -// the user permission to the repository have been revoked. -func TestSync_Revoke(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{ - {UID: "1", Namespace: "octocat", Name: "hello-world"}, - }, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{}, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - want := &core.Batch{ - Revoke: []*core.Repository{ - {UID: "1", Namespace: "octocat", Name: "hello-world"}, - }, - } - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -// this test verifies that we invoke the batch update even -// if there are no batch updates to make. This is important -// because the batcher resets permissions and forces Drone -// to re-synchronize. -func TestSync_EmptyBatch(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return(nil, nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return(nil, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - batch, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - if want, got := len(batch.Insert), 0; got != want { - t.Errorf("Want %d batch inserts, got %d", want, got) - } - if want, got := len(batch.Update), 0; got != want { - t.Errorf("Want %d batch updates, got %d", want, got) - } - if want, got := len(batch.Revoke), 0; got != want { - t.Errorf("Want %d batch revokes, got %d", want, got) - } -} - -// this test verifies that an error returned by the source -// code management system causes the synchronization process to -// exit and is returned to the caller. -func TestSync_RemoteError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return(nil, scm.ErrNotFound) - - s := New( - repoService, - nil, - userStore, - nil, - ) - _, err := s.Sync(context.Background(), user) - if got, want := err, scm.ErrNotFound; got != want { - t.Errorf("Want error %s, got %s", want, got) - } -} - -// this test verifies that an error returned by the internal -// repository datastore causes the synchronization process to -// exit and is returned to the caller. -func TestSync_StoreError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{}, nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return(nil, sql.ErrNoRows) - - s := Synchronizer{ - repoz: repoService, - users: userStore, - repos: repoStore, - } - _, err := s.Sync(context.Background(), user) - if got, want := err, sql.ErrNoRows; got != want { - t.Errorf("Want error %s, got %s", want, got) - } -} - -// this test verifies that an error returned by the batcher -// causes the synchronization process to exit and is returned -// to the caller. -func TestSync_BatchError(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{}, nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return(nil, nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(sql.ErrNoRows) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - _, err := s.Sync(context.Background(), user) - if got, want := err, sql.ErrNoRows; got != want { - t.Errorf("Want error %s, got %s", want, got) - } -} - -// this test verifies that sub-repositories are skipped. They -// are unsupported by Drone and should not be ignored. -func TestSync_SkipSubrepo(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{}, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{ - { - UID: "1", - Slug: "octocat/hello/world", - Namespace: "octocat", - Name: "hello-world", - Private: false, - Visibility: core.VisibilityPublic, - }, - }, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - - want := &core.Batch{} - if diff := cmp.Diff(got, want); len(diff) != 0 { - t.Errorf(diff) - } -} - -func TestSyncArchive(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - user := &core.User{ID: 1} - - userStore := mock.NewMockUserStore(controller) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - userStore.EXPECT().Update(gomock.Any(), user).Return(nil) - - batcher := mock.NewMockBatcher(controller) - batcher.EXPECT().Batch(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) - - repoStore := mock.NewMockRepositoryStore(controller) - repoStore.EXPECT().List(gomock.Any(), gomock.Any()).Return([]*core.Repository{}, nil) - - repoService := mock.NewMockRepositoryService(controller) - repoService.EXPECT().List(gomock.Any(), user).Return([]*core.Repository{ - { - UID: "1", - Slug: "octocat/hello-world", - Namespace: "octocat", - Name: "hello-world", - Private: false, - Visibility: core.VisibilityPublic, - Archived: true, - }, - }, nil) - - s := New( - repoService, - repoStore, - userStore, - batcher, - ) - got, err := s.Sync(context.Background(), user) - if err != nil { - t.Error(err) - } - - want := &core.Batch{} - - ignore := cmpopts.IgnoreFields(core.Repository{}, - "Synced", "Created", "Updated") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/service/syncer/util.go b/service/syncer/util.go deleted file mode 100644 index 82386e03a6..0000000000 --- a/service/syncer/util.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package syncer - -import ( - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -// merge is a helper function that merges a subset of -// values from the source to the destination repository. -func merge(dst, src *core.Repository) { - dst.Namespace = src.Namespace - dst.Name = src.Name - dst.HTTPURL = src.HTTPURL - dst.SSHURL = src.SSHURL - dst.Private = src.Private - dst.Branch = src.Branch - dst.Slug = scm.Join(src.Namespace, src.Name) - - // the gitea and gogs repository endpoints do not - // return the html url, so we need to ensure we do - // not replace the existing value with a zero value. - if src.Link != "" { - dst.Link = src.Link - } -} - -// diff is a helper function that compares two repositories -// and returns true if a subset of values are different. -func diff(a, b *core.Repository) bool { - switch { - case a.Namespace != b.Namespace: - return true - case a.Name != b.Name: - return true - case a.HTTPURL != b.HTTPURL: - return true - case a.SSHURL != b.SSHURL: - return true - case a.Private != b.Private: - return true - case a.Branch != b.Branch: - return true - case a.Link != b.Link: - return true - default: - return false - } -} diff --git a/service/syncer/util_test.go b/service/syncer/util_test.go deleted file mode 100644 index a59d4b1c10..0000000000 --- a/service/syncer/util_test.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package syncer - -import ( - "testing" - - "github.com/drone/drone/core" - "github.com/google/go-cmp/cmp" -) - -// import ( -// "testing" - -// "github.com/drone/drone/core" -// "github.com/drone/go-scm/scm" - -// "github.com/google/go-cmp/cmp" -// ) - -// func TestConvertRepository(t *testing.T) { -// from := &scm.Repository{ -// ID: "42", -// Namespace: "octocat", -// Name: "hello-world", -// Branch: "master", -// Private: true, -// Clone: "https://github.com/octocat/hello-world.git", -// CloneSSH: "git@github.com:octocat/hello-world.git", -// Link: "https://github.com/octocat/hello-world", -// } -// want := &core.Repository{ -// UID: "42", -// Namespace: "octocat", -// Name: "hello-world", -// Slug: "octocat/hello-world", -// HTTPURL: "https://github.com/octocat/hello-world.git", -// SSHURL: "git@github.com:octocat/hello-world.git", -// Link: "https://github.com/octocat/hello-world", -// Private: true, -// Branch: "master", -// Visibility: core.VisibilityPrivate, -// } -// got := convertRepository(from) -// if diff := cmp.Diff(want, got); len(diff) != 0 { -// t.Errorf(diff) -// } -// } - -// func TestConvertVisibility(t *testing.T) { -// tests := []struct { -// r *scm.Repository -// v string -// }{ -// { -// r: &scm.Repository{Private: false}, -// v: core.VisibilityPublic, -// }, -// { -// r: &scm.Repository{Private: true}, -// v: core.VisibilityPrivate, -// }, -// } - -// for i, test := range tests { -// if got, want := convertVisibility(test.r), test.v; got != want { -// t.Errorf("Want visibility %s, got %s for index %d", got, want, i) -// } -// } -// } - -func TestDiff(t *testing.T) { - tests := []struct { - a *core.Repository - b *core.Repository - r bool - }{ - { - a: &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - Private: true, - Branch: "master", - }, - b: &core.Repository{ - Namespace: "octocat", - Name: "hello-world", - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - Private: true, - Branch: "master", - }, - r: false, - }, - { - a: &core.Repository{Namespace: "octocat"}, - b: &core.Repository{Namespace: "spaceghost"}, - r: true, - }, - { - a: &core.Repository{Name: "hello-world"}, - b: &core.Repository{Name: "hola-mundo"}, - r: true, - }, - { - a: &core.Repository{HTTPURL: "https://github.com/octocat/hello-world.git"}, - b: &core.Repository{HTTPURL: "https://github.com/octocat/hola-mundo.git"}, - r: true, - }, - { - a: &core.Repository{SSHURL: "git@github.com:octocat/hello-world.git"}, - b: &core.Repository{SSHURL: "git@github.com:octocat/hola-mundo.git"}, - r: true, - }, - { - a: &core.Repository{Link: "https://github.com/octocat/hello-world"}, - b: &core.Repository{Link: "https://github.com/octocat/hola-mundo"}, - r: true, - }, - { - a: &core.Repository{Private: false}, - b: &core.Repository{Private: true}, - r: true, - }, - { - a: &core.Repository{Branch: "master"}, - b: &core.Repository{Branch: "develop"}, - r: true, - }, - } - - for i, test := range tests { - if got, want := diff(test.a, test.b), test.r; got != want { - t.Errorf("Want diff %v, got %v for index %d", got, want, i) - } - } -} - -func TestMerge(t *testing.T) { - dst := &core.Repository{ - ID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - HTTPURL: "https://github.com/octocat/hello-world.git", - SSHURL: "git@github.com:octocat/hello-world.git", - Link: "https://github.com/octocat/hello-world", - Private: true, - Branch: "master", - Visibility: core.VisibilityPublic, - Active: true, - Counter: 99, - Version: 2, - Signer: "DRONESIGNER", - Secret: "DRONESECRET", - } - src := &core.Repository{ - Namespace: "spaceghost", - Name: "hola-mundo", - HTTPURL: "https://github.com/spaceghost/hola-mundo.git", - SSHURL: "git@github.com:spaceghost/hola-mundo.git", - Link: "https://github.com/spaceghost/hola-mundo", - Private: false, - Branch: "develop", - } - merged := &core.Repository{ - ID: 1, - UID: "42", - Namespace: "spaceghost", - Name: "hola-mundo", - Slug: "spaceghost/hola-mundo", - HTTPURL: "https://github.com/spaceghost/hola-mundo.git", - SSHURL: "git@github.com:spaceghost/hola-mundo.git", - Link: "https://github.com/spaceghost/hola-mundo", - Private: false, - Branch: "develop", - Visibility: core.VisibilityPublic, - Active: true, - Counter: 99, - Version: 2, - Signer: "DRONESIGNER", - Secret: "DRONESECRET", - } - merge(dst, src) - if diff := cmp.Diff(merged, dst); len(diff) != 0 { - t.Errorf(diff) - } -} diff --git a/service/token/renew.go b/service/token/renew.go deleted file mode 100644 index 76d8ab03ac..0000000000 --- a/service/token/renew.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package token - -import ( - "context" - "time" - - "github.com/drone/drone/core" - - "github.com/drone/go-scm/scm" - "github.com/drone/go-scm/scm/transport/oauth2" -) - -// expiryDelta determines how earlier a token should be considered -// expired than its actual expiration time. It is used to avoid late -// expirations due to client-server time mismatches. -const expiryDelta = time.Minute - -type renewer struct { - refresh *oauth2.Refresher - users core.UserStore -} - -// Renewer returns a new Renewer. -func Renewer(refresh *oauth2.Refresher, store core.UserStore) core.Renewer { - return &renewer{ - refresh: refresh, - users: store, - } -} - -func (r *renewer) Renew(ctx context.Context, user *core.User, force bool) error { - if r.refresh == nil { - return nil - } - t := &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - Expires: time.Unix(user.Expiry, 0), - } - if expired(t) == false && force == false { - return nil - } - err := r.refresh.Refresh(t) - if err != nil { - return err - } - user.Token = t.Token - user.Refresh = t.Refresh - user.Expiry = t.Expires.Unix() - return r.users.Update(ctx, user) -} - -// expired reports whether the token is expired. -func expired(token *scm.Token) bool { - if len(token.Refresh) == 0 { - return false - } - if token.Expires.IsZero() && len(token.Token) != 0 { - return false - } - return token.Expires.Add(-expiryDelta). - Before(time.Now()) -} diff --git a/service/token/renew_test.go b/service/token/renew_test.go deleted file mode 100644 index 5ea7064789..0000000000 --- a/service/token/renew_test.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package token diff --git a/service/transfer/transfer.go b/service/transfer/transfer.go deleted file mode 100644 index 31eb5a87aa..0000000000 --- a/service/transfer/transfer.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2020 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package transfer - -import ( - "context" - "runtime/debug" - - "github.com/drone/drone/core" - - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -// Transferer handles transfering repository ownership from one -// user to another user account. -type Transferer struct { - Repos core.RepositoryStore - Perms core.PermStore -} - -// New returns a new repository transfer service. -func New(repos core.RepositoryStore, perms core.PermStore) core.Transferer { - return &Transferer{ - Repos: repos, - Perms: perms, - } -} - -// Transfer transfers all repositories owned by the specified user -// to an alternate account with sufficient admin permissions. -func (t *Transferer) Transfer(ctx context.Context, user *core.User) error { - defer func() { - // taking the paranoid approach to recover from - // a panic that should absolutely never happen. - if r := recover(); r != nil { - logrus.Errorf("transferer: unexpected panic: %s", r) - debug.PrintStack() - } - }() - - repos, err := t.Repos.List(ctx, user.ID) - if err != nil { - return err - } - - var result error - for _, repo := range repos { - // only transfer repository ownership if the deactivated - // user owns the repository. - if repo.UserID != user.ID { - continue - } - - members, err := t.Perms.List(ctx, repo.UID) - if err != nil { - result = multierror.Append(result, err) - continue - } - - var admin int64 - for _, member := range members { - // only transfer the repository to an admin user - // that is not equal to the deactivated user. - if repo.UserID == member.UserID { - continue - } - if member.Admin { - admin = member.UserID - break - } - } - - if admin == 0 { - logrus. - WithField("repo.id", repo.ID). - WithField("repo.namespace", repo.Namespace). - WithField("repo.name", repo.Name). - Traceln("repository disabled") - } else { - logrus. - WithField("repo.id", repo.ID). - WithField("repo.namespace", repo.Namespace). - WithField("repo.name", repo.Name). - WithField("old.user.id", repo.UserID). - WithField("new.user.id", admin). - Traceln("repository owner re-assigned") - } - - // if no alternate user was found the repository id - // is reset to the zero value, indicating the repository - // has no owner. - repo.UserID = admin - err = t.Repos.Update(ctx, repo) - if err != nil { - result = multierror.Append(result, err) - } - } - - return result -} diff --git a/service/transfer/transfer_test.go b/service/transfer/transfer_test.go deleted file mode 100644 index e6d8095bae..0000000000 --- a/service/transfer/transfer_test.go +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2020 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package transfer - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" -) - -var nocontext = context.Background() - -func TestTransfer(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - ID: 1, - UserID: 2, - UID: "123", - } - mockRepos := []*core.Repository{ - mockRepo, - } - mockCollabs := []*core.Collaborator{ - { - UserID: 1, // do not match non-admin - Admin: false, - }, - { - UserID: 2, // do not match existing owner - Admin: true, - }, - { - UserID: 3, - Admin: true, - }, - } - mockUser := &core.User{ - ID: 2, - } - - checkRepo := func(ctx context.Context, updated *core.Repository) error { - if updated.UserID != 3 { - t.Errorf("Expect repository owner id assigned to user id 3") - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().List(gomock.Any(), mockUser.ID).Return(mockRepos, nil).Times(1) - repos.EXPECT().Update(gomock.Any(), mockRepo).Do(checkRepo).Times(1) - - perms := mock.NewMockPermStore(controller) - perms.EXPECT().List(gomock.Any(), mockRepo.UID).Return(mockCollabs, nil).Times(1) - - r := New( - repos, - perms, - ) - - err := r.Transfer(nocontext, mockUser) - if err != nil { - t.Error(err) - } -} - -func TestTransfer_NoOwner(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockRepo := &core.Repository{ - ID: 1, - UserID: 2, - UID: "123", - } - mockRepos := []*core.Repository{ - mockRepo, - } - mockCollabs := []*core.Collaborator{ - { - UserID: 2, // same user id - Admin: true, - }, - } - mockUser := &core.User{ - ID: 2, - } - - checkRepo := func(ctx context.Context, updated *core.Repository) error { - if updated.UserID != 0 { - t.Errorf("Expect repository owner id reset to zero value") - } - return nil - } - - repos := mock.NewMockRepositoryStore(controller) - repos.EXPECT().List(gomock.Any(), mockUser.ID).Return(mockRepos, nil).Times(1) - repos.EXPECT().Update(gomock.Any(), mockRepo).Do(checkRepo).Times(1) - - perms := mock.NewMockPermStore(controller) - perms.EXPECT().List(gomock.Any(), mockRepo.UID).Return(mockCollabs, nil).Times(1) - - r := New( - repos, - perms, - ) - - err := r.Transfer(nocontext, mockUser) - if err != nil { - t.Error(err) - } -} diff --git a/service/user/user.go b/service/user/user.go deleted file mode 100644 index 9b0efcbb80..0000000000 --- a/service/user/user.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/go-scm/scm" -) - -type service struct { - client *scm.Client - renew core.Renewer -} - -// New returns a new User service that provides access to -// user data from the source code management system. -func New(client *scm.Client, renew core.Renewer) core.UserService { - return &service{client: client, renew: renew} -} - -func (s *service) Find(ctx context.Context, access, refresh string) (*core.User, error) { - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: access, - Refresh: refresh, - }) - src, _, err := s.client.Users.Find(ctx) - if err != nil { - return nil, err - } - return convert(src), nil -} - -func (s *service) FindLogin(ctx context.Context, user *core.User, login string) (*core.User, error) { - err := s.renew.Renew(ctx, user, false) - if err != nil { - return nil, err - } - - ctx = context.WithValue(ctx, scm.TokenKey{}, &scm.Token{ - Token: user.Token, - Refresh: user.Refresh, - }) - src, _, err := s.client.Users.FindLogin(ctx, login) - if err != nil { - return nil, err - } - return convert(src), nil -} - -func convert(src *scm.User) *core.User { - dst := &core.User{ - Login: src.Login, - Email: src.Email, - Avatar: src.Avatar, - } - if !src.Created.IsZero() { - dst.Created = src.Created.Unix() - } - if !src.Updated.IsZero() { - dst.Updated = src.Updated.Unix() - } - return dst -} diff --git a/service/user/user_test.go b/service/user/user_test.go deleted file mode 100644 index 79e8570e10..0000000000 --- a/service/user/user_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package user - -import ( - "context" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock/mockscm" - "github.com/drone/go-scm/scm" - "github.com/google/go-cmp/cmp" - - "github.com/golang/mock/gomock" -) - -var noContext = context.Background() - -func TestFind(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkToken := func(ctx context.Context) { - got, ok := ctx.Value(scm.TokenKey{}).(*scm.Token) - if !ok { - t.Errorf("Expect token stored in context") - return - } - want := &scm.Token{ - Token: "755bb80e5b", - Refresh: "e08f3fa43e", - } - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } - } - - now := time.Now() - mockUser := &scm.User{ - Login: "octocat", - Email: "octocat@github.com", - Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", - Created: now, - Updated: now, - } - mockUsers := mockscm.NewMockUserService(controller) - mockUsers.EXPECT().Find(gomock.Any()).Do(checkToken).Return(mockUser, nil, nil) - - client := new(scm.Client) - client.Users = mockUsers - - want := &core.User{ - Login: "octocat", - Email: "octocat@github.com", - Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", - Created: now.Unix(), - Updated: now.Unix(), - } - got, err := New(client, nil).Find(noContext, "755bb80e5b", "e08f3fa43e") - if err != nil { - t.Error(err) - } - - if diff := cmp.Diff(got, want); diff != "" { - t.Errorf(diff) - } -} - -func TestFind_Error(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mockscm.NewMockUserService(controller) - mockUsers.EXPECT().Find(gomock.Any()).Return(nil, nil, scm.ErrNotFound) - - client := new(scm.Client) - client.Users = mockUsers - - got, err := New(client, nil).Find(noContext, "755bb80e5b", "e08f3fa43e") - if err == nil { - t.Errorf("Expect error finding user") - } - if got != nil { - t.Errorf("Expect nil user on error") - } -} diff --git a/session/config.go b/session/config.go deleted file mode 100644 index bb7191e031..0000000000 --- a/session/config.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package session - -import "time" - -// Config provides the session configuration. -type Config struct { - Secure bool - Secret string - Timeout time.Duration -} - -// NewConfig returns a new session configuration. -func NewConfig(secret string, timeout time.Duration, secure bool) Config { - return Config{ - Secure: secure, - Secret: secret, - Timeout: timeout, - } -} diff --git a/session/session.go b/session/session.go deleted file mode 100644 index 86e2d96d57..0000000000 --- a/session/session.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package session - -import ( - "net/http" - "strings" - "time" - - "github.com/drone/drone/core" - - "github.com/dchest/authcookie" -) - -// New returns a new cookie-based session management. -func New(users core.UserStore, config Config) core.Session { - return &session{ - secret: []byte(config.Secret), - secure: config.Secure, - timeout: config.Timeout, - users: users, - } -} - -type session struct { - users core.UserStore - secret []byte - secure bool - timeout time.Duration - - administrator string // administrator account - prometheus string // prometheus account - autoscaler string // autoscaler account -} - -func (s *session) Create(w http.ResponseWriter, user *core.User) error { - cookie := &http.Cookie{ - Name: "_session_", - Path: "/", - MaxAge: 2147483647, - HttpOnly: true, - Secure: s.secure, - Value: authcookie.NewSinceNow( - user.Login, - s.timeout, - s.secret, - ), - } - w.Header().Add("Set-Cookie", cookie.String()+"; SameSite=lax") - return nil -} - -func (s *session) Delete(w http.ResponseWriter) error { - w.Header().Add("Set-Cookie", "_session_=deleted; Path=/; Max-Age=0") - return nil -} - -func (s *session) Get(r *http.Request) (*core.User, error) { - switch { - case isAuthorizationToken(r): - return s.fromToken(r) - case isAuthorizationParameter(r): - return s.fromToken(r) - default: - return s.fromSession(r) - } -} - -func (s *session) fromSession(r *http.Request) (*core.User, error) { - cookie, err := r.Cookie("_session_") - if err != nil { - return nil, nil - } - login := authcookie.Login(cookie.Value, s.secret) - if login == "" { - return nil, nil - } - return s.users.FindLogin(r.Context(), login) -} - -func (s *session) fromToken(r *http.Request) (*core.User, error) { - return s.users.FindToken(r.Context(), - extractToken(r), - ) -} - -func isAuthorizationToken(r *http.Request) bool { - return r.Header.Get("Authorization") != "" -} - -func isAuthorizationParameter(r *http.Request) bool { - return r.FormValue("access_token") != "" -} - -func extractToken(r *http.Request) string { - bearer := r.Header.Get("Authorization") - if bearer == "" { - bearer = r.FormValue("access_token") - } - return strings.TrimPrefix(bearer, "Bearer ") -} diff --git a/session/session_test.go b/session/session_test.go deleted file mode 100644 index 9cada0bda8..0000000000 --- a/session/session_test.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package session - -import ( - "database/sql" - "net/http" - "net/http/httptest" - "regexp" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/dchest/authcookie" - "github.com/golang/mock/gomock" -) - -// This test verifies that a user is returned when a valid -// authorization token included in the http.Request access_token -// query parameter. -func TestGet_Token_QueryParam(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - Login: "octocat", - Hash: "ulSxuA0FKjNiOFIchk18NNvC6ygSxdtKjiOAS", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindToken(gomock.Any(), mockUser.Hash).Return(mockUser, nil) - - session := New(users, NewConfig("correct-horse-battery-staple", time.Hour, false)) - r := httptest.NewRequest("GET", "/?access_token=ulSxuA0FKjNiOFIchk18NNvC6ygSxdtKjiOAS", nil) - user, _ := session.Get(r) - if user != mockUser { - t.Errorf("Want authenticated user") - } -} - -// This test verifies that a user is returned when a valid -// authorization token included in the Authorization header. -func TestGet_Token_Header(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - Login: "octocat", - Hash: "ulSxuA0FKjNiOFIchk18NNvC6ygSxdtKjiOAS", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindToken(gomock.Any(), mockUser.Hash).Return(mockUser, nil) - - session := New(users, NewConfig("correct-horse-battery-staple", time.Hour, false)) - r := httptest.NewRequest("GET", "/", nil) - r.Header.Set("Authorization", "Bearer ulSxuA0FKjNiOFIchk18NNvC6ygSxdtKjiOAS") - user, _ := session.Get(r) - if user != mockUser { - t.Errorf("Want authenticated user") - } -} - -func TestGet_Token_NoSession(t *testing.T) { - r := httptest.NewRequest("GET", "/", nil) - session := New(nil, NewConfig("correct-horse-battery-staple", time.Hour, false)) - user, _ := session.Get(r) - if user != nil { - t.Errorf("Expect empty session") - } -} - -func TestGet_Token_UserNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindToken(gomock.Any(), gomock.Any()).Return(nil, sql.ErrNoRows) - - r := httptest.NewRequest("GET", "/?access_token=ulSxuA0FKjNiOFIchk18NNvC6ygSxdtKjiOAS", nil) - session := New(users, NewConfig("correct-horse-battery-staple", time.Hour, false)) - user, _ := session.Get(r) - if user != nil { - t.Errorf("Expect empty session") - } -} - -func TestGet_Cookie(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUser := &core.User{ - Login: "octocat", - Admin: true, - Hash: "$2a$04$wD3oI7rqUlVy7xNh0B0FqOnNlw0bkVhxCi.XZNi2BTMnqIODIT4Xa", - } - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), gomock.Any()).Return(mockUser, nil) - - secret := "correct-horse-battery-staple" - s := authcookie.New("octocat", time.Now().Add(time.Hour), []byte(secret)) - r := httptest.NewRequest("GET", "/", nil) - r.AddCookie(&http.Cookie{ - Name: "_session_", - Value: s, - }) - session := New(users, Config{Secure: false, Secret: secret, Timeout: time.Hour}) - user, err := session.Get(r) - if err != nil { - t.Error(err) - return - } - if user != mockUser { - t.Errorf("Want authenticated user") - } -} - -func TestGet_Cookie_NoCookie(t *testing.T) { - r := httptest.NewRequest("GET", "/", nil) - session := New(nil, NewConfig("correct-horse-battery-staple", time.Hour, false)) - user, _ := session.Get(r) - if user != nil { - t.Errorf("Expect nil user when no cookie") - } -} - -func TestGet_Cookie_Expired(t *testing.T) { - secret := "correct-horse-battery-staple" - s := authcookie.New("octocat", time.Now().Add(-1*time.Hour), []byte(secret)) - r := httptest.NewRequest("GET", "/", nil) - r.AddCookie(&http.Cookie{ - Name: "_session_", - Value: s, - }) - - session := New(nil, NewConfig("correct-horse-battery-staple", time.Hour, false)) - user, _ := session.Get(r) - if user != nil { - t.Errorf("Expect nil user when no cookie") - } -} - -func TestGet_Cookie_UserNotFound(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - users := mock.NewMockUserStore(controller) - users.EXPECT().FindLogin(gomock.Any(), gomock.Any()).Return(nil, sql.ErrNoRows) - - secret := "correct-horse-battery-staple" - s := authcookie.New("octocat", time.Now().Add(time.Hour), []byte(secret)) - r := httptest.NewRequest("GET", "/", nil) - r.AddCookie(&http.Cookie{ - Name: "_session_", - Value: s, - }) - - session := New(users, Config{Secure: false, Secret: secret, Timeout: time.Hour}) - user, _ := session.Get(r) - if user != nil { - t.Errorf("Expect empty session") - } -} - -func TestDelete(t *testing.T) { - w := httptest.NewRecorder() - - s := new(session) - err := s.Delete(w) - if err != nil { - t.Error(err) - } - - want := "_session_=deleted; Path=/; Max-Age=0" - got := w.Header().Get("Set-Cookie") - if got != want { - t.Errorf("Want header %q, got %q", want, got) - } -} - -func TestCreate(t *testing.T) { - w := httptest.NewRecorder() - - user := &core.User{ - ID: 1, - Login: "octocat", - } - s := &session{ - timeout: time.Minute, - secret: []byte("correct-horse-battery-staple"), - } - err := s.Create(w, user) - if err != nil { - t.Error(err) - } - - // TODO(bradrydzewski) improve this test to check the individual - // header parts, including the session string, to ensure the - // authcookie is set correctly and can be parsed. - - got := w.Header().Get("Set-Cookie") - want := "_session_=(.+); Path=/; Max-Age=2147483647; HttpOnly; SameSite=lax" - matched, err := regexp.MatchString(want, got) - if err != nil { - t.Error(err) - } - if !matched { - t.Error("Unexpected Set-Cookie header value") - } -} diff --git a/session/testdata/mapping.json b/session/testdata/mapping.json deleted file mode 100644 index d755fe63ed..0000000000 --- a/session/testdata/mapping.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "octocat": "this-is-a-test-secret", - "spaceghost": "this-is-an-invalid-secret" -} \ No newline at end of file diff --git a/store/batch/batch.go b/store/batch/batch.go deleted file mode 100644 index 3bfd04e945..0000000000 --- a/store/batch/batch.go +++ /dev/null @@ -1,360 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package batch - -import ( - "context" - "fmt" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new Batcher. -func New(db *db.DB) core.Batcher { - return &batchUpdater{db} -} - -type batchUpdater struct { - db *db.DB -} - -func (b *batchUpdater) Batch(ctx context.Context, user *core.User, batch *core.Batch) error { - return b.db.Update(func(execer db.Execer, binder db.Binder) error { - now := time.Now().Unix() - - // - // the repository list API does not return permissions, which means we have - // no way of knowing if permissions are current or not. We therefore mark all - // permissions stale in the database, so that each one must be individually - // verified at runtime. - // - - stmt := permResetStmt - switch b.db.Driver() { - case db.Postgres: - stmt = permResetStmtPostgres - } - - _, err := execer.Exec(stmt, now, user.ID) - if err != nil { - return fmt.Errorf("Error resetting permissions: %s", err) - } - - for _, repo := range batch.Insert { - - // - // insert repository - // TODO: group inserts in batches of N - // - - stmt := repoInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = repoInsertIgnoreStmtMysql - case db.Postgres: - stmt = repoInsertIgnoreStmtPostgres - } - - params := repos.ToParams(repo) - stmt, args, err := binder.BindNamed(stmt, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - if err != nil { - return fmt.Errorf("Error inserting repository: %s: %s: %s", repo.Slug, repo.UID, err) - } - - // - // insert permissions - // TODO: group inserts in batches of N - // - - stmt = permInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = permInsertIgnoreStmtMysql - case db.Postgres: - stmt = permInsertIgnoreStmtPostgres - } - - _, err = execer.Exec(stmt, - user.ID, - repo.UID, - now, - now, - ) - if err != nil { - return fmt.Errorf("Error inserting permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - // - // update existing repositories - // TODO: group updates in batches of N - // - - for _, repo := range batch.Update { - params := repos.ToParams(repo) - stmt, args, err := binder.BindNamed(repoUpdateRemoteStmt, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - if err != nil { - return fmt.Errorf("Error updating repository: %s: %s: %s", repo.Slug, repo.UID, err) - } - - stmt = permInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = permInsertIgnoreStmtMysql - case db.Postgres: - stmt = permInsertIgnoreStmtPostgres - } - - _, err = execer.Exec(stmt, - user.ID, - repo.UID, - now, - now, - ) - if err != nil { - return fmt.Errorf("Error inserting permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - // - // revoke permissions - // TODO: group deletes in batches of N - // - - for _, repo := range batch.Revoke { - stmt := permRevokeStmt - switch b.db.Driver() { - case db.Postgres: - stmt = permRevokeStmtPostgres - } - - _, err = execer.Exec(stmt, user.ID, repo.UID) - if err != nil { - return fmt.Errorf("Error revoking permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - return nil - }) -} - -const stmtInsertBase = ` -( - repo_uid -,repo_user_id -,repo_namespace -,repo_name -,repo_slug -,repo_scm -,repo_clone_url -,repo_ssh_url -,repo_html_url -,repo_active -,repo_private -,repo_visibility -,repo_branch -,repo_counter -,repo_config -,repo_timeout -,repo_throttle -,repo_trusted -,repo_protected -,repo_no_forks -,repo_no_pulls -,repo_cancel_pulls -,repo_cancel_push -,repo_cancel_running -,repo_synced -,repo_created -,repo_updated -,repo_version -,repo_signer -,repo_secret -) VALUES ( - :repo_uid -,:repo_user_id -,:repo_namespace -,:repo_name -,:repo_slug -,:repo_scm -,:repo_clone_url -,:repo_ssh_url -,:repo_html_url -,:repo_active -,:repo_private -,:repo_visibility -,:repo_branch -,:repo_counter -,:repo_config -,:repo_timeout -,:repo_throttle -,:repo_trusted -,:repo_protected -,:repo_no_forks -,:repo_no_pulls -,:repo_cancel_pulls -,:repo_cancel_push -,:repo_cancel_running -,:repo_synced -,:repo_created -,:repo_updated -,:repo_version -,:repo_signer -,:repo_secret -) -` - -const repoInsertIgnoreStmt = ` -INSERT OR IGNORE INTO repos ` + stmtInsertBase - -const repoInsertIgnoreStmtMysql = ` -INSERT IGNORE INTO repos ` + stmtInsertBase - -const repoInsertIgnoreStmtPostgres = ` -INSERT INTO repos ` + stmtInsertBase + ` ON CONFLICT DO NOTHING` - -const repoUpdateRemoteStmt = ` -UPDATE repos SET - repo_namespace=:repo_namespace -,repo_name=:repo_name -,repo_slug=:repo_slug -,repo_clone_url=:repo_clone_url -,repo_ssh_url=:repo_ssh_url -,repo_html_url=:repo_html_url -,repo_private=:repo_private -,repo_branch=:repo_branch -,repo_updated=:repo_updated -WHERE repo_id=:repo_id -` - -const repoUpdateRemoteStmtPostgres = ` -UPDATE repos SET - repo_namespace=$1 -,repo_name=$2 -,repo_slug=$3 -,repo_clone_url=$4 -,repo_ssh_url=$5 -,repo_html_url=$6 -,repo_private=$7 -,repo_branch=$8 -,repo_updated=$9 -WHERE repo_id=$10 -` - -const permInsertIgnoreStmt = ` -INSERT OR IGNORE INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - ? -,? -,1 -,0 -,0 -,0 -,? -,? -) -` - -const permInsertIgnoreStmtMysql = ` -INSERT IGNORE INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - ? -,? -,1 -,0 -,0 -,0 -,? -,? -) -` - -const permInsertIgnoreStmtPostgres = ` -INSERT INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - $1 -,$2 -,true -,false -,false -,0 -,$3 -,$4 -) ON CONFLICT DO NOTHING -` - -// this resets the synced date indicating that -// the system should refresh the permissions next -// time the user attempts to access the resource -const permResetStmt = ` -UPDATE perms SET - perm_updated = ? -,perm_synced = 0 -WHERE perm_user_id = ? -` - -const permResetStmtPostgres = ` -UPDATE perms SET - perm_updated = $1 -,perm_synced = 0 -WHERE perm_user_id = $2 -` - -const permRevokeStmt = ` -DELETE FROM perms -WHERE perm_user_id = ? -AND perm_repo_uid = ? -` - -const permRevokeStmtPostgres = ` -DELETE FROM perms -WHERE perm_user_id = $1 -AND perm_repo_uid = $2 -` diff --git a/store/batch/batch_test.go b/store/batch/batch_test.go deleted file mode 100644 index 45d731f186..0000000000 --- a/store/batch/batch_test.go +++ /dev/null @@ -1,338 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package batch - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/perm" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" - "github.com/drone/drone/store/user" -) - -var noContext = context.TODO() - -func TestBatch(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - batcher := New(conn).(*batchUpdater) - repos := repos.New(conn) - perms := perm.New(conn) - - user, err := seedUser(batcher.db) - if err != nil { - t.Error(err) - } - - t.Run("Insert", testBatchInsert(batcher, repos, perms, user)) - t.Run("Update", testBatchUpdate(batcher, repos, perms, user)) - t.Run("Delete", testBatchDelete(batcher, repos, perms, user)) - t.Run("DuplicateID", testBatchDuplicateID(batcher, repos, perms, user)) - t.Run("DuplicateSlug", testBatchDuplicateSlug(batcher, repos, perms, user)) - t.Run("DuplicateRename", testBatchDuplicateRename(batcher, repos, perms, user)) -} - -func testBatchInsert( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - batch := &core.Batch{ - Insert: []*core.Repository{ - { - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: false, - Visibility: "public", - }, - }, - } - err := batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != nil { - t.Errorf("Want permissions, got error %q", err) - } - } -} - -func testBatchUpdate( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - batch := &core.Batch{ - Update: []*core.Repository{ - { - ID: before.ID, - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - after, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - if got, want := after.Private, true; got != want { - t.Errorf("Want repository Private %v, got %v", want, got) - } - } -} - -func testBatchDelete( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != nil { - t.Errorf("Want permissions, got error %q", err) - } - - batch := &core.Batch{ - Revoke: []*core.Repository{ - { - ID: repo.ID, - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != sql.ErrNoRows { - t.Errorf("Want sql.ErrNoRows got %v", err) - } - } -} - -func testBatchDuplicateID( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "43", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - { - ID: 0, - UserID: 1, - UID: "43", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - { - ID: 0, - UserID: 1, - UID: "64778136", - Namespace: "octocat", - Name: "linguist", - Slug: "octocat/linguist", - }, - }, - Update: []*core.Repository{ - { - ID: before.ID, - UserID: 1, - UID: "44", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - added, err := repos.FindName(noContext, "octocat", "linguist") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - if got, want := added.UID, "64778136"; got != want { - t.Errorf("Want added repository UID %v, got %v", want, got) - } - } -} - -// the purpose of this unit test is to understand what happens -// when a repository is deleted, re-created with the same name, -// but has a different unique identifier. -func testBatchDuplicateSlug( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - _, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "99", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - }, - } - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - } -} - -// the purpose of this unit test is to understand what happens -// when a repository is deleted, re-created with a new name, and -// then updated back to the old name. -// -// TODO(bradrydzewski) for sqlite consider UPDATE OR REPLACE. -// TODO(bradrydzewski) for mysql consider UPDATE IGNORE. -// TODO(bradrydzewski) consider breaking rename into a separate set of logic that checks for existing records. -func testBatchDuplicateRename( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "200", - Namespace: "octocat", - Name: "test-1", - Slug: "octocat/test-1", - }, - { - ID: 0, - UserID: 1, - UID: "201", - Namespace: "octocat", - Name: "test-2", - Slug: "octocat/test-2", - }, - }, - } - err := batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - return - } - - before, err := repos.FindName(noContext, "octocat", "test-2") - if err != nil { - t.Errorf("Want repository, got error %q", err) - return - } - before.Name = "test-1" - before.Slug = "octocat/test-1" - - batch = &core.Batch{ - Update: []*core.Repository{before}, - } - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Skip(err) - } - } -} - -func seedUser(db *db.DB) (*core.User, error) { - enc, _ := encrypt.New("") - out := &core.User{Login: "octocat"} - err := user.New(db, enc).Create(noContext, out) - return out, err -} diff --git a/store/batch2/batch.go b/store/batch2/batch.go deleted file mode 100644 index e51a61d1b6..0000000000 --- a/store/batch2/batch.go +++ /dev/null @@ -1,428 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package batch2 - -import ( - "context" - "fmt" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new Batcher. -func New(db *db.DB) core.Batcher { - return &batchUpdater{db} -} - -type batchUpdater struct { - db *db.DB -} - -func (b *batchUpdater) Batch(ctx context.Context, user *core.User, batch *core.Batch) error { - return b.db.Update(func(execer db.Execer, binder db.Binder) error { - now := time.Now().Unix() - - // - // the repository list API does not return permissions, which means we have - // no way of knowing if permissions are current or not. We therefore mark all - // permissions stale in the database, so that each one must be individually - // verified at runtime. - // - - stmt := permResetStmt - switch b.db.Driver() { - case db.Postgres: - stmt = permResetStmtPostgres - } - - _, err := execer.Exec(stmt, now, user.ID) - if err != nil { - return fmt.Errorf("batch: cannot reset permissions: %s", err) - } - - // if the repository exists with the same name, - // but a different unique identifier, attempt to - // delete the previous entry. - var insert []*core.Repository - var update []*core.Repository - for _, repo := range append(batch.Insert, batch.Update...) { - params := repos.ToParams(repo) - stmt, args, err := binder.BindNamed(repoDeleteDeleted, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return fmt.Errorf("batch: cannot remove duplicate repository: %s: %s: %s", repo.Slug, repo.UID, err) - } - rows, _ := res.RowsAffected() - if rows > 0 { - insert = append(insert, repo) - } else if repo.ID > 0 { - update = append(update, repo) - } else { - insert = append(insert, repo) - } - } - - for _, repo := range insert { - - // - // insert repository - // TODO: group inserts in batches of N - // - - stmt := repoInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = repoInsertIgnoreStmtMysql - case db.Postgres: - stmt = repoInsertIgnoreStmtPostgres - } - - params := repos.ToParams(repo) - stmt, args, err := binder.BindNamed(stmt, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - if err != nil { - return fmt.Errorf("batch: cannot insert repository: %s: %s: %s", repo.Slug, repo.UID, err) - } - - // - // insert permissions - // TODO: group inserts in batches of N - // - - stmt = permInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = permInsertIgnoreStmtMysql - case db.Postgres: - stmt = permInsertIgnoreStmtPostgres - } - - _, err = execer.Exec(stmt, - user.ID, - repo.UID, - now, - now, - ) - if err != nil { - return fmt.Errorf("batch: cannot insert permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - // - // update existing repositories - // TODO: group updates in batches of N - // - - for _, repo := range update { - params := repos.ToParams(repo) - - // // if the repository exists with the same name, - // // but a different unique identifier, attempt to - // // delete the previous entry. - // stmt, args, err := binder.BindNamed(repoDeleteDeleted, params) - // if err != nil { - // return err - // } - // res, err := execer.Exec(stmt, args...) - // if err != nil { - // return fmt.Errorf("batch: cannot remove duplicate repository: %s: %s: %s", repo.Slug, repo.UID, err) - // } - // rows, _ := res.RowsAffected() - // if rows > 0 { - // stmt := repoInsertIgnoreStmt - // switch b.db.Driver() { - // case db.Mysql: - // stmt = repoInsertIgnoreStmtMysql - // case db.Postgres: - // stmt = repoInsertIgnoreStmtPostgres - // } - - // params := repos.ToParams(repo) - // stmt, args, err := binder.BindNamed(stmt, params) - // if err != nil { - // return err - // } - // _, err = execer.Exec(stmt, args...) - // if err != nil { - // return fmt.Errorf("batch: cannot insert repository: %s: %s: %s", repo.Slug, repo.UID, err) - // } - // } else { - stmt, args, err := binder.BindNamed(repoUpdateRemoteStmt, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - if err != nil { - return fmt.Errorf("batch: cannot update repository: %s: %s: %s", repo.Slug, repo.UID, err) - } - // } - - stmt = permInsertIgnoreStmt - switch b.db.Driver() { - case db.Mysql: - stmt = permInsertIgnoreStmtMysql - case db.Postgres: - stmt = permInsertIgnoreStmtPostgres - } - - _, err = execer.Exec(stmt, - user.ID, - repo.UID, - now, - now, - ) - if err != nil { - return fmt.Errorf("batch: cannot insert permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - // - // revoke permissions - // TODO: group deletes in batches of N - // - - for _, repo := range batch.Revoke { - stmt := permRevokeStmt - switch b.db.Driver() { - case db.Postgres: - stmt = permRevokeStmtPostgres - } - - _, err = execer.Exec(stmt, user.ID, repo.UID) - if err != nil { - return fmt.Errorf("batch: cannot revoking permissions: %s: %s: %s", repo.Slug, repo.UID, err) - } - } - - return nil - }) -} - -const stmtInsertBase = ` -( - repo_uid -,repo_user_id -,repo_namespace -,repo_name -,repo_slug -,repo_scm -,repo_clone_url -,repo_ssh_url -,repo_html_url -,repo_active -,repo_private -,repo_visibility -,repo_branch -,repo_counter -,repo_config -,repo_timeout -,repo_throttle -,repo_trusted -,repo_protected -,repo_no_forks -,repo_no_pulls -,repo_cancel_pulls -,repo_cancel_push -,repo_cancel_running -,repo_synced -,repo_created -,repo_updated -,repo_version -,repo_signer -,repo_secret -) VALUES ( - :repo_uid -,:repo_user_id -,:repo_namespace -,:repo_name -,:repo_slug -,:repo_scm -,:repo_clone_url -,:repo_ssh_url -,:repo_html_url -,:repo_active -,:repo_private -,:repo_visibility -,:repo_branch -,:repo_counter -,:repo_config -,:repo_timeout -,:repo_throttle -,:repo_trusted -,:repo_protected -,:repo_no_forks -,:repo_no_pulls -,:repo_cancel_pulls -,:repo_cancel_push -,:repo_cancel_running -,:repo_synced -,:repo_created -,:repo_updated -,:repo_version -,:repo_signer -,:repo_secret -) -` - -const repoInsertIgnoreStmt = ` -INSERT OR IGNORE INTO repos ` + stmtInsertBase - -const repoInsertIgnoreStmtMysql = ` -INSERT IGNORE INTO repos ` + stmtInsertBase - -const repoInsertIgnoreStmtPostgres = ` -INSERT INTO repos ` + stmtInsertBase + ` ON CONFLICT DO NOTHING` - -const repoUpdateRemoteStmt = ` -UPDATE repos SET - repo_namespace=:repo_namespace -,repo_name=:repo_name -,repo_slug=:repo_slug -,repo_clone_url=:repo_clone_url -,repo_ssh_url=:repo_ssh_url -,repo_html_url=:repo_html_url -,repo_private=:repo_private -,repo_branch=:repo_branch -,repo_updated=:repo_updated -WHERE repo_id=:repo_id -` - -const repoUpdateRemoteStmtPostgres = ` -UPDATE repos SET - repo_namespace=$1 -,repo_name=$2 -,repo_slug=$3 -,repo_clone_url=$4 -,repo_ssh_url=$5 -,repo_html_url=$6 -,repo_private=$7 -,repo_branch=$8 -,repo_updated=$9 -WHERE repo_id=$10 -` - -const permInsertIgnoreStmt = ` -INSERT OR IGNORE INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - ? -,? -,1 -,0 -,0 -,0 -,? -,? -) -` - -const permInsertIgnoreStmtMysql = ` -INSERT IGNORE INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - ? -,? -,1 -,0 -,0 -,0 -,? -,? -) -` - -const permInsertIgnoreStmtPostgres = ` -INSERT INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) values ( - $1 -,$2 -,true -,false -,false -,0 -,$3 -,$4 -) ON CONFLICT DO NOTHING -` - -// this statement deletes a repository that was -// deleted in version control and then re-created -// with the same name (and thus has a different -// unique identifier) -const repoDeleteDeleted = ` -DELETE FROM repos -WHERE repo_slug = :repo_slug - AND repo_uid != :repo_uid -` - -// this resets the synced date indicating that -// the system should refresh the permissions next -// time the user attempts to access the resource -const permResetStmt = ` -UPDATE perms SET - perm_updated = ? -,perm_synced = 0 -WHERE perm_user_id = ? -` - -const permResetStmtPostgres = ` -UPDATE perms SET - perm_updated = $1 -,perm_synced = 0 -WHERE perm_user_id = $2 -` - -const permRevokeStmt = ` -DELETE FROM perms -WHERE perm_user_id = ? -AND perm_repo_uid = ? -` - -const permRevokeStmtPostgres = ` -DELETE FROM perms -WHERE perm_user_id = $1 -AND perm_repo_uid = $2 -` diff --git a/store/batch2/batch_test.go b/store/batch2/batch_test.go deleted file mode 100644 index 72a8bc3bb7..0000000000 --- a/store/batch2/batch_test.go +++ /dev/null @@ -1,397 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package batch2 - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/perm" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" - "github.com/drone/drone/store/user" -) - -var noContext = context.TODO() - -func TestBatch(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - batcher := New(conn).(*batchUpdater) - repos := repos.New(conn) - perms := perm.New(conn) - - user, err := seedUser(batcher.db) - if err != nil { - t.Error(err) - } - - t.Run("Insert", testBatchInsert(batcher, repos, perms, user)) - t.Run("Update", testBatchUpdate(batcher, repos, perms, user)) - t.Run("Delete", testBatchDelete(batcher, repos, perms, user)) - t.Run("DuplicateID", testBatchDuplicateID(batcher, repos, perms, user)) - t.Run("DuplicateSlug", testBatchDuplicateSlug(batcher, repos, perms, user)) - t.Run("DuplicateRename", testBatchDuplicateRename(batcher, repos, perms, user)) - t.Run("DuplicateRecreateRename", testBatchDuplicateRecreateRename(batcher, repos, perms, user)) - -} - -func testBatchInsert( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - batch := &core.Batch{ - Insert: []*core.Repository{ - { - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: false, - Visibility: "public", - }, - }, - } - err := batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != nil { - t.Errorf("Want permissions, got error %q", err) - } - } -} - -func testBatchUpdate( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - batch := &core.Batch{ - Update: []*core.Repository{ - { - ID: before.ID, - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - after, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - if got, want := after.Private, true; got != want { - t.Errorf("Want repository Private %v, got %v", want, got) - } - } -} - -func testBatchDelete( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != nil { - t.Errorf("Want permissions, got error %q", err) - } - - batch := &core.Batch{ - Revoke: []*core.Repository{ - { - ID: repo.ID, - UserID: 1, - UID: "42", - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - - _, err = perms.Find(noContext, repo.UID, user.ID) - if err != sql.ErrNoRows { - t.Errorf("Want sql.ErrNoRows got %v", err) - } - } -} - -func testBatchDuplicateID( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - } - - batchDuplicate := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "43", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - { - ID: 0, - UserID: 1, - UID: "43", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - }, - } - - err = batcher.Batch(noContext, user, batchDuplicate) - if err != nil { - t.Error(err) - return - } - - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "64778136", - Namespace: "octocat", - Name: "linguist", - Slug: "octocat/linguist", - }, - }, - Update: []*core.Repository{ - { - ID: before.ID, - UserID: 1, - UID: "44", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - Private: true, - }, - }, - } - - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - return - } - - added, err := repos.FindName(noContext, "octocat", "linguist") - if err != nil { - t.Errorf("Want inserted repository, got error %q", err) - } - - if got, want := added.UID, "64778136"; got != want { - t.Errorf("Want inserted repository UID %v, got %v", want, got) - } - - renamed, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want renamed repository, got error %q", err) - } - - if got, want := renamed.UID, "44"; got != want { - t.Errorf("Want renamed repository UID %v, got %v", want, got) - } - } -} - -// the purpose of this unit test is to understand what happens -// when a repository is deleted, re-created with the same name, -// but has a different unique identifier. -func testBatchDuplicateSlug( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - _, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - return - } - - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "99", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - }, - } - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - } -} - -// the purpose of this unit test is to understand what happens -// when a repository is deleted, re-created with a different name, -// renamed to the original name, but has a different unique identifier. -func testBatchDuplicateRecreateRename( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - _, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Errorf("Want repository, got error %q", err) - return - } - - batch := &core.Batch{ - Update: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "8888", // Updated ID - Namespace: "octocat", - Name: "hello-world", - Slug: "octocat/hello-world", - }, - }, - } - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - } - } -} - -// the purpose of this unit test is to understand what happens -// when a repository is deleted, re-created with a new name, and -// then updated back to the old name. -// -// TODO(bradrydzewski) for sqlite consider UPDATE OR REPLACE. -// TODO(bradrydzewski) for mysql consider UPDATE IGNORE. -// TODO(bradrydzewski) consider breaking rename into a separate set of logic that checks for existing records. -func testBatchDuplicateRename( - batcher core.Batcher, - repos core.RepositoryStore, - perms core.PermStore, - user *core.User, -) func(t *testing.T) { - return func(t *testing.T) { - batch := &core.Batch{ - Insert: []*core.Repository{ - { - ID: 0, - UserID: 1, - UID: "200", - Namespace: "octocat", - Name: "test-1", - Slug: "octocat/test-1", - }, - { - ID: 0, - UserID: 1, - UID: "201", - Namespace: "octocat", - Name: "test-2", - Slug: "octocat/test-2", - }, - }, - } - err := batcher.Batch(noContext, user, batch) - if err != nil { - t.Error(err) - return - } - - before, err := repos.FindName(noContext, "octocat", "test-2") - if err != nil { - t.Errorf("Want repository, got error %q", err) - return - } - before.Name = "test-1" - before.Slug = "octocat/test-1" - - batch = &core.Batch{ - Update: []*core.Repository{before}, - } - err = batcher.Batch(noContext, user, batch) - if err != nil { - t.Skip(err) - } - } -} - -func seedUser(db *db.DB) (*core.User, error) { - enc, _ := encrypt.New("") - out := &core.User{Login: "octocat"} - err := user.New(db, enc).Create(noContext, out) - return out, err -} diff --git a/store/build/build.go b/store/build/build.go deleted file mode 100644 index 0f4869ba5b..0000000000 --- a/store/build/build.go +++ /dev/null @@ -1,843 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package build - -import ( - "context" - "fmt" - "regexp" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// regular expression to extract the pull request number -// from the git ref (e.g. refs/pulls/{d}/head) -var pr = regexp.MustCompile("\\d+") - -// New returns a new Buildcore. -func New(db *db.DB) core.BuildStore { - return &buildStore{db} -} - -type buildStore struct { - db *db.DB -} - -// Find returns a build from the datacore. -func (s *buildStore) Find(ctx context.Context, id int64) (*core.Build, error) { - out := &core.Build{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -// FindNumber returns a build from the datastore by build number. -func (s *buildStore) FindNumber(ctx context.Context, repo, number int64) (*core.Build, error) { - out := &core.Build{Number: number, RepoID: repo} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryNumber, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -// FindLast returns the last build from the datastore by ref. -func (s *buildStore) FindRef(ctx context.Context, repo int64, ref string) (*core.Build, error) { - out := &core.Build{RepoID: repo, Ref: ref} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryRowRef, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -// List returns a list of builds from the datastore by repository id. -func (s *buildStore) List(ctx context.Context, repo int64, limit, offset int) ([]*core.Build, error) { - var out []*core.Build - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "build_repo_id": repo, - "limit": limit, - "offset": offset, - } - stmt, args, err := binder.BindNamed(queryRepo, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -// ListRef returns a list of builds from the datastore by ref. -func (s *buildStore) ListRef(ctx context.Context, repo int64, ref string, limit, offset int) ([]*core.Build, error) { - var out []*core.Build - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "build_repo_id": repo, - "build_ref": ref, - "limit": limit, - "offset": offset, - } - stmt, args, err := binder.BindNamed(queryRef, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -// LatestBranches returns a list of the latest build by branch. -func (s *buildStore) LatestBranches(ctx context.Context, repo int64) ([]*core.Build, error) { - return s.latest(ctx, repo, "branch") -} - -// LatestPulls returns a list of the latest builds by pull requests. -func (s *buildStore) LatestPulls(ctx context.Context, repo int64) ([]*core.Build, error) { - return s.latest(ctx, repo, "pull_request") -} - -// LatestDeploys returns a list of the latest builds by target deploy. -func (s *buildStore) LatestDeploys(ctx context.Context, repo int64) ([]*core.Build, error) { - return s.latest(ctx, repo, "deployment") -} - -func (s *buildStore) latest(ctx context.Context, repo int64, event string) ([]*core.Build, error) { - var out []*core.Build - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "latest_repo_id": repo, - "latest_type": event, - } - stmt, args, err := binder.BindNamed(queryLatestList, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -// Pending returns a list of pending builds from the datastore by repository id. -func (s *buildStore) Pending(ctx context.Context) ([]*core.Build, error) { - var out []*core.Build - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryPending) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -// Running returns a list of running builds from the datastore by repository id. -func (s *buildStore) Running(ctx context.Context) ([]*core.Build, error) { - var out []*core.Build - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryRunning) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -// Create persists a build to the datacore. -func (s *buildStore) Create(ctx context.Context, build *core.Build, stages []*core.Stage) error { - var err error - switch s.db.Driver() { - case db.Postgres: - err = s.createPostgres(ctx, build, stages) - default: - err = s.create(ctx, build, stages) - } - if err != nil { - return err - } - var event, name string - switch build.Event { - case core.EventPullRequest: - event = "pull_request" - name = pr.FindString(build.Ref) - case core.EventPush: - event = "branch" - name = build.Target - case core.EventPromote, core.EventRollback: - event = "deployment" - name = build.Deploy - default: - return nil - } - return s.index(ctx, build.ID, build.RepoID, event, name) -} - -func (s *buildStore) create(ctx context.Context, build *core.Build, stages []*core.Stage) error { - build.Version = 1 - return s.db.Update(func(execer db.Execer, binder db.Binder) error { - params := toParams(build) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - build.ID, err = res.LastInsertId() - if err != nil { - return err - } - - for _, stage := range stages { - stage.Version = 1 - stage.BuildID = build.ID - params := toStageParams(stage) - stmt, args, err := binder.BindNamed(stmtStageInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - stage.ID, err = res.LastInsertId() - } - return err - }) -} - -func (s *buildStore) createPostgres(ctx context.Context, build *core.Build, stages []*core.Stage) error { - build.Version = 1 - return s.db.Update(func(execer db.Execer, binder db.Binder) error { - params := toParams(build) - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - err = execer.QueryRow(stmt, args...).Scan(&build.ID) - if err != nil { - return err - } - - for _, stage := range stages { - stage.Version = 1 - stage.BuildID = build.ID - params := toStageParams(stage) - stmt, args, err := binder.BindNamed(stmtStageInsertPg, params) - if err != nil { - return err - } - err = execer.QueryRow(stmt, args...).Scan(&stage.ID) - if err != nil { - return err - } - } - return err - }) -} - -// Update updates a build in the datacore. -func (s *buildStore) Update(ctx context.Context, build *core.Build) error { - versionNew := build.Version + 1 - versionOld := build.Version - - err := s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(build) - params["build_version_old"] = versionOld - params["build_version_new"] = versionNew - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - effected, err := res.RowsAffected() - if err != nil { - return err - } - if effected == 0 { - return db.ErrOptimisticLock - } - return nil - }) - if err == nil { - build.Version = versionNew - } - return err -} - -func (s *buildStore) index(ctx context.Context, build, repo int64, event, name string) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := map[string]interface{}{ - "latest_repo_id": repo, - "latest_build_id": build, - "latest_type": event, - "latest_name": name, - "latest_created": time.Now().Unix(), - "latest_updated": time.Now().Unix(), - "latest_deleted": time.Now().Unix(), - } - stmtInsert := stmtInsertLatest - switch s.db.Driver() { - case db.Postgres: - stmtInsert = stmtInsertLatestPg - case db.Mysql: - stmtInsert = stmtInsertLatestMysql - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Delete deletes a build from the datacore. -func (s *buildStore) Delete(ctx context.Context, build *core.Build) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(build) - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// DeletePull deletes a pull request index from the datastore. -func (s *buildStore) DeletePull(ctx context.Context, repo int64, number int) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := map[string]interface{}{ - "latest_repo_id": repo, - "latest_name": fmt.Sprint(number), - "latest_type": "pull_request", - } - stmt, args, err := binder.BindNamed(stmtDeleteLatest, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// DeleteBranch deletes a branch index from the datastore. -func (s *buildStore) DeleteBranch(ctx context.Context, repo int64, branch string) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := map[string]interface{}{ - "latest_repo_id": repo, - "latest_name": branch, - "latest_type": "branch", - } - stmt, args, err := binder.BindNamed(stmtDeleteLatest, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// DeleteDeploy deletes a deploy index from the datastore. -func (s *buildStore) DeleteDeploy(ctx context.Context, repo int64, environment string) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := map[string]interface{}{ - "latest_repo_id": repo, - "latest_name": environment, - "latest_type": "deployment", - } - stmt, args, err := binder.BindNamed(stmtDeleteLatest, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Purge deletes builds from the database where the build number is less than n. -func (s *buildStore) Purge(ctx context.Context, repo, number int64) error { - build := &core.Build{ - RepoID: repo, - Number: number, - } - stageErr := s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(build) - stmt, args, err := binder.BindNamed(stmtPurge, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) - if stageErr != nil { - return stageErr - } - if s.db.Driver() == db.Postgres || s.db.Driver() == db.Mysql { - // purge orphaned stages - err := s.db.Update(func(execer db.Execer, binder db.Binder) error { - _, err := execer.Exec(stmtStagePurge) - return err - }) - if err != nil { - return err - } - // purge orphaned steps - err = s.db.Update(func(execer db.Execer, binder db.Binder) error { - _, err := execer.Exec(stmtStepPurge) - return err - }) - return err - } - return nil -} - -// Count returns a count of builds. -func (s *buildStore) Count(ctx context.Context) (i int64, err error) { - err = s.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow(queryCount).Scan(&i) - }) - return -} - -const queryCount = ` -SELECT COUNT(*) -FROM builds -` - -const queryBase = ` -SELECT - build_id -,build_repo_id -,build_trigger -,build_number -,build_parent -,build_status -,build_error -,build_event -,build_action -,build_link -,build_timestamp -,build_title -,build_message -,build_before -,build_after -,build_ref -,build_source_repo -,build_source -,build_target -,build_author -,build_author_name -,build_author_email -,build_author_avatar -,build_sender -,build_params -,build_cron -,build_deploy -,build_deploy_id -,build_debug -,build_started -,build_finished -,build_created -,build_updated -,build_version -` - -const queryKey = queryBase + ` -FROM builds -WHERE build_id = :build_id -` - -const queryNumber = queryBase + ` -FROM builds -WHERE build_repo_id = :build_repo_id - AND build_number = :build_number -` - -const queryRef = queryBase + ` -FROM builds -WHERE build_repo_id = :build_repo_id - AND build_ref = :build_ref -ORDER BY build_id DESC -LIMIT :limit OFFSET :offset -` - -const queryRowRef = queryBase + ` -FROM builds -WHERE build_repo_id = :build_repo_id - AND build_ref = :build_ref -ORDER BY build_id DESC -LIMIT 1 -` - -const queryRepo = queryBase + ` -FROM builds -WHERE build_repo_id = :build_repo_id -ORDER BY build_id DESC -LIMIT :limit OFFSET :offset -` - -const queryPending = queryBase + ` -FROM builds -WHERE EXISTS ( - SELECT stage_id - FROM stages - WHERE stages.stage_build_id = builds.build_id - AND stages.stage_status = 'pending' -) -ORDER BY build_id ASC -` - -const queryRunning = queryBase + ` -FROM builds -WHERE EXISTS ( - SELECT stage_id - FROM stages - WHERE stages.stage_build_id = builds.build_id - AND stages.stage_status = 'running' -) -ORDER BY build_id ASC -` - -// const queryRunningOLD = queryBase + ` -// FROM builds -// WHERE build_status = 'running' -// ORDER BY build_id ASC -// ` - -const queryAll = queryBase + ` -FROM builds -WHERE build_id > :build_id -LIMIT :limit OFFSET :offset -` - -const stmtUpdate = ` -UPDATE builds SET - build_parent = :build_parent -,build_status = :build_status -,build_error = :build_error -,build_event = :build_event -,build_action = :build_action -,build_link = :build_link -,build_timestamp = :build_timestamp -,build_title = :build_title -,build_message = :build_message -,build_before = :build_before -,build_after = :build_after -,build_ref = :build_ref -,build_source_repo = :build_source_repo -,build_source = :build_source -,build_target = :build_target -,build_author = :build_author -,build_author_name = :build_author_name -,build_author_email = :build_author_email -,build_author_avatar = :build_author_avatar -,build_sender = :build_sender -,build_params = :build_params -,build_cron = :build_cron -,build_deploy = :build_deploy -,build_started = :build_started -,build_finished = :build_finished -,build_updated = :build_updated -,build_version = :build_version_new -WHERE build_id = :build_id - AND build_version = :build_version_old -` - -const stmtInsert = ` -INSERT INTO builds ( - build_repo_id -,build_trigger -,build_number -,build_parent -,build_status -,build_error -,build_event -,build_action -,build_link -,build_timestamp -,build_title -,build_message -,build_before -,build_after -,build_ref -,build_source_repo -,build_source -,build_target -,build_author -,build_author_name -,build_author_email -,build_author_avatar -,build_sender -,build_params -,build_cron -,build_deploy -,build_deploy_id -,build_debug -,build_started -,build_finished -,build_created -,build_updated -,build_version -) VALUES ( - :build_repo_id -,:build_trigger -,:build_number -,:build_parent -,:build_status -,:build_error -,:build_event -,:build_action -,:build_link -,:build_timestamp -,:build_title -,:build_message -,:build_before -,:build_after -,:build_ref -,:build_source_repo -,:build_source -,:build_target -,:build_author -,:build_author_name -,:build_author_email -,:build_author_avatar -,:build_sender -,:build_params -,:build_cron -,:build_deploy -,:build_deploy_id -,:build_debug -,:build_started -,:build_finished -,:build_created -,:build_updated -,:build_version -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING build_id -` - -const stmtStageInsert = ` -INSERT INTO stages ( - stage_repo_id -,stage_build_id -,stage_number -,stage_name -,stage_kind -,stage_type -,stage_status -,stage_error -,stage_errignore -,stage_exit_code -,stage_limit -,stage_limit_repo -,stage_os -,stage_arch -,stage_variant -,stage_kernel -,stage_machine -,stage_started -,stage_stopped -,stage_created -,stage_updated -,stage_version -,stage_on_success -,stage_on_failure -,stage_depends_on -,stage_labels -) VALUES ( - :stage_repo_id -,:stage_build_id -,:stage_number -,:stage_name -,:stage_kind -,:stage_type -,:stage_status -,:stage_error -,:stage_errignore -,:stage_exit_code -,:stage_limit -,:stage_limit_repo -,:stage_os -,:stage_arch -,:stage_variant -,:stage_kernel -,:stage_machine -,:stage_started -,:stage_stopped -,:stage_created -,:stage_updated -,:stage_version -,:stage_on_success -,:stage_on_failure -,:stage_depends_on -,:stage_labels -) -` - -const stmtStageInsertPg = stmtStageInsert + ` -RETURNING stage_id -` - -const stmtDelete = ` -DELETE FROM builds -WHERE build_id = :build_id -` - -const stmtPurge = ` -DELETE FROM builds -WHERE build_repo_id = :build_repo_id -AND build_number < :build_number -` -const stmtStagePurge = ` -DELETE FROM stages -WHERE stage_build_id NOT IN ( - SELECT build_id FROM builds -)` - -const stmtStepPurge = ` -DELETE FROM steps -WHERE step_stage_id NOT IN ( - SELECT stage_id FROM stages -)` - -// -// latest builds index -// - -const stmtInsertLatest = ` -INSERT INTO latest ( - latest_repo_id -,latest_build_id -,latest_type -,latest_name -,latest_created -,latest_updated -,latest_deleted -) VALUES ( - :latest_repo_id -,:latest_build_id -,:latest_type -,:latest_name -,:latest_created -,:latest_updated -,:latest_deleted -) ON CONFLICT (latest_repo_id, latest_type, latest_name) -DO UPDATE SET latest_build_id = EXCLUDED.latest_build_id -` - -const stmtInsertLatestPg = ` -INSERT INTO latest ( - latest_repo_id -,latest_build_id -,latest_type -,latest_name -,latest_created -,latest_updated -,latest_deleted -) VALUES ( - :latest_repo_id -,:latest_build_id -,:latest_type -,:latest_name -,:latest_created -,:latest_updated -,:latest_deleted -) ON CONFLICT (latest_repo_id, latest_type, latest_name) -DO UPDATE SET latest_build_id = EXCLUDED.latest_build_id -` - -const stmtInsertLatestMysql = ` -INSERT INTO latest ( - latest_repo_id -,latest_build_id -,latest_type -,latest_name -,latest_created -,latest_updated -,latest_deleted -) VALUES ( - :latest_repo_id -,:latest_build_id -,:latest_type -,:latest_name -,:latest_created -,:latest_updated -,:latest_deleted -) ON DUPLICATE KEY UPDATE latest_build_id = :latest_build_id -` - -const stmtDeleteLatest = ` -DELETE FROM latest -WHERE latest_repo_id = :latest_repo_id - AND latest_type = :latest_type - AND latest_name = :latest_name -` - -const queryLatestList = queryBase + ` -FROM builds -WHERE build_id IN ( - SELECT latest_build_id - FROM latest - WHERE latest_repo_id = :latest_repo_id - AND latest_type = :latest_type -) -` diff --git a/store/build/build_test.go b/store/build/build_test.go deleted file mode 100644 index d7d8d0f542..0000000000 --- a/store/build/build_test.go +++ /dev/null @@ -1,482 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package build - -import ( - "context" - "database/sql" - "fmt" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/drone/drone/store/shared/db/dbtest" -) - -var noContext = context.TODO() - -func TestBuild(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - _ = dbtest.Disconnect(conn) - }() - - store := New(conn).(*buildStore) - t.Run("Create", testBuildCreate(store)) - t.Run("Purge", testBuildPurge(store)) - t.Run("Count", testBuildCount(store)) - t.Run("Pending", testBuildPending(store)) - t.Run("Running", testBuildRunning(store)) - t.Run("Latest", testBuildLatest(store)) -} - -func testBuildCreate(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - build := &core.Build{ - RepoID: 1, - Number: 99, - Event: core.EventPush, - Ref: "refs/heads/master", - Target: "master", - } - stage := &core.Stage{ - RepoID: 42, - Number: 1, - } - err := store.Create(noContext, build, []*core.Stage{stage}) - if err != nil { - t.Error(err) - } - if build.ID == 0 { - t.Errorf("Want build ID assigned, got %d", build.ID) - } - if got, want := build.Version, int64(1); got != want { - t.Errorf("Want build Version %d, got %d", want, got) - } - t.Run("Find", testBuildFind(store, build)) - t.Run("FindNumber", testBuildFindNumber(store, build)) - t.Run("FindRef", testBuildFindRef(store, build)) - t.Run("List", testBuildList(store, build)) - t.Run("ListRef", testBuildListRef(store, build)) - t.Run("Update", testBuildUpdate(store, build)) - t.Run("Locking", testBuildLocking(store, build)) - t.Run("Delete", testBuildDelete(store, build)) - } -} - -func testBuildFind(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - result, err := store.Find(noContext, build.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testBuild(result)) - } - } -} - -func testBuildFindNumber(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindNumber(noContext, build.RepoID, build.Number) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testBuild(item)) - } - } -} - -func testBuildFindRef(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindRef(noContext, build.RepoID, build.Ref) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testBuild(item)) - } - } -} - -func testBuildList(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, build.RepoID, 10, 0) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want list count %d, got %d", want, got) - } else { - t.Run("Fields", testBuild(list[0])) - } - } -} - -func testBuildListRef(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.ListRef(noContext, build.RepoID, build.Ref, 10, 0) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want list count %d, got %d", want, got) - } else { - t.Run("Fields", testBuild(list[0])) - } - } -} - -func testBuildUpdate(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Build{ - ID: build.ID, - RepoID: build.RepoID, - Number: build.Number, - Status: core.StatusFailing, - Version: build.Version, - } - err := store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - if got, want := before.Version, build.Version+1; got != want { - t.Errorf("Want incremented version %d, got %d", want, got) - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if got, want := after.Version, build.Version+1; got != want { - t.Errorf("Want incremented version %d, got %d", want, got) - } - if got, want := after.Status, before.Status; got != want { - t.Errorf("Want updated build status %v, got %v", want, got) - } - } -} - -func testBuildLocking(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, build.ID) - if err != nil { - t.Error(err) - return - } - item.Version = 1 - err = store.Update(noContext, item) - if err == nil { - t.Errorf("Want Optimistic Lock Error, got nil") - } else if err != db.ErrOptimisticLock { - t.Errorf("Want Optimistic Lock Error") - } - } -} - -func testBuildDelete(store *buildStore, build *core.Build) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, build.ID) - if err != nil { - t.Error(err) - } - err = store.Delete(noContext, item) - if err != nil { - t.Error(err) - } - _, err = store.Find(noContext, item.ID) - if want, got := sql.ErrNoRows, err; got != want { - t.Errorf("Want %q, got %q", want, got) - } - } -} - -func testBuildPurge(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec("DELETE FROM builds") - _, _ = execer.Exec("DELETE FROM stages") - _, _ = execer.Exec("DELETE FROM steps") - return nil - }) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 98}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 99}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 100}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 101}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - // get the first stageid - var startingStageID int64 - _ = store.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow("SELECT stage_id FROM stages limit 1").Scan(&startingStageID) - }) - // lets add steps to the builds - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec(fmt.Sprintf("INSERT INTO steps (step_stage_id, step_number, step_status) VALUES (%d, 1, 'pending')", startingStageID)) - _, _ = execer.Exec(fmt.Sprintf("INSERT INTO steps (step_stage_id, step_number, step_status) VALUES (%d, 1, 'pending')", startingStageID+1)) - _, _ = execer.Exec(fmt.Sprintf("INSERT INTO steps (step_stage_id, step_number, step_status) VALUES (%d, 1, 'pending')", startingStageID+2)) - _, _ = execer.Exec(fmt.Sprintf("INSERT INTO steps (step_stage_id, step_number, step_status) VALUES (%d, 1, 'pending')", startingStageID+3)) - return nil - }) - - before, err := store.List(noContext, 1, 100, 0) - if err != nil { - t.Error(err) - } - if got, want := len(before), 4; got != want { - t.Errorf("Want build count %d, got %d", want, got) - } - // count the number of stages - countOfStages := 4 - _ = store.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow("SELECT count(*) FROM stages").Scan(&countOfStages) - }) - want := 4 - if want != countOfStages { - t.Errorf("Want stage count %d, got %d", want, countOfStages) - } - // count the number of steps - countOfSteps := 4 - _ = store.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow("SELECT count(*) FROM steps").Scan(&countOfSteps) - }) - want = 4 - if want != countOfSteps { - t.Errorf("Want step count %d, got %d", want, countOfSteps) - } - // purge the builds - err = store.Purge(noContext, 1, 100) - if err != nil { - t.Error(err) - } - after, err := store.List(noContext, 1, 100, 0) - if err != nil { - t.Error(err) - } - // we want 2 builds - want = 2 - got := len(after) - if got != want { - t.Errorf("Want build count %d, got %d", want, got) - } - for _, build := range after { - if build.Number < 100 { - t.Errorf("Expect purge if build number is less than 100") - } - } - // check that orphaned stages are deleted - _ = store.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow("SELECT count(*) FROM stages").Scan(&countOfStages) - }) - want = 2 - if want != countOfStages { - t.Errorf("Want stage count %d, got %d", want, countOfStages) - } - // check that orphaned steps are deleted - // count the number of steps - countOfSteps = 2 - _ = store.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow("SELECT count(*) FROM steps").Scan(&countOfSteps) - }) - want = 2 - if want != countOfSteps { - t.Errorf("Want step count %d, got %d", want, countOfSteps) - } - } -} - -func testBuildCount(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, err := execer.Exec("DELETE FROM builds") - return err - }) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 98}, nil) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 99}, nil) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 100}, nil) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 101}, nil) - - count, err := store.Count(noContext) - if err != nil { - t.Error(err) - } else if got, want := count, int64(4); got != want { - t.Errorf("Want build count %d, got %d", want, got) - } - } -} - -func testBuildPending(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec("DELETE FROM builds") - _, _ = execer.Exec("DELETE FROM stages") - return nil - }) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 98, Status: core.StatusPending}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 99, Status: core.StatusPending}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPending}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 100, Status: core.StatusRunning}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusRunning}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 101, Status: core.StatusPassing}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPassing}}) - - count, err := store.Count(noContext) - if err != nil { - t.Error(err) - } else if got, want := count, int64(4); got != want { - t.Errorf("Want build count %d, got %d", want, got) - } - list, err := store.Pending(noContext) - if err != nil { - t.Error(err) - } else if got, want := len(list), 2; got != want { - t.Errorf("Want list count %d, got %d", want, got) - } - } -} - -func testBuildRunning(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec("DELETE FROM builds") - _, _ = execer.Exec("DELETE FROM stages") - return nil - }) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 98, Status: core.StatusRunning}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusRunning}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 99, Status: core.StatusRunning}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusRunning}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 100, Status: core.StatusBlocked}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusBlocked}}) - _ = store.Create(noContext, &core.Build{RepoID: 1, Number: 101, Status: core.StatusPassing}, []*core.Stage{{RepoID: 1, Number: 1, Status: core.StatusPassing}}) - - count, err := store.Count(noContext) - if err != nil { - t.Error(err) - } else if got, want := count, int64(4); got != want { - t.Errorf("Want build count %d, got %d", want, got) - } - list, err := store.Running(noContext) - if err != nil { - t.Error(err) - } else if got, want := len(list), 2; got != want { - t.Errorf("Want list count %d, got %d", want, got) - } - } -} - -func testBuildLatest(store *buildStore) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec("DELETE FROM stages") - _, _ = execer.Exec("DELETE FROM latest") - _, _ = execer.Exec("DELETE FROM builds") - return nil - }) - - // step 1: insert the initial builds - build := &core.Build{ - RepoID: 1, - Number: 99, - Event: core.EventPush, - Ref: "refs/heads/master", - Target: "master", - } - - err := store.Create(noContext, build, []*core.Stage{}) - if err != nil { - t.Error(err) - return - } - - develop := &core.Build{ - RepoID: 1, - Number: 100, - Event: core.EventPush, - Ref: "refs/heads/develop", - Target: "develop", - } - err = store.Create(noContext, develop, []*core.Stage{}) - if err != nil { - t.Error(err) - return - } - - err = store.Create(noContext, &core.Build{ - RepoID: 1, - Number: 999, - Event: core.EventPullRequest, - Ref: "refs/pulls/10/head", - Source: "develop", - Target: "master", - }, []*core.Stage{}) - if err != nil { - t.Error(err) - return - } - - // step 2: verify the latest build number was captured - latest, _ := store.LatestBranches(noContext, build.RepoID) - if len(latest) != 2 { - t.Errorf("Expect latest branch list == 1, got %d", len(latest)) - return - } - if got, want := latest[0].Number, build.Number; got != want { - t.Errorf("Expected latest master build number %d, got %d", want, got) - } - if got, want := latest[1].Number, develop.Number; got != want { - t.Errorf("Expected latest develop build number %d, got %d", want, got) - return - } - - build = &core.Build{ - RepoID: 1, - Number: 101, - Event: core.EventPush, - Ref: "refs/heads/master", - Target: "master", - } - err = store.Create(noContext, build, []*core.Stage{}) - if err != nil { - t.Error(err) - return - } - - latest, _ = store.LatestBranches(noContext, build.RepoID) - if len(latest) != 2 { - t.Errorf("Expect latest branch list == 1") - return - } - if got, want := latest[1].Number, build.Number; got != want { - t.Errorf("Expected latest build number %d, got %d", want, got) - return - } - - err = store.DeleteBranch(noContext, build.RepoID, build.Target) - if err != nil { - t.Error(err) - return - } - - latest, _ = store.LatestBranches(noContext, build.RepoID) - if len(latest) != 1 { - t.Errorf("Expect latest branch list == 1 after delete") - return - } - } -} - -func testBuild(item *core.Build) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.RepoID, int64(1); got != want { - t.Errorf("Want build repo ID %d, got %d", want, got) - } - if got, want := item.Number, int64(99); got != want { - t.Errorf("Want build number %d, got %d", want, got) - } - if got, want := item.Ref, "refs/heads/master"; got != want { - t.Errorf("Want build ref %q, got %q", want, got) - } - } -} diff --git a/store/build/scan.go b/store/build/scan.go deleted file mode 100644 index 4377ed28e8..0000000000 --- a/store/build/scan.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package build - -import ( - "database/sql" - "encoding/json" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/jmoiron/sqlx/types" -) - -// helper function converts the Build structure to a set -// of named query parameters. -func toParams(build *core.Build) map[string]interface{} { - return map[string]interface{}{ - "build_id": build.ID, - "build_repo_id": build.RepoID, - "build_trigger": build.Trigger, - "build_number": build.Number, - "build_parent": build.Parent, - "build_status": build.Status, - "build_error": build.Error, - "build_event": build.Event, - "build_action": build.Action, - "build_link": build.Link, - "build_timestamp": build.Timestamp, - "build_title": build.Title, - "build_message": build.Message, - "build_before": build.Before, - "build_after": build.After, - "build_ref": build.Ref, - "build_source_repo": build.Fork, - "build_source": build.Source, - "build_target": build.Target, - "build_author": build.Author, - "build_author_name": build.AuthorName, - "build_author_email": build.AuthorEmail, - "build_author_avatar": build.AuthorAvatar, - "build_sender": build.Sender, - "build_params": encodeParams(build.Params), - "build_cron": build.Cron, - "build_deploy": build.Deploy, - "build_deploy_id": build.DeployID, - "build_debug": build.Debug, - "build_started": build.Started, - "build_finished": build.Finished, - "build_created": build.Created, - "build_updated": build.Updated, - "build_version": build.Version, - } -} - -// helper function converts the Stage structure to a set -// of named query parameters. -func toStageParams(stage *core.Stage) map[string]interface{} { - return map[string]interface{}{ - "stage_id": stage.ID, - "stage_repo_id": stage.RepoID, - "stage_build_id": stage.BuildID, - "stage_number": stage.Number, - "stage_name": stage.Name, - "stage_kind": stage.Kind, - "stage_type": stage.Type, - "stage_status": stage.Status, - "stage_error": stage.Error, - "stage_errignore": stage.ErrIgnore, - "stage_exit_code": stage.ExitCode, - "stage_limit": stage.Limit, - "stage_limit_repo": stage.LimitRepo, - "stage_os": stage.OS, - "stage_arch": stage.Arch, - "stage_variant": stage.Variant, - "stage_kernel": stage.Kernel, - "stage_machine": stage.Machine, - "stage_started": stage.Started, - "stage_stopped": stage.Stopped, - "stage_created": stage.Created, - "stage_updated": stage.Updated, - "stage_version": stage.Version, - "stage_on_success": stage.OnSuccess, - "stage_on_failure": stage.OnFailure, - "stage_depends_on": encodeSlice(stage.DependsOn), - "stage_labels": encodeParams(stage.Labels), - } -} - -func encodeParams(v map[string]string) types.JSONText { - raw, _ := json.Marshal(v) - return types.JSONText(raw) -} - -func encodeSlice(v []string) types.JSONText { - raw, _ := json.Marshal(v) - return types.JSONText(raw) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dest *core.Build) error { - paramsJSON := types.JSONText{} - err := scanner.Scan( - &dest.ID, - &dest.RepoID, - &dest.Trigger, - &dest.Number, - &dest.Parent, - &dest.Status, - &dest.Error, - &dest.Event, - &dest.Action, - &dest.Link, - &dest.Timestamp, - &dest.Title, - &dest.Message, - &dest.Before, - &dest.After, - &dest.Ref, - &dest.Fork, - &dest.Source, - &dest.Target, - &dest.Author, - &dest.AuthorName, - &dest.AuthorEmail, - &dest.AuthorAvatar, - &dest.Sender, - ¶msJSON, - &dest.Cron, - &dest.Deploy, - &dest.DeployID, - &dest.Debug, - &dest.Started, - &dest.Finished, - &dest.Created, - &dest.Updated, - &dest.Version, - ) - dest.Params = map[string]string{} - json.Unmarshal(paramsJSON, &dest.Params) - return err -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Build, error) { - defer rows.Close() - - builds := []*core.Build{} - for rows.Next() { - build := new(core.Build) - err := scanRow(rows, build) - if err != nil { - return nil, err - } - builds = append(builds, build) - } - return builds, nil -} diff --git a/store/card/card.go b/store/card/card.go deleted file mode 100644 index 4918920fe3..0000000000 --- a/store/card/card.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. -// +build !oss - -package card - -import ( - "bytes" - "context" - "io" - "io/ioutil" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new card database store. -func New(db *db.DB) core.CardStore { - return &cardStore{ - db: db, - } -} - -type cardStore struct { - db *db.DB -} - -type card struct { - Id int64 `json:"id,omitempty"` - Data []byte `json:"card_data"` -} - -func (c cardStore) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - out := &card{Id: step} - err := c.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - - return ioutil.NopCloser( - bytes.NewBuffer(out.Data), - ), err -} - -func (c cardStore) Create(ctx context.Context, step int64, r io.Reader) error { - data, err := ioutil.ReadAll(r) - if err != nil { - return err - } - return c.db.Lock(func(execer db.Execer, binder db.Binder) error { - in := &card{ - Id: step, - Data: data, - } - params, err := toParams(in) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (c *cardStore) Update(ctx context.Context, step int64, r io.Reader) error { - data, err := ioutil.ReadAll(r) - if err != nil { - return err - } - return c.db.Lock(func(execer db.Execer, binder db.Binder) error { - card := &card{ - Id: step, - Data: data, - } - params, err := toParams(card) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (c cardStore) Delete(ctx context.Context, step int64) error { - return c.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := &card{ - Id: step, - } - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryBase = ` -SELECT - card_id -,card_data -` - -const queryKey = queryBase + ` -FROM cards -WHERE card_id = :card_id -LIMIT 1 -` - -const stmtInsert = ` -INSERT INTO cards ( - card_id -,card_data -) VALUES ( - :card_id -,:card_data -) -` - -const stmtUpdate = ` -UPDATE cards -SET card_data = :card_data -WHERE card_id = :card_id -` - -const stmtDelete = ` -DELETE FROM cards -WHERE card_id = :card_id -` - -const stmtInsertPostgres = stmtInsert + ` -RETURNING card_id -` diff --git a/store/card/card_oss.go b/store/card/card_oss.go deleted file mode 100644 index 0c3d3c2fd6..0000000000 --- a/store/card/card_oss.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package card - -import ( - "context" - "io" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -func New(db *db.DB) core.CardStore { - return new(noop) -} - -type noop struct{} - -func (noop) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - return nil, nil -} - -func (noop) Create(ctx context.Context, step int64, r io.Reader) error { - return nil -} - -func (noop) Update(ctx context.Context, step int64, r io.Reader) error { - return nil -} - -func (noop) Delete(ctx context.Context, step int64) error { - return nil -} diff --git a/store/card/card_test.go b/store/card/card_test.go deleted file mode 100644 index 89bd424e86..0000000000 --- a/store/card/card_test.go +++ /dev/null @@ -1,122 +0,0 @@ -package card - -import ( - "bytes" - "context" - "database/sql" - "io/ioutil" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/build" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/step" -) - -var noContext = context.TODO() - -func TestCard(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seed with a dummy repository - dummyRepo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - repos.Create(noContext, dummyRepo) - - // seed with a dummy stage - stage := &core.Stage{Number: 1} - stages := []*core.Stage{stage} - - // seed with a dummy build - dummyBuild := &core.Build{Number: 1, RepoID: dummyRepo.ID} - builds := build.New(conn) - builds.Create(noContext, dummyBuild, stages) - - // seed with a dummy step - dummyStep := &core.Step{Number: 1, StageID: stage.ID} - steps := step.New(conn) - steps.Create(noContext, dummyStep) - - store := New(conn).(*cardStore) - t.Run("Create", testCardCreate(store, dummyStep)) - t.Run("Find", testFindCard(store, dummyStep)) - t.Run("Update", testLogsUpdate(store, dummyStep)) -} - -func testCardCreate(store *cardStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - buf := ioutil.NopCloser( - bytes.NewBuffer([]byte("{\"type\": \"AdaptiveCard\"}")), - ) - err := store.Create(noContext, step.ID, buf) - if err != nil { - t.Error(err) - } - } -} - -func testFindCard(card *cardStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - r, err := card.Find(noContext, step.ID) - if err != nil { - t.Error(err) - } else { - data, err := ioutil.ReadAll(r) - if err != nil { - t.Error(err) - return - } - if got, want := string(data), "{\"type\": \"AdaptiveCard\"}"; got != want { - t.Errorf("Want card data output stream %q, got %q", want, got) - } - } - } -} - -func testLogsUpdate(store *cardStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - buf := bytes.NewBufferString("hola mundo") - err := store.Update(noContext, step.ID, buf) - if err != nil { - t.Error(err) - return - } - r, err := store.Find(noContext, step.ID) - if err != nil { - t.Error(err) - return - } - data, err := ioutil.ReadAll(r) - if err != nil { - t.Error(err) - return - } - if got, want := string(data), "hola mundo"; got != want { - t.Errorf("Want updated log output stream %q, got %q", want, got) - } - } -} - -func testLogsDelete(store *cardStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - err := store.Delete(noContext, step.ID) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, step.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} diff --git a/store/card/scan.go b/store/card/scan.go deleted file mode 100644 index 495ee1ca4d..0000000000 --- a/store/card/scan.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package card - -import ( - "github.com/drone/drone/store/shared/db" -) - -// helper function converts the card structure to a set -// of named query parameters. -func toParams(card *card) (map[string]interface{}, error) { - return map[string]interface{}{ - "card_id": card.Id, - "card_data": card.Data, - }, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dst *card) error { - err := scanner.Scan( - &dst.Id, - &dst.Data, - ) - if err != nil { - return err - } - return nil -} diff --git a/store/cron/cron.go b/store/cron/cron.go deleted file mode 100644 index b4abe4f821..0000000000 --- a/store/cron/cron.go +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cron - -// NewCronStore returns a new CronStore. -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new Cron database store. -func New(db *db.DB) core.CronStore { - return &cronStore{db} -} - -type cronStore struct { - db *db.DB -} - -func (s *cronStore) List(ctx context.Context, id int64) ([]*core.Cron, error) { - var out []*core.Cron - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"cron_repo_id": id} - stmt, args, err := binder.BindNamed(queryRepo, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *cronStore) Ready(ctx context.Context, before int64) ([]*core.Cron, error) { - var out []*core.Cron - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"cron_next": before} - stmt, args, err := binder.BindNamed(queryReady, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *cronStore) Find(ctx context.Context, id int64) (*core.Cron, error) { - out := &core.Cron{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *cronStore) FindName(ctx context.Context, id int64, name string) (*core.Cron, error) { - out := &core.Cron{Name: name, RepoID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryName, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *cronStore) Create(ctx context.Context, cron *core.Cron) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, cron) - } - return s.create(ctx, cron) -} - -func (s *cronStore) create(ctx context.Context, cron *core.Cron) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(cron) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - cron.ID, err = res.LastInsertId() - return err - }) -} - -func (s *cronStore) createPostgres(ctx context.Context, cron *core.Cron) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(cron) - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&cron.ID) - }) -} - -func (s *cronStore) Update(ctx context.Context, cron *core.Cron) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(cron) - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *cronStore) Delete(ctx context.Context, cron *core.Cron) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(cron) - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryBase = ` -SELECT - cron_id -,cron_repo_id -,cron_name -,cron_expr -,cron_next -,cron_prev -,cron_event -,cron_branch -,cron_target -,cron_disabled -,cron_created -,cron_updated -,cron_version -` - -const queryKey = queryBase + ` -FROM cron -WHERE cron_id = :cron_id -LIMIT 1 -` - -const queryName = queryBase + ` -FROM cron -WHERE cron_name = :cron_name - AND cron_repo_id = :cron_repo_id -LIMIT 1 -` - -const queryRepo = queryBase + ` -FROM cron -WHERE cron_repo_id = :cron_repo_id -ORDER BY cron_name -` - -const queryReady = queryBase + ` -FROM cron -WHERE cron_next < :cron_next -ORDER BY cron_name -` - -const stmtUpdate = ` -UPDATE cron SET - cron_repo_id = :cron_repo_id -,cron_name = :cron_name -,cron_expr = :cron_expr -,cron_next = :cron_next -,cron_prev = :cron_prev -,cron_event = :cron_event -,cron_branch = :cron_branch -,cron_target = :cron_target -,cron_disabled = :cron_disabled -,cron_created = :cron_created -,cron_updated = :cron_updated -,cron_version = :cron_version -WHERE cron_id = :cron_id -` - -const stmtDelete = ` -DELETE FROM cron -WHERE cron_id = :cron_id -` - -const stmtInsert = ` -INSERT INTO cron ( - cron_repo_id -,cron_name -,cron_expr -,cron_next -,cron_prev -,cron_event -,cron_branch -,cron_target -,cron_disabled -,cron_created -,cron_updated -,cron_version -) VALUES ( - :cron_repo_id -,:cron_name -,:cron_expr -,:cron_next -,:cron_prev -,:cron_event -,:cron_branch -,:cron_target -,:cron_disabled -,:cron_created -,:cron_updated -,:cron_version -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING cron_id -` diff --git a/store/cron/cron_oss.go b/store/cron/cron_oss.go deleted file mode 100644 index aff1fb76c5..0000000000 --- a/store/cron/cron_oss.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package cron - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new Secret database store. -func New(db *db.DB) core.CronStore { - return new(noop) -} - -type noop struct{} - -func (noop) List(ctx context.Context, id int64) ([]*core.Cron, error) { - return nil, nil -} - -func (noop) Ready(ctx context.Context, id int64) ([]*core.Cron, error) { - return nil, nil -} - -func (noop) Find(ctx context.Context, id int64) (*core.Cron, error) { - return nil, nil -} - -func (noop) FindName(ctx context.Context, id int64, name string) (*core.Cron, error) { - return nil, nil -} - -func (noop) Create(ctx context.Context, secret *core.Cron) error { - return nil -} - -func (noop) Update(context.Context, *core.Cron) error { - return nil -} - -func (noop) Delete(context.Context, *core.Cron) error { - return nil -} diff --git a/store/cron/cron_test.go b/store/cron/cron_test.go deleted file mode 100644 index bb185c040f..0000000000 --- a/store/cron/cron_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cron - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db/dbtest" -) - -var noContext = context.TODO() - -func TestCron(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seeds the database with a dummy repository. - repo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - if err := repos.Create(noContext, repo); err != nil { - t.Error(err) - } - - store := New(conn).(*cronStore) - t.Run("Create", testCronCreate(store, repos, repo)) -} - -func testCronCreate(store *cronStore, repos core.RepositoryStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Cron{ - RepoID: repo.ID, - Name: "nightly", - Expr: "00 00 * * *", - Next: 1000000000, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.ID == 0 { - t.Errorf("Want cron ID assigned, got %d", item.ID) - } - - t.Run("Find", testCronFind(store, item)) - t.Run("FindName", testCronFindName(store, repo)) - t.Run("List", testCronList(store, repo)) - t.Run("Read", testCronReady(store, repo)) - t.Run("Update", testCronUpdate(store, repo)) - t.Run("Delete", testCronDelete(store, repo)) - t.Run("Fkey", testCronForeignKey(store, repos, repo)) - } -} - -func testCronFind(store *cronStore, cron *core.Cron) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, cron.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testCron(item)) - } - } -} - -func testCronFindName(store *cronStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindName(noContext, repo.ID, "nightly") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testCron(item)) - } - } -} - -func testCronList(store *cronStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, repo.ID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testCron(list[0])) - } - } -} - -func testCronReady(store *cronStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Cron{ - RepoID: repo.ID, - Name: "daily", - Expr: "00 00 * * *", - Next: 1000000002, // ignored (1 second too late) - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - return - } - list, err := store.Ready(noContext, 1000000001) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testCron(list[0])) - } - } -} - -func testCronUpdate(store *cronStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - before, err := store.FindName(noContext, repo.ID, "nightly") - if err != nil { - t.Error(err) - return - } - err = store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if after == nil { - t.Fail() - } - } -} - -func testCronDelete(store *cronStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - cron, err := store.FindName(noContext, repo.ID, "nightly") - if err != nil { - t.Error(err) - return - } - err = store.Delete(noContext, cron) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, cron.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} - -func testCronForeignKey(store *cronStore, repos core.RepositoryStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Cron{ - RepoID: repo.ID, - Name: "nightly", - Expr: "00 00 * * *", - } - store.Create(noContext, item) - before, _ := store.List(noContext, repo.ID) - if len(before) == 0 { - t.Errorf("Want non-empty cron list") - return - } - - err := repos.Delete(noContext, repo) - if err != nil { - t.Error(err) - return - } - after, _ := store.List(noContext, repo.ID) - if len(after) != 0 { - t.Errorf("Want empty cron list") - } - } -} - -func testCron(item *core.Cron) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "nightly"; got != want { - t.Errorf("Want cron name %q, got %q", want, got) - } - if got, want := item.Expr, "00 00 * * *"; got != want { - t.Errorf("Want cron name %q, got %q", want, got) - } - } -} diff --git a/store/cron/scan.go b/store/cron/scan.go deleted file mode 100644 index 68a73b20ba..0000000000 --- a/store/cron/scan.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cron - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// helper function converts the User structure to a set -// of named query parameters. -func toParams(cron *core.Cron) map[string]interface{} { - return map[string]interface{}{ - "cron_id": cron.ID, - "cron_repo_id": cron.RepoID, - "cron_name": cron.Name, - "cron_expr": cron.Expr, - "cron_next": cron.Next, - "cron_prev": cron.Prev, - "cron_event": cron.Event, - "cron_branch": cron.Branch, - "cron_target": cron.Target, - "cron_disabled": cron.Disabled, - "cron_created": cron.Created, - "cron_updated": cron.Updated, - "cron_version": cron.Version, - } -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dst *core.Cron) error { - return scanner.Scan( - &dst.ID, - &dst.RepoID, - &dst.Name, - &dst.Expr, - &dst.Next, - &dst.Prev, - &dst.Event, - &dst.Branch, - &dst.Target, - &dst.Disabled, - &dst.Created, - &dst.Updated, - &dst.Version, - ) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Cron, error) { - defer rows.Close() - - crons := []*core.Cron{} - for rows.Next() { - cron := new(core.Cron) - err := scanRow(rows, cron) - if err != nil { - return nil, err - } - crons = append(crons, cron) - } - return crons, nil -} diff --git a/store/logs/azureblob.go b/store/logs/azureblob.go deleted file mode 100644 index a0f10b15c1..0000000000 --- a/store/logs/azureblob.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package logs - -import ( - "context" - "fmt" - "io" - "net/url" - - "github.com/Azure/azure-storage-blob-go/azblob" - "github.com/drone/drone/core" -) - -// NewAzureBlobEnv returns a new Azure blob log store. -func NewAzureBlobEnv(containerName, storageAccountName, storageAccessKey string) core.LogStore { - return &azureBlobStore{ - containerName: containerName, - storageAccountName: storageAccountName, - storageAccessKey: storageAccessKey, - containerURL: nil, - } -} - -type azureBlobStore struct { - containerName string - storageAccountName string - storageAccessKey string - containerURL *azblob.ContainerURL -} - -func (az *azureBlobStore) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - err := az.getContainerURL() - if err != nil { - return nil, err - } - blobURL := az.containerURL.NewBlockBlobURL(fmt.Sprintf("%d", step)) - out, err := blobURL.Download(ctx, 0, azblob.CountToEnd, azblob.BlobAccessConditions{}, false) - if err != nil { - return nil, err - } - return out.Body(azblob.RetryReaderOptions{}), nil -} - -func (az *azureBlobStore) Create(ctx context.Context, step int64, r io.Reader) error { - err := az.getContainerURL() - if err != nil { - return err - } - opts := &azblob.UploadStreamToBlockBlobOptions{ - BufferSize: 4 * 1024 * 1024, - MaxBuffers: 5, - } - blobURL := az.containerURL.NewBlockBlobURL(fmt.Sprintf("%d", step)) - _, err = azblob.UploadStreamToBlockBlob(ctx, r, blobURL, *opts) - return err -} - -func (az *azureBlobStore) Update(ctx context.Context, step int64, r io.Reader) error { - return az.Create(ctx, step, r) -} - -func (az *azureBlobStore) Delete(ctx context.Context, step int64) error { - err := az.getContainerURL() - if err != nil { - return err - } - blobURL := az.containerURL.NewBlockBlobURL(fmt.Sprintf("%d", step)) - _, err = blobURL.Delete(ctx, azblob.DeleteSnapshotsOptionInclude, azblob.BlobAccessConditions{}) - return err -} - -func (az *azureBlobStore) getContainerURL() error { - if az.containerURL != nil { - return nil - } - if len(az.storageAccountName) == 0 || len(az.storageAccessKey) == 0 { - return fmt.Errorf("Either the storage account or storage access key environment variable is not set") - } - credential, err := azblob.NewSharedKeyCredential(az.storageAccountName, az.storageAccessKey) - - if err != nil { - return err - } - - p := azblob.NewPipeline(credential, azblob.PipelineOptions{}) - URL, err := url.Parse(fmt.Sprintf("https://%s.blob.core.windows.net/%s", az.storageAccountName, az.containerName)) - - if err != nil { - return err - } - - containerURL := azblob.NewContainerURL(*URL, p) - az.containerURL = &containerURL - return nil -} diff --git a/store/logs/azureblob_oss.go b/store/logs/azureblob_oss.go deleted file mode 100644 index 3438078f77..0000000000 --- a/store/logs/azureblob_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package logs - -import "github.com/drone/drone/core" - -// New returns a zero value LogStore. -func NewAzureBlobEnv(containerName, storageAccountName, storageAccessKey string) core.LogStore { - return nil -} diff --git a/store/logs/combine.go b/store/logs/combine.go deleted file mode 100644 index fe7a15cec9..0000000000 --- a/store/logs/combine.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logs - -import ( - "context" - "io" - - "github.com/drone/drone/core" -) - -// NewCombined returns a new combined log store that will fallback -// to a secondary log store when necessary. This can be useful when -// migrating from database logs to s3, where logs for older builds -// are still being stored in the database, and newer logs in s3. -func NewCombined(primary, secondary core.LogStore) core.LogStore { - return &combined{ - primary: primary, - secondary: secondary, - } -} - -type combined struct { - primary, secondary core.LogStore -} - -func (s *combined) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - rc, err := s.primary.Find(ctx, step) - if err == nil { - return rc, err - } - return s.secondary.Find(ctx, step) -} - -func (s *combined) Create(ctx context.Context, step int64, r io.Reader) error { - return s.primary.Create(ctx, step, r) -} - -func (s *combined) Update(ctx context.Context, step int64, r io.Reader) error { - return s.primary.Update(ctx, step, r) -} - -func (s *combined) Delete(ctx context.Context, step int64) error { - err := s.primary.Delete(ctx, step) - if err != nil { - err = s.secondary.Delete(ctx, step) - } - return err -} diff --git a/store/logs/logs.go b/store/logs/logs.go deleted file mode 100644 index 9b9c78535f..0000000000 --- a/store/logs/logs.go +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logs - -import ( - "bytes" - "context" - "io" - "io/ioutil" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new LogStore. -func New(db *db.DB) core.LogStore { - return &logStore{db} -} - -type logStore struct { - db *db.DB -} - -func (s *logStore) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - out := &logs{ID: step} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - query, args, err := binder.BindNamed(queryKey, out) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return ioutil.NopCloser( - bytes.NewBuffer(out.Data), - ), err -} - -func (s *logStore) Create(ctx context.Context, step int64, r io.Reader) error { - data, err := ioutil.ReadAll(r) - if err != nil { - return err - } - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := &logs{ - ID: step, - Data: data, - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *logStore) Update(ctx context.Context, step int64, r io.Reader) error { - data, err := ioutil.ReadAll(r) - if err != nil { - return err - } - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := &logs{ - ID: step, - Data: data, - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *logStore) Delete(ctx context.Context, step int64) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := &logs{ - ID: step, - } - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -type logs struct { - ID int64 `db:"log_id"` - Data []byte `db:"log_data"` -} - -const queryKey = ` -SELECT - log_id -,log_data -FROM logs -WHERE log_id = :log_id -` - -const stmtInsert = ` -INSERT INTO logs ( - log_id -,log_data -) VALUES ( - :log_id -,:log_data -) -` - -const stmtUpdate = ` -UPDATE logs -SET log_data = :log_data -WHERE log_id = :log_id -` - -const stmtDelete = ` -DELETE FROM logs -WHERE log_id = :log_id -` diff --git a/store/logs/logs_test.go b/store/logs/logs_test.go deleted file mode 100644 index 2495e245ca..0000000000 --- a/store/logs/logs_test.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package logs - -import ( - "bytes" - "context" - "database/sql" - "io/ioutil" - "testing" - - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/core" - "github.com/drone/drone/store/build" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/step" -) - -var noContext = context.TODO() - -func TestLogs(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seed with a dummy repository - arepo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - repos.Create(noContext, arepo) - - // seed with a dummy stage - stage := &core.Stage{Number: 1} - stages := []*core.Stage{stage} - - // seed with a dummy build - abuild := &core.Build{Number: 1, RepoID: arepo.ID} - builds := build.New(conn) - builds.Create(noContext, abuild, stages) - - // seed with a dummy step - astep := &core.Step{Number: 1, StageID: stage.ID} - steps := step.New(conn) - steps.Create(noContext, astep) - - store := New(conn).(*logStore) - t.Run("Create", testLogsCreate(store, astep)) - t.Run("Find", testLogsFind(store, astep)) - t.Run("Update", testLogsUpdate(store, astep)) - t.Run("Delete", testLogsDelete(store, astep)) -} - -func testLogsCreate(store *logStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - buf := bytes.NewBufferString("hello world") - err := store.Create(noContext, step.ID, buf) - if err != nil { - t.Error(err) - } - } -} - -func testLogsFind(store *logStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - r, err := store.Find(noContext, step.ID) - if err != nil { - t.Error(err) - return - } - data, err := ioutil.ReadAll(r) - if err != nil { - t.Error(err) - return - } - if got, want := string(data), "hello world"; got != want { - t.Errorf("Want log output stream %q, got %q", want, got) - } - } -} - -func testLogsUpdate(store *logStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - buf := bytes.NewBufferString("hola mundo") - err := store.Update(noContext, step.ID, buf) - if err != nil { - t.Error(err) - return - } - r, err := store.Find(noContext, step.ID) - if err != nil { - t.Error(err) - return - } - data, err := ioutil.ReadAll(r) - if err != nil { - t.Error(err) - return - } - if got, want := string(data), "hola mundo"; got != want { - t.Errorf("Want updated log output stream %q, got %q", want, got) - } - } -} - -func testLogsDelete(store *logStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - err := store.Delete(noContext, step.ID) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, step.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} diff --git a/store/logs/s3.go b/store/logs/s3.go deleted file mode 100644 index 6f5e92d083..0000000000 --- a/store/logs/s3.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package logs - -import ( - "context" - "fmt" - "io" - "path" - "strings" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/s3" - "github.com/aws/aws-sdk-go/service/s3/s3manager" - - "github.com/drone/drone/core" -) - -// NewS3Env returns a new S3 log store. -func NewS3Env(bucket, prefix, endpoint string, pathStyle bool) core.LogStore { - disableSSL := false - - if endpoint != "" { - disableSSL = !strings.HasPrefix(endpoint, "https://") - } - - return &s3store{ - bucket: bucket, - prefix: prefix, - session: session.Must( - session.NewSession(&aws.Config{ - Endpoint: aws.String(endpoint), - DisableSSL: aws.Bool(disableSSL), - S3ForcePathStyle: aws.Bool(pathStyle), - }), - ), - } -} - -// NewS3 returns a new S3 log store. -func NewS3(session *session.Session, bucket, prefix string) core.LogStore { - return &s3store{ - bucket: bucket, - prefix: prefix, - session: session, - } -} - -type s3store struct { - bucket string - prefix string - session *session.Session -} - -func (s *s3store) Find(ctx context.Context, step int64) (io.ReadCloser, error) { - svc := s3.New(s.session) - out, err := svc.GetObject(&s3.GetObjectInput{ - Bucket: aws.String(s.bucket), - Key: aws.String(s.key(step)), - }) - if err != nil { - return nil, err - } - return out.Body, nil -} - -func (s *s3store) Create(ctx context.Context, step int64, r io.Reader) error { - uploader := s3manager.NewUploader(s.session) - input := &s3manager.UploadInput{ - ACL: aws.String("private"), - Bucket: aws.String(s.bucket), - Key: aws.String(s.key(step)), - Body: r, - } - _, err := uploader.Upload(input) - return err -} - -func (s *s3store) Update(ctx context.Context, step int64, r io.Reader) error { - return s.Create(ctx, step, r) -} - -func (s *s3store) Delete(ctx context.Context, step int64) error { - svc := s3.New(s.session) - _, err := svc.DeleteObject(&s3.DeleteObjectInput{ - Bucket: aws.String(s.bucket), - Key: aws.String(s.key(step)), - }) - return err -} - -func (s *s3store) key(step int64) string { - return path.Join("/", s.prefix, fmt.Sprint(step)) -} diff --git a/store/logs/s3_oss.go b/store/logs/s3_oss.go deleted file mode 100644 index 9cb958468b..0000000000 --- a/store/logs/s3_oss.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package logs - -import "github.com/drone/drone/core" - -// New returns a zero value LogStore. -func NewS3Env(bucket, prefix, endpoint string, pathStyle bool) core.LogStore { - return nil -} diff --git a/store/logs/s3_test.go b/store/logs/s3_test.go deleted file mode 100644 index a7530834a6..0000000000 --- a/store/logs/s3_test.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package logs - -import "testing" - -func TestKey(t *testing.T) { - tests := []struct { - bucket string - prefix string - result string - }{ - { - bucket: "test-bucket", - prefix: "drone/logs", - result: "/drone/logs/1", - }, - { - bucket: "test-bucket", - prefix: "/drone/logs", - result: "/drone/logs/1", - }, - } - for _, test := range tests { - s := &s3store{ - bucket: test.bucket, - prefix: test.prefix, - } - if got, want := s.key(1), test.result; got != want { - t.Errorf("Want key %s, got %s", want, got) - } - } -} diff --git a/store/logs/scan.go b/store/logs/scan.go deleted file mode 100644 index 651b966248..0000000000 --- a/store/logs/scan.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package logs - -import "github.com/drone/drone/store/shared/db" - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dst *logs) error { - return scanner.Scan( - &dst.ID, - &dst.Data, - ) -} diff --git a/store/perm/perm.go b/store/perm/perm.go deleted file mode 100644 index b2bca074b2..0000000000 --- a/store/perm/perm.go +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package perm - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new PermStore. -func New(db *db.DB) core.PermStore { - return &permStore{db} -} - -type permStore struct { - db *db.DB -} - -// Find returns a project member from the datastore. -func (s *permStore) Find(ctx context.Context, repo string, user int64) (*core.Perm, error) { - out := &core.Perm{RepoUID: repo, UserID: user} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -// List returns a list of project members from the datastore. -func (s *permStore) List(ctx context.Context, repo string) ([]*core.Collaborator, error) { - var out []*core.Collaborator - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"repo_uid": repo} - stmt, args, err := binder.BindNamed(queryCollabs, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanCollabRows(rows) - return err - }) - return out, err -} - -// Create persists a project member to the datastore. -func (s *permStore) Create(ctx context.Context, perm *core.Perm) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(perm) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Update persists an updated project member to the datastore. -func (s *permStore) Update(ctx context.Context, perm *core.Perm) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(perm) - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Delete deletes a project member from the datastore. -func (s *permStore) Delete(ctx context.Context, perm *core.Perm) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(perm) - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryKey = ` -SELECT - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -FROM perms -WHERE perm_user_id = :perm_user_id - AND perm_repo_uid = :perm_repo_uid -` - -const queryCollabs = ` -SELECT - perm_user_id -,perm_repo_uid -,user_login -,user_avatar -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -FROM users -INNER JOIN perms ON perms.perm_user_id = users.user_id -WHERE perms.perm_repo_uid = :repo_uid -ORDER BY user_login ASC -` - -const stmtDelete = ` -DELETE FROM perms -WHERE perm_user_id = :perm_user_id - AND perm_repo_uid = :perm_repo_uid -` - -const stmtUpdate = ` -UPDATE perms -SET - perm_read = :perm_read -,perm_write = :perm_write -,perm_admin = :perm_admin -,perm_synced = :perm_synced -,perm_updated = :perm_updated -WHERE perm_user_id = :perm_user_id - AND perm_repo_uid = :perm_repo_uid -` - -const stmtInsert = ` -INSERT INTO perms ( - perm_user_id -,perm_repo_uid -,perm_read -,perm_write -,perm_admin -,perm_synced -,perm_created -,perm_updated -) VALUES ( - :perm_user_id -,:perm_repo_uid -,:perm_read -,:perm_write -,:perm_admin -,:perm_synced -,:perm_created -,:perm_updated -) -` diff --git a/store/perm/perm_test.go b/store/perm/perm_test.go deleted file mode 100644 index b8f2d5c4cf..0000000000 --- a/store/perm/perm_test.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package perm - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" - "github.com/drone/drone/store/user" -) - -var noContext = context.TODO() - -func TestPerms(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // no-op encrypter - enc, _ := encrypt.New("") - - // seeds the database with a dummy user account. - auser := &core.User{Login: "spaceghost"} - users := user.New(conn, enc) - err = users.Create(noContext, auser) - if err != nil { - t.Error(err) - } - - // seeds the database with a dummy repository. - arepo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - err = repos.Create(noContext, arepo) - if err != nil { - t.Error(err) - } - if err != nil { - t.Error(err) - } - - store := New(conn).(*permStore) - t.Run("Create", testPermCreate(store, auser, arepo)) - t.Run("Find", testPermFind(store, auser, arepo)) - t.Run("List", testPermList(store, auser, arepo)) - t.Run("Update", testPermUpdate(store, auser, arepo)) - t.Run("Delete", testPermDelete(store, auser, arepo)) -} - -func testPermCreate(store *permStore, user *core.User, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Perm{ - UserID: user.ID, - RepoUID: repo.UID, - Read: true, - Write: true, - Admin: false, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - } -} - -func testPermFind(store *permStore, user *core.User, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, repo.UID, user.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testPerm(item)) - } - } -} - -func testPermList(store *permStore, user *core.User, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, repo.UID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want collaborator count %d, got %d", want, got) - return - } - if got, want := list[0].Login, user.Login; got != want { - t.Errorf("Want username %q, got %q", want, got) - } - t.Run("Fields", testPerm( - &core.Perm{ - Read: list[0].Read, - Write: list[0].Write, - Admin: list[0].Admin, - }, - )) - } -} - -func testPermUpdate(store *permStore, user *core.User, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Perm{ - UserID: user.ID, - RepoUID: repo.UID, - Read: true, - Write: true, - Admin: true, - } - err := store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := store.Find(noContext, before.RepoUID, before.UserID) - if err != nil { - t.Error(err) - return - } - if got, want := after.Admin, before.Admin; got != want { - t.Errorf("Want updated Admin %v, got %v", want, got) - } - } -} - -func testPermDelete(store *permStore, user *core.User, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - err := store.Delete(noContext, &core.Perm{UserID: user.ID, RepoUID: repo.UID}) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, "3", user.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} - -func testPerm(item *core.Perm) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Read, true; got != want { - t.Errorf("Want Read %v, got %v", want, got) - } - if got, want := item.Write, true; got != want { - t.Errorf("Want Write %v, got %v", want, got) - } - if got, want := item.Admin, false; got != want { - t.Errorf("Want Admin %v, got %v", want, got) - } - } -} diff --git a/store/perm/scan.go b/store/perm/scan.go deleted file mode 100644 index d56c09d20f..0000000000 --- a/store/perm/scan.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package perm - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// helper function converts the Perm structure to a set -// of named query parameters. -func toParams(perm *core.Perm) map[string]interface{} { - return map[string]interface{}{ - "perm_user_id": perm.UserID, - "perm_repo_uid": perm.RepoUID, - "perm_read": perm.Read, - "perm_write": perm.Write, - "perm_admin": perm.Admin, - "perm_synced": perm.Synced, - "perm_created": perm.Created, - "perm_updated": perm.Updated, - } -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dst *core.Perm) error { - return scanner.Scan( - &dst.UserID, - &dst.RepoUID, - &dst.Read, - &dst.Write, - &dst.Admin, - &dst.Synced, - &dst.Created, - &dst.Updated, - ) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanCollabRow(scanner db.Scanner, dst *core.Collaborator) error { - return scanner.Scan( - &dst.UserID, - &dst.RepoUID, - &dst.Login, - &dst.Avatar, - &dst.Read, - &dst.Write, - &dst.Admin, - &dst.Synced, - &dst.Created, - &dst.Updated, - ) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanCollabRows(rows *sql.Rows) ([]*core.Collaborator, error) { - defer rows.Close() - - collabs := []*core.Collaborator{} - for rows.Next() { - collab := new(core.Collaborator) - err := scanCollabRow(rows, collab) - if err != nil { - return nil, err - } - collabs = append(collabs, collab) - } - return collabs, nil -} diff --git a/store/repos/repos.go b/store/repos/repos.go deleted file mode 100644 index ac883bf692..0000000000 --- a/store/repos/repos.go +++ /dev/null @@ -1,585 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new RepositoryStore. -func New(db *db.DB) core.RepositoryStore { - return &repoStore{db} -} - -type repoStore struct { - db *db.DB -} - -func (s *repoStore) List(ctx context.Context, id int64) ([]*core.Repository, error) { - var out []*core.Repository - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_id": id} - query, args, err := binder.BindNamed(queryPerms, params) - if err != nil { - return err - } - rows, err := queryer.Query(query, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *repoStore) ListLatest(ctx context.Context, id int64) ([]*core.Repository, error) { - var out []*core.Repository - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "user_id": id, - "repo_active": true, - } - stmt := queryRepoWithBuild - if s.db.Driver() == db.Postgres { - stmt = queryRepoWithBuildPostgres - } - query, args, err := binder.BindNamed(stmt, params) - if err != nil { - return err - } - rows, err := queryer.Query(query, args...) - if err != nil { - return err - } - out, err = scanRowsBuild(rows) - return err - }) - return out, err -} - -func (s *repoStore) ListRecent(ctx context.Context, id int64) ([]*core.Repository, error) { - var out []*core.Repository - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_id": id} - query, args, err := binder.BindNamed(queryRepoWithBuildAll, params) - if err != nil { - return err - } - rows, err := queryer.Query(query, args...) - if err != nil { - return err - } - out, err = scanRowsBuild(rows) - return err - }) - return out, err -} - -func (s *repoStore) ListIncomplete(ctx context.Context) ([]*core.Repository, error) { - var out []*core.Repository - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryRepoWithBuildIncomplete) - if err != nil { - return err - } - out, err = scanRowsBuild(rows) - return err - }) - return out, err -} - -func (s *repoStore) ListRunningStatus(ctx context.Context) ([]*core.RepoBuildStage, error) { - var out []*core.RepoBuildStage - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryReposRunningStatus) - if err != nil { - return err - } - out, err = repoBuildStageRowsBuild(rows) - return err - }) - return out, err -} - -func (s *repoStore) ListAll(ctx context.Context, limit, offset int) ([]*core.Repository, error) { - var out []*core.Repository - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "limit": limit, - "offset": offset, - } - query, args, err := binder.BindNamed(queryAll, params) - if err != nil { - return err - } - rows, err := queryer.Query(query, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *repoStore) Find(ctx context.Context, id int64) (*core.Repository, error) { - out := &core.Repository{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := ToParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *repoStore) FindName(ctx context.Context, namespace, name string) (*core.Repository, error) { - out := &core.Repository{Slug: namespace + "/" + name} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := ToParams(out) - query, args, err := binder.BindNamed(querySlug, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *repoStore) Create(ctx context.Context, repo *core.Repository) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, repo) - } - return s.create(ctx, repo) -} - -func (s *repoStore) create(ctx context.Context, repo *core.Repository) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - repo.Version = 1 // set the initial record version - params := ToParams(repo) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - repo.ID, err = res.LastInsertId() - return err - }) -} - -func (s *repoStore) createPostgres(ctx context.Context, repo *core.Repository) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - repo.Version = 1 // set the initial record version - params := ToParams(repo) - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&repo.ID) - }) -} - -func (s *repoStore) Activate(ctx context.Context, repo *core.Repository) error { - return s.Update(ctx, repo) -} - -func (s *repoStore) Update(ctx context.Context, repo *core.Repository) error { - versionNew := repo.Version + 1 - versionOld := repo.Version - err := s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := ToParams(repo) - params["repo_version_old"] = versionOld - params["repo_version_new"] = versionNew - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - effected, err := res.RowsAffected() - if err != nil { - return err - } - if effected == 0 { - return db.ErrOptimisticLock - } - return nil - }) - if err == nil { - repo.Version = versionNew - } - return err -} - -func (s *repoStore) Delete(ctx context.Context, repo *core.Repository) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := ToParams(repo) - stmt, args, _ := binder.BindNamed(stmtDelete, params) - _, err := execer.Exec(stmt, args...) - return err - }) -} - -func (s *repoStore) Count(ctx context.Context) (i int64, err error) { - err = s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"repo_active": true} - query, args, _ := binder.BindNamed(queryCount, params) - return queryer.QueryRow(query, args...).Scan(&i) - }) - return -} - -func (s *repoStore) Increment(ctx context.Context, repo *core.Repository) (*core.Repository, error) { - for { - repo.Counter++ - err := s.Update(ctx, repo) - if err == nil { - return repo, nil - } - if err != nil && err != db.ErrOptimisticLock { - return repo, err - } - repo, err = s.Find(ctx, repo.ID) - if err != nil { - return nil, err - } - } -} - -const queryCount = ` -SELECT count(*) -FROM repos -WHERE repo_active = :repo_active -` - -const queryCols = ` -SELECT - repo_id -,repo_uid -,repo_user_id -,repo_namespace -,repo_name -,repo_slug -,repo_scm -,repo_clone_url -,repo_ssh_url -,repo_html_url -,repo_active -,repo_private -,repo_visibility -,repo_branch -,repo_counter -,repo_config -,repo_timeout -,repo_throttle -,repo_trusted -,repo_protected -,repo_no_forks -,repo_no_pulls -,repo_cancel_pulls -,repo_cancel_push -,repo_cancel_running -,repo_synced -,repo_created -,repo_updated -,repo_version -,repo_signer -,repo_secret -` - -const queryColsBuilds = queryCols + ` -,build_id -,build_repo_id -,build_trigger -,build_number -,build_parent -,build_status -,build_error -,build_event -,build_action -,build_link -,build_timestamp -,build_title -,build_message -,build_before -,build_after -,build_ref -,build_source_repo -,build_source -,build_target -,build_author -,build_author_name -,build_author_email -,build_author_avatar -,build_sender -,build_params -,build_cron -,build_deploy -,build_deploy_id -,build_debug -,build_started -,build_finished -,build_created -,build_updated -,build_version -` - -const queryKey = queryCols + ` -FROM repos -WHERE repo_id = :repo_id -` - -const querySlug = queryCols + ` -FROM repos -WHERE repo_slug = :repo_slug -` - -const queryPerms = queryCols + ` -FROM repos -INNER JOIN perms ON perms.perm_repo_uid = repos.repo_uid -WHERE perms.perm_user_id = :user_id -ORDER BY repo_slug ASC -` - -const queryAll = queryCols + ` -FROM repos -LIMIT :limit OFFSET :offset -` - -const stmtDelete = ` -DELETE FROM repos WHERE repo_id = :repo_id -` - -const stmtInsert = ` -INSERT INTO repos ( - repo_uid -,repo_user_id -,repo_namespace -,repo_name -,repo_slug -,repo_scm -,repo_clone_url -,repo_ssh_url -,repo_html_url -,repo_active -,repo_private -,repo_visibility -,repo_branch -,repo_counter -,repo_config -,repo_timeout -,repo_throttle -,repo_trusted -,repo_protected -,repo_no_forks -,repo_no_pulls -,repo_cancel_pulls -,repo_cancel_push -,repo_cancel_running -,repo_synced -,repo_created -,repo_updated -,repo_version -,repo_signer -,repo_secret -) VALUES ( - :repo_uid -,:repo_user_id -,:repo_namespace -,:repo_name -,:repo_slug -,:repo_scm -,:repo_clone_url -,:repo_ssh_url -,:repo_html_url -,:repo_active -,:repo_private -,:repo_visibility -,:repo_branch -,:repo_counter -,:repo_config -,:repo_timeout -,:repo_throttle -,:repo_trusted -,:repo_protected -,:repo_no_forks -,:repo_no_pulls -,:repo_cancel_pulls -,:repo_cancel_push -,:repo_cancel_running -,:repo_synced -,:repo_created -,:repo_updated -,:repo_version -,:repo_signer -,:repo_secret -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING repo_id -` - -const stmtPermInsert = ` -INSERT INTO perms VALUES ( - :perm_user_id -,:perm_repo_uid -,:perm_read -,:perm_write -,:perm_admin -,:perm_synced -,:perm_created -,:perm_updated -) -` - -const stmtUpdate = ` -UPDATE repos SET - repo_user_id = :repo_user_id -,repo_namespace = :repo_namespace -,repo_name = :repo_name -,repo_slug = :repo_slug -,repo_scm = :repo_scm -,repo_clone_url = :repo_clone_url -,repo_ssh_url = :repo_ssh_url -,repo_html_url = :repo_html_url -,repo_branch = :repo_branch -,repo_private = :repo_private -,repo_visibility = :repo_visibility -,repo_active = :repo_active -,repo_config = :repo_config -,repo_trusted = :repo_trusted -,repo_protected = :repo_protected -,repo_no_forks = :repo_no_forks -,repo_no_pulls = :repo_no_pulls -,repo_cancel_pulls = :repo_cancel_pulls -,repo_cancel_push = :repo_cancel_push -,repo_cancel_running = :repo_cancel_running -,repo_timeout = :repo_timeout -,repo_throttle = :repo_throttle -,repo_counter = :repo_counter -,repo_synced = :repo_synced -,repo_created = :repo_created -,repo_updated = :repo_updated -,repo_version = :repo_version_new -,repo_signer = :repo_signer -,repo_secret = :repo_secret -WHERE repo_id = :repo_id - AND repo_version = :repo_version_old -` - -// TODO(bradrydzewski) this query needs performance tuning. -// one approach that is promising is the ability to use the -// repo_counter (latest build number) to join on the build -// table. -// -// FROM repos LEFT OUTER JOIN builds ON ( -// repos.repo_id = builds.build_repo_id AND -// builds.build_number = repos.repo_counter -// ) -// INNER JOIN perms ON perms.perm_repo_uid = repos.repo_uid -// - -const queryRepoWithBuild = queryColsBuilds + ` -FROM repos LEFT OUTER JOIN builds ON build_id = ( - SELECT build_id FROM builds - WHERE builds.build_repo_id = repos.repo_id - ORDER BY build_id DESC - LIMIT 1 -) -INNER JOIN perms ON perms.perm_repo_uid = repos.repo_uid -WHERE perms.perm_user_id = :user_id -ORDER BY repo_slug ASC -` - -const queryRepoWithBuildPostgres = queryColsBuilds + ` -FROM repos LEFT OUTER JOIN builds ON build_id = ( - SELECT DISTINCT ON (build_repo_id) build_id FROM builds - WHERE builds.build_repo_id = repos.repo_id - ORDER BY build_repo_id, build_id DESC -) -INNER JOIN perms ON perms.perm_repo_uid = repos.repo_uid -WHERE perms.perm_user_id = :user_id -ORDER BY repo_slug ASC -` - -const queryRepoWithBuildAll = queryColsBuilds + ` -FROM repos -INNER JOIN perms ON perms.perm_repo_uid = repos.repo_uid -INNER JOIN builds ON builds.build_repo_id = repos.repo_id -WHERE perms.perm_user_id = :user_id -ORDER BY build_id DESC -LIMIT 25; -` - -const queryRepoWithBuildIncomplete = queryColsBuilds + ` -FROM repos -INNER JOIN builds ON builds.build_repo_id = repos.repo_id -WHERE EXISTS ( - SELECT stage_id - FROM stages - WHERE stages.stage_build_id = builds.build_id - AND stages.stage_status IN ('pending', 'running') -) -ORDER BY build_id DESC -LIMIT 50; -` -const queryReposRunningStatus = ` -SELECT -repo_namespace -,repo_name -,repo_slug -,build_number -,build_author -,build_author_name -,build_author_email -,build_author_avatar -,build_sender -,build_started -,build_finished -,build_created -,build_updated -,stage_name -,stage_kind -,stage_type -,stage_status -,stage_machine -,stage_os -,stage_arch -,stage_variant -,stage_kernel -,stage_limit -,stage_limit_repo -,stage_started -,stage_stopped -FROM repos -INNER JOIN builds ON builds.build_repo_id = repos.repo_id -inner join stages on stages.stage_build_id = builds.build_id -where stages.stage_status IN ('pending', 'running') -ORDER BY build_id DESC; -` diff --git a/store/repos/repos_test.go b/store/repos/repos_test.go deleted file mode 100644 index 2506a7a6c4..0000000000 --- a/store/repos/repos_test.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package repos - -import ( - "context" - "encoding/json" - "io/ioutil" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/db/dbtest" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -var noContext = context.TODO() - -func TestRepo(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - store := New(conn).(*repoStore) - t.Run("Create", testRepoCreate(store)) - t.Run("Count", testRepoCount(store)) - t.Run("Find", testRepoFind(store)) - t.Run("FindName", testRepoFindName(store)) - t.Run("List", testRepoList(store)) - t.Run("ListLatest", testRepoListLatest(store)) - t.Run("Update", testRepoUpdate(store)) - t.Run("Activate", testRepoActivate(store)) - t.Run("Locking", testRepoLocking(store)) - t.Run("Increment", testRepoIncrement(store)) - t.Run("Delete", testRepoDelete(store)) -} - -func testRepoCreate(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - out, err := ioutil.ReadFile("testdata/repo.json") - if err != nil { - t.Error(err) - return - } - repo := &core.Repository{} - err = json.Unmarshal(out, repo) - if err != nil { - t.Error(err) - return - } - err = repos.Create(noContext, repo) - if err != nil { - t.Error(err) - } - if got := repo.ID; got == 0 { - t.Errorf("Want non-zero ID") - } - if got, want := repo.Version, int64(1); got != want { - t.Errorf("Want Version %d, got %d", want, got) - } - - err = repos.db.Update(func(execer db.Execer, binder db.Binder) error { - query, args, _ := binder.BindNamed(stmtPermInsert, map[string]interface{}{ - "perm_user_id": 1, - "perm_repo_uid": repo.UID, - "perm_read": true, - "perm_write": true, - "perm_admin": true, - "perm_synced": 0, - "perm_created": 0, - "perm_updated": 0, - }) - _, err = execer.Exec(query, args...) - return err - }) - if err != nil { - t.Error(err) - } - } -} - -func testRepoCount(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - count, err := repos.Count(noContext) - if err != nil { - t.Error(err) - } - if got, want := count, int64(1); got != want { - t.Errorf("Want count %d, got %d", want, got) - } - } -} - -func testRepoFind(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - named, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - got, err := repos.Find(noContext, named.ID) - if err != nil { - t.Error(err) - return - } - - want := &core.Repository{} - raw, err := ioutil.ReadFile("testdata/repo.json.golden") - if err != nil { - t.Error(err) - return - } - err = json.Unmarshal(raw, want) - if err != nil { - t.Error(err) - return - } - - ignore := cmpopts.IgnoreFields(core.Repository{}, "ID") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - } -} - -func testRepoFindName(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - got, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - - want := &core.Repository{} - raw, err := ioutil.ReadFile("testdata/repo.json.golden") - if err != nil { - t.Error(err) - return - } - err = json.Unmarshal(raw, want) - if err != nil { - t.Error(err) - return - } - - ignore := cmpopts.IgnoreFields(core.Repository{}, "ID") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - } -} - -func testRepoList(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - repos, err := repos.List(noContext, 1) - if err != nil { - t.Error(err) - return - } - if got, want := len(repos), 1; got != want { - t.Errorf("Want Repo count %d, got %d", want, got) - return - } - - if err != nil { - t.Error(err) - return - } - - got, want := repos[0], &core.Repository{} - raw, err := ioutil.ReadFile("testdata/repo.json.golden") - if err != nil { - t.Error(err) - return - } - err = json.Unmarshal(raw, want) - if err != nil { - t.Error(err) - return - } - - ignore := cmpopts.IgnoreFields(core.Repository{}, "ID") - if diff := cmp.Diff(got, want, ignore); len(diff) != 0 { - t.Errorf(diff) - } - } -} - -func testRepoListLatest(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - repos, err := repos.ListLatest(noContext, 1) - if err != nil { - t.Error(err) - return - } - if got, want := len(repos), 1; got != want { - t.Errorf("Want Repo count %d, got %d", want, got) - } else if repos[0].Build != nil { - t.Errorf("Expect nil build") - } else { - t.Run("Fields", testRepo(repos[0])) - } - } -} - -func testRepoUpdate(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - - version := before.Version - before.Private = true - err = repos.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := repos.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if got, want := after.Version, version+1; got != want { - t.Errorf("Want version incremented on update") - } - if got, want := before.Private, after.Private; got != want { - t.Errorf("Want updated Repo private %v, got %v", want, got) - } - } -} - -func testRepoActivate(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - before, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - before.Active = true - err = repos.Activate(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := repos.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if got, want := before.Active, after.Active; got != want { - t.Errorf("Want updated Repo Active %v, got %v", want, got) - } - } -} - -func testRepoLocking(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - repo.Version = 1 - err = repos.Update(noContext, repo) - if err == nil { - t.Errorf("Want Optimistic Lock Error, got nil") - } else if err != db.ErrOptimisticLock { - t.Errorf("Want Optimistic Lock Error") - } - } -} - -func testRepoIncrement(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - t.Error(err) - return - } - before := repo.Counter - repo.Version-- - repo, err = repos.Increment(noContext, repo) - if err != nil { - t.Error(err) - return - } - if got, want := repo.Counter, before+1; got != want { - t.Errorf("Want count incremented to %d, got %d", want, got) - } - } -} - -func testRepoDelete(repos *repoStore) func(t *testing.T) { - return func(t *testing.T) { - count, _ := repos.Count(noContext) - if got, want := count, int64(1); got != want { - t.Errorf("Want Repo table count %d, got %d", want, got) - return - } - - repo, err := repos.FindName(noContext, "octocat", "hello-world") - if err != nil { - return - } - - err = repos.Delete(noContext, repo) - if err != nil { - t.Error(err) - } - - count, _ = repos.Count(noContext) - if got, want := count, int64(0); got != want { - t.Errorf("Want Repo table count %d, got %d", want, got) - return - } - } -} - -func testRepo(repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - if got, want := repo.UserID, int64(1); got != want { - t.Errorf("Want UserID %d, got %d", want, got) - } - if got, want := repo.Namespace, "octocat"; got != want { - t.Errorf("Want Namespace %q, got %q", want, got) - } - if got, want := repo.Name, "hello-world"; got != want { - t.Errorf("Want Name %q, got %q", want, got) - } - if got, want := repo.Slug, "octocat/hello-world"; got != want { - t.Errorf("Want Slug %q, got %q", want, got) - } - if got, want := repo.UID, "42"; got != want { - t.Errorf("Want UID %q, got %q", want, got) - } - } -} diff --git a/store/repos/scan.go b/store/repos/scan.go deleted file mode 100644 index 9ddbd13202..0000000000 --- a/store/repos/scan.go +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// ToParams converts the Repository structure to a set -// of named query parameters. -func ToParams(v *core.Repository) map[string]interface{} { - return map[string]interface{}{ - "repo_id": v.ID, - "repo_uid": v.UID, - "repo_user_id": v.UserID, - "repo_namespace": v.Namespace, - "repo_name": v.Name, - "repo_slug": v.Slug, - "repo_scm": v.SCM, - "repo_clone_url": v.HTTPURL, - "repo_ssh_url": v.SSHURL, - "repo_html_url": v.Link, - "repo_branch": v.Branch, - "repo_private": v.Private, - "repo_visibility": v.Visibility, - "repo_active": v.Active, - "repo_config": v.Config, - "repo_trusted": v.Trusted, - "repo_protected": v.Protected, - "repo_no_forks": v.IgnoreForks, - "repo_no_pulls": v.IgnorePulls, - "repo_cancel_pulls": v.CancelPulls, - "repo_cancel_push": v.CancelPush, - "repo_cancel_running": v.CancelRunning, - "repo_timeout": v.Timeout, - "repo_throttle": v.Throttle, - "repo_counter": v.Counter, - "repo_synced": v.Synced, - "repo_created": v.Created, - "repo_updated": v.Updated, - "repo_version": v.Version, - "repo_signer": v.Signer, - "repo_secret": v.Secret, - } -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dest *core.Repository) error { - return scanner.Scan( - &dest.ID, - &dest.UID, - &dest.UserID, - &dest.Namespace, - &dest.Name, - &dest.Slug, - &dest.SCM, - &dest.HTTPURL, - &dest.SSHURL, - &dest.Link, - &dest.Active, - &dest.Private, - &dest.Visibility, - &dest.Branch, - &dest.Counter, - &dest.Config, - &dest.Timeout, - &dest.Throttle, - &dest.Trusted, - &dest.Protected, - &dest.IgnoreForks, - &dest.IgnorePulls, - &dest.CancelPulls, - &dest.CancelPush, - &dest.CancelRunning, - &dest.Synced, - &dest.Created, - &dest.Updated, - &dest.Version, - &dest.Signer, - &dest.Secret, - ) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Repository, error) { - defer rows.Close() - - repos := []*core.Repository{} - for rows.Next() { - repo := new(core.Repository) - err := scanRow(rows, repo) - if err != nil { - return nil, err - } - repos = append(repos, repo) - } - return repos, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRowBuild(scanner db.Scanner, dest *core.Repository) error { - build := new(nullBuild) - err := scanner.Scan( - &dest.ID, - &dest.UID, - &dest.UserID, - &dest.Namespace, - &dest.Name, - &dest.Slug, - &dest.SCM, - &dest.HTTPURL, - &dest.SSHURL, - &dest.Link, - &dest.Active, - &dest.Private, - &dest.Visibility, - &dest.Branch, - &dest.Counter, - &dest.Config, - &dest.Timeout, - &dest.Throttle, - &dest.Trusted, - &dest.Protected, - &dest.IgnoreForks, - &dest.IgnorePulls, - &dest.CancelPulls, - &dest.CancelPush, - &dest.CancelRunning, - &dest.Synced, - &dest.Created, - &dest.Updated, - &dest.Version, - &dest.Signer, - &dest.Secret, - // build parameters - &build.ID, - &build.RepoID, - &build.Trigger, - &build.Number, - &build.Parent, - &build.Status, - &build.Error, - &build.Event, - &build.Action, - &build.Link, - &build.Timestamp, - &build.Title, - &build.Message, - &build.Before, - &build.After, - &build.Ref, - &build.Fork, - &build.Source, - &build.Target, - &build.Author, - &build.AuthorName, - &build.AuthorEmail, - &build.AuthorAvatar, - &build.Sender, - &build.Params, - &build.Cron, - &build.Deploy, - &build.DeployID, - &build.Debug, - &build.Started, - &build.Finished, - &build.Created, - &build.Updated, - &build.Version, - ) - if build.ID.Int64 != 0 { - dest.Build = build.value() - } - return err -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRowsBuild(rows *sql.Rows) ([]*core.Repository, error) { - defer rows.Close() - - repos := []*core.Repository{} - for rows.Next() { - repo := new(core.Repository) - err := scanRowBuild(rows, repo) - if err != nil { - return nil, err - } - repos = append(repos, repo) - } - return repos, nil -} - -// helper function scans the sql.Row and copies the column values to the destination object. -func repoBuildStageRowBuild(scanner db.Scanner, dest *core.RepoBuildStage) error { - err := scanner.Scan( - &dest.RepoNamespace, - &dest.RepoName, - &dest.RepoSlug, - &dest.BuildNumber, - &dest.BuildAuthor, - &dest.BuildAuthorName, - &dest.BuildAuthorEmail, - &dest.BuildAuthorAvatar, - &dest.BuildSender, - &dest.BuildStarted, - &dest.BuildFinished, - &dest.BuildCreated, - &dest.BuildUpdated, - &dest.StageName, - &dest.StageKind, - &dest.StageType, - &dest.StageStatus, - &dest.StageMachine, - &dest.StageOS, - &dest.StageArch, - &dest.StageVariant, - &dest.StageKernel, - &dest.StageLimit, - &dest.StageLimitRepo, - &dest.StageStarted, - &dest.StageStopped, - ) - return err -} - -// helper function scans the sql.Row and copies the column values to the destination object. -func repoBuildStageRowsBuild(rows *sql.Rows) ([]*core.RepoBuildStage, error) { - defer rows.Close() - - slices := []*core.RepoBuildStage{} - for rows.Next() { - row := new(core.RepoBuildStage) - err := repoBuildStageRowBuild(rows, row) - if err != nil { - return nil, err - } - slices = append(slices, row) - } - return slices, nil -} diff --git a/store/repos/scan_test.go b/store/repos/scan_test.go deleted file mode 100644 index 5b8603a32f..0000000000 --- a/store/repos/scan_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package repos diff --git a/store/repos/testdata/repo.json b/store/repos/testdata/repo.json deleted file mode 100644 index 524770448e..0000000000 --- a/store/repos/testdata/repo.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": 0, - "uid": "42", - "user_id": 1, - "namespace": "octocat", - "name": "hello-world", - "slug": "octocat/hello-world", - "scm": "git", - "git_http_url": "https://github.com/octocat/hello-world.git", - "git_ssh_url": "git@github.com:octocat/hello-world.git", - "link": "https://github.com/octocat/hello-world", - "default_branch": "master", - "private": true, - "visibility": "internal", - "active": true, - "config_path": ".drone.yml", - "trusted": true, - "timeout": 900, - "counter": 22, - "synced": 0, - "created": 0, - "updated": 0, - "version": 0, - "endpoints": { - "approvals": { - "endpoint": "https://requestb.in/912ec803", - "signer": "22cf869033dc0b1a0da65d2c478e4bf82ad9470d4c34e1d9b583af0b0840380d", - "skip_verify": true - }, - "config": { - "endpoint": "https://requestb.in/b2ce49e4", - "signer": "0860baa72c42a0e438f9361b077235858bfb0e526a14c609d28b695e212cd9e1", - "skip_verify": true - }, - "registry": { - "endpoint": "https://requestb.in/a541068d", - "secret": "236ce83561fae0a93f25777e2e60bb59", - "signer": "0860baa72c42a0e438f9361b077235858bfb0e526a14c609d28b695e212cd9e1", - "skip_verify": true - }, - "secrets": { - "endpoint": "https://requestb.in/4c64bfa5", - "secret": "2cb65bd9607673c3b6ab7b103b451e54", - "signer": "5e21d95b66146f1ea3008be4b6d89ef5a898c1489f449353102fb43bb3f11595", - "skip_verify": true - }, - "webhook": { - "endpoint": "https://requestb.in/0043cf7d", - "signer": "d1b9f36dec2de09a2a7f7cc9ed3619b50de5fee8af8ad739ecf1fa53356984e0", - "skip_verify": true - } - } -} \ No newline at end of file diff --git a/store/repos/testdata/repo.json.golden b/store/repos/testdata/repo.json.golden deleted file mode 100644 index 9ddf8735ef..0000000000 --- a/store/repos/testdata/repo.json.golden +++ /dev/null @@ -1,53 +0,0 @@ -{ - "id": 0, - "uid": "42", - "user_id": 1, - "namespace": "octocat", - "name": "hello-world", - "slug": "octocat/hello-world", - "scm": "git", - "git_http_url": "https://github.com/octocat/hello-world.git", - "git_ssh_url": "git@github.com:octocat/hello-world.git", - "link": "https://github.com/octocat/hello-world", - "default_branch": "master", - "private": true, - "visibility": "internal", - "active": true, - "config_path": ".drone.yml", - "trusted": true, - "timeout": 900, - "counter": 22, - "synced": 0, - "created": 0, - "updated": 0, - "version": 1, - "endpoints": { - "approvals": { - "endpoint": "https://requestb.in/912ec803", - "signer": "22cf869033dc0b1a0da65d2c478e4bf82ad9470d4c34e1d9b583af0b0840380d", - "skip_verify": true - }, - "config": { - "endpoint": "https://requestb.in/b2ce49e4", - "signer": "0860baa72c42a0e438f9361b077235858bfb0e526a14c609d28b695e212cd9e1", - "skip_verify": true - }, - "registry": { - "endpoint": "https://requestb.in/a541068d", - "secret": "236ce83561fae0a93f25777e2e60bb59", - "signer": "0860baa72c42a0e438f9361b077235858bfb0e526a14c609d28b695e212cd9e1", - "skip_verify": true - }, - "secrets": { - "endpoint": "https://requestb.in/4c64bfa5", - "secret": "2cb65bd9607673c3b6ab7b103b451e54", - "signer": "5e21d95b66146f1ea3008be4b6d89ef5a898c1489f449353102fb43bb3f11595", - "skip_verify": true - }, - "webhook": { - "endpoint": "https://requestb.in/0043cf7d", - "signer": "d1b9f36dec2de09a2a7f7cc9ed3619b50de5fee8af8ad739ecf1fa53356984e0", - "skip_verify": true - } - } -} \ No newline at end of file diff --git a/store/repos/type.go b/store/repos/type.go deleted file mode 100644 index f66cb64f0b..0000000000 --- a/store/repos/type.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package repos - -import ( - "database/sql" - "encoding/json" - - "github.com/drone/drone/core" - - "github.com/jmoiron/sqlx/types" -) - -type nullBuild struct { - ID sql.NullInt64 - RepoID sql.NullInt64 - ConfigID sql.NullInt64 - Trigger sql.NullString - Number sql.NullInt64 - Parent sql.NullInt64 - Status sql.NullString - Error sql.NullString - Event sql.NullString - Action sql.NullString - Link sql.NullString - Timestamp sql.NullInt64 - Title sql.NullString - Message sql.NullString - Before sql.NullString - After sql.NullString - Ref sql.NullString - Fork sql.NullString - Source sql.NullString - Target sql.NullString - Author sql.NullString - AuthorName sql.NullString - AuthorEmail sql.NullString - AuthorAvatar sql.NullString - Sender sql.NullString - Params types.JSONText - Cron sql.NullString - Deploy sql.NullString - DeployID sql.NullInt64 - Debug sql.NullBool - Started sql.NullInt64 - Finished sql.NullInt64 - Created sql.NullInt64 - Updated sql.NullInt64 - Version sql.NullInt64 -} - -func (b *nullBuild) value() *core.Build { - params := map[string]string{} - json.Unmarshal(b.Params, ¶ms) - - build := &core.Build{ - ID: b.ID.Int64, - RepoID: b.RepoID.Int64, - Trigger: b.Trigger.String, - Number: b.Number.Int64, - Parent: b.Parent.Int64, - Status: b.Status.String, - Error: b.Error.String, - Event: b.Event.String, - Action: b.Action.String, - Link: b.Link.String, - Timestamp: b.Timestamp.Int64, - Title: b.Title.String, - Message: b.Message.String, - Before: b.Before.String, - After: b.After.String, - Ref: b.Ref.String, - Fork: b.Fork.String, - Source: b.Source.String, - Target: b.Target.String, - Author: b.Author.String, - AuthorName: b.AuthorName.String, - AuthorEmail: b.AuthorEmail.String, - AuthorAvatar: b.AuthorAvatar.String, - Sender: b.Sender.String, - Params: params, - Cron: b.Cron.String, - Deploy: b.Deploy.String, - DeployID: b.DeployID.Int64, - Debug: b.Debug.Bool, - Started: b.Started.Int64, - Finished: b.Finished.Int64, - Created: b.Created.Int64, - Updated: b.Updated.Int64, - Version: b.Version.Int64, - } - return build -} diff --git a/store/secret/global/scan.go b/store/secret/global/scan.go deleted file mode 100644 index 8b975aac91..0000000000 --- a/store/secret/global/scan.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package global - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// helper function converts the User structure to a set -// of named query parameters. -func toParams(encrypt encrypt.Encrypter, secret *core.Secret) (map[string]interface{}, error) { - ciphertext, err := encrypt.Encrypt(secret.Data) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "secret_id": secret.ID, - "secret_namespace": secret.Namespace, - "secret_name": secret.Name, - "secret_type": secret.Type, - "secret_data": ciphertext, - "secret_pull_request": secret.PullRequest, - "secret_pull_request_push": secret.PullRequestPush, - }, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(encrypt encrypt.Encrypter, scanner db.Scanner, dst *core.Secret) error { - var ciphertext []byte - err := scanner.Scan( - &dst.ID, - &dst.Namespace, - &dst.Name, - &dst.Type, - &ciphertext, - &dst.PullRequest, - &dst.PullRequestPush, - ) - if err != nil { - return err - } - plaintext, err := encrypt.Decrypt(ciphertext) - if err != nil { - return err - } - dst.Data = plaintext - return nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(encrypt encrypt.Encrypter, rows *sql.Rows) ([]*core.Secret, error) { - defer rows.Close() - - secrets := []*core.Secret{} - for rows.Next() { - sec := new(core.Secret) - err := scanRow(encrypt, rows, sec) - if err != nil { - return nil, err - } - secrets = append(secrets, sec) - } - return secrets, nil -} diff --git a/store/secret/global/secret.go b/store/secret/global/secret.go deleted file mode 100644 index 1f37e56f97..0000000000 --- a/store/secret/global/secret.go +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package global - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// New returns a new global Secret database store. -func New(db *db.DB, enc encrypt.Encrypter) core.GlobalSecretStore { - return &secretStore{ - db: db, - enc: enc, - } -} - -type secretStore struct { - db *db.DB - enc encrypt.Encrypter -} - -func (s *secretStore) List(ctx context.Context, namespace string) ([]*core.Secret, error) { - var out []*core.Secret - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"secret_namespace": namespace} - stmt, args, err := binder.BindNamed(queryNamespace, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(s.enc, rows) - return err - }) - return out, err -} - -func (s *secretStore) ListAll(ctx context.Context) ([]*core.Secret, error) { - var out []*core.Secret - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryAll) - if err != nil { - return err - } - out, err = scanRows(s.enc, rows) - return err - }) - return out, err -} - -func (s *secretStore) Find(ctx context.Context, id int64) (*core.Secret, error) { - out := &core.Secret{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(s.enc, out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -func (s *secretStore) FindName(ctx context.Context, namespace, name string) (*core.Secret, error) { - out := &core.Secret{Name: name, Namespace: namespace} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(s.enc, out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryName, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -func (s *secretStore) Create(ctx context.Context, secret *core.Secret) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, secret) - } - return s.create(ctx, secret) -} - -func (s *secretStore) create(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - secret.ID, err = res.LastInsertId() - return err - }) -} - -func (s *secretStore) createPostgres(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&secret.ID) - }) -} - -func (s *secretStore) Update(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *secretStore) Delete(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryBase = ` -SELECT - secret_id -,secret_namespace -,secret_name -,secret_type -,secret_data -,secret_pull_request -,secret_pull_request_push -` - -const queryKey = queryBase + ` -FROM orgsecrets -WHERE secret_id = :secret_id -LIMIT 1 -` - -const queryAll = queryBase + ` -FROM orgsecrets -ORDER BY secret_name -` - -const queryName = queryBase + ` -FROM orgsecrets -WHERE secret_name = :secret_name - AND secret_namespace = :secret_namespace -LIMIT 1 -` - -const queryNamespace = queryBase + ` -FROM orgsecrets -WHERE secret_namespace = :secret_namespace -ORDER BY secret_name -` - -const stmtUpdate = ` -UPDATE orgsecrets SET - secret_data = :secret_data -,secret_pull_request = :secret_pull_request -,secret_pull_request_push = :secret_pull_request_push -WHERE secret_id = :secret_id -` - -const stmtDelete = ` -DELETE FROM orgsecrets -WHERE secret_id = :secret_id -` - -const stmtInsert = ` -INSERT INTO orgsecrets ( - secret_namespace -,secret_name -,secret_type -,secret_data -,secret_pull_request -,secret_pull_request_push -) VALUES ( - :secret_namespace -,:secret_name -,:secret_type -,:secret_data -,:secret_pull_request -,:secret_pull_request_push -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING secret_id -` diff --git a/store/secret/global/secret_oss.go b/store/secret/global/secret_oss.go deleted file mode 100644 index 1ed7270324..0000000000 --- a/store/secret/global/secret_oss.go +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package global - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// New returns a new Secret database store. -func New(db *db.DB, enc encrypt.Encrypter) core.GlobalSecretStore { - return new(noop) -} - -type noop struct{} - -func (noop) List(context.Context, string) ([]*core.Secret, error) { - return nil, nil -} - -func (noop) ListAll(context.Context) ([]*core.Secret, error) { - return nil, nil -} - -func (noop) Find(context.Context, int64) (*core.Secret, error) { - return nil, nil -} - -func (noop) FindName(context.Context, string, string) (*core.Secret, error) { - return nil, nil -} - -func (noop) Create(context.Context, *core.Secret) error { - return nil -} - -func (noop) Update(context.Context, *core.Secret) error { - return nil -} - -func (noop) Delete(context.Context, *core.Secret) error { - return nil -} diff --git a/store/secret/global/secret_test.go b/store/secret/global/secret_test.go deleted file mode 100644 index fbbb8f45fb..0000000000 --- a/store/secret/global/secret_test.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package global - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" -) - -var noContext = context.TODO() - -func TestSecret(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - store := New(conn, nil).(*secretStore) - store.enc, _ = encrypt.New("fb4b4d6267c8a5ce8231f8b186dbca92") - t.Run("Create", testSecretCreate(store)) -} - -func testSecretCreate(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Secret{ - Namespace: "octocat", - Name: "password", - Data: "correct-horse-battery-staple", - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.ID == 0 { - t.Errorf("Want secret ID assigned, got %d", item.ID) - } - - t.Run("Find", testSecretFind(store, item)) - t.Run("FindName", testSecretFindName(store)) - t.Run("List", testSecretList(store)) - t.Run("ListAll", testSecretListAll(store)) - t.Run("Update", testSecretUpdate(store)) - t.Run("Delete", testSecretDelete(store)) - } -} - -func testSecretFind(store *secretStore, secret *core.Secret) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, secret.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testSecret(item)) - } - } -} - -func testSecretFindName(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindName(noContext, "octocat", "password") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testSecret(item)) - } - } -} - -func testSecretList(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, "octocat") - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testSecret(list[0])) - } - } -} - -func testSecretListAll(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.ListAll(noContext) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testSecret(list[0])) - } - } -} - -func testSecretUpdate(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - before, err := store.FindName(noContext, "octocat", "password") - if err != nil { - t.Error(err) - return - } - err = store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if after == nil { - t.Fail() - } - } -} - -func testSecretDelete(store *secretStore) func(t *testing.T) { - return func(t *testing.T) { - secret, err := store.FindName(noContext, "octocat", "password") - if err != nil { - t.Error(err) - return - } - err = store.Delete(noContext, secret) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, secret.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} - -func testSecret(item *core.Secret) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "password"; got != want { - t.Errorf("Want secret name %q, got %q", want, got) - } - if got, want := item.Data, "correct-horse-battery-staple"; got != want { - t.Errorf("Want secret data %q, got %q", want, got) - } - } -} diff --git a/store/secret/scan.go b/store/secret/scan.go deleted file mode 100644 index f83df2ac8e..0000000000 --- a/store/secret/scan.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secret - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// helper function converts the User structure to a set -// of named query parameters. -func toParams(encrypt encrypt.Encrypter, secret *core.Secret) (map[string]interface{}, error) { - ciphertext, err := encrypt.Encrypt(secret.Data) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "secret_id": secret.ID, - "secret_repo_id": secret.RepoID, - "secret_name": secret.Name, - "secret_data": ciphertext, - "secret_pull_request": secret.PullRequest, - "secret_pull_request_push": secret.PullRequestPush, - }, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(encrypt encrypt.Encrypter, scanner db.Scanner, dst *core.Secret) error { - var ciphertext []byte - err := scanner.Scan( - &dst.ID, - &dst.RepoID, - &dst.Name, - &ciphertext, - &dst.PullRequest, - &dst.PullRequestPush, - ) - if err != nil { - return err - } - plaintext, err := encrypt.Decrypt(ciphertext) - if err != nil { - return err - } - dst.Data = plaintext - return nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(encrypt encrypt.Encrypter, rows *sql.Rows) ([]*core.Secret, error) { - defer rows.Close() - - secrets := []*core.Secret{} - for rows.Next() { - sec := new(core.Secret) - err := scanRow(encrypt, rows, sec) - if err != nil { - return nil, err - } - secrets = append(secrets, sec) - } - return secrets, nil -} diff --git a/store/secret/secret.go b/store/secret/secret.go deleted file mode 100644 index 062eb3c47d..0000000000 --- a/store/secret/secret.go +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secret - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// New returns a new Secret database store. -func New(db *db.DB, enc encrypt.Encrypter) core.SecretStore { - return &secretStore{ - db: db, - enc: enc, - } -} - -type secretStore struct { - db *db.DB - enc encrypt.Encrypter -} - -func (s *secretStore) List(ctx context.Context, id int64) ([]*core.Secret, error) { - var out []*core.Secret - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"secret_repo_id": id} - stmt, args, err := binder.BindNamed(queryRepo, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(s.enc, rows) - return err - }) - return out, err -} - -func (s *secretStore) Find(ctx context.Context, id int64) (*core.Secret, error) { - out := &core.Secret{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(s.enc, out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -func (s *secretStore) FindName(ctx context.Context, id int64, name string) (*core.Secret, error) { - out := &core.Secret{Name: name, RepoID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(s.enc, out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryName, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -func (s *secretStore) Create(ctx context.Context, secret *core.Secret) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, secret) - } - return s.create(ctx, secret) -} - -func (s *secretStore) create(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - secret.ID, err = res.LastInsertId() - return err - }) -} - -func (s *secretStore) createPostgres(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&secret.ID) - }) -} - -func (s *secretStore) Update(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *secretStore) Delete(ctx context.Context, secret *core.Secret) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, secret) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryBase = ` -SELECT - secret_id -,secret_repo_id -,secret_name -,secret_data -,secret_pull_request -,secret_pull_request_push -` - -const queryKey = queryBase + ` -FROM secrets -WHERE secret_id = :secret_id -LIMIT 1 -` - -const queryName = queryBase + ` -FROM secrets -WHERE secret_name = :secret_name - AND secret_repo_id = :secret_repo_id -LIMIT 1 -` - -const queryRepo = queryBase + ` -FROM secrets -WHERE secret_repo_id = :secret_repo_id -ORDER BY secret_name -` - -const stmtUpdate = ` -UPDATE secrets SET - secret_data = :secret_data -,secret_pull_request = :secret_pull_request -,secret_pull_request_push = :secret_pull_request_push -WHERE secret_id = :secret_id -` - -const stmtDelete = ` -DELETE FROM secrets -WHERE secret_id = :secret_id -` - -const stmtInsert = ` -INSERT INTO secrets ( - secret_repo_id -,secret_name -,secret_data -,secret_pull_request -,secret_pull_request_push -) VALUES ( - :secret_repo_id -,:secret_name -,:secret_data -,:secret_pull_request -,:secret_pull_request_push -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING secret_id -` diff --git a/store/secret/secret_oss.go b/store/secret/secret_oss.go deleted file mode 100644 index dff3486433..0000000000 --- a/store/secret/secret_oss.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package secret - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// New returns a new Secret database store. -func New(db *db.DB, enc encrypt.Encrypter) core.SecretStore { - return new(noop) -} - -type noop struct{} - -func (noop) List(ctx context.Context, id int64) ([]*core.Secret, error) { - return nil, nil -} - -func (noop) Find(ctx context.Context, id int64) (*core.Secret, error) { - return nil, nil -} - -func (noop) FindName(ctx context.Context, id int64, name string) (*core.Secret, error) { - return nil, nil -} - -func (noop) Create(ctx context.Context, secret *core.Secret) error { - return nil -} - -func (noop) Update(context.Context, *core.Secret) error { - return nil -} - -func (noop) Delete(context.Context, *core.Secret) error { - return nil -} diff --git a/store/secret/secret_test.go b/store/secret/secret_test.go deleted file mode 100644 index 08fcaaf2ea..0000000000 --- a/store/secret/secret_test.go +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package secret - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" -) - -var noContext = context.TODO() - -func TestSecret(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seeds the database with a dummy repository. - repo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - if err := repos.Create(noContext, repo); err != nil { - t.Error(err) - } - - store := New(conn, nil).(*secretStore) - store.enc, _ = encrypt.New("fb4b4d6267c8a5ce8231f8b186dbca92") - t.Run("Create", testSecretCreate(store, repos, repo)) -} - -func testSecretCreate(store *secretStore, repos core.RepositoryStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Secret{ - RepoID: repo.ID, - Name: "password", - Data: "correct-horse-battery-staple", - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.ID == 0 { - t.Errorf("Want secret ID assigned, got %d", item.ID) - } - - t.Run("Find", testSecretFind(store, item)) - t.Run("FindName", testSecretFindName(store, repo)) - t.Run("List", testSecretList(store, repo)) - t.Run("Update", testSecretUpdate(store, repo)) - t.Run("Delete", testSecretDelete(store, repo)) - t.Run("Fkey", testSecretForeignKey(store, repos, repo)) - } -} - -func testSecretFind(store *secretStore, secret *core.Secret) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, secret.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testSecret(item)) - } - } -} - -func testSecretFindName(store *secretStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindName(noContext, repo.ID, "password") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testSecret(item)) - } - } -} - -func testSecretList(store *secretStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, repo.ID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testSecret(list[0])) - } - } -} - -func testSecretUpdate(store *secretStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - before, err := store.FindName(noContext, repo.ID, "password") - if err != nil { - t.Error(err) - return - } - err = store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if after == nil { - t.Fail() - } - } -} - -func testSecretDelete(store *secretStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - secret, err := store.FindName(noContext, repo.ID, "password") - if err != nil { - t.Error(err) - return - } - err = store.Delete(noContext, secret) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, secret.ID) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} - -func testSecretForeignKey(store *secretStore, repos core.RepositoryStore, repo *core.Repository) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Secret{ - RepoID: repo.ID, - Name: "password", - Data: "correct-horse-battery-staple", - } - store.Create(noContext, item) - before, _ := store.List(noContext, repo.ID) - if len(before) == 0 { - t.Errorf("Want non-empty secret list") - return - } - - err := repos.Delete(noContext, repo) - if err != nil { - t.Error(err) - return - } - after, _ := store.List(noContext, repo.ID) - if len(after) != 0 { - t.Errorf("Want empty secret list") - } - } -} - -func testSecret(item *core.Secret) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "password"; got != want { - t.Errorf("Want secret name %q, got %q", want, got) - } - if got, want := item.Data, "correct-horse-battery-staple"; got != want { - t.Errorf("Want secret data %q, got %q", want, got) - } - } -} - -// The purpose of this unit test is to ensure that plaintext -// data can still be read from the database if encryption is -// added at a later time. -func TestSecretCryptoChange(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seeds the database with a dummy repository. - repo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - if err := repos.Create(noContext, repo); err != nil { - t.Error(err) - } - - store := New(conn, nil).(*secretStore) - store.enc, _ = encrypt.New("") - - item := &core.Secret{ - RepoID: repo.ID, - Name: "password", - Data: "correct-horse-battery-staple", - } - - // create the secret with the secret value stored as plaintext - err = store.Create(noContext, item) - if err != nil { - t.Error(err) - return - } - if item.ID == 0 { - t.Errorf("Want secret ID assigned, got %d", item.ID) - return - } - - // update the store to use encryption - store.enc, _ = encrypt.New("fb4b4d6267c8a5ce8231f8b186dbca92") - store.enc.(*encrypt.Aesgcm).Compat = true - - // fetch the secret from the database - got, err := store.Find(noContext, item.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testSecret(got)) - } -} diff --git a/store/shared/db/conn.go b/store/shared/db/conn.go deleted file mode 100644 index c637b829f3..0000000000 --- a/store/shared/db/conn.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package db - -import ( - "database/sql" - "sync" - "time" - - "github.com/jmoiron/sqlx" - - "github.com/drone/drone/store/shared/migrate/mysql" - "github.com/drone/drone/store/shared/migrate/postgres" - "github.com/drone/drone/store/shared/migrate/sqlite" -) - -// Connect to a database and verify with a ping. -func Connect(driver, datasource string, maxOpenConnections int) (*DB, error) { - db, err := sql.Open(driver, datasource) - if err != nil { - return nil, err - } - switch driver { - case "mysql": - db.SetMaxIdleConns(0) - } - if err := pingDatabase(db); err != nil { - return nil, err - } - if err := setupDatabase(db, driver); err != nil { - return nil, err - } - // generally set to 0, user configured for larger installs - db.SetMaxOpenConns(maxOpenConnections) - - var engine Driver - var locker Locker - switch driver { - case "mysql": - engine = Mysql - locker = &nopLocker{} - case "postgres": - engine = Postgres - locker = &nopLocker{} - default: - engine = Sqlite - locker = &sync.RWMutex{} - } - - return &DB{ - conn: sqlx.NewDb(db, driver), - lock: locker, - driver: engine, - }, nil -} - -// helper function to ping the database with backoff to ensure -// a connection can be established before we proceed with the -// database setup and migration. -func pingDatabase(db *sql.DB) (err error) { - for i := 0; i < 30; i++ { - err = db.Ping() - if err == nil { - return - } - time.Sleep(time.Second) - } - return -} - -// helper function to setup the database by performing automated -// database migration steps. -func setupDatabase(db *sql.DB, driver string) error { - switch driver { - case "mysql": - return mysql.Migrate(db) - case "postgres": - return postgres.Migrate(db) - default: - return sqlite.Migrate(db) - } -} diff --git a/store/shared/db/conn_oss.go b/store/shared/db/conn_oss.go deleted file mode 100644 index 79529766b6..0000000000 --- a/store/shared/db/conn_oss.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package db - -import ( - "database/sql" - "sync" - - "github.com/jmoiron/sqlx" - - "github.com/drone/drone/store/shared/migrate/sqlite" -) - -// Connect to an embedded sqlite database. -func Connect(driver, datasource string, maxOpenConnections int) (*DB, error) { - db, err := sql.Open(driver, datasource) - if err != nil { - return nil, err - } - - db.SetMaxOpenConns(maxOpenConnections) - - if err := sqlite.Migrate(db); err != nil { - return nil, err - } - return &DB{ - conn: sqlx.NewDb(db, driver), - lock: &sync.RWMutex{}, - driver: Sqlite, - }, nil -} diff --git a/store/shared/db/conn_test.go b/store/shared/db/conn_test.go deleted file mode 100644 index 9355b27059..0000000000 --- a/store/shared/db/conn_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package db diff --git a/store/shared/db/db.go b/store/shared/db/db.go deleted file mode 100644 index eb54fadf02..0000000000 --- a/store/shared/db/db.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -import ( - "database/sql" - "runtime/debug" - - "github.com/jmoiron/sqlx" -) - -// Driver defines the database driver. -type Driver int - -// Database driver enums. -const ( - Sqlite = iota + 1 - Mysql - Postgres -) - -type ( - // A Scanner represents an object that can be scanned - // for values. - Scanner interface { - Scan(dest ...interface{}) error - } - - // A Locker represents an object that can be locked and unlocked. - Locker interface { - Lock() - Unlock() - RLock() - RUnlock() - } - - // Binder interface defines database field bindings. - Binder interface { - BindNamed(query string, arg interface{}) (string, []interface{}, error) - } - - // Queryer interface defines a set of methods for - // querying the database. - Queryer interface { - Query(query string, args ...interface{}) (*sql.Rows, error) - QueryRow(query string, args ...interface{}) *sql.Row - } - - // Execer interface defines a set of methods for executing - // read and write commands against the database. - Execer interface { - Queryer - Exec(query string, args ...interface{}) (sql.Result, error) - } - - // DB is a pool of zero or more underlying connections to - // the drone database. - DB struct { - conn *sqlx.DB - lock Locker - driver Driver - } -) - -// View executes a function within the context of a managed read-only -// transaction. Any error that is returned from the function is returned -// from the View() method. -func (db *DB) View(fn func(Queryer, Binder) error) error { - db.lock.RLock() - err := fn(db.conn, db.conn) - db.lock.RUnlock() - return err -} - -// Lock obtains a write lock to the database (sqlite only) and executes -// a function. Any error that is returned from the function is returned -// from the Lock() method. -func (db *DB) Lock(fn func(Execer, Binder) error) error { - db.lock.Lock() - err := fn(db.conn, db.conn) - db.lock.Unlock() - return err -} - -// Update executes a function within the context of a read-write managed -// transaction. If no error is returned from the function then the -// transaction is committed. If an error is returned then the entire -// transaction is rolled back. Any error that is returned from the function -// or returned from the commit is returned from the Update() method. -func (db *DB) Update(fn func(Execer, Binder) error) (err error) { - db.lock.Lock() - defer db.lock.Unlock() - - tx, err := db.conn.Begin() - if err != nil { - return err - } - - defer func() { - if p := recover(); p != nil { - err = tx.Rollback() - debug.PrintStack() - } else if err != nil { - tx.Rollback() - } else { - err = tx.Commit() - } - }() - - err = fn(tx, db.conn) - return err -} - -// Driver returns the name of the SQL driver. -func (db *DB) Driver() Driver { - return db.driver -} - -// Close closes the database connection. -func (db *DB) Close() error { - return db.conn.Close() -} diff --git a/store/shared/db/db_test.go b/store/shared/db/db_test.go deleted file mode 100644 index 9355b27059..0000000000 --- a/store/shared/db/db_test.go +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package db diff --git a/store/shared/db/dbtest/dbtest.go b/store/shared/db/dbtest/dbtest.go deleted file mode 100644 index 21a578e2f6..0000000000 --- a/store/shared/db/dbtest/dbtest.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package dbtest - -import ( - "os" - "strconv" - - "github.com/drone/drone/store/shared/db" - - // blank imports are used to load database drivers - // for unit tests. Only unit tests should be importing - // this package. - _ "github.com/go-sql-driver/mysql" - _ "github.com/lib/pq" - _ "github.com/mattn/go-sqlite3" -) - -// Connect opens a new test database connection. -func Connect() (*db.DB, error) { - var ( - driver = "sqlite3" - config = ":memory:?_foreign_keys=1" - maxConnections = 0 - ) - if os.Getenv("DRONE_DATABASE_DRIVER") != "" { - driver = os.Getenv("DRONE_DATABASE_DRIVER") - config = os.Getenv("DRONE_DATABASE_DATASOURCE") - maxConnectionsString := os.Getenv("DRONE_DATABASE_MAX_CONNECTIONS") - maxConnections, _ = strconv.Atoi(maxConnectionsString) - } - return db.Connect(driver, config, maxConnections) -} - -// Reset resets the database state. -func Reset(d *db.DB) { - d.Lock(func(tx db.Execer, _ db.Binder) error { - tx.Exec("DELETE FROM cron") - tx.Exec("DELETE FROM cards") - tx.Exec("DELETE FROM logs") - tx.Exec("DELETE FROM steps") - tx.Exec("DELETE FROM stages") - tx.Exec("DELETE FROM latest") - tx.Exec("DELETE FROM builds") - tx.Exec("DELETE FROM perms") - tx.Exec("DELETE FROM repos") - tx.Exec("DELETE FROM users") - tx.Exec("DELETE FROM templates") - tx.Exec("DELETE FROM orgsecrets") - return nil - }) -} - -// Disconnect closes the database connection. -func Disconnect(d *db.DB) error { - return d.Close() -} diff --git a/store/shared/db/error.go b/store/shared/db/error.go deleted file mode 100644 index 91308e393b..0000000000 --- a/store/shared/db/error.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -import "errors" - -// ErrOptimisticLock is returned by if the struct being -// modified has a Version field and the value is not equal -// to the current value in the database -var ErrOptimisticLock = errors.New("Optimistic Lock Error") diff --git a/store/shared/db/nop.go b/store/shared/db/nop.go deleted file mode 100644 index 85f60eb7c4..0000000000 --- a/store/shared/db/nop.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -type nopLocker struct{} - -func (nopLocker) Lock() {} -func (nopLocker) Unlock() {} -func (nopLocker) RLock() {} -func (nopLocker) RUnlock() {} diff --git a/store/shared/encrypt/aesgcm.go b/store/shared/encrypt/aesgcm.go deleted file mode 100644 index 831799394d..0000000000 --- a/store/shared/encrypt/aesgcm.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package encrypt - -import ( - "crypto/cipher" - "crypto/rand" - "errors" - "io" -) - -// Aesgcm provides an encrypter that uses the aesgcm encryption -// algorithm. -type Aesgcm struct { - block cipher.Block - Compat bool -} - -// Encrypt encrypts the plaintext using aesgcm. -func (e *Aesgcm) Encrypt(plaintext string) ([]byte, error) { - gcm, err := cipher.NewGCM(e.block) - if err != nil { - return nil, err - } - - nonce := make([]byte, gcm.NonceSize()) - _, err = io.ReadFull(rand.Reader, nonce) - if err != nil { - return nil, err - } - - return gcm.Seal(nonce, nonce, []byte(plaintext), nil), nil -} - -// Decrypt decrypts the ciphertext using aesgcm. -func (e *Aesgcm) Decrypt(ciphertext []byte) (string, error) { - gcm, err := cipher.NewGCM(e.block) - if err != nil { - return "", err - } - - if len(ciphertext) < gcm.NonceSize() { - // if the decryption utility is running in compatibility - // mode, it will return the ciphertext as plain text if - // decryption fails. This should be used when running the - // database in mixed-mode, where there is a mix of encrypted - // and unencrypted content. - if e.Compat { - return string(ciphertext), nil - } - return "", errors.New("malformed ciphertext") - } - - plaintext, err := gcm.Open(nil, - ciphertext[:gcm.NonceSize()], - ciphertext[gcm.NonceSize():], - nil, - ) - // if the decryption utility is running in compatibility - // mode, it will return the ciphertext as plain text if - // decryption fails. This should be used when running the - // database in mixed-mode, where there is a mix of encrypted - // and unencrypted content. - if err != nil && e.Compat { - return string(ciphertext), nil - } - return string(plaintext), err -} diff --git a/store/shared/encrypt/aesgcm_test.go b/store/shared/encrypt/aesgcm_test.go deleted file mode 100644 index 7e917c64a8..0000000000 --- a/store/shared/encrypt/aesgcm_test.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package encrypt - -import "testing" - -func TestAesgcm(t *testing.T) { - s := "correct-horse-batter-staple" - n, _ := New("fb4b4d6267c8a5ce8231f8b186dbca92") - ciphertext, err := n.Encrypt(s) - if err != nil { - t.Error(err) - } - plaintext, err := n.Decrypt(ciphertext) - if err != nil { - t.Error(err) - } - if want, got := plaintext, s; got != want { - t.Errorf("Want plaintext %q, got %q", want, got) - } -} - -func TestAesgcmFail(t *testing.T) { - s := "correct-horse-batter-staple" - n, _ := New("ea1c5a9145c8a5ce8231f8b186dbcabc") - ciphertext, err := n.Encrypt(s) - if err != nil { - t.Error(err) - } - n, _ = New("fb4b4d6267c8a5ce8231f8b186dbca92") - _, err = n.Decrypt(ciphertext) - if err == nil { - t.Error("Expect error when encryption and decryption keys mismatch") - } -} - -func TestAesgcmCompat(t *testing.T) { - s := "correct-horse-batter-staple" - n, _ := New("") - ciphertext, err := n.Encrypt(s) - if err != nil { - t.Error(err) - } - n, _ = New("ea1c5a9145c8a5ce8231f8b186dbcabc") - n.(*Aesgcm).Compat = true - plaintext, err := n.Decrypt(ciphertext) - if err != nil { - t.Error(err) - } - if want, got := plaintext, s; got != want { - t.Errorf("Want plaintext %q, got %q", want, got) - } -} diff --git a/store/shared/encrypt/encrypt.go b/store/shared/encrypt/encrypt.go deleted file mode 100644 index c552bc6929..0000000000 --- a/store/shared/encrypt/encrypt.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package encrypt - -import ( - "crypto/aes" - "errors" -) - -// indicates key size is too small. -var errKeySize = errors.New("encryption key must be 32 bytes") - -// Encrypter provides database field encryption and decryption. -// Encrypted values are currently limited to strings, which is -// reflected in the interface design. -type Encrypter interface { - Encrypt(plaintext string) ([]byte, error) - Decrypt(ciphertext []byte) (string, error) -} - -// New provides a new database field encrypter. -func New(key string) (Encrypter, error) { - if key == "" { - return &none{}, nil - } - if len(key) != 32 { - return nil, errKeySize - } - b := []byte(key) - block, err := aes.NewCipher(b) - if err != nil { - return nil, err - } - return &Aesgcm{block: block}, nil -} diff --git a/store/shared/encrypt/none.go b/store/shared/encrypt/none.go deleted file mode 100644 index b780d6cf4b..0000000000 --- a/store/shared/encrypt/none.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package encrypt - -// none is an encryption strategy that stores secret -// values in plain text. This is the default strategy -// when no key is specified. -type none struct { -} - -func (*none) Encrypt(plaintext string) ([]byte, error) { - return []byte(plaintext), nil -} - -func (*none) Decrypt(ciphertext []byte) (string, error) { - return string(ciphertext), nil -} diff --git a/store/shared/encrypt/none_test.go b/store/shared/encrypt/none_test.go deleted file mode 100644 index b645dcbecc..0000000000 --- a/store/shared/encrypt/none_test.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -package encrypt - -import "testing" - -func TestNone(t *testing.T) { - n, _ := New("") - ciphertext, err := n.Encrypt("correct-horse-batter-staple") - if err != nil { - t.Error(err) - } - plaintext, err := n.Decrypt(ciphertext) - if err != nil { - t.Error(err) - } - if want, got := plaintext, "correct-horse-batter-staple"; got != want { - t.Errorf("Want plaintext %q, got %q", want, got) - } -} diff --git a/store/shared/migrate/README.md b/store/shared/migrate/README.md deleted file mode 100644 index a31b79963f..0000000000 --- a/store/shared/migrate/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Building SQL DDL into Drone - -These folders contain the code for the different of databases that drone can use. They contain the SQL necessary to create the necessary tables and migrate between versions (IE the DDL). This SQL is generated into a go file and included as part of the Drone binary. - -## Making a changes to the database DDL - -Any new changes to the database structure are always put into a new SQL file. Follow the naming scheme in the `store/shared/migrate//files` of the SQL files by incrementing the number file name and give it a good description of what changes are being made. - -Changes will need to be implemented for all supported databases, making similar changes for eg Mysql/Postgres/Sqllite. - -**NB** Any changes to the database structure will need to be reflected for the relevant `struct` in the `core` directory. Changing the objects in the `store` directory for the ORM. Finally Possibly in the repositories github.com/drone/drone-go and github.com/drone/runner-go. - -## Generating Go from the SQL files - -To generate the go files you will need to install the golang command line tool `Togo` so it is on your users PATH. - -### Steps to install Togo - -``` bash -# in your workspace -git clone git@github.com:bradrydzewski/togo.git -cd togo -go get github.com/bradrydzewski/togo -``` - -### Generating go DDL - -Enter the desired database's implementation folder, and run the following. It will update the `ddl_gen.go` file. - -``` bash -go generate -``` diff --git a/store/shared/migrate/mysql/ddl.go b/store/shared/migrate/mysql/ddl.go deleted file mode 100644 index 41f71516a4..0000000000 --- a/store/shared/migrate/mysql/ddl.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package mysql - -//go:generate togo ddl -package mysql -dialect mysql diff --git a/store/shared/migrate/mysql/ddl_gen.go b/store/shared/migrate/mysql/ddl_gen.go deleted file mode 100644 index 156aa4563d..0000000000 --- a/store/shared/migrate/mysql/ddl_gen.go +++ /dev/null @@ -1,793 +0,0 @@ -package mysql - -import ( - "database/sql" -) - -var migrations = []struct { - name string - stmt string -}{ - { - name: "create-table-users", - stmt: createTableUsers, - }, - { - name: "create-table-repos", - stmt: createTableRepos, - }, - { - name: "alter-table-repos-add-column-no-fork", - stmt: alterTableReposAddColumnNoFork, - }, - { - name: "alter-table-repos-add-column-no-pulls", - stmt: alterTableReposAddColumnNoPulls, - }, - { - name: "alter-table-repos-add-column-cancel-pulls", - stmt: alterTableReposAddColumnCancelPulls, - }, - { - name: "alter-table-repos-add-column-cancel-push", - stmt: alterTableReposAddColumnCancelPush, - }, - { - name: "alter-table-repos-add-column-throttle", - stmt: alterTableReposAddColumnThrottle, - }, - { - name: "alter-table-repos-add-column-cancel-running", - stmt: alterTableReposAddColumnCancelRunning, - }, - { - name: "create-table-perms", - stmt: createTablePerms, - }, - { - name: "create-index-perms-user", - stmt: createIndexPermsUser, - }, - { - name: "create-index-perms-repo", - stmt: createIndexPermsRepo, - }, - { - name: "create-table-builds", - stmt: createTableBuilds, - }, - { - name: "create-index-builds-repo", - stmt: createIndexBuildsRepo, - }, - { - name: "create-index-builds-author", - stmt: createIndexBuildsAuthor, - }, - { - name: "create-index-builds-sender", - stmt: createIndexBuildsSender, - }, - { - name: "create-index-builds-ref", - stmt: createIndexBuildsRef, - }, - { - name: "alter-table-builds-add-column-debug", - stmt: alterTableBuildsAddColumnDebug, - }, - { - name: "create-table-stages", - stmt: createTableStages, - }, - { - name: "create-index-stages-build", - stmt: createIndexStagesBuild, - }, - { - name: "create-table-unfinished", - stmt: createTableUnfinished, - }, - { - name: "create-trigger-stage-insert", - stmt: createTriggerStageInsert, - }, - { - name: "create-trigger-stage-update", - stmt: createTriggerStageUpdate, - }, - { - name: "alter-table-stages-add-column-limit-repos", - stmt: alterTableStagesAddColumnLimitRepos, - }, - { - name: "create-table-steps", - stmt: createTableSteps, - }, - { - name: "create-index-steps-stage", - stmt: createIndexStepsStage, - }, - { - name: "create-table-logs", - stmt: createTableLogs, - }, - { - name: "create-table-cron", - stmt: createTableCron, - }, - { - name: "create-index-cron-repo", - stmt: createIndexCronRepo, - }, - { - name: "create-index-cron-next", - stmt: createIndexCronNext, - }, - { - name: "create-table-secrets", - stmt: createTableSecrets, - }, - { - name: "create-index-secrets-repo", - stmt: createIndexSecretsRepo, - }, - { - name: "create-index-secrets-repo-name", - stmt: createIndexSecretsRepoName, - }, - { - name: "create-table-nodes", - stmt: createTableNodes, - }, - { - name: "alter-table-builds-add-column-cron", - stmt: alterTableBuildsAddColumnCron, - }, - { - name: "create-table-org-secrets", - stmt: createTableOrgSecrets, - }, - { - name: "alter-table-builds-add-column-deploy-id", - stmt: alterTableBuildsAddColumnDeployId, - }, - { - name: "create-table-latest", - stmt: createTableLatest, - }, - { - name: "create-index-latest-repo", - stmt: createIndexLatestRepo, - }, - { - name: "create-table-template", - stmt: createTableTemplate, - }, - { - name: "create-index-template-namespace", - stmt: createIndexTemplateNamespace, - }, - { - name: "alter-table-steps-add-column-step-depends-on", - stmt: alterTableStepsAddColumnStepDependsOn, - }, - { - name: "alter-table-steps-add-column-step-image", - stmt: alterTableStepsAddColumnStepImage, - }, - { - name: "alter-table-steps-add-column-step-detached", - stmt: alterTableStepsAddColumnStepDetached, - }, - { - name: "create-table-cards", - stmt: createTableCards, - }, - { - name: "create-index-cards-card-build", - stmt: createIndexCardsCardBuild, - }, - { - name: "create-index-cards-card_step", - stmt: createIndexCardsCardstep, - }, - { - name: "drop-table-cards", - stmt: dropTableCards, - }, - { - name: "alter-table-steps-add-column-step_schema", - stmt: alterTableStepsAddColumnStepschema, - }, - { - name: "create-new-table-cards", - stmt: createNewTableCards, - }, -} - -// Migrate performs the database migration. If the migration fails -// and error is returned. -func Migrate(db *sql.DB) error { - if err := createTable(db); err != nil { - return err - } - completed, err := selectCompleted(db) - if err != nil && err != sql.ErrNoRows { - return err - } - for _, migration := range migrations { - if _, ok := completed[migration.name]; ok { - - continue - } - - if _, err := db.Exec(migration.stmt); err != nil { - return err - } - if err := insertMigration(db, migration.name); err != nil { - return err - } - - } - return nil -} - -func createTable(db *sql.DB) error { - _, err := db.Exec(migrationTableCreate) - return err -} - -func insertMigration(db *sql.DB, name string) error { - _, err := db.Exec(migrationInsert, name) - return err -} - -func selectCompleted(db *sql.DB) (map[string]struct{}, error) { - migrations := map[string]struct{}{} - rows, err := db.Query(migrationSelect) - if err != nil { - return nil, err - } - defer rows.Close() - for rows.Next() { - var name string - if err := rows.Scan(&name); err != nil { - return nil, err - } - migrations[name] = struct{}{} - } - return migrations, nil -} - -// -// migration table ddl and sql -// - -var migrationTableCreate = ` -CREATE TABLE IF NOT EXISTS migrations ( - name VARCHAR(255) -,UNIQUE(name) -) -` - -var migrationInsert = ` -INSERT INTO migrations (name) VALUES (?) -` - -var migrationSelect = ` -SELECT name FROM migrations -` - -// -// 001_create_table_user.sql -// - -var createTableUsers = ` -CREATE TABLE IF NOT EXISTS users ( - user_id INTEGER PRIMARY KEY AUTO_INCREMENT -,user_login VARCHAR(250) -,user_email VARCHAR(500) -,user_admin BOOLEAN -,user_machine BOOLEAN -,user_active BOOLEAN -,user_avatar VARCHAR(2000) -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token BLOB -,user_oauth_refresh BLOB -,user_oauth_expiry INTEGER -,user_hash VARCHAR(500) -,UNIQUE(user_login) -,UNIQUE(user_hash) -); -` - -// -// 002_create_table_repos.sql -// - -var createTableRepos = ` -CREATE TABLE IF NOT EXISTS repos ( - repo_id INTEGER PRIMARY KEY AUTO_INCREMENT -,repo_uid VARCHAR(250) -,repo_user_id INTEGER -,repo_namespace VARCHAR(250) -,repo_name VARCHAR(250) -,repo_slug VARCHAR(250) -,repo_scm VARCHAR(50) -,repo_clone_url VARCHAR(2000) -,repo_ssh_url VARCHAR(2000) -,repo_html_url VARCHAR(2000) -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility VARCHAR(50) -,repo_branch VARCHAR(250) -,repo_counter INTEGER -,repo_config VARCHAR(500) -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer VARCHAR(50) -,repo_secret VARCHAR(50) -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); -` - -var alterTableReposAddColumnNoFork = ` -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnNoPulls = ` -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnCancelPulls = ` -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnCancelPush = ` -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnThrottle = ` -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnCancelRunning = ` -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT false; -` - -// -// 003_create_table_perms.sql -// - -var createTablePerms = ` -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid VARCHAR(250) -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) -); -` - -var createIndexPermsUser = ` -CREATE INDEX ix_perms_user ON perms (perm_user_id); -` - -var createIndexPermsRepo = ` -CREATE INDEX ix_perms_repo ON perms (perm_repo_uid); -` - -// -// 004_create_table_builds.sql -// - -var createTableBuilds = ` -CREATE TABLE IF NOT EXISTS builds ( - build_id INTEGER PRIMARY KEY AUTO_INCREMENT -,build_repo_id INTEGER -,build_config_id INTEGER -,build_trigger VARCHAR(250) -,build_number INTEGER -,build_parent INTEGER -,build_status VARCHAR(50) -,build_error VARCHAR(500) -,build_event VARCHAR(50) -,build_action VARCHAR(50) -,build_link VARCHAR(1000) -,build_timestamp INTEGER -,build_title VARCHAR(2000) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_message VARCHAR(2000) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_before VARCHAR(50) -,build_after VARCHAR(50) -,build_ref VARCHAR(500) -,build_source_repo VARCHAR(250) -,build_source VARCHAR(500) -,build_target VARCHAR(500) -,build_author VARCHAR(500) -,build_author_name VARCHAR(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_author_email VARCHAR(500) -,build_author_avatar VARCHAR(1000) -,build_sender VARCHAR(500) -,build_deploy VARCHAR(500) -,build_params VARCHAR(2000) -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) -); -` - -var createIndexBuildsRepo = ` -CREATE INDEX ix_build_repo ON builds (build_repo_id); -` - -var createIndexBuildsAuthor = ` -CREATE INDEX ix_build_author ON builds (build_author); -` - -var createIndexBuildsSender = ` -CREATE INDEX ix_build_sender ON builds (build_sender); -` - -var createIndexBuildsRef = ` -CREATE INDEX ix_build_ref ON builds (build_repo_id, build_ref); -` - -var alterTableBuildsAddColumnDebug = ` -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT false; -` - -// -// 005_create_table_stages.sql -// - -var createTableStages = ` -CREATE TABLE IF NOT EXISTS stages ( - stage_id INTEGER PRIMARY KEY AUTO_INCREMENT -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_name VARCHAR(100) -,stage_kind VARCHAR(50) -,stage_type VARCHAR(50) -,stage_status VARCHAR(50) -,stage_error VARCHAR(500) -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os VARCHAR(50) -,stage_arch VARCHAR(50) -,stage_variant VARCHAR(10) -,stage_kernel VARCHAR(50) -,stage_machine VARCHAR(500) -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -); -` - -var createIndexStagesBuild = ` -CREATE INDEX ix_stages_build ON stages (stage_build_id); -` - -var createTableUnfinished = ` -CREATE TABLE IF NOT EXISTS stages_unfinished ( -stage_id INTEGER PRIMARY KEY -); -` - -var createTriggerStageInsert = ` -CREATE TRIGGER stage_insert AFTER INSERT ON stages -FOR EACH ROW -BEGIN - IF NEW.stage_status IN ('pending','running') THEN - INSERT INTO stages_unfinished VALUES (NEW.stage_id); - END IF; -END; -` - -var createTriggerStageUpdate = ` -CREATE TRIGGER stage_update AFTER UPDATE ON stages -FOR EACH ROW -BEGIN - IF NEW.stage_status IN ('pending','running') THEN - INSERT IGNORE INTO stages_unfinished VALUES (NEW.stage_id); - ELSEIF OLD.stage_status IN ('pending','running') THEN - DELETE FROM stages_unfinished WHERE stage_id = OLD.stage_id; - END IF; -END; -` - -var alterTableStagesAddColumnLimitRepos = ` -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; -` - -// -// 006_create_table_steps.sql -// - -var createTableSteps = ` -CREATE TABLE IF NOT EXISTS steps ( - step_id INTEGER PRIMARY KEY AUTO_INCREMENT -,step_stage_id INTEGER -,step_number INTEGER -,step_name VARCHAR(100) -,step_status VARCHAR(50) -,step_error VARCHAR(500) -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -); -` - -var createIndexStepsStage = ` -CREATE INDEX ix_steps_stage ON steps (step_stage_id); -` - -// -// 007_create_table_logs.sql -// - -var createTableLogs = ` -CREATE TABLE IF NOT EXISTS logs ( - log_id INTEGER PRIMARY KEY -,log_data MEDIUMBLOB -); -` - -// -// 008_create_table_cron.sql -// - -var createTableCron = ` -CREATE TABLE IF NOT EXISTS cron ( - cron_id INTEGER PRIMARY KEY AUTO_INCREMENT -,cron_repo_id INTEGER -,cron_name VARCHAR(50) -,cron_expr VARCHAR(50) -,cron_next INTEGER -,cron_prev INTEGER -,cron_event VARCHAR(50) -,cron_branch VARCHAR(250) -,cron_target VARCHAR(250) -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexCronRepo = ` -CREATE INDEX ix_cron_repo ON cron (cron_repo_id); -` - -var createIndexCronNext = ` -CREATE INDEX ix_cron_next ON cron (cron_next); -` - -// -// 009_create_table_secrets.sql -// - -var createTableSecrets = ` -CREATE TABLE IF NOT EXISTS secrets ( - secret_id INTEGER PRIMARY KEY AUTO_INCREMENT -,secret_repo_id INTEGER -,secret_name VARCHAR(500) -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexSecretsRepo = ` -CREATE INDEX ix_secret_repo ON secrets (secret_repo_id); -` - -var createIndexSecretsRepoName = ` -CREATE INDEX ix_secret_repo_name ON secrets (secret_repo_id, secret_name); -` - -// -// 010_create_table_nodes.sql -// - -var createTableNodes = ` -CREATE TABLE IF NOT EXISTS nodes ( - node_id INTEGER PRIMARY KEY AUTO_INCREMENT -,node_uid VARCHAR(500) -,node_provider VARCHAR(50) -,node_state VARCHAR(50) -,node_name VARCHAR(50) -,node_image VARCHAR(500) -,node_region VARCHAR(100) -,node_size VARCHAR(100) -,node_os VARCHAR(50) -,node_arch VARCHAR(50) -,node_kernel VARCHAR(50) -,node_variant VARCHAR(50) -,node_address VARCHAR(500) -,node_capacity INTEGER -,node_filter VARCHAR(2000) -,node_labels VARCHAR(2000) -,node_error VARCHAR(2000) -,node_ca_key BLOB -,node_ca_cert BLOB -,node_tls_key BLOB -,node_tls_cert BLOB -,node_tls_name VARCHAR(500) -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); -` - -// -// 011_add_column_builds_cron.sql -// - -var alterTableBuildsAddColumnCron = ` -ALTER TABLE builds ADD COLUMN build_cron VARCHAR(50) NOT NULL DEFAULT ''; -` - -// -// 012_create_table_global_secrets.sql -// - -var createTableOrgSecrets = ` -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id INTEGER PRIMARY KEY AUTO_INCREMENT -,secret_namespace VARCHAR(50) -,secret_name VARCHAR(200) -,secret_type VARCHAR(50) -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); -` - -// -// 013_add_column_builds_deploy_id.sql -// - -var alterTableBuildsAddColumnDeployId = ` -ALTER TABLE builds ADD COLUMN build_deploy_id INTEGER NOT NULL DEFAULT 0; -` - -// -// 014_create_table_refs.sql -// - -var createTableLatest = ` -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type VARCHAR(50) -,latest_name VARCHAR(500) -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); -` - -var createIndexLatestRepo = ` -CREATE INDEX ix_latest_repo ON latest (latest_repo_id); -` - -// -// 015_create_table_templates.sql -// - -var createTableTemplate = ` -CREATE TABLE IF NOT EXISTS templates ( - template_id INTEGER PRIMARY KEY AUTO_INCREMENT - ,template_name VARCHAR(500) - ,template_namespace VARCHAR(50) - ,template_data BLOB - ,template_created INTEGER - ,template_updated INTEGER - ,UNIQUE(template_name, template_namespace) - ); -` - -var createIndexTemplateNamespace = ` -CREATE INDEX ix_template_namespace ON templates (template_namespace); -` - -// -// 016_add_columns_steps.sql -// - -var alterTableStepsAddColumnStepDependsOn = ` -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NULL; -` - -var alterTableStepsAddColumnStepImage = ` -ALTER TABLE steps ADD COLUMN step_image VARCHAR(1000) NOT NULL DEFAULT ''; -` - -var alterTableStepsAddColumnStepDetached = ` -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; -` - -// -// 017_create_table_cards.sql -// - -var createTableCards = ` -CREATE TABLE IF NOT EXISTS cards ( - card_id INTEGER PRIMARY KEY AUTO_INCREMENT - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); -` - -var createIndexCardsCardBuild = ` -CREATE INDEX ix_cards_build ON cards (card_build); -` - -var createIndexCardsCardstep = ` -CREATE UNIQUE INDEX ix_cards_step ON cards (card_step); -` - -// -// 018_amend_table_cards.sql -// - -var dropTableCards = ` -DROP TABLE IF EXISTS cards; -` - -var alterTableStepsAddColumnStepschema = ` -ALTER TABLE steps - ADD COLUMN step_schema VARCHAR(2000) NOT NULL DEFAULT ''; -` - -var createNewTableCards = ` -CREATE TABLE IF NOT EXISTS cards -( - card_id INTEGER PRIMARY KEY, - card_data BLOB, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); -` diff --git a/store/shared/migrate/mysql/files/001_create_table_user.sql b/store/shared/migrate/mysql/files/001_create_table_user.sql deleted file mode 100644 index b90f6695fc..0000000000 --- a/store/shared/migrate/mysql/files/001_create_table_user.sql +++ /dev/null @@ -1,22 +0,0 @@ --- name: create-table-users - -CREATE TABLE IF NOT EXISTS users ( - user_id INTEGER PRIMARY KEY AUTO_INCREMENT -,user_login VARCHAR(250) -,user_email VARCHAR(500) -,user_admin BOOLEAN -,user_machine BOOLEAN -,user_active BOOLEAN -,user_avatar VARCHAR(2000) -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token BLOB -,user_oauth_refresh BLOB -,user_oauth_expiry INTEGER -,user_hash VARCHAR(500) -,UNIQUE(user_login) -,UNIQUE(user_hash) -); diff --git a/store/shared/migrate/mysql/files/002_create_table_repos.sql b/store/shared/migrate/mysql/files/002_create_table_repos.sql deleted file mode 100644 index feabda2ff5..0000000000 --- a/store/shared/migrate/mysql/files/002_create_table_repos.sql +++ /dev/null @@ -1,55 +0,0 @@ --- name: create-table-repos - -CREATE TABLE IF NOT EXISTS repos ( - repo_id INTEGER PRIMARY KEY AUTO_INCREMENT -,repo_uid VARCHAR(250) -,repo_user_id INTEGER -,repo_namespace VARCHAR(250) -,repo_name VARCHAR(250) -,repo_slug VARCHAR(250) -,repo_scm VARCHAR(50) -,repo_clone_url VARCHAR(2000) -,repo_ssh_url VARCHAR(2000) -,repo_html_url VARCHAR(2000) -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility VARCHAR(50) -,repo_branch VARCHAR(250) -,repo_counter INTEGER -,repo_config VARCHAR(500) -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer VARCHAR(50) -,repo_secret VARCHAR(50) -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); - --- name: alter-table-repos-add-column-no-fork - -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-no-pulls - -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-cancel-pulls - -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-cancel-push - -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-throttle - -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-cancel-running - -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT false; diff --git a/store/shared/migrate/mysql/files/003_create_table_perms.sql b/store/shared/migrate/mysql/files/003_create_table_perms.sql deleted file mode 100644 index 4622536c8f..0000000000 --- a/store/shared/migrate/mysql/files/003_create_table_perms.sql +++ /dev/null @@ -1,23 +0,0 @@ --- name: create-table-perms - -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid VARCHAR(250) -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) ---,FOREIGN KEY(perm_user_id) REFERENCES users(user_id) ON DELETE CASCADE ---,FOREIGN KEY(perm_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-perms-user - -CREATE INDEX ix_perms_user ON perms (perm_user_id); - --- name: create-index-perms-repo - -CREATE INDEX ix_perms_repo ON perms (perm_repo_uid); diff --git a/store/shared/migrate/mysql/files/004_create_table_builds.sql b/store/shared/migrate/mysql/files/004_create_table_builds.sql deleted file mode 100644 index b914e07c56..0000000000 --- a/store/shared/migrate/mysql/files/004_create_table_builds.sql +++ /dev/null @@ -1,58 +0,0 @@ --- name: create-table-builds - -CREATE TABLE IF NOT EXISTS builds ( - build_id INTEGER PRIMARY KEY AUTO_INCREMENT -,build_repo_id INTEGER -,build_config_id INTEGER -,build_trigger VARCHAR(250) -,build_number INTEGER -,build_parent INTEGER -,build_status VARCHAR(50) -,build_error VARCHAR(500) -,build_event VARCHAR(50) -,build_action VARCHAR(50) -,build_link VARCHAR(1000) -,build_timestamp INTEGER -,build_title VARCHAR(2000) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_message VARCHAR(2000) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_before VARCHAR(50) -,build_after VARCHAR(50) -,build_ref VARCHAR(500) -,build_source_repo VARCHAR(250) -,build_source VARCHAR(500) -,build_target VARCHAR(500) -,build_author VARCHAR(500) -,build_author_name VARCHAR(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci -,build_author_email VARCHAR(500) -,build_author_avatar VARCHAR(1000) -,build_sender VARCHAR(500) -,build_deploy VARCHAR(500) -,build_params VARCHAR(2000) -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) ---,FOREIGN KEY(build_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-builds-repo - -CREATE INDEX ix_build_repo ON builds (build_repo_id); - --- name: create-index-builds-author - -CREATE INDEX ix_build_author ON builds (build_author); - --- name: create-index-builds-sender - -CREATE INDEX ix_build_sender ON builds (build_sender); - --- name: create-index-builds-ref - -CREATE INDEX ix_build_ref ON builds (build_repo_id, build_ref); - --- name: alter-table-builds-add-column-debug - -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT false; diff --git a/store/shared/migrate/mysql/files/005_create_table_stages.sql b/store/shared/migrate/mysql/files/005_create_table_stages.sql deleted file mode 100644 index f835f02ca1..0000000000 --- a/store/shared/migrate/mysql/files/005_create_table_stages.sql +++ /dev/null @@ -1,67 +0,0 @@ --- name: create-table-stages - -CREATE TABLE IF NOT EXISTS stages ( - stage_id INTEGER PRIMARY KEY AUTO_INCREMENT -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_name VARCHAR(100) -,stage_kind VARCHAR(50) -,stage_type VARCHAR(50) -,stage_status VARCHAR(50) -,stage_error VARCHAR(500) -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os VARCHAR(50) -,stage_arch VARCHAR(50) -,stage_variant VARCHAR(10) -,stage_kernel VARCHAR(50) -,stage_machine VARCHAR(500) -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -); - --- name: create-index-stages-build - -CREATE INDEX ix_stages_build ON stages (stage_build_id); - --- name: create-table-unfinished - -CREATE TABLE IF NOT EXISTS stages_unfinished ( -stage_id INTEGER PRIMARY KEY -); - --- name: create-trigger-stage-insert - -CREATE TRIGGER stage_insert AFTER INSERT ON stages -FOR EACH ROW -BEGIN - IF NEW.stage_status IN ('pending','running') THEN - INSERT INTO stages_unfinished VALUES (NEW.stage_id); - END IF; -END; - --- name: create-trigger-stage-update - -CREATE TRIGGER stage_update AFTER UPDATE ON stages -FOR EACH ROW -BEGIN - IF NEW.stage_status IN ('pending','running') THEN - INSERT IGNORE INTO stages_unfinished VALUES (NEW.stage_id); - ELSEIF OLD.stage_status IN ('pending','running') THEN - DELETE FROM stages_unfinished WHERE stage_id = OLD.stage_id; - END IF; -END; - --- name: alter-table-stages-add-column-limit-repos - -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/mysql/files/006_create_table_steps.sql b/store/shared/migrate/mysql/files/006_create_table_steps.sql deleted file mode 100644 index 81b433f94f..0000000000 --- a/store/shared/migrate/mysql/files/006_create_table_steps.sql +++ /dev/null @@ -1,20 +0,0 @@ --- name: create-table-steps - -CREATE TABLE IF NOT EXISTS steps ( - step_id INTEGER PRIMARY KEY AUTO_INCREMENT -,step_stage_id INTEGER -,step_number INTEGER -,step_name VARCHAR(100) -,step_status VARCHAR(50) -,step_error VARCHAR(500) -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -); - --- name: create-index-steps-stage - -CREATE INDEX ix_steps_stage ON steps (step_stage_id); diff --git a/store/shared/migrate/mysql/files/007_create_table_logs.sql b/store/shared/migrate/mysql/files/007_create_table_logs.sql deleted file mode 100644 index b1a60996bd..0000000000 --- a/store/shared/migrate/mysql/files/007_create_table_logs.sql +++ /dev/null @@ -1,6 +0,0 @@ --- name: create-table-logs - -CREATE TABLE IF NOT EXISTS logs ( - log_id INTEGER PRIMARY KEY -,log_data MEDIUMBLOB -); diff --git a/store/shared/migrate/mysql/files/008_create_table_cron.sql b/store/shared/migrate/mysql/files/008_create_table_cron.sql deleted file mode 100644 index 64b259292d..0000000000 --- a/store/shared/migrate/mysql/files/008_create_table_cron.sql +++ /dev/null @@ -1,27 +0,0 @@ --- name: create-table-cron - -CREATE TABLE IF NOT EXISTS cron ( - cron_id INTEGER PRIMARY KEY AUTO_INCREMENT -,cron_repo_id INTEGER -,cron_name VARCHAR(50) -,cron_expr VARCHAR(50) -,cron_next INTEGER -,cron_prev INTEGER -,cron_event VARCHAR(50) -,cron_branch VARCHAR(250) -,cron_target VARCHAR(250) -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-cron-repo - -CREATE INDEX ix_cron_repo ON cron (cron_repo_id); - --- name: create-index-cron-next - -CREATE INDEX ix_cron_next ON cron (cron_next); diff --git a/store/shared/migrate/mysql/files/009_create_table_secrets.sql b/store/shared/migrate/mysql/files/009_create_table_secrets.sql deleted file mode 100644 index 7c0876f16c..0000000000 --- a/store/shared/migrate/mysql/files/009_create_table_secrets.sql +++ /dev/null @@ -1,20 +0,0 @@ --- name: create-table-secrets - -CREATE TABLE IF NOT EXISTS secrets ( - secret_id INTEGER PRIMARY KEY AUTO_INCREMENT -,secret_repo_id INTEGER -,secret_name VARCHAR(500) -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-secrets-repo - -CREATE INDEX ix_secret_repo ON secrets (secret_repo_id); - --- name: create-index-secrets-repo-name - -CREATE INDEX ix_secret_repo_name ON secrets (secret_repo_id, secret_name); diff --git a/store/shared/migrate/mysql/files/010_create_table_nodes.sql b/store/shared/migrate/mysql/files/010_create_table_nodes.sql deleted file mode 100644 index f11c962741..0000000000 --- a/store/shared/migrate/mysql/files/010_create_table_nodes.sql +++ /dev/null @@ -1,33 +0,0 @@ --- name: create-table-nodes - -CREATE TABLE IF NOT EXISTS nodes ( - node_id INTEGER PRIMARY KEY AUTO_INCREMENT -,node_uid VARCHAR(500) -,node_provider VARCHAR(50) -,node_state VARCHAR(50) -,node_name VARCHAR(50) -,node_image VARCHAR(500) -,node_region VARCHAR(100) -,node_size VARCHAR(100) -,node_os VARCHAR(50) -,node_arch VARCHAR(50) -,node_kernel VARCHAR(50) -,node_variant VARCHAR(50) -,node_address VARCHAR(500) -,node_capacity INTEGER -,node_filter VARCHAR(2000) -,node_labels VARCHAR(2000) -,node_error VARCHAR(2000) -,node_ca_key BLOB -,node_ca_cert BLOB -,node_tls_key BLOB -,node_tls_cert BLOB -,node_tls_name VARCHAR(500) -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); diff --git a/store/shared/migrate/mysql/files/011_add_column_builds_cron.sql b/store/shared/migrate/mysql/files/011_add_column_builds_cron.sql deleted file mode 100644 index 11d82ac814..0000000000 --- a/store/shared/migrate/mysql/files/011_add_column_builds_cron.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-cron - -ALTER TABLE builds ADD COLUMN build_cron VARCHAR(50) NOT NULL DEFAULT ''; diff --git a/store/shared/migrate/mysql/files/012_create_table_global_secrets.sql b/store/shared/migrate/mysql/files/012_create_table_global_secrets.sql deleted file mode 100644 index 917abad493..0000000000 --- a/store/shared/migrate/mysql/files/012_create_table_global_secrets.sql +++ /dev/null @@ -1,12 +0,0 @@ --- name: create-table-org-secrets - -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id INTEGER PRIMARY KEY AUTO_INCREMENT -,secret_namespace VARCHAR(50) -,secret_name VARCHAR(200) -,secret_type VARCHAR(50) -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); diff --git a/store/shared/migrate/mysql/files/013_add_column_builds_deploy_id.sql b/store/shared/migrate/mysql/files/013_add_column_builds_deploy_id.sql deleted file mode 100644 index b09a583256..0000000000 --- a/store/shared/migrate/mysql/files/013_add_column_builds_deploy_id.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-deploy-id - -ALTER TABLE builds ADD COLUMN build_deploy_id INTEGER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/mysql/files/014_create_table_refs.sql b/store/shared/migrate/mysql/files/014_create_table_refs.sql deleted file mode 100644 index f342b88d59..0000000000 --- a/store/shared/migrate/mysql/files/014_create_table_refs.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-latest - -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type VARCHAR(50) -,latest_name VARCHAR(500) -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); - --- name: create-index-latest-repo - -CREATE INDEX ix_latest_repo ON latest (latest_repo_id); diff --git a/store/shared/migrate/mysql/files/015_create_table_templates.sql b/store/shared/migrate/mysql/files/015_create_table_templates.sql deleted file mode 100644 index ab8ddc1eb5..0000000000 --- a/store/shared/migrate/mysql/files/015_create_table_templates.sql +++ /dev/null @@ -1,15 +0,0 @@ --- name: create-table-template - -CREATE TABLE IF NOT EXISTS templates ( - template_id INTEGER PRIMARY KEY AUTO_INCREMENT - ,template_name VARCHAR(500) - ,template_namespace VARCHAR(50) - ,template_data BLOB - ,template_created INTEGER - ,template_updated INTEGER - ,UNIQUE(template_name, template_namespace) - ); - --- name: create-index-template-namespace - -CREATE INDEX ix_template_namespace ON templates (template_namespace); \ No newline at end of file diff --git a/store/shared/migrate/mysql/files/016_add_columns_steps.sql b/store/shared/migrate/mysql/files/016_add_columns_steps.sql deleted file mode 100644 index 6df6c67c16..0000000000 --- a/store/shared/migrate/mysql/files/016_add_columns_steps.sql +++ /dev/null @@ -1,11 +0,0 @@ --- name: alter-table-steps-add-column-step-depends-on - -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NULL; - --- name: alter-table-steps-add-column-step-image - -ALTER TABLE steps ADD COLUMN step_image VARCHAR(1000) NOT NULL DEFAULT ''; - --- name: alter-table-steps-add-column-step-detached - -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/store/shared/migrate/mysql/files/017_create_table_cards.sql b/store/shared/migrate/mysql/files/017_create_table_cards.sql deleted file mode 100644 index 6418b0619a..0000000000 --- a/store/shared/migrate/mysql/files/017_create_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-cards - -CREATE TABLE IF NOT EXISTS cards ( - card_id INTEGER PRIMARY KEY AUTO_INCREMENT - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); - --- name: create-index-cards-card-build -CREATE INDEX ix_cards_build ON cards (card_build); - --- name: create-index-cards-card_step -CREATE UNIQUE INDEX ix_cards_step ON cards (card_step); diff --git a/store/shared/migrate/mysql/files/018_amend_table_cards.sql b/store/shared/migrate/mysql/files/018_amend_table_cards.sql deleted file mode 100644 index e041289aa0..0000000000 --- a/store/shared/migrate/mysql/files/018_amend_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: drop-table-cards - -DROP TABLE IF EXISTS cards; - --- name: alter-table-steps-add-column-step_schema - -ALTER TABLE steps - ADD COLUMN step_schema VARCHAR(2000) NOT NULL DEFAULT ''; - --- name: create-new-table-cards -CREATE TABLE IF NOT EXISTS cards -( - card_id INTEGER PRIMARY KEY, - card_data BLOB, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); \ No newline at end of file diff --git a/store/shared/migrate/postgres/ddl.go b/store/shared/migrate/postgres/ddl.go deleted file mode 100644 index 99354fc753..0000000000 --- a/store/shared/migrate/postgres/ddl.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package postgres - -//go:generate togo ddl -package postgres -dialect postgres diff --git a/store/shared/migrate/postgres/ddl_gen.go b/store/shared/migrate/postgres/ddl_gen.go deleted file mode 100644 index d87aaf5016..0000000000 --- a/store/shared/migrate/postgres/ddl_gen.go +++ /dev/null @@ -1,771 +0,0 @@ -package postgres - -import ( - "database/sql" -) - -var migrations = []struct { - name string - stmt string -}{ - { - name: "create-table-users", - stmt: createTableUsers, - }, - { - name: "create-table-repos", - stmt: createTableRepos, - }, - { - name: "alter-table-repos-add-column-no-fork", - stmt: alterTableReposAddColumnNoFork, - }, - { - name: "alter-table-repos-add-column-no-pulls", - stmt: alterTableReposAddColumnNoPulls, - }, - { - name: "alter-table-repos-add-column-cancel-pulls", - stmt: alterTableReposAddColumnCancelPulls, - }, - { - name: "alter-table-repos-add-column-cancel-push", - stmt: alterTableReposAddColumnCancelPush, - }, - { - name: "alter-table-repos-add-column-throttle", - stmt: alterTableReposAddColumnThrottle, - }, - { - name: "alter-table-repos-add-column-cancel-running", - stmt: alterTableReposAddColumnCancelRunning, - }, - { - name: "create-table-perms", - stmt: createTablePerms, - }, - { - name: "create-index-perms-user", - stmt: createIndexPermsUser, - }, - { - name: "create-index-perms-repo", - stmt: createIndexPermsRepo, - }, - { - name: "create-table-builds", - stmt: createTableBuilds, - }, - { - name: "create-index-builds-incomplete", - stmt: createIndexBuildsIncomplete, - }, - { - name: "create-index-builds-repo", - stmt: createIndexBuildsRepo, - }, - { - name: "create-index-builds-author", - stmt: createIndexBuildsAuthor, - }, - { - name: "create-index-builds-sender", - stmt: createIndexBuildsSender, - }, - { - name: "create-index-builds-ref", - stmt: createIndexBuildsRef, - }, - { - name: "alter-table-builds-add-column-debug", - stmt: alterTableBuildsAddColumnDebug, - }, - { - name: "create-table-stages", - stmt: createTableStages, - }, - { - name: "create-index-stages-build", - stmt: createIndexStagesBuild, - }, - { - name: "create-index-stages-status", - stmt: createIndexStagesStatus, - }, - { - name: "alter-table-stages-add-column-limit-repos", - stmt: alterTableStagesAddColumnLimitRepos, - }, - { - name: "create-table-steps", - stmt: createTableSteps, - }, - { - name: "create-index-steps-stage", - stmt: createIndexStepsStage, - }, - { - name: "create-table-logs", - stmt: createTableLogs, - }, - { - name: "create-table-cron", - stmt: createTableCron, - }, - { - name: "create-index-cron-repo", - stmt: createIndexCronRepo, - }, - { - name: "create-index-cron-next", - stmt: createIndexCronNext, - }, - { - name: "create-table-secrets", - stmt: createTableSecrets, - }, - { - name: "create-index-secrets-repo", - stmt: createIndexSecretsRepo, - }, - { - name: "create-index-secrets-repo-name", - stmt: createIndexSecretsRepoName, - }, - { - name: "create-table-nodes", - stmt: createTableNodes, - }, - { - name: "alter-table-builds-add-column-cron", - stmt: alterTableBuildsAddColumnCron, - }, - { - name: "create-table-org-secrets", - stmt: createTableOrgSecrets, - }, - { - name: "alter-table-builds-add-column-deploy-id", - stmt: alterTableBuildsAddColumnDeployId, - }, - { - name: "create-table-latest", - stmt: createTableLatest, - }, - { - name: "create-index-latest-repo", - stmt: createIndexLatestRepo, - }, - { - name: "create-table-template", - stmt: createTableTemplate, - }, - { - name: "create-index-template-namespace", - stmt: createIndexTemplateNamespace, - }, - { - name: "alter-table-steps-add-column-step-depends-on", - stmt: alterTableStepsAddColumnStepDependsOn, - }, - { - name: "alter-table-steps-add-column-step-image", - stmt: alterTableStepsAddColumnStepImage, - }, - { - name: "alter-table-steps-add-column-step-detached", - stmt: alterTableStepsAddColumnStepDetached, - }, - { - name: "create-table-cards", - stmt: createTableCards, - }, - { - name: "create-index-cards-card_build", - stmt: createIndexCardsCardbuild, - }, - { - name: "create-index-cards-card_step", - stmt: createIndexCardsCardstep, - }, - { - name: "drop-table-cards", - stmt: dropTableCards, - }, - { - name: "alter-table-steps-add-column-step_schema", - stmt: alterTableStepsAddColumnStepschema, - }, - { - name: "create-new-table-cards", - stmt: createNewTableCards, - }, -} - -// Migrate performs the database migration. If the migration fails -// and error is returned. -func Migrate(db *sql.DB) error { - if err := createTable(db); err != nil { - return err - } - completed, err := selectCompleted(db) - if err != nil && err != sql.ErrNoRows { - return err - } - for _, migration := range migrations { - if _, ok := completed[migration.name]; ok { - - continue - } - - if _, err := db.Exec(migration.stmt); err != nil { - return err - } - if err := insertMigration(db, migration.name); err != nil { - return err - } - - } - return nil -} - -func createTable(db *sql.DB) error { - _, err := db.Exec(migrationTableCreate) - return err -} - -func insertMigration(db *sql.DB, name string) error { - _, err := db.Exec(migrationInsert, name) - return err -} - -func selectCompleted(db *sql.DB) (map[string]struct{}, error) { - migrations := map[string]struct{}{} - rows, err := db.Query(migrationSelect) - if err != nil { - return nil, err - } - defer rows.Close() - for rows.Next() { - var name string - if err := rows.Scan(&name); err != nil { - return nil, err - } - migrations[name] = struct{}{} - } - return migrations, nil -} - -// -// migration table ddl and sql -// - -var migrationTableCreate = ` -CREATE TABLE IF NOT EXISTS migrations ( - name VARCHAR(255) -,UNIQUE(name) -) -` - -var migrationInsert = ` -INSERT INTO migrations (name) VALUES ($1) -` - -var migrationSelect = ` -SELECT name FROM migrations -` - -// -// 001_create_table_user.sql -// - -var createTableUsers = ` -CREATE TABLE IF NOT EXISTS users ( - user_id SERIAL PRIMARY KEY -,user_login VARCHAR(250) -,user_email VARCHAR(500) -,user_admin BOOLEAN -,user_active BOOLEAN -,user_machine BOOLEAN -,user_avatar VARCHAR(2000) -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token BYTEA -,user_oauth_refresh BYTEA -,user_oauth_expiry INTEGER -,user_hash VARCHAR(500) -,UNIQUE(user_login) -,UNIQUE(user_hash) -); -` - -// -// 002_create_table_repos.sql -// - -var createTableRepos = ` -CREATE TABLE IF NOT EXISTS repos ( - repo_id SERIAL PRIMARY KEY -,repo_uid VARCHAR(250) -,repo_user_id INTEGER -,repo_namespace VARCHAR(250) -,repo_name VARCHAR(250) -,repo_slug VARCHAR(250) -,repo_scm VARCHAR(50) -,repo_clone_url VARCHAR(2000) -,repo_ssh_url VARCHAR(2000) -,repo_html_url VARCHAR(2000) -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility VARCHAR(50) -,repo_branch VARCHAR(250) -,repo_counter INTEGER -,repo_config VARCHAR(500) -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer VARCHAR(50) -,repo_secret VARCHAR(50) -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); -` - -var alterTableReposAddColumnNoFork = ` -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnNoPulls = ` -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnCancelPulls = ` -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnCancelPush = ` -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT false; -` - -var alterTableReposAddColumnThrottle = ` -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnCancelRunning = ` -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT false; -` - -// -// 003_create_table_perms.sql -// - -var createTablePerms = ` -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid VARCHAR(250) -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) -); -` - -var createIndexPermsUser = ` -CREATE INDEX IF NOT EXISTS ix_perms_user ON perms (perm_user_id); -` - -var createIndexPermsRepo = ` -CREATE INDEX IF NOT EXISTS ix_perms_repo ON perms (perm_repo_uid); -` - -// -// 004_create_table_builds.sql -// - -var createTableBuilds = ` -CREATE TABLE IF NOT EXISTS builds ( - build_id SERIAL PRIMARY KEY -,build_repo_id INTEGER -,build_config_id INTEGER -,build_trigger VARCHAR(250) -,build_number INTEGER -,build_parent INTEGER -,build_status VARCHAR(50) -,build_error VARCHAR(500) -,build_event VARCHAR(50) -,build_action VARCHAR(50) -,build_link VARCHAR(2000) -,build_timestamp INTEGER -,build_title VARCHAR(2000) -,build_message VARCHAR(2000) -,build_before VARCHAR(50) -,build_after VARCHAR(50) -,build_ref VARCHAR(500) -,build_source_repo VARCHAR(250) -,build_source VARCHAR(500) -,build_target VARCHAR(500) -,build_author VARCHAR(500) -,build_author_name VARCHAR(500) -,build_author_email VARCHAR(500) -,build_author_avatar VARCHAR(2000) -,build_sender VARCHAR(500) -,build_deploy VARCHAR(500) -,build_params VARCHAR(4000) -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) -); -` - -var createIndexBuildsIncomplete = ` -CREATE INDEX IF NOT EXISTS ix_build_incomplete ON builds (build_status) -WHERE build_status IN ('pending', 'running'); -` - -var createIndexBuildsRepo = ` -CREATE INDEX IF NOT EXISTS ix_build_repo ON builds (build_repo_id); -` - -var createIndexBuildsAuthor = ` -CREATE INDEX IF NOT EXISTS ix_build_author ON builds (build_author); -` - -var createIndexBuildsSender = ` -CREATE INDEX IF NOT EXISTS ix_build_sender ON builds (build_sender); -` - -var createIndexBuildsRef = ` -CREATE INDEX IF NOT EXISTS ix_build_ref ON builds (build_repo_id, build_ref); -` - -var alterTableBuildsAddColumnDebug = ` -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT false; -` - -// -// 005_create_table_stages.sql -// - -var createTableStages = ` -CREATE TABLE IF NOT EXISTS stages ( - stage_id SERIAL PRIMARY KEY -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_name VARCHAR(100) -,stage_kind VARCHAR(50) -,stage_type VARCHAR(50) -,stage_status VARCHAR(50) -,stage_error VARCHAR(500) -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os VARCHAR(50) -,stage_arch VARCHAR(50) -,stage_variant VARCHAR(10) -,stage_kernel VARCHAR(50) -,stage_machine VARCHAR(500) -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -); -` - -var createIndexStagesBuild = ` -CREATE INDEX IF NOT EXISTS ix_stages_build ON stages (stage_build_id); -` - -var createIndexStagesStatus = ` -CREATE INDEX IF NOT EXISTS ix_stage_in_progress ON stages (stage_status) -WHERE stage_status IN ('pending', 'running'); -` - -var alterTableStagesAddColumnLimitRepos = ` -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; -` - -// -// 006_create_table_steps.sql -// - -var createTableSteps = ` -CREATE TABLE IF NOT EXISTS steps ( - step_id SERIAL PRIMARY KEY -,step_stage_id INTEGER -,step_number INTEGER -,step_name VARCHAR(100) -,step_status VARCHAR(50) -,step_error VARCHAR(500) -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -); -` - -var createIndexStepsStage = ` -CREATE INDEX IF NOT EXISTS ix_steps_stage ON steps (step_stage_id); -` - -// -// 007_create_table_logs.sql -// - -var createTableLogs = ` -CREATE TABLE IF NOT EXISTS logs ( - log_id SERIAL PRIMARY KEY -,log_data BYTEA -); -` - -// -// 008_create_table_cron.sql -// - -var createTableCron = ` -CREATE TABLE IF NOT EXISTS cron ( - cron_id SERIAL PRIMARY KEY -,cron_repo_id INTEGER -,cron_name VARCHAR(50) -,cron_expr VARCHAR(50) -,cron_next INTEGER -,cron_prev INTEGER -,cron_event VARCHAR(50) -,cron_branch VARCHAR(250) -,cron_target VARCHAR(250) -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexCronRepo = ` -CREATE INDEX IF NOT EXISTS ix_cron_repo ON cron (cron_repo_id); -` - -var createIndexCronNext = ` -CREATE INDEX IF NOT EXISTS ix_cron_next ON cron (cron_next); -` - -// -// 009_create_table_secrets.sql -// - -var createTableSecrets = ` -CREATE TABLE IF NOT EXISTS secrets ( - secret_id SERIAL PRIMARY KEY -,secret_repo_id INTEGER -,secret_name VARCHAR(500) -,secret_data BYTEA -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexSecretsRepo = ` -CREATE INDEX IF NOT EXISTS ix_secret_repo ON secrets (secret_repo_id); -` - -var createIndexSecretsRepoName = ` -CREATE INDEX IF NOT EXISTS ix_secret_repo_name ON secrets (secret_repo_id, secret_name); -` - -// -// 010_create_table_nodes.sql -// - -var createTableNodes = ` -CREATE TABLE IF NOT EXISTS nodes ( - node_id SERIAL PRIMARY KEY -,node_uid VARCHAR(500) -,node_provider VARCHAR(50) -,node_state VARCHAR(50) -,node_name VARCHAR(50) -,node_image VARCHAR(500) -,node_region VARCHAR(100) -,node_size VARCHAR(100) -,node_os VARCHAR(50) -,node_arch VARCHAR(50) -,node_kernel VARCHAR(50) -,node_variant VARCHAR(50) -,node_address VARCHAR(500) -,node_capacity INTEGER -,node_filter VARCHAR(2000) -,node_labels VARCHAR(2000) -,node_error VARCHAR(2000) -,node_ca_key BYTEA -,node_ca_cert BYTEA -,node_tls_key BYTEA -,node_tls_cert BYTEA -,node_tls_name VARCHAR(500) -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); -` - -// -// 011_add_column_builds_cron.sql -// - -var alterTableBuildsAddColumnCron = ` -ALTER TABLE builds ADD COLUMN build_cron VARCHAR(50) NOT NULL DEFAULT ''; -` - -// -// 012_create_table_org_secrets.sql -// - -var createTableOrgSecrets = ` -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id SERIAL PRIMARY KEY -,secret_namespace VARCHAR(50) -,secret_name VARCHAR(200) -,secret_type VARCHAR(50) -,secret_data BYTEA -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); -` - -// -// 013_add_column_builds_deploy_id.sql -// - -var alterTableBuildsAddColumnDeployId = ` -ALTER TABLE builds ADD COLUMN build_deploy_id INTEGER NOT NULL DEFAULT 0; -` - -// -// 015_create_table_refs.sql -// - -var createTableLatest = ` -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type VARCHAR(50) -,latest_name VARCHAR(500) -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); -` - -var createIndexLatestRepo = ` -CREATE INDEX IF NOT EXISTS ix_latest_repo ON latest (latest_repo_id); -` - -// -// 016_create_template_tables.sql -// - -var createTableTemplate = ` -CREATE TABLE IF NOT EXISTS templates ( - template_id SERIAL PRIMARY KEY - ,template_name TEXT - ,template_namespace VARCHAR(50) - ,template_data BYTEA - ,template_created INTEGER - ,template_updated INTEGER -,UNIQUE(template_name, template_namespace) -); -` - -var createIndexTemplateNamespace = ` -CREATE INDEX IF NOT EXISTS ix_template_namespace ON templates (template_namespace); -` - -// -// 017_add_columns_steps.sql -// - -var alterTableStepsAddColumnStepDependsOn = ` -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NOT NULL DEFAULT ''; -` - -var alterTableStepsAddColumnStepImage = ` -ALTER TABLE steps ADD COLUMN step_image VARCHAR(1000) NOT NULL DEFAULT ''; -` - -var alterTableStepsAddColumnStepDetached = ` -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; -` - -// -// 018_create_table_cards.sql -// - -var createTableCards = ` -CREATE TABLE IF NOT EXISTS cards ( - card_id SERIAL PRIMARY KEY - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); -` - -var createIndexCardsCardbuild = ` -CREATE INDEX IF NOT EXISTS ix_cards_build ON cards (card_build); -` - -var createIndexCardsCardstep = ` -CREATE UNIQUE INDEX IF NOT EXISTS ix_cards_step ON cards (card_step); -` - -// -// 019_amend_table_cards.sql -// - -var dropTableCards = ` -DROP TABLE IF EXISTS cards; -` - -var alterTableStepsAddColumnStepschema = ` -ALTER TABLE steps - ADD COLUMN step_schema VARCHAR(2000) NOT NULL DEFAULT ''; -` - -var createNewTableCards = ` -CREATE TABLE IF NOT EXISTS cards -( - card_id SERIAL PRIMARY KEY, - card_data BYTEA, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); -` diff --git a/store/shared/migrate/postgres/files/001_create_table_user.sql b/store/shared/migrate/postgres/files/001_create_table_user.sql deleted file mode 100644 index 47432c8dc7..0000000000 --- a/store/shared/migrate/postgres/files/001_create_table_user.sql +++ /dev/null @@ -1,22 +0,0 @@ --- name: create-table-users - -CREATE TABLE IF NOT EXISTS users ( - user_id SERIAL PRIMARY KEY -,user_login VARCHAR(250) -,user_email VARCHAR(500) -,user_admin BOOLEAN -,user_active BOOLEAN -,user_machine BOOLEAN -,user_avatar VARCHAR(2000) -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token BYTEA -,user_oauth_refresh BYTEA -,user_oauth_expiry INTEGER -,user_hash VARCHAR(500) -,UNIQUE(user_login) -,UNIQUE(user_hash) -); diff --git a/store/shared/migrate/postgres/files/002_create_table_repos.sql b/store/shared/migrate/postgres/files/002_create_table_repos.sql deleted file mode 100644 index 06e0020ba8..0000000000 --- a/store/shared/migrate/postgres/files/002_create_table_repos.sql +++ /dev/null @@ -1,55 +0,0 @@ --- name: create-table-repos - -CREATE TABLE IF NOT EXISTS repos ( - repo_id SERIAL PRIMARY KEY -,repo_uid VARCHAR(250) -,repo_user_id INTEGER -,repo_namespace VARCHAR(250) -,repo_name VARCHAR(250) -,repo_slug VARCHAR(250) -,repo_scm VARCHAR(50) -,repo_clone_url VARCHAR(2000) -,repo_ssh_url VARCHAR(2000) -,repo_html_url VARCHAR(2000) -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility VARCHAR(50) -,repo_branch VARCHAR(250) -,repo_counter INTEGER -,repo_config VARCHAR(500) -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer VARCHAR(50) -,repo_secret VARCHAR(50) -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); - --- name: alter-table-repos-add-column-no-fork - -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-no-pulls - -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-cancel-pulls - -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-cancel-push - -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT false; - --- name: alter-table-repos-add-column-throttle - -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-cancel-running - -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT false; \ No newline at end of file diff --git a/store/shared/migrate/postgres/files/003_create_table_perms.sql b/store/shared/migrate/postgres/files/003_create_table_perms.sql deleted file mode 100644 index d5c61b3906..0000000000 --- a/store/shared/migrate/postgres/files/003_create_table_perms.sql +++ /dev/null @@ -1,23 +0,0 @@ --- name: create-table-perms - -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid VARCHAR(250) -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) ---,FOREIGN KEY(perm_user_id) REFERENCES users(user_id) ON DELETE CASCADE ---,FOREIGN KEY(perm_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-perms-user - -CREATE INDEX IF NOT EXISTS ix_perms_user ON perms (perm_user_id); - --- name: create-index-perms-repo - -CREATE INDEX IF NOT EXISTS ix_perms_repo ON perms (perm_repo_uid); diff --git a/store/shared/migrate/postgres/files/004_create_table_builds.sql b/store/shared/migrate/postgres/files/004_create_table_builds.sql deleted file mode 100644 index 1f54881cd8..0000000000 --- a/store/shared/migrate/postgres/files/004_create_table_builds.sql +++ /dev/null @@ -1,63 +0,0 @@ --- name: create-table-builds - -CREATE TABLE IF NOT EXISTS builds ( - build_id SERIAL PRIMARY KEY -,build_repo_id INTEGER -,build_config_id INTEGER -,build_trigger VARCHAR(250) -,build_number INTEGER -,build_parent INTEGER -,build_status VARCHAR(50) -,build_error VARCHAR(500) -,build_event VARCHAR(50) -,build_action VARCHAR(50) -,build_link VARCHAR(2000) -,build_timestamp INTEGER -,build_title VARCHAR(2000) -,build_message VARCHAR(2000) -,build_before VARCHAR(50) -,build_after VARCHAR(50) -,build_ref VARCHAR(500) -,build_source_repo VARCHAR(250) -,build_source VARCHAR(500) -,build_target VARCHAR(500) -,build_author VARCHAR(500) -,build_author_name VARCHAR(500) -,build_author_email VARCHAR(500) -,build_author_avatar VARCHAR(2000) -,build_sender VARCHAR(500) -,build_deploy VARCHAR(500) -,build_params VARCHAR(4000) -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) ---,FOREIGN KEY(build_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-builds-incomplete - -CREATE INDEX IF NOT EXISTS ix_build_incomplete ON builds (build_status) -WHERE build_status IN ('pending', 'running'); - --- name: create-index-builds-repo - -CREATE INDEX IF NOT EXISTS ix_build_repo ON builds (build_repo_id); - --- name: create-index-builds-author - -CREATE INDEX IF NOT EXISTS ix_build_author ON builds (build_author); - --- name: create-index-builds-sender - -CREATE INDEX IF NOT EXISTS ix_build_sender ON builds (build_sender); - --- name: create-index-builds-ref - -CREATE INDEX IF NOT EXISTS ix_build_ref ON builds (build_repo_id, build_ref); - --- name: alter-table-builds-add-column-debug - -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT false; diff --git a/store/shared/migrate/postgres/files/005_create_table_stages.sql b/store/shared/migrate/postgres/files/005_create_table_stages.sql deleted file mode 100644 index 32505d3ce2..0000000000 --- a/store/shared/migrate/postgres/files/005_create_table_stages.sql +++ /dev/null @@ -1,44 +0,0 @@ --- name: create-table-stages - -CREATE TABLE IF NOT EXISTS stages ( - stage_id SERIAL PRIMARY KEY -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_name VARCHAR(100) -,stage_kind VARCHAR(50) -,stage_type VARCHAR(50) -,stage_status VARCHAR(50) -,stage_error VARCHAR(500) -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os VARCHAR(50) -,stage_arch VARCHAR(50) -,stage_variant VARCHAR(10) -,stage_kernel VARCHAR(50) -,stage_machine VARCHAR(500) -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -); - --- name: create-index-stages-build - -CREATE INDEX IF NOT EXISTS ix_stages_build ON stages (stage_build_id); - --- name: create-index-stages-status - -CREATE INDEX IF NOT EXISTS ix_stage_in_progress ON stages (stage_status) -WHERE stage_status IN ('pending', 'running'); - --- name: alter-table-stages-add-column-limit-repos - -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/postgres/files/006_create_table_steps.sql b/store/shared/migrate/postgres/files/006_create_table_steps.sql deleted file mode 100644 index ff9ade1da1..0000000000 --- a/store/shared/migrate/postgres/files/006_create_table_steps.sql +++ /dev/null @@ -1,20 +0,0 @@ --- name: create-table-steps - -CREATE TABLE IF NOT EXISTS steps ( - step_id SERIAL PRIMARY KEY -,step_stage_id INTEGER -,step_number INTEGER -,step_name VARCHAR(100) -,step_status VARCHAR(50) -,step_error VARCHAR(500) -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -); - --- name: create-index-steps-stage - -CREATE INDEX IF NOT EXISTS ix_steps_stage ON steps (step_stage_id); diff --git a/store/shared/migrate/postgres/files/007_create_table_logs.sql b/store/shared/migrate/postgres/files/007_create_table_logs.sql deleted file mode 100644 index 25b00c687b..0000000000 --- a/store/shared/migrate/postgres/files/007_create_table_logs.sql +++ /dev/null @@ -1,6 +0,0 @@ --- name: create-table-logs - -CREATE TABLE IF NOT EXISTS logs ( - log_id SERIAL PRIMARY KEY -,log_data BYTEA -); diff --git a/store/shared/migrate/postgres/files/008_create_table_cron.sql b/store/shared/migrate/postgres/files/008_create_table_cron.sql deleted file mode 100644 index b890519269..0000000000 --- a/store/shared/migrate/postgres/files/008_create_table_cron.sql +++ /dev/null @@ -1,27 +0,0 @@ --- name: create-table-cron - -CREATE TABLE IF NOT EXISTS cron ( - cron_id SERIAL PRIMARY KEY -,cron_repo_id INTEGER -,cron_name VARCHAR(50) -,cron_expr VARCHAR(50) -,cron_next INTEGER -,cron_prev INTEGER -,cron_event VARCHAR(50) -,cron_branch VARCHAR(250) -,cron_target VARCHAR(250) -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-cron-repo - -CREATE INDEX IF NOT EXISTS ix_cron_repo ON cron (cron_repo_id); - --- name: create-index-cron-next - -CREATE INDEX IF NOT EXISTS ix_cron_next ON cron (cron_next); diff --git a/store/shared/migrate/postgres/files/009_create_table_secrets.sql b/store/shared/migrate/postgres/files/009_create_table_secrets.sql deleted file mode 100644 index cafb290d2e..0000000000 --- a/store/shared/migrate/postgres/files/009_create_table_secrets.sql +++ /dev/null @@ -1,20 +0,0 @@ --- name: create-table-secrets - -CREATE TABLE IF NOT EXISTS secrets ( - secret_id SERIAL PRIMARY KEY -,secret_repo_id INTEGER -,secret_name VARCHAR(500) -,secret_data BYTEA -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-secrets-repo - -CREATE INDEX IF NOT EXISTS ix_secret_repo ON secrets (secret_repo_id); - --- name: create-index-secrets-repo-name - -CREATE INDEX IF NOT EXISTS ix_secret_repo_name ON secrets (secret_repo_id, secret_name); diff --git a/store/shared/migrate/postgres/files/010_create_table_nodes.sql b/store/shared/migrate/postgres/files/010_create_table_nodes.sql deleted file mode 100644 index 1c84c4a915..0000000000 --- a/store/shared/migrate/postgres/files/010_create_table_nodes.sql +++ /dev/null @@ -1,33 +0,0 @@ --- name: create-table-nodes - -CREATE TABLE IF NOT EXISTS nodes ( - node_id SERIAL PRIMARY KEY -,node_uid VARCHAR(500) -,node_provider VARCHAR(50) -,node_state VARCHAR(50) -,node_name VARCHAR(50) -,node_image VARCHAR(500) -,node_region VARCHAR(100) -,node_size VARCHAR(100) -,node_os VARCHAR(50) -,node_arch VARCHAR(50) -,node_kernel VARCHAR(50) -,node_variant VARCHAR(50) -,node_address VARCHAR(500) -,node_capacity INTEGER -,node_filter VARCHAR(2000) -,node_labels VARCHAR(2000) -,node_error VARCHAR(2000) -,node_ca_key BYTEA -,node_ca_cert BYTEA -,node_tls_key BYTEA -,node_tls_cert BYTEA -,node_tls_name VARCHAR(500) -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); diff --git a/store/shared/migrate/postgres/files/011_add_column_builds_cron.sql b/store/shared/migrate/postgres/files/011_add_column_builds_cron.sql deleted file mode 100644 index 11d82ac814..0000000000 --- a/store/shared/migrate/postgres/files/011_add_column_builds_cron.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-cron - -ALTER TABLE builds ADD COLUMN build_cron VARCHAR(50) NOT NULL DEFAULT ''; diff --git a/store/shared/migrate/postgres/files/012_create_table_org_secrets.sql b/store/shared/migrate/postgres/files/012_create_table_org_secrets.sql deleted file mode 100644 index 7541ff069e..0000000000 --- a/store/shared/migrate/postgres/files/012_create_table_org_secrets.sql +++ /dev/null @@ -1,12 +0,0 @@ --- name: create-table-org-secrets - -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id SERIAL PRIMARY KEY -,secret_namespace VARCHAR(50) -,secret_name VARCHAR(200) -,secret_type VARCHAR(50) -,secret_data BYTEA -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); diff --git a/store/shared/migrate/postgres/files/013_add_column_builds_deploy_id.sql b/store/shared/migrate/postgres/files/013_add_column_builds_deploy_id.sql deleted file mode 100644 index b09a583256..0000000000 --- a/store/shared/migrate/postgres/files/013_add_column_builds_deploy_id.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-deploy-id - -ALTER TABLE builds ADD COLUMN build_deploy_id INTEGER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/postgres/files/015_create_table_refs.sql b/store/shared/migrate/postgres/files/015_create_table_refs.sql deleted file mode 100644 index dcd3ccc183..0000000000 --- a/store/shared/migrate/postgres/files/015_create_table_refs.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-latest - -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type VARCHAR(50) -,latest_name VARCHAR(500) -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); - --- name: create-index-latest-repo - -CREATE INDEX IF NOT EXISTS ix_latest_repo ON latest (latest_repo_id); diff --git a/store/shared/migrate/postgres/files/016_create_template_tables.sql b/store/shared/migrate/postgres/files/016_create_template_tables.sql deleted file mode 100644 index c08bb5a21a..0000000000 --- a/store/shared/migrate/postgres/files/016_create_template_tables.sql +++ /dev/null @@ -1,15 +0,0 @@ --- name: create-table-template - -CREATE TABLE IF NOT EXISTS templates ( - template_id SERIAL PRIMARY KEY - ,template_name TEXT - ,template_namespace VARCHAR(50) - ,template_data BYTEA - ,template_created INTEGER - ,template_updated INTEGER -,UNIQUE(template_name, template_namespace) -); - --- name: create-index-template-namespace - -CREATE INDEX IF NOT EXISTS ix_template_namespace ON templates (template_namespace); diff --git a/store/shared/migrate/postgres/files/017_add_columns_steps.sql b/store/shared/migrate/postgres/files/017_add_columns_steps.sql deleted file mode 100644 index de8c06006a..0000000000 --- a/store/shared/migrate/postgres/files/017_add_columns_steps.sql +++ /dev/null @@ -1,11 +0,0 @@ --- name: alter-table-steps-add-column-step-depends-on - -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NOT NULL DEFAULT ''; - --- name: alter-table-steps-add-column-step-image - -ALTER TABLE steps ADD COLUMN step_image VARCHAR(1000) NOT NULL DEFAULT ''; - --- name: alter-table-steps-add-column-step-detached - -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/store/shared/migrate/postgres/files/018_create_table_cards.sql b/store/shared/migrate/postgres/files/018_create_table_cards.sql deleted file mode 100644 index fd64924ae6..0000000000 --- a/store/shared/migrate/postgres/files/018_create_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-cards - -CREATE TABLE IF NOT EXISTS cards ( - card_id SERIAL PRIMARY KEY - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); - --- name: create-index-cards-card_build -CREATE INDEX IF NOT EXISTS ix_cards_build ON cards (card_build); - --- name: create-index-cards-card_step -CREATE UNIQUE INDEX IF NOT EXISTS ix_cards_step ON cards (card_step); diff --git a/store/shared/migrate/postgres/files/019_amend_table_cards.sql b/store/shared/migrate/postgres/files/019_amend_table_cards.sql deleted file mode 100644 index b5391fa242..0000000000 --- a/store/shared/migrate/postgres/files/019_amend_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: drop-table-cards - -DROP TABLE IF EXISTS cards; - --- name: alter-table-steps-add-column-step_schema - -ALTER TABLE steps - ADD COLUMN step_schema VARCHAR(2000) NOT NULL DEFAULT ''; - --- name: create-new-table-cards -CREATE TABLE IF NOT EXISTS cards -( - card_id SERIAL PRIMARY KEY, - card_data BYTEA, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); \ No newline at end of file diff --git a/store/shared/migrate/sqlite/ddl.go b/store/shared/migrate/sqlite/ddl.go deleted file mode 100644 index 48b3d8b7da..0000000000 --- a/store/shared/migrate/sqlite/ddl.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlite - -//go:generate togo ddl -package sqlite -dialect sqlite3 diff --git a/store/shared/migrate/sqlite/ddl_gen.go b/store/shared/migrate/sqlite/ddl_gen.go deleted file mode 100644 index ea5db1100f..0000000000 --- a/store/shared/migrate/sqlite/ddl_gen.go +++ /dev/null @@ -1,773 +0,0 @@ -package sqlite - -import ( - "database/sql" -) - -var migrations = []struct { - name string - stmt string -}{ - { - name: "create-table-users", - stmt: createTableUsers, - }, - { - name: "create-table-repos", - stmt: createTableRepos, - }, - { - name: "alter-table-repos-add-column-no-fork", - stmt: alterTableReposAddColumnNoFork, - }, - { - name: "alter-table-repos-add-column-no-pulls", - stmt: alterTableReposAddColumnNoPulls, - }, - { - name: "alter-table-repos-add-column-cancel-pulls", - stmt: alterTableReposAddColumnCancelPulls, - }, - { - name: "alter-table-repos-add-column-cancel-push", - stmt: alterTableReposAddColumnCancelPush, - }, - { - name: "alter-table-repos-add-column-throttle", - stmt: alterTableReposAddColumnThrottle, - }, - { - name: "alter-table-repos-add-column-cancel-running", - stmt: alterTableReposAddColumnCancelRunning, - }, - { - name: "create-table-perms", - stmt: createTablePerms, - }, - { - name: "create-index-perms-user", - stmt: createIndexPermsUser, - }, - { - name: "create-index-perms-repo", - stmt: createIndexPermsRepo, - }, - { - name: "create-table-builds", - stmt: createTableBuilds, - }, - { - name: "create-index-builds-repo", - stmt: createIndexBuildsRepo, - }, - { - name: "create-index-builds-author", - stmt: createIndexBuildsAuthor, - }, - { - name: "create-index-builds-sender", - stmt: createIndexBuildsSender, - }, - { - name: "create-index-builds-ref", - stmt: createIndexBuildsRef, - }, - { - name: "create-index-build-incomplete", - stmt: createIndexBuildIncomplete, - }, - { - name: "alter-table-builds-add-column-debug", - stmt: alterTableBuildsAddColumnDebug, - }, - { - name: "create-table-stages", - stmt: createTableStages, - }, - { - name: "create-index-stages-build", - stmt: createIndexStagesBuild, - }, - { - name: "create-index-stages-status", - stmt: createIndexStagesStatus, - }, - { - name: "alter-table-stages-add-column-limit-repos", - stmt: alterTableStagesAddColumnLimitRepos, - }, - { - name: "create-table-steps", - stmt: createTableSteps, - }, - { - name: "create-index-steps-stage", - stmt: createIndexStepsStage, - }, - { - name: "create-table-logs", - stmt: createTableLogs, - }, - { - name: "create-table-cron", - stmt: createTableCron, - }, - { - name: "create-index-cron-repo", - stmt: createIndexCronRepo, - }, - { - name: "create-index-cron-next", - stmt: createIndexCronNext, - }, - { - name: "create-table-secrets", - stmt: createTableSecrets, - }, - { - name: "create-index-secrets-repo", - stmt: createIndexSecretsRepo, - }, - { - name: "create-index-secrets-repo-name", - stmt: createIndexSecretsRepoName, - }, - { - name: "create-table-nodes", - stmt: createTableNodes, - }, - { - name: "alter-table-builds-add-column-cron", - stmt: alterTableBuildsAddColumnCron, - }, - { - name: "create-table-org-secrets", - stmt: createTableOrgSecrets, - }, - { - name: "alter-table-builds-add-column-deploy-id", - stmt: alterTableBuildsAddColumnDeployId, - }, - { - name: "create-table-latest", - stmt: createTableLatest, - }, - { - name: "create-index-latest-repo", - stmt: createIndexLatestRepo, - }, - { - name: "create-table-templates", - stmt: createTableTemplates, - }, - { - name: "create-index-template-namespace", - stmt: createIndexTemplateNamespace, - }, - { - name: "alter-table-steps-add-column-step-depends-on", - stmt: alterTableStepsAddColumnStepDependsOn, - }, - { - name: "alter-table-steps-add-column-step-image", - stmt: alterTableStepsAddColumnStepImage, - }, - { - name: "alter-table-steps-add-column-step-detached", - stmt: alterTableStepsAddColumnStepDetached, - }, - { - name: "create-table-cards", - stmt: createTableCards, - }, - { - name: "create-index-cards-card_build", - stmt: createIndexCardsCardbuild, - }, - { - name: "create-index-cards-card_step", - stmt: createIndexCardsCardstep, - }, - { - name: "drop-table-cards", - stmt: dropTableCards, - }, - { - name: "alter-table-steps-add-column-step_schema", - stmt: alterTableStepsAddColumnStepschema, - }, - { - name: "create-new-table-cards", - stmt: createNewTableCards, - }, -} - -// Migrate performs the database migration. If the migration fails -// and error is returned. -func Migrate(db *sql.DB) error { - if err := createTable(db); err != nil { - return err - } - completed, err := selectCompleted(db) - if err != nil && err != sql.ErrNoRows { - return err - } - for _, migration := range migrations { - if _, ok := completed[migration.name]; ok { - - continue - } - - if _, err := db.Exec(migration.stmt); err != nil { - return err - } - if err := insertMigration(db, migration.name); err != nil { - return err - } - - } - return nil -} - -func createTable(db *sql.DB) error { - _, err := db.Exec(migrationTableCreate) - return err -} - -func insertMigration(db *sql.DB, name string) error { - _, err := db.Exec(migrationInsert, name) - return err -} - -func selectCompleted(db *sql.DB) (map[string]struct{}, error) { - migrations := map[string]struct{}{} - rows, err := db.Query(migrationSelect) - if err != nil { - return nil, err - } - defer rows.Close() - for rows.Next() { - var name string - if err := rows.Scan(&name); err != nil { - return nil, err - } - migrations[name] = struct{}{} - } - return migrations, nil -} - -// -// migration table ddl and sql -// - -var migrationTableCreate = ` -CREATE TABLE IF NOT EXISTS migrations ( - name VARCHAR(255) -,UNIQUE(name) -) -` - -var migrationInsert = ` -INSERT INTO migrations (name) VALUES (?) -` - -var migrationSelect = ` -SELECT name FROM migrations -` - -// -// 001_create_table_user.sql -// - -var createTableUsers = ` -CREATE TABLE IF NOT EXISTS users ( - user_id INTEGER PRIMARY KEY AUTOINCREMENT -,user_login TEXT COLLATE NOCASE -,user_email TEXT -,user_admin BOOLEAN -,user_machine BOOLEAN -,user_active BOOLEAN -,user_avatar TEXT -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token TEXT -,user_oauth_refresh TEXT -,user_oauth_expiry INTEGER -,user_hash TEXT -,UNIQUE(user_login COLLATE NOCASE) -,UNIQUE(user_hash) -); -` - -// -// 002_create_table_repos.sql -// - -var createTableRepos = ` -CREATE TABLE IF NOT EXISTS repos ( - repo_id INTEGER PRIMARY KEY AUTOINCREMENT -,repo_uid TEXT -,repo_user_id INTEGER -,repo_namespace TEXT -,repo_name TEXT -,repo_slug TEXT -,repo_scm TEXT -,repo_clone_url TEXT -,repo_ssh_url TEXT -,repo_html_url TEXT -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility TEXT -,repo_branch TEXT -,repo_counter INTEGER -,repo_config TEXT -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer TEXT -,repo_secret TEXT -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); -` - -var alterTableReposAddColumnNoFork = ` -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnNoPulls = ` -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnCancelPulls = ` -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnCancelPush = ` -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnThrottle = ` -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; -` - -var alterTableReposAddColumnCancelRunning = ` -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT 0; -` - -// -// 003_create_table_perms.sql -// - -var createTablePerms = ` -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid TEXT -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) -); -` - -var createIndexPermsUser = ` -CREATE INDEX IF NOT EXISTS ix_perms_user ON perms (perm_user_id); -` - -var createIndexPermsRepo = ` -CREATE INDEX IF NOT EXISTS ix_perms_repo ON perms (perm_repo_uid); -` - -// -// 004_create_table_builds.sql -// - -var createTableBuilds = ` -CREATE TABLE IF NOT EXISTS builds ( - build_id INTEGER PRIMARY KEY AUTOINCREMENT -,build_repo_id INTEGER -,build_trigger TEXT -,build_number INTEGER -,build_parent INTEGER -,build_status TEXT -,build_error TEXT -,build_event TEXT -,build_action TEXT -,build_link TEXT -,build_timestamp INTEGER -,build_title TEXT -,build_message TEXT -,build_before TEXT -,build_after TEXT -,build_ref TEXT -,build_source_repo TEXT -,build_source TEXT -,build_target TEXT -,build_author TEXT -,build_author_name TEXT -,build_author_email TEXT -,build_author_avatar TEXT -,build_sender TEXT -,build_deploy TEXT -,build_params TEXT -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) -); -` - -var createIndexBuildsRepo = ` -CREATE INDEX IF NOT EXISTS ix_build_repo ON builds (build_repo_id); -` - -var createIndexBuildsAuthor = ` -CREATE INDEX IF NOT EXISTS ix_build_author ON builds (build_author); -` - -var createIndexBuildsSender = ` -CREATE INDEX IF NOT EXISTS ix_build_sender ON builds (build_sender); -` - -var createIndexBuildsRef = ` -CREATE INDEX IF NOT EXISTS ix_build_ref ON builds (build_repo_id, build_ref); -` - -var createIndexBuildIncomplete = ` -CREATE INDEX IF NOT EXISTS ix_build_incomplete ON builds (build_status) -WHERE build_status IN ('pending', 'running'); -` - -var alterTableBuildsAddColumnDebug = ` -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT 0; -` - -// -// 005_create_table_stages.sql -// - -var createTableStages = ` -CREATE TABLE IF NOT EXISTS stages ( - stage_id INTEGER PRIMARY KEY AUTOINCREMENT -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_kind TEXT -,stage_type TEXT -,stage_name TEXT -,stage_status TEXT -,stage_error TEXT -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os TEXT -,stage_arch TEXT -,stage_variant TEXT -,stage_kernel TEXT -,stage_machine TEXT -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -,FOREIGN KEY(stage_build_id) REFERENCES builds(build_id) ON DELETE CASCADE -); -` - -var createIndexStagesBuild = ` -CREATE INDEX IF NOT EXISTS ix_stages_build ON stages (stage_build_id); -` - -var createIndexStagesStatus = ` -CREATE INDEX IF NOT EXISTS ix_stage_in_progress ON stages (stage_status) -WHERE stage_status IN ('pending', 'running'); -` - -var alterTableStagesAddColumnLimitRepos = ` -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; -` - -// -// 006_create_table_steps.sql -// - -var createTableSteps = ` -CREATE TABLE IF NOT EXISTS steps ( - step_id INTEGER PRIMARY KEY AUTOINCREMENT -,step_stage_id INTEGER -,step_number INTEGER -,step_name TEXT -,step_status TEXT -,step_error TEXT -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -,FOREIGN KEY(step_stage_id) REFERENCES stages(stage_id) ON DELETE CASCADE -); -` - -var createIndexStepsStage = ` -CREATE INDEX IF NOT EXISTS ix_steps_stage ON steps (step_stage_id); -` - -// -// 007_create_table_logs.sql -// - -var createTableLogs = ` -CREATE TABLE IF NOT EXISTS logs ( - log_id INTEGER PRIMARY KEY -,log_data BLOB -,FOREIGN KEY(log_id) REFERENCES steps(step_id) ON DELETE CASCADE -); -` - -// -// 008_create_table_cron.sql -// - -var createTableCron = ` -CREATE TABLE IF NOT EXISTS cron ( - cron_id INTEGER PRIMARY KEY AUTOINCREMENT -,cron_repo_id INTEGER -,cron_name TEXT -,cron_expr TEXT -,cron_next INTEGER -,cron_prev INTEGER -,cron_event TEXT -,cron_branch TEXT -,cron_target TEXT -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexCronRepo = ` -CREATE INDEX IF NOT EXISTS ix_cron_repo ON cron (cron_repo_id); -` - -var createIndexCronNext = ` -CREATE INDEX IF NOT EXISTS ix_cron_next ON cron (cron_next); -` - -// -// 009_create_table_secrets.sql -// - -var createTableSecrets = ` -CREATE TABLE IF NOT EXISTS secrets ( - secret_id INTEGER PRIMARY KEY AUTOINCREMENT -,secret_repo_id INTEGER -,secret_name TEXT -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); -` - -var createIndexSecretsRepo = ` -CREATE INDEX IF NOT EXISTS ix_secret_repo ON secrets (secret_repo_id); -` - -var createIndexSecretsRepoName = ` -CREATE INDEX IF NOT EXISTS ix_secret_repo_name ON secrets (secret_repo_id, secret_name); -` - -// -// 010_create_table_nodes.sql -// - -var createTableNodes = ` -CREATE TABLE IF NOT EXISTS nodes ( - node_id INTEGER PRIMARY KEY AUTOINCREMENT -,node_uid TEXT -,node_provider TEXT -,node_state TEXT -,node_name TEXT -,node_image TEXT -,node_region TEXT -,node_size TEXT -,node_os TEXT -,node_arch TEXT -,node_kernel TEXT -,node_variant TEXT -,node_address TEXT -,node_capacity INTEGER -,node_filter TEXT -,node_labels TEXT -,node_error TEXT -,node_ca_key TEXT -,node_ca_cert TEXT -,node_tls_key TEXT -,node_tls_cert TEXT -,node_tls_name TEXT -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); -` - -// -// 011_add_column_builds_cron.sql -// - -var alterTableBuildsAddColumnCron = ` -ALTER TABLE builds ADD COLUMN build_cron TEXT NOT NULL DEFAULT ''; -` - -// -// 012_create_table_org_secrets.sql -// - -var createTableOrgSecrets = ` -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id INTEGER PRIMARY KEY AUTOINCREMENT -,secret_namespace TEXT COLLATE NOCASE -,secret_name TEXT COLLATE NOCASE -,secret_type TEXT -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); -` - -// -// 013_add_column_builds_deploy_id.sql -// - -var alterTableBuildsAddColumnDeployId = ` -ALTER TABLE builds ADD COLUMN build_deploy_id NUMBER NOT NULL DEFAULT 0; -` - -// -// 014_create_table_refs.sql -// - -var createTableLatest = ` -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type TEXT -- branch | tag | pull_request | promote -,latest_name TEXT -- master | v1.0.0, | 42 | production -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); -` - -var createIndexLatestRepo = ` -CREATE INDEX IF NOT EXISTS ix_latest_repo ON latest (latest_repo_id); -` - -// -// 015_create_template_tables.sql -// - -var createTableTemplates = ` -CREATE TABLE IF NOT EXISTS templates ( - template_id INTEGER PRIMARY KEY AUTOINCREMENT - ,template_name TEXT - ,template_namespace TEXT COLLATE NOCASE - ,template_data BLOB - ,template_created INTEGER - ,template_updated INTEGER - ,UNIQUE(template_name COLLATE NOCASE, template_namespace COLLATE NOCASE) -); -` - -var createIndexTemplateNamespace = ` -CREATE INDEX IF NOT EXISTS ix_template_namespace ON templates (template_namespace); -` - -// -// 016_add_columns_steps.sql -// - -var alterTableStepsAddColumnStepDependsOn = ` -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NOT NULL DEFAULT ''; -` - -var alterTableStepsAddColumnStepImage = ` -ALTER TABLE steps ADD COLUMN step_image TEXT NOT NULL DEFAULT ''; -` - -var alterTableStepsAddColumnStepDetached = ` -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; -` - -// -// 017_create_table_cards.sql -// - -var createTableCards = ` -CREATE TABLE IF NOT EXISTS cards ( - card_id INTEGER PRIMARY KEY AUTOINCREMENT - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); -` - -var createIndexCardsCardbuild = ` -CREATE INDEX IF NOT EXISTS ix_cards_build ON cards (card_build); -` - -var createIndexCardsCardstep = ` -CREATE UNIQUE INDEX IF NOT EXISTS ix_cards_step ON cards (card_step); -` - -// -// 018_amend_table_cards.sql -// - -var dropTableCards = ` -DROP TABLE IF EXISTS cards; -` - -var alterTableStepsAddColumnStepschema = ` -ALTER TABLE steps - ADD COLUMN step_schema TEXT NOT NULL DEFAULT ''; -` - -var createNewTableCards = ` -CREATE TABLE IF NOT EXISTS cards -( - card_id INTEGER PRIMARY KEY, - card_data BLOB, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); -` diff --git a/store/shared/migrate/sqlite/files/001_create_table_user.sql b/store/shared/migrate/sqlite/files/001_create_table_user.sql deleted file mode 100644 index 3e09b406fa..0000000000 --- a/store/shared/migrate/sqlite/files/001_create_table_user.sql +++ /dev/null @@ -1,22 +0,0 @@ --- name: create-table-users - -CREATE TABLE IF NOT EXISTS users ( - user_id INTEGER PRIMARY KEY AUTOINCREMENT -,user_login TEXT COLLATE NOCASE -,user_email TEXT -,user_admin BOOLEAN -,user_machine BOOLEAN -,user_active BOOLEAN -,user_avatar TEXT -,user_syncing BOOLEAN -,user_synced INTEGER -,user_created INTEGER -,user_updated INTEGER -,user_last_login INTEGER -,user_oauth_token TEXT -,user_oauth_refresh TEXT -,user_oauth_expiry INTEGER -,user_hash TEXT -,UNIQUE(user_login COLLATE NOCASE) -,UNIQUE(user_hash) -); diff --git a/store/shared/migrate/sqlite/files/002_create_table_repos.sql b/store/shared/migrate/sqlite/files/002_create_table_repos.sql deleted file mode 100644 index e17d2e400e..0000000000 --- a/store/shared/migrate/sqlite/files/002_create_table_repos.sql +++ /dev/null @@ -1,55 +0,0 @@ --- name: create-table-repos - -CREATE TABLE IF NOT EXISTS repos ( - repo_id INTEGER PRIMARY KEY AUTOINCREMENT -,repo_uid TEXT -,repo_user_id INTEGER -,repo_namespace TEXT -,repo_name TEXT -,repo_slug TEXT -,repo_scm TEXT -,repo_clone_url TEXT -,repo_ssh_url TEXT -,repo_html_url TEXT -,repo_active BOOLEAN -,repo_private BOOLEAN -,repo_visibility TEXT -,repo_branch TEXT -,repo_counter INTEGER -,repo_config TEXT -,repo_timeout INTEGER -,repo_trusted BOOLEAN -,repo_protected BOOLEAN -,repo_synced INTEGER -,repo_created INTEGER -,repo_updated INTEGER -,repo_version INTEGER -,repo_signer TEXT -,repo_secret TEXT -,UNIQUE(repo_slug) -,UNIQUE(repo_uid) -); - --- name: alter-table-repos-add-column-no-fork - -ALTER TABLE repos ADD COLUMN repo_no_forks BOOLEAN NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-no-pulls - -ALTER TABLE repos ADD COLUMN repo_no_pulls BOOLEAN NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-cancel-pulls - -ALTER TABLE repos ADD COLUMN repo_cancel_pulls BOOLEAN NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-cancel-push - -ALTER TABLE repos ADD COLUMN repo_cancel_push BOOLEAN NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-throttle - -ALTER TABLE repos ADD COLUMN repo_throttle INTEGER NOT NULL DEFAULT 0; - --- name: alter-table-repos-add-column-cancel-running - -ALTER TABLE repos ADD COLUMN repo_cancel_running BOOLEAN NOT NULL DEFAULT 0; \ No newline at end of file diff --git a/store/shared/migrate/sqlite/files/003_create_table_perms.sql b/store/shared/migrate/sqlite/files/003_create_table_perms.sql deleted file mode 100644 index f3f2557f6a..0000000000 --- a/store/shared/migrate/sqlite/files/003_create_table_perms.sql +++ /dev/null @@ -1,23 +0,0 @@ --- name: create-table-perms - -CREATE TABLE IF NOT EXISTS perms ( - perm_user_id INTEGER -,perm_repo_uid TEXT -,perm_read BOOLEAN -,perm_write BOOLEAN -,perm_admin BOOLEAN -,perm_synced INTEGER -,perm_created INTEGER -,perm_updated INTEGER -,PRIMARY KEY(perm_user_id, perm_repo_uid) ---,FOREIGN KEY(perm_user_id) REFERENCES users(user_id) ON DELETE CASCADE ---,FOREIGN KEY(perm_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-perms-user - -CREATE INDEX IF NOT EXISTS ix_perms_user ON perms (perm_user_id); - --- name: create-index-perms-repo - -CREATE INDEX IF NOT EXISTS ix_perms_repo ON perms (perm_repo_uid); diff --git a/store/shared/migrate/sqlite/files/004_create_table_builds.sql b/store/shared/migrate/sqlite/files/004_create_table_builds.sql deleted file mode 100644 index 9577025add..0000000000 --- a/store/shared/migrate/sqlite/files/004_create_table_builds.sql +++ /dev/null @@ -1,62 +0,0 @@ --- name: create-table-builds - -CREATE TABLE IF NOT EXISTS builds ( - build_id INTEGER PRIMARY KEY AUTOINCREMENT -,build_repo_id INTEGER -,build_trigger TEXT -,build_number INTEGER -,build_parent INTEGER -,build_status TEXT -,build_error TEXT -,build_event TEXT -,build_action TEXT -,build_link TEXT -,build_timestamp INTEGER -,build_title TEXT -,build_message TEXT -,build_before TEXT -,build_after TEXT -,build_ref TEXT -,build_source_repo TEXT -,build_source TEXT -,build_target TEXT -,build_author TEXT -,build_author_name TEXT -,build_author_email TEXT -,build_author_avatar TEXT -,build_sender TEXT -,build_deploy TEXT -,build_params TEXT -,build_started INTEGER -,build_finished INTEGER -,build_created INTEGER -,build_updated INTEGER -,build_version INTEGER -,UNIQUE(build_repo_id, build_number) ---,FOREIGN KEY(build_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-builds-repo - -CREATE INDEX IF NOT EXISTS ix_build_repo ON builds (build_repo_id); - --- name: create-index-builds-author - -CREATE INDEX IF NOT EXISTS ix_build_author ON builds (build_author); - --- name: create-index-builds-sender - -CREATE INDEX IF NOT EXISTS ix_build_sender ON builds (build_sender); - --- name: create-index-builds-ref - -CREATE INDEX IF NOT EXISTS ix_build_ref ON builds (build_repo_id, build_ref); - --- name: create-index-build-incomplete - -CREATE INDEX IF NOT EXISTS ix_build_incomplete ON builds (build_status) -WHERE build_status IN ('pending', 'running'); - --- name: alter-table-builds-add-column-debug - -ALTER TABLE builds ADD COLUMN build_debug BOOLEAN NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/sqlite/files/005_create_table_stages.sql b/store/shared/migrate/sqlite/files/005_create_table_stages.sql deleted file mode 100644 index 2c5d1b8199..0000000000 --- a/store/shared/migrate/sqlite/files/005_create_table_stages.sql +++ /dev/null @@ -1,45 +0,0 @@ --- name: create-table-stages - -CREATE TABLE IF NOT EXISTS stages ( - stage_id INTEGER PRIMARY KEY AUTOINCREMENT -,stage_repo_id INTEGER -,stage_build_id INTEGER -,stage_number INTEGER -,stage_kind TEXT -,stage_type TEXT -,stage_name TEXT -,stage_status TEXT -,stage_error TEXT -,stage_errignore BOOLEAN -,stage_exit_code INTEGER -,stage_limit INTEGER -,stage_os TEXT -,stage_arch TEXT -,stage_variant TEXT -,stage_kernel TEXT -,stage_machine TEXT -,stage_started INTEGER -,stage_stopped INTEGER -,stage_created INTEGER -,stage_updated INTEGER -,stage_version INTEGER -,stage_on_success BOOLEAN -,stage_on_failure BOOLEAN -,stage_depends_on TEXT -,stage_labels TEXT -,UNIQUE(stage_build_id, stage_number) -,FOREIGN KEY(stage_build_id) REFERENCES builds(build_id) ON DELETE CASCADE -); - --- name: create-index-stages-build - -CREATE INDEX IF NOT EXISTS ix_stages_build ON stages (stage_build_id); - --- name: create-index-stages-status - -CREATE INDEX IF NOT EXISTS ix_stage_in_progress ON stages (stage_status) -WHERE stage_status IN ('pending', 'running'); - --- name: alter-table-stages-add-column-limit-repos - -ALTER TABLE stages ADD COLUMN stage_limit_repo INTEGER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/sqlite/files/006_create_table_steps.sql b/store/shared/migrate/sqlite/files/006_create_table_steps.sql deleted file mode 100644 index 3f5c757e12..0000000000 --- a/store/shared/migrate/sqlite/files/006_create_table_steps.sql +++ /dev/null @@ -1,21 +0,0 @@ --- name: create-table-steps - -CREATE TABLE IF NOT EXISTS steps ( - step_id INTEGER PRIMARY KEY AUTOINCREMENT -,step_stage_id INTEGER -,step_number INTEGER -,step_name TEXT -,step_status TEXT -,step_error TEXT -,step_errignore BOOLEAN -,step_exit_code INTEGER -,step_started INTEGER -,step_stopped INTEGER -,step_version INTEGER -,UNIQUE(step_stage_id, step_number) -,FOREIGN KEY(step_stage_id) REFERENCES stages(stage_id) ON DELETE CASCADE -); - --- name: create-index-steps-stage - -CREATE INDEX IF NOT EXISTS ix_steps_stage ON steps (step_stage_id); diff --git a/store/shared/migrate/sqlite/files/007_create_table_logs.sql b/store/shared/migrate/sqlite/files/007_create_table_logs.sql deleted file mode 100644 index 4c04d9fe68..0000000000 --- a/store/shared/migrate/sqlite/files/007_create_table_logs.sql +++ /dev/null @@ -1,7 +0,0 @@ --- name: create-table-logs - -CREATE TABLE IF NOT EXISTS logs ( - log_id INTEGER PRIMARY KEY -,log_data BLOB -,FOREIGN KEY(log_id) REFERENCES steps(step_id) ON DELETE CASCADE -); diff --git a/store/shared/migrate/sqlite/files/008_create_table_cron.sql b/store/shared/migrate/sqlite/files/008_create_table_cron.sql deleted file mode 100644 index 610a161811..0000000000 --- a/store/shared/migrate/sqlite/files/008_create_table_cron.sql +++ /dev/null @@ -1,27 +0,0 @@ --- name: create-table-cron - -CREATE TABLE IF NOT EXISTS cron ( - cron_id INTEGER PRIMARY KEY AUTOINCREMENT -,cron_repo_id INTEGER -,cron_name TEXT -,cron_expr TEXT -,cron_next INTEGER -,cron_prev INTEGER -,cron_event TEXT -,cron_branch TEXT -,cron_target TEXT -,cron_disabled BOOLEAN -,cron_created INTEGER -,cron_updated INTEGER -,cron_version INTEGER -,UNIQUE(cron_repo_id, cron_name) -,FOREIGN KEY(cron_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-cron-repo - -CREATE INDEX IF NOT EXISTS ix_cron_repo ON cron (cron_repo_id); - --- name: create-index-cron-next - -CREATE INDEX IF NOT EXISTS ix_cron_next ON cron (cron_next); diff --git a/store/shared/migrate/sqlite/files/009_create_table_secrets.sql b/store/shared/migrate/sqlite/files/009_create_table_secrets.sql deleted file mode 100644 index 3b3bd547d9..0000000000 --- a/store/shared/migrate/sqlite/files/009_create_table_secrets.sql +++ /dev/null @@ -1,20 +0,0 @@ --- name: create-table-secrets - -CREATE TABLE IF NOT EXISTS secrets ( - secret_id INTEGER PRIMARY KEY AUTOINCREMENT -,secret_repo_id INTEGER -,secret_name TEXT -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_repo_id, secret_name) -,FOREIGN KEY(secret_repo_id) REFERENCES repos(repo_id) ON DELETE CASCADE -); - --- name: create-index-secrets-repo - -CREATE INDEX IF NOT EXISTS ix_secret_repo ON secrets (secret_repo_id); - --- name: create-index-secrets-repo-name - -CREATE INDEX IF NOT EXISTS ix_secret_repo_name ON secrets (secret_repo_id, secret_name); diff --git a/store/shared/migrate/sqlite/files/010_create_table_nodes.sql b/store/shared/migrate/sqlite/files/010_create_table_nodes.sql deleted file mode 100644 index dcf919841e..0000000000 --- a/store/shared/migrate/sqlite/files/010_create_table_nodes.sql +++ /dev/null @@ -1,33 +0,0 @@ --- name: create-table-nodes - -CREATE TABLE IF NOT EXISTS nodes ( - node_id INTEGER PRIMARY KEY AUTOINCREMENT -,node_uid TEXT -,node_provider TEXT -,node_state TEXT -,node_name TEXT -,node_image TEXT -,node_region TEXT -,node_size TEXT -,node_os TEXT -,node_arch TEXT -,node_kernel TEXT -,node_variant TEXT -,node_address TEXT -,node_capacity INTEGER -,node_filter TEXT -,node_labels TEXT -,node_error TEXT -,node_ca_key TEXT -,node_ca_cert TEXT -,node_tls_key TEXT -,node_tls_cert TEXT -,node_tls_name TEXT -,node_paused BOOLEAN -,node_protected BOOLEAN -,node_created INTEGER -,node_updated INTEGER -,node_pulled INTEGER - -,UNIQUE(node_name) -); diff --git a/store/shared/migrate/sqlite/files/011_add_column_builds_cron.sql b/store/shared/migrate/sqlite/files/011_add_column_builds_cron.sql deleted file mode 100644 index ac9ffe6f0b..0000000000 --- a/store/shared/migrate/sqlite/files/011_add_column_builds_cron.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-cron - -ALTER TABLE builds ADD COLUMN build_cron TEXT NOT NULL DEFAULT ''; diff --git a/store/shared/migrate/sqlite/files/012_create_table_org_secrets.sql b/store/shared/migrate/sqlite/files/012_create_table_org_secrets.sql deleted file mode 100644 index 42808515cc..0000000000 --- a/store/shared/migrate/sqlite/files/012_create_table_org_secrets.sql +++ /dev/null @@ -1,12 +0,0 @@ --- name: create-table-org-secrets - -CREATE TABLE IF NOT EXISTS orgsecrets ( - secret_id INTEGER PRIMARY KEY AUTOINCREMENT -,secret_namespace TEXT COLLATE NOCASE -,secret_name TEXT COLLATE NOCASE -,secret_type TEXT -,secret_data BLOB -,secret_pull_request BOOLEAN -,secret_pull_request_push BOOLEAN -,UNIQUE(secret_namespace, secret_name) -); diff --git a/store/shared/migrate/sqlite/files/013_add_column_builds_deploy_id.sql b/store/shared/migrate/sqlite/files/013_add_column_builds_deploy_id.sql deleted file mode 100644 index 918c39b0fe..0000000000 --- a/store/shared/migrate/sqlite/files/013_add_column_builds_deploy_id.sql +++ /dev/null @@ -1,3 +0,0 @@ --- name: alter-table-builds-add-column-deploy-id - -ALTER TABLE builds ADD COLUMN build_deploy_id NUMBER NOT NULL DEFAULT 0; diff --git a/store/shared/migrate/sqlite/files/014_create_table_refs.sql b/store/shared/migrate/sqlite/files/014_create_table_refs.sql deleted file mode 100644 index 7a9f197a13..0000000000 --- a/store/shared/migrate/sqlite/files/014_create_table_refs.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-latest - -CREATE TABLE IF NOT EXISTS latest ( - latest_repo_id INTEGER -,latest_build_id INTEGER -,latest_type TEXT -- branch | tag | pull_request | promote -,latest_name TEXT -- master | v1.0.0, | 42 | production -,latest_created INTEGER -,latest_updated INTEGER -,latest_deleted INTEGER -,PRIMARY KEY(latest_repo_id, latest_type, latest_name) -); - --- name: create-index-latest-repo - -CREATE INDEX IF NOT EXISTS ix_latest_repo ON latest (latest_repo_id); diff --git a/store/shared/migrate/sqlite/files/015_create_template_tables.sql b/store/shared/migrate/sqlite/files/015_create_template_tables.sql deleted file mode 100644 index bb73f96030..0000000000 --- a/store/shared/migrate/sqlite/files/015_create_template_tables.sql +++ /dev/null @@ -1,15 +0,0 @@ --- name: create-table-templates - -CREATE TABLE IF NOT EXISTS templates ( - template_id INTEGER PRIMARY KEY AUTOINCREMENT - ,template_name TEXT - ,template_namespace TEXT COLLATE NOCASE - ,template_data BLOB - ,template_created INTEGER - ,template_updated INTEGER - ,UNIQUE(template_name COLLATE NOCASE, template_namespace COLLATE NOCASE) -); - --- name: create-index-template-namespace - -CREATE INDEX IF NOT EXISTS ix_template_namespace ON templates (template_namespace); diff --git a/store/shared/migrate/sqlite/files/016_add_columns_steps.sql b/store/shared/migrate/sqlite/files/016_add_columns_steps.sql deleted file mode 100644 index 7e01996d90..0000000000 --- a/store/shared/migrate/sqlite/files/016_add_columns_steps.sql +++ /dev/null @@ -1,11 +0,0 @@ --- name: alter-table-steps-add-column-step-depends-on - -ALTER TABLE steps ADD COLUMN step_depends_on TEXT NOT NULL DEFAULT ''; - --- name: alter-table-steps-add-column-step-image - -ALTER TABLE steps ADD COLUMN step_image TEXT NOT NULL DEFAULT ''; - --- name: alter-table-steps-add-column-step-detached - -ALTER TABLE steps ADD COLUMN step_detached BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/store/shared/migrate/sqlite/files/017_create_table_cards.sql b/store/shared/migrate/sqlite/files/017_create_table_cards.sql deleted file mode 100644 index 2732b29b57..0000000000 --- a/store/shared/migrate/sqlite/files/017_create_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: create-table-cards - -CREATE TABLE IF NOT EXISTS cards ( - card_id INTEGER PRIMARY KEY AUTOINCREMENT - ,card_build INTEGER - ,card_stage INTEGER - ,card_step INTEGER - ,card_schema TEXT - ,card_data TEXT -); - --- name: create-index-cards-card_build -CREATE INDEX IF NOT EXISTS ix_cards_build ON cards (card_build); - --- name: create-index-cards-card_step -CREATE UNIQUE INDEX IF NOT EXISTS ix_cards_step ON cards (card_step); diff --git a/store/shared/migrate/sqlite/files/018_amend_table_cards.sql b/store/shared/migrate/sqlite/files/018_amend_table_cards.sql deleted file mode 100644 index 34ba75237c..0000000000 --- a/store/shared/migrate/sqlite/files/018_amend_table_cards.sql +++ /dev/null @@ -1,16 +0,0 @@ --- name: drop-table-cards - -DROP TABLE IF EXISTS cards; - --- name: alter-table-steps-add-column-step_schema - -ALTER TABLE steps - ADD COLUMN step_schema TEXT NOT NULL DEFAULT ''; - --- name: create-new-table-cards -CREATE TABLE IF NOT EXISTS cards -( - card_id INTEGER PRIMARY KEY, - card_data BLOB, - FOREIGN KEY (card_id) REFERENCES steps (step_id) ON DELETE CASCADE -); \ No newline at end of file diff --git a/store/stage/scan.go b/store/stage/scan.go deleted file mode 100644 index 1d016da78d..0000000000 --- a/store/stage/scan.go +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stage - -import ( - "database/sql" - "encoding/json" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/jmoiron/sqlx/types" -) - -// helper function converts the Stage structure to a set -// of named query parameters. -func toParams(stage *core.Stage) map[string]interface{} { - return map[string]interface{}{ - "stage_id": stage.ID, - "stage_repo_id": stage.RepoID, - "stage_build_id": stage.BuildID, - "stage_number": stage.Number, - "stage_name": stage.Name, - "stage_kind": stage.Kind, - "stage_type": stage.Type, - "stage_status": stage.Status, - "stage_error": stage.Error, - "stage_errignore": stage.ErrIgnore, - "stage_exit_code": stage.ExitCode, - "stage_limit": stage.Limit, - "stage_limit_repo": stage.LimitRepo, - "stage_os": stage.OS, - "stage_arch": stage.Arch, - "stage_variant": stage.Variant, - "stage_kernel": stage.Kernel, - "stage_machine": stage.Machine, - "stage_started": stage.Started, - "stage_stopped": stage.Stopped, - "stage_created": stage.Created, - "stage_updated": stage.Updated, - "stage_version": stage.Version, - "stage_on_success": stage.OnSuccess, - "stage_on_failure": stage.OnFailure, - "stage_depends_on": encodeSlice(stage.DependsOn), - "stage_labels": encodeParams(stage.Labels), - } -} - -func encodeSlice(v []string) types.JSONText { - raw, _ := json.Marshal(v) - return types.JSONText(raw) -} - -func encodeParams(v map[string]string) types.JSONText { - raw, _ := json.Marshal(v) - return types.JSONText(raw) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dest *core.Stage) error { - depJSON := types.JSONText{} - labJSON := types.JSONText{} - err := scanner.Scan( - &dest.ID, - &dest.RepoID, - &dest.BuildID, - &dest.Number, - &dest.Name, - &dest.Kind, - &dest.Type, - &dest.Status, - &dest.Error, - &dest.ErrIgnore, - &dest.ExitCode, - &dest.Limit, - &dest.LimitRepo, - &dest.OS, - &dest.Arch, - &dest.Variant, - &dest.Kernel, - &dest.Machine, - &dest.Started, - &dest.Stopped, - &dest.Created, - &dest.Updated, - &dest.Version, - &dest.OnSuccess, - &dest.OnFailure, - &depJSON, - &labJSON, - ) - json.Unmarshal(depJSON, &dest.DependsOn) - json.Unmarshal(labJSON, &dest.Labels) - return err -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRowStep(scanner db.Scanner, stage *core.Stage, step *nullStep) error { - depJSON := types.JSONText{} - labJSON := types.JSONText{} - stepDepJSON := types.JSONText{} - err := scanner.Scan( - &stage.ID, - &stage.RepoID, - &stage.BuildID, - &stage.Number, - &stage.Name, - &stage.Kind, - &stage.Type, - &stage.Status, - &stage.Error, - &stage.ErrIgnore, - &stage.ExitCode, - &stage.Limit, - &stage.LimitRepo, - &stage.OS, - &stage.Arch, - &stage.Variant, - &stage.Kernel, - &stage.Machine, - &stage.Started, - &stage.Stopped, - &stage.Created, - &stage.Updated, - &stage.Version, - &stage.OnSuccess, - &stage.OnFailure, - &depJSON, - &labJSON, - &step.ID, - &step.StageID, - &step.Number, - &step.Name, - &step.Status, - &step.Error, - &step.ErrIgnore, - &step.ExitCode, - &step.Started, - &step.Stopped, - &step.Version, - &stepDepJSON, - &step.Image, - &step.Detached, - &step.Schema, - ) - json.Unmarshal(depJSON, &stage.DependsOn) - json.Unmarshal(labJSON, &stage.Labels) - json.Unmarshal(stepDepJSON, &step.DependsOn) - return err -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Stage, error) { - defer rows.Close() - - stages := []*core.Stage{} - for rows.Next() { - stage := new(core.Stage) - err := scanRow(rows, stage) - if err != nil { - return nil, err - } - stages = append(stages, stage) - } - return stages, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRowsWithSteps(rows *sql.Rows) ([]*core.Stage, error) { - defer rows.Close() - - stages := []*core.Stage{} - var curr *core.Stage - for rows.Next() { - stage := new(core.Stage) - step := new(nullStep) - err := scanRowStep(rows, stage, step) - if err != nil { - return nil, err - } - if curr == nil || curr.ID != stage.ID { - curr = stage - stages = append(stages, stage) - } - if step.ID.Int64 != 0 { - curr.Steps = append(curr.Steps, step.value()) - } - } - return stages, nil -} diff --git a/store/stage/stage.go b/store/stage/stage.go deleted file mode 100644 index fc69b69203..0000000000 --- a/store/stage/stage.go +++ /dev/null @@ -1,424 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stage - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new StageStore. -func New(database *db.DB) core.StageStore { - return &stageStore{database} -} - -type stageStore struct { - db *db.DB -} - -func (s *stageStore) List(ctx context.Context, id int64) ([]*core.Stage, error) { - var out []*core.Stage - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "stage_build_id": id, - } - stmt, args, err := binder.BindNamed(queryBuild, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *stageStore) ListState(ctx context.Context, state string) ([]*core.Stage, error) { - var out []*core.Stage - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "stage_status": state, - } - query := queryState - // this is a workaround because mysql does not support - // partial or filtered indexes for low-cardinality values. - // For mysql we use a separate table to track pending and - // running jobs to avoid full table scans. - if (state == "pending" || state == "running") && - s.db.Driver() == db.Mysql { - query = queryStateMysql - } - stmt, args, err := binder.BindNamed(query, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *stageStore) ListSteps(ctx context.Context, id int64) ([]*core.Stage, error) { - var out []*core.Stage - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{ - "stage_build_id": id, - } - stmt, args, err := binder.BindNamed(queryNumberWithSteps, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRowsWithSteps(rows) - return err - }) - return out, err -} - -func (s *stageStore) ListIncomplete(ctx context.Context) ([]*core.Stage, error) { - var out []*core.Stage - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - stmt := queryUnfinished - // this is a workaround because mysql does not support - // partial or filtered indexes for low-cardinality values. - // For mysql we use a separate table to track pending and - // running jobs to avoid full table scans. - if s.db.Driver() == db.Mysql { - stmt = queryUnfinishedMysql - } - rows, err := queryer.Query(stmt) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *stageStore) Find(ctx context.Context, id int64) (*core.Stage, error) { - out := &core.Stage{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *stageStore) FindNumber(ctx context.Context, id int64, number int) (*core.Stage, error) { - out := &core.Stage{BuildID: id, Number: number} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryNumber, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *stageStore) Create(ctx context.Context, stage *core.Stage) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, stage) - } - return s.create(ctx, stage) -} - -func (s *stageStore) create(ctx context.Context, stage *core.Stage) error { - stage.Version = 1 - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(stage) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - stage.ID, err = res.LastInsertId() - return err - }) -} - -func (s *stageStore) createPostgres(ctx context.Context, stage *core.Stage) error { - stage.Version = 1 - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(stage) - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&stage.ID) - }) -} - -func (s *stageStore) Update(ctx context.Context, stage *core.Stage) error { - versionNew := stage.Version + 1 - versionOld := stage.Version - - err := s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(stage) - params["stage_version_old"] = versionOld - params["stage_version_new"] = versionNew - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - effected, err := res.RowsAffected() - if err != nil { - return err - } - if effected == 0 { - return db.ErrOptimisticLock - } - return nil - }) - if err == nil { - stage.Version = versionNew - } - return err -} - -const queryBase = ` -SELECT - stage_id -,stage_repo_id -,stage_build_id -,stage_number -,stage_name -,stage_kind -,stage_type -,stage_status -,stage_error -,stage_errignore -,stage_exit_code -,stage_limit -,stage_limit_repo -,stage_os -,stage_arch -,stage_variant -,stage_kernel -,stage_machine -,stage_started -,stage_stopped -,stage_created -,stage_updated -,stage_version -,stage_on_success -,stage_on_failure -,stage_depends_on -,stage_labels -FROM stages -` - -const queryKey = queryBase + ` -WHERE stage_id = :stage_id -` - -const queryState = queryBase + ` -WHERE stage_status = :stage_status -ORDER BY stage_id ASC -` - -const queryStateMysql = queryBase + ` -WHERE stage_id IN (SELECT stage_id FROM stages_unfinished) - AND stage_status = :stage_status -ORDER BY stage_id ASC -` - -const queryUnfinished = queryBase + ` -WHERE stage_status IN ('pending','running') -ORDER BY stage_id ASC -` - -const queryUnfinishedMysql = queryBase + ` -WHERE stage_id IN (SELECT stage_id FROM stages_unfinished) - AND stage_status IN ('pending','running') -ORDER BY stage_id ASC -` - -const queryBuild = queryBase + ` -WHERE stage_build_id = :stage_build_id -ORDER BY stage_number ASC -` - -const queryNumber = queryBase + ` -WHERE stage_build_id = :stage_build_id - AND stage_number = :stage_number -` - -const queryNumberWithSteps = ` -SELECT - stage_id -,stage_repo_id -,stage_build_id -,stage_number -,stage_name -,stage_kind -,stage_type -,stage_status -,stage_error -,stage_errignore -,stage_exit_code -,stage_limit -,stage_limit_repo -,stage_os -,stage_arch -,stage_variant -,stage_kernel -,stage_machine -,stage_started -,stage_stopped -,stage_created -,stage_updated -,stage_version -,stage_on_success -,stage_on_failure -,stage_depends_on -,stage_labels -,step_id -,step_stage_id -,step_number -,step_name -,step_status -,step_error -,step_errignore -,step_exit_code -,step_started -,step_stopped -,step_version -,step_depends_on -,step_image -,step_detached -,step_schema -FROM stages - LEFT JOIN steps - ON stages.stage_id=steps.step_stage_id - WHERE stages.stage_build_id = :stage_build_id -ORDER BY - stage_id ASC -,step_id ASC -` - -const stmtUpdate = ` -UPDATE stages -SET - stage_status = :stage_status -,stage_error = :stage_error -,stage_errignore = :stage_errignore -,stage_exit_code = :stage_exit_code -,stage_os = :stage_os -,stage_arch = :stage_arch -,stage_variant = :stage_variant -,stage_kernel = :stage_kernel -,stage_machine = :stage_machine -,stage_started = :stage_started -,stage_stopped = :stage_stopped -,stage_created = :stage_created -,stage_updated = :stage_updated -,stage_version = :stage_version_new -,stage_on_success = :stage_on_success -,stage_on_failure = :stage_on_failure -,stage_depends_on = :stage_depends_on -,stage_labels = :stage_labels -WHERE stage_id = :stage_id - AND stage_version = :stage_version_old -` - -const stmtInsert = ` -INSERT INTO stages ( - stage_repo_id -,stage_build_id -,stage_number -,stage_name -,stage_kind -,stage_type -,stage_status -,stage_error -,stage_errignore -,stage_exit_code -,stage_limit -,stage_limit_repo -,stage_os -,stage_arch -,stage_variant -,stage_kernel -,stage_machine -,stage_started -,stage_stopped -,stage_created -,stage_updated -,stage_version -,stage_on_success -,stage_on_failure -,stage_depends_on -,stage_labels -) VALUES ( - :stage_repo_id -,:stage_build_id -,:stage_number -,:stage_name -,:stage_kind -,:stage_type -,:stage_status -,:stage_error -,:stage_errignore -,:stage_exit_code -,:stage_limit -,:stage_limit_repo -,:stage_os -,:stage_arch -,:stage_variant -,:stage_kernel -,:stage_machine -,:stage_started -,:stage_stopped -,:stage_created -,:stage_updated -,:stage_version -,:stage_on_success -,:stage_on_failure -,:stage_depends_on -,:stage_labels -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING stage_id -` diff --git a/store/stage/stage_test.go b/store/stage/stage_test.go deleted file mode 100644 index d1305d9677..0000000000 --- a/store/stage/stage_test.go +++ /dev/null @@ -1,252 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package stage - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/build" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/db/dbtest" -) - -var noContext = context.TODO() - -func TestStage(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seed with a dummy repository - arepo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - repos.Create(noContext, arepo) - - // seed with a dummy build - builds := build.New(conn) - abuild := &core.Build{Number: 1, RepoID: arepo.ID} - builds.Create(noContext, abuild, nil) - - store := New(conn).(*stageStore) - t.Run("Create", testStageCreate(store, abuild)) - t.Run("ListState", testStageListStatus(store, abuild)) -} - -func testStageCreate(store *stageStore, abuild *core.Build) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Stage{ - RepoID: 42, - BuildID: abuild.ID, - Number: 2, - Name: "clone", - Status: core.StatusRunning, - ExitCode: 0, - Started: 1522878684, - Stopped: 0, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.ID == 0 { - t.Errorf("Want ID assigned, got %d", item.ID) - } - if item.Version == 0 { - t.Errorf("Want Version assigned, got %d", item.Version) - } - - t.Run("Find", testStageFind(store, item)) - t.Run("FindNumber", testStageFindNumber(store, item)) - t.Run("List", testStageList(store, item)) - t.Run("ListSteps", testStageListSteps(store, item)) - t.Run("Update", testStageUpdate(store, item)) - t.Run("Locking", testStageLocking(store, item)) - } -} - -func testStageFind(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - result, err := store.Find(noContext, stage.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testStage(result)) - } - } -} - -func testStageFindNumber(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - result, err := store.FindNumber(noContext, stage.BuildID, stage.Number) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testStage(result)) - } - } -} - -func testStageList(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, stage.BuildID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testStage(list[0])) - } - } -} - -func testStageListSteps(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.ListSteps(noContext, stage.BuildID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testStage(list[0])) - } - } -} - -func testStageUpdate(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Stage{ - ID: stage.ID, - RepoID: 42, - BuildID: stage.BuildID, - Number: stage.Number, - Name: "clone", - ExitCode: 255, - Started: 1522878684, - Stopped: 1522878690, - Status: core.StatusFailing, - Version: stage.Version, - } - err := store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - if got, want := before.Version, stage.Version+1; got != want { - t.Errorf("Want incremented version %d, got %d", want, got) - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if got, want := after.Version, stage.Version+1; got != want { - t.Errorf("Want incremented version %d, got %d", want, got) - } - if got, want := after.ExitCode, before.ExitCode; got != want { - t.Errorf("Want updated ExitCode %v, got %v", want, got) - } - if got, want := after.Status, before.Status; got != want { - t.Errorf("Want updated Status %v, got %v", want, got) - } - if got, want := after.Stopped, before.Stopped; got != want { - t.Errorf("Want updated Stopped %v, got %v", want, got) - } - } -} - -func testStageLocking(store *stageStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Stage{ - ID: stage.ID, - RepoID: 42, - BuildID: stage.BuildID, - Number: stage.Number, - Name: "clone", - ExitCode: 255, - Started: 1522878684, - Stopped: 1522878690, - Status: core.StatusFailing, - Version: stage.Version - 1, - } - err := store.Update(noContext, before) - if err == nil { - t.Errorf("Want Optimistic Lock Error, got nil") - } else if err != db.ErrOptimisticLock { - t.Errorf("Want Optimistic Lock Error") - } - } -} - -func testStageListStatus(store *stageStore, abuild *core.Build) func(t *testing.T) { - return func(t *testing.T) { - _ = store.db.Update(func(execer db.Execer, binder db.Binder) error { - _, _ = execer.Exec("DELETE FROM stages_unfinished") - _, _ = execer.Exec("DELETE FROM stages") - return nil - }) - _ = store.Create(noContext, &core.Stage{Number: 1, BuildID: abuild.ID, Status: core.StatusPending}) - _ = store.Create(noContext, &core.Stage{Number: 2, BuildID: abuild.ID, Status: core.StatusRunning}) - _ = store.Create(noContext, &core.Stage{Number: 3, BuildID: abuild.ID, Status: core.StatusFailing}) - list, err := store.ListState(noContext, core.StatusPending) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } - if got, want := list[0].Status, core.StatusPending; got != want { - t.Errorf("Want status %s, got %s", want, got) - } - if store.db.Driver() == db.Mysql { - store.db.Update(func(execer db.Execer, binder db.Binder) error { - var count int - execer.QueryRow("SELECT count(*) FROM stages_unfinished").Scan(&count) - if count != 2 { - t.Errorf("Expect 2 items in stages_unfinished got %d", count) - } - execer.Exec("UPDATE stages SET stage_status ='success' WHERE stage_number=1") - execer.QueryRow("SELECT count(*) FROM stages_unfinished").Scan(&count) - if count != 1 { - t.Errorf("Expect 1 items in stages_unfinished got %d", count) - } - return nil - }) - } - } -} - -func testStage(item *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "clone"; got != want { - t.Errorf("Want Name %q, got %q", want, got) - } - if got, want := item.Status, core.StatusRunning; got != want { - t.Errorf("Want Status %q, got %q", want, got) - } - if got, want := item.Started, int64(1522878684); got != want { - t.Errorf("Want Started %d, got %d", want, got) - } - if got, want := item.RepoID, int64(42); got != want { - t.Errorf("Want RepoID %d, got %d", want, got) - } - } -} diff --git a/store/stage/type.go b/store/stage/type.go deleted file mode 100644 index e1d3d6839b..0000000000 --- a/store/stage/type.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package stage - -import ( - "database/sql" - "encoding/json" - - "github.com/drone/drone/core" - "github.com/jmoiron/sqlx/types" -) - -type nullStep struct { - ID sql.NullInt64 - StageID sql.NullInt64 - Number sql.NullInt64 - Name sql.NullString - Status sql.NullString - Error sql.NullString - ErrIgnore sql.NullBool - ExitCode sql.NullInt64 - Started sql.NullInt64 - Stopped sql.NullInt64 - Version sql.NullInt64 - DependsOn types.JSONText - Image sql.NullString - Detached sql.NullBool - Schema sql.NullString -} - -func (s *nullStep) value() *core.Step { - var dependsOn []string - json.Unmarshal(s.DependsOn, &dependsOn) - - step := &core.Step{ - ID: s.ID.Int64, - StageID: s.StageID.Int64, - Number: int(s.Number.Int64), - Name: s.Name.String, - Status: s.Status.String, - Error: s.Error.String, - ErrIgnore: s.ErrIgnore.Bool, - ExitCode: int(s.ExitCode.Int64), - Started: s.Started.Int64, - Stopped: s.Stopped.Int64, - Version: s.Version.Int64, - DependsOn: dependsOn, - Image: s.Image.String, - Detached: s.Detached.Bool, - Schema: s.Schema.String, - } - - return step -} diff --git a/store/step/scan.go b/store/step/scan.go deleted file mode 100644 index 385b5ffd3e..0000000000 --- a/store/step/scan.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package step - -import ( - "database/sql" - "encoding/json" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - - "github.com/jmoiron/sqlx/types" -) - -// helper function converts the Step structure to a set -// of named query parameters. -func toParams(from *core.Step) map[string]interface{} { - return map[string]interface{}{ - "step_id": from.ID, - "step_stage_id": from.StageID, - "step_number": from.Number, - "step_name": from.Name, - "step_status": from.Status, - "step_error": from.Error, - "step_errignore": from.ErrIgnore, - "step_exit_code": from.ExitCode, - "step_started": from.Started, - "step_stopped": from.Stopped, - "step_version": from.Version, - "step_depends_on": encodeSlice(from.DependsOn), - "step_image": from.Image, - "step_detached": from.Detached, - "step_schema": from.Schema, - } -} - -func encodeSlice(v []string) types.JSONText { - raw, _ := json.Marshal(v) - return types.JSONText(raw) -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dest *core.Step) error { - depJSON := types.JSONText{} - err := scanner.Scan( - &dest.ID, - &dest.StageID, - &dest.Number, - &dest.Name, - &dest.Status, - &dest.Error, - &dest.ErrIgnore, - &dest.ExitCode, - &dest.Started, - &dest.Stopped, - &dest.Version, - &depJSON, - &dest.Image, - &dest.Detached, - &dest.Schema, - ) - json.Unmarshal(depJSON, &dest.DependsOn) - return err -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Step, error) { - defer rows.Close() - - steps := []*core.Step{} - for rows.Next() { - step := new(core.Step) - err := scanRow(rows, step) - if err != nil { - return nil, err - } - steps = append(steps, step) - } - return steps, nil -} diff --git a/store/step/step.go b/store/step/step.go deleted file mode 100644 index 8424c24664..0000000000 --- a/store/step/step.go +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package step - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new StepStore. -func New(db *db.DB) core.StepStore { - return &stepStore{db} -} - -type stepStore struct { - db *db.DB -} - -func (s *stepStore) List(ctx context.Context, id int64) ([]*core.Step, error) { - var out []*core.Step - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"step_stage_id": id} - stmt, args, err := binder.BindNamed(queryStage, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *stepStore) Find(ctx context.Context, id int64) (*core.Step, error) { - out := &core.Step{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *stepStore) FindNumber(ctx context.Context, id int64, number int) (*core.Step, error) { - out := &core.Step{StageID: id, Number: number} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := toParams(out) - query, args, err := binder.BindNamed(queryNumber, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *stepStore) Create(ctx context.Context, step *core.Step) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, step) - } - return s.create(ctx, step) -} - -func (s *stepStore) create(ctx context.Context, step *core.Step) error { - step.Version = 1 - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(step) - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - step.ID, err = res.LastInsertId() - return err - }) -} - -func (s *stepStore) createPostgres(ctx context.Context, step *core.Step) error { - step.Version = 1 - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(step) - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&step.ID) - }) -} - -func (s *stepStore) Update(ctx context.Context, step *core.Step) error { - versionNew := step.Version + 1 - versionOld := step.Version - - err := s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := toParams(step) - params["step_version_old"] = versionOld - params["step_version_new"] = versionNew - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - effected, err := res.RowsAffected() - if err != nil { - return err - } - if effected == 0 { - return db.ErrOptimisticLock - } - return nil - }) - if err == nil { - step.Version = versionNew - } - return err -} - -const queryBase = ` -SELECT - step_id -,step_stage_id -,step_number -,step_name -,step_status -,step_error -,step_errignore -,step_exit_code -,step_started -,step_stopped -,step_version -,step_depends_on -,step_image -,step_detached -,step_schema -` - -const queryKey = queryBase + ` -FROM steps -WHERE step_id = :step_id -` - -const queryNumber = queryBase + ` -FROM steps -WHERE step_stage_id = :step_stage_id - AND step_number = :step_number -` - -const queryStage = queryBase + ` -FROM steps -WHERE step_stage_id = :step_stage_id -` - -const stmtUpdate = ` -UPDATE steps -SET - step_name = :step_name -,step_status = :step_status -,step_error = :step_error -,step_errignore = :step_errignore -,step_exit_code = :step_exit_code -,step_started = :step_started -,step_stopped = :step_stopped -,step_version = :step_version_new -,step_depends_on = :step_depends_on -,step_image = :step_image -,step_detached = :step_detached -,step_schema = :step_schema -WHERE step_id = :step_id - AND step_version = :step_version_old -` - -const stmtInsert = ` -INSERT INTO steps ( - step_stage_id -,step_number -,step_name -,step_status -,step_error -,step_errignore -,step_exit_code -,step_started -,step_stopped -,step_version -,step_depends_on -,step_image -,step_detached -,step_schema -) VALUES ( - :step_stage_id -,:step_number -,:step_name -,:step_status -,:step_error -,:step_errignore -,:step_exit_code -,:step_started -,:step_stopped -,:step_version -,:step_depends_on -,:step_image -,:step_detached -,:step_schema -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING step_id -` diff --git a/store/step/step_test.go b/store/step/step_test.go deleted file mode 100644 index 7f2b7a7d09..0000000000 --- a/store/step/step_test.go +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package step - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/build" - "github.com/drone/drone/store/repos" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/db/dbtest" -) - -var noContext = context.TODO() - -func TestStep(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - // seed with a dummy repository - arepo := &core.Repository{UID: "1", Slug: "octocat/hello-world"} - repos := repos.New(conn) - repos.Create(noContext, arepo) - - // seed with a dummy stage - stage := &core.Stage{Number: 1} - stages := []*core.Stage{stage} - - // seed with a dummy build - abuild := &core.Build{Number: 1, RepoID: arepo.ID} - builds := build.New(conn) - builds.Create(noContext, abuild, stages) - - store := New(conn).(*stepStore) - t.Run("Create", testStepCreate(store, stage)) -} - -func testStepCreate(store *stepStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Step{ - StageID: stage.ID, - Number: 2, - Name: "clone", - Status: core.StatusRunning, - ExitCode: 0, - Started: 1522878684, - Stopped: 0, - DependsOn: []string{"backend", "frontend"}, - Image: "ubuntu", - Detached: false, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.ID == 0 { - t.Errorf("Want ID assigned, got %d", item.ID) - } - if item.Version == 0 { - t.Errorf("Want Version assigned, got %d", item.Version) - } - - t.Run("Find", testStepFind(store, item)) - t.Run("FindNumber", testStepFindNumber(store, item)) - t.Run("List", testStepList(store, stage)) - t.Run("Update", testStepUpdate(store, item)) - t.Run("Locking", testStepLocking(store, item)) - } -} - -func testStepFind(store *stepStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - result, err := store.Find(noContext, step.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testStep(result)) - } - } -} - -func testStepFindNumber(store *stepStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - result, err := store.FindNumber(noContext, step.StageID, step.Number) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testStep(result)) - } - } -} - -func testStepList(store *stepStore, stage *core.Stage) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, stage.ID) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testStep(list[0])) - } - } -} - -func testStepUpdate(store *stepStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Step{ - ID: step.ID, - StageID: step.StageID, - Number: 2, - Name: "clone", - ExitCode: 255, - Started: 1522878684, - Stopped: 1522878690, - Status: core.StatusFailing, - Version: step.Version, - } - err := store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - if got, want := before.Version, step.Version+1; got != want { - t.Errorf("Want incremented version %d, got %d", want, got) - } - after, err := store.Find(noContext, before.ID) - if err != nil { - t.Error(err) - return - } - if got, want := step.Version+1, after.Version; got != want { - t.Errorf("Want version incremented on update") - } - if got, want := after.ExitCode, before.ExitCode; got != want { - t.Errorf("Want updated ExitCode %v, got %v", want, got) - } - if got, want := after.Status, before.Status; got != want { - t.Errorf("Want updated Status %v, got %v", want, got) - } - if got, want := after.Stopped, before.Stopped; got != want { - t.Errorf("Want updated Stopped %v, got %v", want, got) - } - } -} - -func testStepLocking(store *stepStore, step *core.Step) func(t *testing.T) { - return func(t *testing.T) { - before := &core.Step{ - ID: step.ID, - StageID: step.StageID, - Number: 2, - Name: "clone", - ExitCode: 255, - Started: 1522878684, - Stopped: 1522878690, - Status: core.StatusFailing, - Version: step.Version - 1, - } - err := store.Update(noContext, before) - if err == nil { - t.Errorf("Want Optimistic Lock Error, got nil") - } else if err != db.ErrOptimisticLock { - t.Errorf("Want Optimistic Lock Error") - } - } -} - -func testStep(item *core.Step) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "clone"; got != want { - t.Errorf("Want Name %q, got %q", want, got) - } - if got, want := item.Number, 2; got != want { - t.Errorf("Want Name %d, got %d", want, got) - } - if got, want := item.Status, core.StatusRunning; got != want { - t.Errorf("Want Status %q, got %q", want, got) - } - if got, want := item.Started, int64(1522878684); got != want { - t.Errorf("Want Started %d, got %d", want, got) - } - } -} diff --git a/store/store.go b/store/store.go deleted file mode 100644 index 46800812a9..0000000000 --- a/store/store.go +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package store diff --git a/store/template/scan.go b/store/template/scan.go deleted file mode 100644 index e06ebbbad4..0000000000 --- a/store/template/scan.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// helper function converts the Template structure to a set -// of named query parameters. -func toParams(template *core.Template) (map[string]interface{}, error) { - return map[string]interface{}{ - "template_id": template.Id, - "template_name": template.Name, - "template_namespace": template.Namespace, - "template_data": template.Data, - "template_created": template.Created, - "template_updated": template.Updated, - }, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(scanner db.Scanner, dst *core.Template) error { - err := scanner.Scan( - &dst.Id, - &dst.Name, - &dst.Namespace, - &dst.Data, - &dst.Created, - &dst.Updated, - ) - if err != nil { - return err - } - return nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(rows *sql.Rows) ([]*core.Template, error) { - defer rows.Close() - - template := []*core.Template{} - for rows.Next() { - tem := new(core.Template) - err := scanRow(rows, tem) - if err != nil { - return nil, err - } - template = append(template, tem) - } - return template, nil -} diff --git a/store/template/template.go b/store/template/template.go deleted file mode 100644 index c0a330f5f3..0000000000 --- a/store/template/template.go +++ /dev/null @@ -1,234 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package template - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -// New returns a new Template database store. -func New(db *db.DB) core.TemplateStore { - return &templateStore{ - db: db, - } -} - -type templateStore struct { - db *db.DB -} - -func (s *templateStore) List(ctx context.Context, namespace string) ([]*core.Template, error) { - var out []*core.Template - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"template_namespace": namespace} - stmt, args, err := binder.BindNamed(queryNamespace, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *templateStore) ListAll(ctx context.Context) ([]*core.Template, error) { - var out []*core.Template - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{} - stmt, args, err := binder.BindNamed(queryAll, params) - if err != nil { - return err - } - rows, err := queryer.Query(stmt, args...) - if err != nil { - return err - } - out, err = scanRows(rows) - return err - }) - return out, err -} - -func (s *templateStore) Find(ctx context.Context, id int64) (*core.Template, error) { - out := &core.Template{Id: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(out) - if err != nil { - return err - } - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *templateStore) FindName(ctx context.Context, name string, namespace string) (*core.Template, error) { - out := &core.Template{Name: name, Namespace: namespace} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params, err := toParams(out) - if err != nil { - return err - } - - query, args, err := binder.BindNamed(queryName, params) - - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(row, out) - }) - return out, err -} - -func (s *templateStore) Create(ctx context.Context, template *core.Template) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, template) - } - return s.create(ctx, template) -} - -func (s *templateStore) create(ctx context.Context, template *core.Template) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(template) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - template.Id, err = res.LastInsertId() - return err - }) -} - -func (s *templateStore) createPostgres(ctx context.Context, template *core.Template) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(template) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsertPostgres, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&template.Id) - }) -} - -func (s *templateStore) Update(ctx context.Context, template *core.Template) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(template) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -func (s *templateStore) Delete(ctx context.Context, template *core.Template) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(template) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -const queryKey = queryBase + ` -FROM templates -WHERE template_id = :template_id -LIMIT 1 -` - -const queryBase = ` -SELECT - template_id -,template_name -,template_namespace -,template_data -,template_created -,template_updated -` - -const queryAll = queryBase + ` -FROM templates -ORDER BY template_name -` - -const queryNamespace = queryBase + ` -FROM templates -WHERE template_namespace = :template_namespace -ORDER BY template_name -` - -const stmtInsert = ` -INSERT INTO templates ( - template_name -,template_namespace -,template_data -,template_created -,template_updated -) VALUES ( - :template_name -,:template_namespace -,:template_data -,:template_created -,:template_updated -) -` - -const stmtUpdate = ` -UPDATE templates SET -template_name = :template_name -,template_namespace = :template_namespace -,template_data = :template_data -,template_updated = :template_updated -WHERE template_id = :template_id -` - -const stmtDelete = ` -DELETE FROM templates -WHERE template_id = :template_id -` -const queryName = queryBase + ` -FROM templates -WHERE template_name = :template_name -AND template_namespace = :template_namespace -LIMIT 1 -` - -const stmtInsertPostgres = stmtInsert + ` -RETURNING template_id -` diff --git a/store/template/template_oss.go b/store/template/template_oss.go deleted file mode 100644 index e6ea289937..0000000000 --- a/store/template/template_oss.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package template - -import ( - "context" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" -) - -func New(db *db.DB) core.TemplateStore { - return new(noop) -} - -type noop struct{} - -func (noop) List(ctx context.Context, namespace string) ([]*core.Template, error) { - return nil, nil -} - -func (noop) ListAll(ctx context.Context) ([]*core.Template, error) { - return nil, nil -} - -func (noop) Find(ctx context.Context, id int64) (*core.Template, error) { - return nil, nil -} - -func (noop) FindName(ctx context.Context, name string, namespace string) (*core.Template, error) { - return nil, nil -} - -func (noop) Create(ctx context.Context, template *core.Template) error { - return nil -} - -func (noop) Update(ctx context.Context, template *core.Template) error { - return nil -} - -func (noop) Delete(ctx context.Context, template *core.Template) error { - return nil -} diff --git a/store/template/template_test.go b/store/template/template_test.go deleted file mode 100644 index cec90191ec..0000000000 --- a/store/template/template_test.go +++ /dev/null @@ -1,224 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package template - -import ( - "context" - "database/sql" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db/dbtest" -) - -var noContext = context.TODO() - -func TestTemplate(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - store := New(conn).(*templateStore) - t.Run("TestTemplates", testTemplateCreate(store)) -} - -func testTemplateCreate(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Template{ - Id: 1, - Name: "my_template", - Namespace: "my_org", - Data: "some_template_data", - Created: 1, - Updated: 2, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.Id == 0 { - t.Errorf("Want template Id assigned, got %d", item.Id) - } - - t.Run("CreateSameNameDiffOrg", testCreateWithSameNameDiffOrg(store)) - t.Run("CreateSameNameSameOrgShouldError", testCreateSameNameSameOrgShouldError(store)) - t.Run("Find", testTemplateFind(store, item)) - t.Run("FindName", testTemplateFindName(store)) - t.Run("ListAll", testTemplateListAll(store)) - t.Run("List", testTemplateList(store)) - t.Run("Update", testTemplateUpdate(store)) - t.Run("Delete", testTemplateDelete(store)) - } -} - -func testCreateWithSameNameDiffOrg(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Template{ - Id: 1, - Name: "my_template", - Namespace: "my_org2", - Data: "some_template_data", - Created: 1, - Updated: 2, - } - err := store.Create(noContext, item) - if err != nil { - t.Error(err) - } - if item.Id == 0 { - t.Errorf("Want template Id assigned, got %d", item.Id) - } - } -} - -func testCreateSameNameSameOrgShouldError(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - item := &core.Template{ - Id: 3, - Name: "my_template", - Namespace: "my_org2", - Data: "some_template_data", - Created: 1, - Updated: 2, - } - err := store.Create(noContext, item) - if err == nil { - t.Error(err) - } - } -} - -func testTemplateFind(store *templateStore, template *core.Template) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.Find(noContext, template.Id) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testTemplate(item)) - } - } -} - -func testTemplateFindName(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - item, err := store.FindName(noContext, "my_template", "my_org") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testTemplate(item)) - } - } -} - -func testTemplate(item *core.Template) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "my_template"; got != want { - t.Errorf("Want template name %q, got %q", want, got) - } - if got, want := item.Data, "some_template_data"; got != want { - t.Errorf("Want template data %q, got %q", want, got) - } - if got, want := item.Namespace, "my_org"; got != want { - t.Errorf("Want template org %q, got %q", want, got) - } - } -} - -func testTemplate2(item *core.Template) func(t *testing.T) { - return func(t *testing.T) { - if got, want := item.Name, "my_template"; got != want { - t.Errorf("Want template name %q, got %q", want, got) - } - if got, want := item.Data, "some_template_data"; got != want { - t.Errorf("Want template data %q, got %q", want, got) - } - if got, want := item.Namespace, "my_org2"; got != want { - t.Errorf("Want template org %q, got %q", want, got) - } - } -} - -func testTemplateListAll(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.ListAll(noContext) - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 2; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testTemplate(list[0])) - t.Run("Fields", testTemplate2(list[1])) - } - } -} - -func testTemplateList(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - list, err := store.List(noContext, "my_org") - if err != nil { - t.Error(err) - return - } - if got, want := len(list), 1; got != want { - t.Errorf("Want count %d, got %d", want, got) - } else { - t.Run("Fields", testTemplate(list[0])) - } - } -} - -func testTemplateUpdate(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - before, err := store.FindName(noContext, "my_template", "my_org") - if err != nil { - t.Error(err) - return - } - err = store.Update(noContext, before) - if err != nil { - t.Error(err) - return - } - after, err := store.Find(noContext, before.Id) - if err != nil { - t.Error(err) - return - } - if after == nil { - t.Fail() - } - } -} - -func testTemplateDelete(store *templateStore) func(t *testing.T) { - return func(t *testing.T) { - secret, err := store.FindName(noContext, "my_template", "my_org") - if err != nil { - t.Error(err) - return - } - err = store.Delete(noContext, secret) - if err != nil { - t.Error(err) - return - } - _, err = store.Find(noContext, secret.Id) - if got, want := sql.ErrNoRows, err; got != want { - t.Errorf("Want sql.ErrNoRows, got %v", got) - return - } - } -} diff --git a/store/user/scan.go b/store/user/scan.go deleted file mode 100644 index 015b1e3b68..0000000000 --- a/store/user/scan.go +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "database/sql" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// helper function converts the User structure to a set -// of named query parameters. -func toParams(encrypt encrypt.Encrypter, u *core.User) (map[string]interface{}, error) { - token, err := encrypt.Encrypt(u.Token) - if err != nil { - return nil, err - } - refresh, err := encrypt.Encrypt(u.Refresh) - if err != nil { - return nil, err - } - return map[string]interface{}{ - "user_id": u.ID, - "user_login": u.Login, - "user_email": u.Email, - "user_admin": u.Admin, - "user_machine": u.Machine, - "user_active": u.Active, - "user_avatar": u.Avatar, - "user_syncing": u.Syncing, - "user_synced": u.Synced, - "user_created": u.Created, - "user_updated": u.Updated, - "user_last_login": u.LastLogin, - "user_oauth_token": token, - "user_oauth_refresh": refresh, - "user_oauth_expiry": u.Expiry, - "user_hash": u.Hash, - }, nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRow(encrypt encrypt.Encrypter, scanner db.Scanner, dest *core.User) error { - var token, refresh []byte - err := scanner.Scan( - &dest.ID, - &dest.Login, - &dest.Email, - &dest.Admin, - &dest.Machine, - &dest.Active, - &dest.Avatar, - &dest.Syncing, - &dest.Synced, - &dest.Created, - &dest.Updated, - &dest.LastLogin, - &token, - &refresh, - &dest.Expiry, - &dest.Hash, - ) - if err != nil { - return err - } - dest.Token, err = encrypt.Decrypt(token) - if err != nil { - return err - } - dest.Refresh, err = encrypt.Decrypt(refresh) - if err != nil { - return err - } - return nil -} - -// helper function scans the sql.Row and copies the column -// values to the destination object. -func scanRows(encrypt encrypt.Encrypter, rows *sql.Rows) ([]*core.User, error) { - defer rows.Close() - - users := []*core.User{} - for rows.Next() { - user := new(core.User) - err := scanRow(encrypt, rows, user) - if err != nil { - return nil, err - } - users = append(users, user) - } - return users, nil -} diff --git a/store/user/user.go b/store/user/user.go deleted file mode 100644 index 32c51a5f20..0000000000 --- a/store/user/user.go +++ /dev/null @@ -1,332 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package user - -import ( - "context" - "fmt" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db" - "github.com/drone/drone/store/shared/encrypt" -) - -// New returns a new UserStore. -func New(db *db.DB, enc encrypt.Encrypter) core.UserStore { - return &userStore{db, enc} -} - -type userStore struct { - db *db.DB - enc encrypt.Encrypter -} - -// Find returns a user from the datastore. -func (s *userStore) Find(ctx context.Context, id int64) (*core.User, error) { - out := &core.User{ID: id} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_id": id} - query, args, err := binder.BindNamed(queryKey, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -// FindLogin returns a user from the datastore by username. -func (s *userStore) FindLogin(ctx context.Context, login string) (*core.User, error) { - out := &core.User{Login: login} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_login": login} - query, args, err := binder.BindNamed(queryLogin, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -// FindToken returns a user from the datastore by token. -func (s *userStore) FindToken(ctx context.Context, token string) (*core.User, error) { - out := &core.User{Hash: token} - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_hash": token} - query, args, err := binder.BindNamed(queryToken, params) - if err != nil { - return err - } - row := queryer.QueryRow(query, args...) - return scanRow(s.enc, row, out) - }) - return out, err -} - -// List returns a list of users from the datastore. -func (s *userStore) List(ctx context.Context) ([]*core.User, error) { - var out []*core.User - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - rows, err := queryer.Query(queryAll) - if err != nil { - return err - } - out, err = scanRows(s.enc, rows) - return err - }) - return out, err -} - -// ListRange returns a list of users from the datastore. -func (s *userStore) ListRange(ctx context.Context, params core.UserParams) ([]*core.User, error) { - var out []*core.User - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - // this query breaks a rule and uses sprintf to inject parameters - // into the query. Normally this should be avoided, however, in this - // case the parameters are set by the internal system and can - // be considered safe. - query := queryRange - switch { - case params.Sort: - query = fmt.Sprintf(query, "user_login", params.Size, params.Page) - default: - query = fmt.Sprintf(query, "user_id", params.Size, params.Page) - } - rows, err := queryer.Query(query) - if err != nil { - return err - } - out, err = scanRows(s.enc, rows) - return err - }) - return out, err -} - -// Create persists a new user to the datastore. -func (s *userStore) Create(ctx context.Context, user *core.User) error { - if s.db.Driver() == db.Postgres { - return s.createPostgres(ctx, user) - } - return s.create(ctx, user) -} - -func (s *userStore) create(ctx context.Context, user *core.User) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, user) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsert, params) - if err != nil { - return err - } - res, err := execer.Exec(stmt, args...) - if err != nil { - return err - } - user.ID, err = res.LastInsertId() - return err - }) -} - -func (s *userStore) createPostgres(ctx context.Context, user *core.User) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, user) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtInsertPg, params) - if err != nil { - return err - } - return execer.QueryRow(stmt, args...).Scan(&user.ID) - }) -} - -// Update persists an updated user to the datastore. -func (s *userStore) Update(ctx context.Context, user *core.User) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params, err := toParams(s.enc, user) - if err != nil { - return err - } - stmt, args, err := binder.BindNamed(stmtUpdate, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Delete deletes a user from the datastore. -func (s *userStore) Delete(ctx context.Context, user *core.User) error { - return s.db.Lock(func(execer db.Execer, binder db.Binder) error { - params := map[string]interface{}{"user_id": user.ID} - stmt, args, err := binder.BindNamed(stmtDelete, params) - if err != nil { - return err - } - _, err = execer.Exec(stmt, args...) - return err - }) -} - -// Count returns a count of active users. -func (s *userStore) Count(ctx context.Context) (int64, error) { - var out int64 - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - return queryer.QueryRow(queryCount).Scan(&out) - }) - return out, err -} - -// Count returns a count of active human users. -func (s *userStore) CountHuman(ctx context.Context) (int64, error) { - var out int64 - err := s.db.View(func(queryer db.Queryer, binder db.Binder) error { - params := map[string]interface{}{"user_machine": false} - stmt, args, err := binder.BindNamed(queryCountHuman, params) - if err != nil { - return err - } - return queryer.QueryRow(stmt, args...).Scan(&out) - }) - return out, err -} - -const queryCount = ` -SELECT COUNT(*) -FROM users -` - -const queryCountHuman = ` -SELECT COUNT(*) -FROM users -WHERE user_machine = :user_machine -` - -const queryBase = ` -SELECT - user_id -,user_login -,user_email -,user_admin -,user_machine -,user_active -,user_avatar -,user_syncing -,user_synced -,user_created -,user_updated -,user_last_login -,user_oauth_token -,user_oauth_refresh -,user_oauth_expiry -,user_hash -` - -const queryKey = queryBase + ` -FROM users -WHERE user_id = :user_id -` - -const queryLogin = queryBase + ` -FROM users -WHERE user_login = :user_login -` - -const queryToken = queryBase + ` -FROM users -WHERE user_hash = :user_hash -` - -const queryAll = queryBase + ` -FROM users -ORDER BY user_login -` - -const queryRange = queryBase + ` -FROM users -ORDER BY %s -LIMIT %d -OFFSET %d -` - -const stmtUpdate = ` -UPDATE users -SET - user_email = :user_email -,user_admin = :user_admin -,user_active = :user_active -,user_avatar = :user_avatar -,user_syncing = :user_syncing -,user_synced = :user_synced -,user_created = :user_created -,user_updated = :user_updated -,user_last_login = :user_last_login -,user_oauth_token = :user_oauth_token -,user_oauth_refresh = :user_oauth_refresh -,user_oauth_expiry = :user_oauth_expiry -,user_hash = :user_hash -WHERE user_id = :user_id -` - -const stmtDelete = ` -DELETE FROM users WHERE user_id = :user_id -` - -const stmtInsert = ` -INSERT INTO users ( - user_login -,user_email -,user_admin -,user_machine -,user_active -,user_avatar -,user_syncing -,user_synced -,user_created -,user_updated -,user_last_login -,user_oauth_token -,user_oauth_refresh -,user_oauth_expiry -,user_hash -) VALUES ( - :user_login -,:user_email -,:user_admin -,:user_machine -,:user_active -,:user_avatar -,:user_syncing -,:user_synced -,:user_created -,:user_updated -,:user_last_login -,:user_oauth_token -,:user_oauth_refresh -,:user_oauth_expiry -,:user_hash -) -` - -const stmtInsertPg = stmtInsert + ` -RETURNING user_id -` diff --git a/store/user/user_test.go b/store/user/user_test.go deleted file mode 100644 index 8c6b0c89e2..0000000000 --- a/store/user/user_test.go +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package user - -import ( - "context" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/store/shared/db/dbtest" - "github.com/drone/drone/store/shared/encrypt" -) - -var noContext = context.TODO() - -func TestUser(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - store := New(conn, nil).(*userStore) - store.enc, _ = encrypt.New("fb4b4d6267c8a5ce8231f8b186dbca92") - t.Run("Create", testUserCreate(store)) -} - -func testUserCreate(store *userStore) func(t *testing.T) { - return func(t *testing.T) { - user := &core.User{ - Login: "octocat", - Email: "octocat@github.com", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - Hash: "MjAxOC0wOC0xMVQxNTo1ODowN1o", - Token: "9595fe015ca9b98c41ebf4e7d4e004ee", - Refresh: "268ef49df64ea8ff79ef11e995d41aed", - } - err := store.Create(noContext, user) - if err != nil { - t.Error(err) - } - if user.ID == 0 { - t.Errorf("Want user ID assigned, got %d", user.ID) - } - - t.Run("Count", testUserCount(store)) - t.Run("Find", testUserFind(store, user)) - t.Run("FindLogin", testUserFindLogin(store)) - t.Run("FindToken", testUserFindToken(store)) - t.Run("List", testUserList(store)) - t.Run("Update", testUserUpdate(store, user)) - t.Run("Delete", testUserDelete(store, user)) - } -} - -func testUserCount(users *userStore) func(t *testing.T) { - return func(t *testing.T) { - count, err := users.Count(noContext) - if err != nil { - t.Error(err) - } - if got, want := count, int64(1); got != want { - t.Errorf("Want user table count %d, got %d", want, got) - } - - count, err = users.CountHuman(noContext) - if err != nil { - t.Error(err) - } - if got, want := count, int64(1); got != want { - t.Errorf("Want user table count %d for humans, got %d", want, got) - } - } -} - -func testUserFind(users *userStore, created *core.User) func(t *testing.T) { - return func(t *testing.T) { - user, err := users.Find(noContext, created.ID) - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testUser(user)) - } - } -} - -func testUserFindLogin(users *userStore) func(t *testing.T) { - return func(t *testing.T) { - user, err := users.FindLogin(noContext, "octocat") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testUser(user)) - } - } -} - -func testUserFindToken(users *userStore) func(t *testing.T) { - return func(t *testing.T) { - user, err := users.FindToken(noContext, "MjAxOC0wOC0xMVQxNTo1ODowN1o") - if err != nil { - t.Error(err) - } else { - t.Run("Fields", testUser(user)) - } - } -} - -func testUserList(users *userStore) func(t *testing.T) { - return func(t *testing.T) { - users, err := users.List(noContext) - if err != nil { - t.Error(err) - return - } - if got, want := len(users), 1; got != want { - t.Errorf("Want user count %d, got %d", want, got) - } else { - t.Run("Fields", testUser(users[0])) - } - } -} - -func testUserUpdate(users *userStore, created *core.User) func(t *testing.T) { - return func(t *testing.T) { - user := &core.User{ - ID: created.ID, - Login: "octocat", - Email: "noreply@github.com", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - } - err := users.Update(noContext, user) - if err != nil { - t.Error(err) - return - } - updated, err := users.Find(noContext, user.ID) - if err != nil { - t.Error(err) - return - } - if got, want := updated.Email, user.Email; got != want { - t.Errorf("Want updated user Email %q, got %q", want, got) - } - } -} - -func testUserDelete(users *userStore, created *core.User) func(t *testing.T) { - return func(t *testing.T) { - count, _ := users.Count(noContext) - if got, want := count, int64(1); got != want { - t.Errorf("Want user table count %d, got %d", want, got) - return - } - - err := users.Delete(noContext, &core.User{ID: created.ID}) - if err != nil { - t.Error(err) - } - - count, _ = users.Count(noContext) - if got, want := count, int64(0); got != want { - t.Errorf("Want user table count %d, got %d", want, got) - return - } - } -} - -func testUser(user *core.User) func(t *testing.T) { - return func(t *testing.T) { - if got, want := user.Login, "octocat"; got != want { - t.Errorf("Want user Login %q, got %q", want, got) - } - if got, want := user.Email, "octocat@github.com"; got != want { - t.Errorf("Want user Email %q, got %q", want, got) - } - if got, want := user.Avatar, "https://avatars3.githubusercontent.com/u/583231?v=4"; got != want { - t.Errorf("Want user Avatar %q, got %q", want, got) - } - if got, want := user.Token, "9595fe015ca9b98c41ebf4e7d4e004ee"; got != want { - t.Errorf("Want user Access Token %q, got %q", want, got) - } - if got, want := user.Refresh, "268ef49df64ea8ff79ef11e995d41aed"; got != want { - t.Errorf("Want user Refresh Token %q, got %q", want, got) - } - } -} - -// The purpose of this unit test is to ensure that plaintext -// data can still be read from the database if encryption is -// added at a later time. -func TestUserCryptoCompat(t *testing.T) { - conn, err := dbtest.Connect() - if err != nil { - t.Error(err) - return - } - defer func() { - dbtest.Reset(conn) - dbtest.Disconnect(conn) - }() - - store := New(conn, nil).(*userStore) - store.enc, _ = encrypt.New("") - - item := &core.User{ - Login: "octocat", - Email: "octocat@github.com", - Avatar: "https://avatars3.githubusercontent.com/u/583231?v=4", - Hash: "MjAxOC0wOC0xMVQxNTo1ODowN1o", - Token: "9595fe015ca9b98c41ebf4e7d4e004ee", - Refresh: "268ef49df64ea8ff79ef11e995d41aed", - } - - // create the secret with the secret value stored as plaintext - err = store.Create(noContext, item) - if err != nil { - t.Error(err) - return - } - if item.ID == 0 { - t.Errorf("Want secret ID assigned, got %d", item.ID) - return - } - - // update the store to use encryption - store.enc, _ = encrypt.New("fb4b4d6267c8a5ce8231f8b186dbca92") - store.enc.(*encrypt.Aesgcm).Compat = true - - // fetch the secret from the database - got, err := store.Find(noContext, item.ID) - if err != nil { - t.Errorf("cannot retrieve user from database: %s", err) - } else { - t.Run("Fields", testUser(got)) - } -} diff --git a/trigger/change.go b/trigger/change.go deleted file mode 100644 index c477b70137..0000000000 --- a/trigger/change.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package trigger - -// import ( -// "context" -// "regexp" -// "strconv" - -// "github.com/drone/drone/core" -// "github.com/drone/go-scm/scm" -// ) - -// func listChanges(client *scm.Client, repo *core.Repository, build *core.Build) ([]string, error) { -// switch build.Event { -// case core.EventPullRequest: -// return listChangesPullRequest(client, repo, build) -// case core.EventPush: -// return listChangesPush(client, repo, build) -// default: -// return nil, nil -// } -// } - -// func listChangesPullRequest(client *scm.Client, repo *core.Repository, build *core.Build) ([]string, error) { -// var paths []string -// pr, err := parsePullRequest(build.Ref) -// if err != nil { -// return nil, err -// } -// change, _, err := client.PullRequests.ListChanges(context.Background(), repo.Slug, pr, scm.ListOptions{}) -// if err == nil { -// for _, file := range change { -// paths = append(paths, file.Path) -// } -// } -// return paths, err -// } - -// func listChangesPush(client *scm.Client, repo *core.Repository, build *core.Build) ([]string, error) { -// var paths []string -// // TODO (bradrydzewski) some tag hooks provide the tag but do -// // not provide the sha, in which case we should use the ref -// // instead of the sha. -// change, _, err := client.Git.ListChanges(context.Background(), repo.Slug, build.After, scm.ListOptions{}) -// if err == nil { -// for _, file := range change { -// paths = append(paths, file.Path) -// } -// } -// return paths, err -// } - -// func parsePullRequest(ref string) (int, error) { -// return strconv.Atoi( -// pre.FindString(ref), -// ) -// } - -// var pre = regexp.MustCompile("\\d+") diff --git a/trigger/change_test.go b/trigger/change_test.go deleted file mode 100644 index 423fc23187..0000000000 --- a/trigger/change_test.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package trigger - -// import ( -// "testing" - -// "github.com/drone/drone/core" -// "github.com/drone/drone/mock" -// "github.com/drone/go-scm/scm" - -// "github.com/golang/mock/gomock" -// "github.com/google/go-cmp/cmp" -// ) - -// func Test_listChanges_None(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// mockRepo := &core.Repository{ -// Slug: "octocat/hello-world", -// } -// mockBuild := &core.Build{ -// Event: core.EventTag, -// Ref: "refs/tags/v1.0.0", -// } -// paths, err := listChanges(nil, mockRepo, mockBuild) -// if err != nil { -// t.Error(err) -// } -// if len(paths) != 0 { -// t.Errorf("Expect empty changeset for Tag events") -// } -// } - -// func Test_listChanges_Push(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// mockRepo := &core.Repository{ -// Slug: "octocat/hello-world", -// } -// mockBuild := &core.Build{ -// Event: core.EventPush, -// After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", -// } -// mockChanges := []*scm.Change{ -// {Path: "README.md"}, -// } - -// mockGit := mock.NewMockGitService(controller) -// mockGit.EXPECT().ListChanges(gomock.Any(), mockRepo.Slug, mockBuild.After, gomock.Any()).Return(mockChanges, nil, nil) - -// mockClient := new(scm.Client) -// mockClient.Git = mockGit - -// got, err := listChanges(mockClient, mockRepo, mockBuild) -// if err != nil { -// t.Error(err) -// } -// want := []string{"README.md"} -// if diff := cmp.Diff(got, want); diff != "" { -// t.Errorf(diff) -// } -// } - -// func Test_listChanges_PullRequest(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// mockRepo := &core.Repository{ -// Slug: "octocat/hello-world", -// } -// mockBuild := &core.Build{ -// Event: core.EventPullRequest, -// Ref: "refs/pulls/12/head", -// } -// mockChanges := []*scm.Change{ -// {Path: "README.md"}, -// } - -// mockPR := mock.NewMockPullRequestService(controller) -// mockPR.EXPECT().ListChanges(gomock.Any(), mockRepo.Slug, 12, gomock.Any()).Return(mockChanges, nil, nil) - -// mockClient := new(scm.Client) -// mockClient.PullRequests = mockPR - -// got, err := listChanges(mockClient, mockRepo, mockBuild) -// if err != nil { -// t.Error(err) -// } -// want := []string{"README.md"} -// if diff := cmp.Diff(got, want); diff != "" { -// t.Errorf(diff) -// } -// } - -// func Test_listChanges_PullRequest_ParseError(t *testing.T) { -// controller := gomock.NewController(t) -// defer controller.Finish() - -// mockRepo := &core.Repository{ -// Slug: "octocat/hello-world", -// } -// mockBuild := &core.Build{ -// Event: core.EventPullRequest, -// Ref: "refs/pulls/foo/head", -// } -// _, err := listChanges(nil, mockRepo, mockBuild) -// if err == nil { -// t.Errorf("Expect error parsing invalid pull request number") -// } -// } - -// func Test_parsePullRequest(t *testing.T) { -// var tests = []struct { -// ref string -// num int -// }{ -// {"refs/pulls/1/merge", 1}, -// {"refs/pulls/12/merge", 12}, -// } -// for _, test := range tests { -// pr, err := parsePullRequest(test.ref) -// if err != nil { -// t.Error(err) -// } -// if got, want := pr, test.num; got != want { -// t.Errorf("Want pull request number %d, got %d", want, got) -// } -// } -// } diff --git a/trigger/cron/cron.go b/trigger/cron/cron.go deleted file mode 100644 index aa6a2da7d7..0000000000 --- a/trigger/cron/cron.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cron - -import ( - "context" - "fmt" - "time" - - "github.com/drone/drone/core" - - "github.com/hashicorp/go-multierror" - "github.com/robfig/cron" - "github.com/sirupsen/logrus" -) - -// New returns a new Cron scheduler. -func New( - commits core.CommitService, - cron core.CronStore, - repos core.RepositoryStore, - users core.UserStore, - trigger core.Triggerer, -) *Scheduler { - return &Scheduler{ - commits: commits, - cron: cron, - repos: repos, - users: users, - trigger: trigger, - } -} - -// Scheduler defines a cron scheduler. -type Scheduler struct { - commits core.CommitService - cron core.CronStore - repos core.RepositoryStore - users core.UserStore - trigger core.Triggerer -} - -// Start starts the cron scheduler. -func (s *Scheduler) Start(ctx context.Context, dur time.Duration) error { - ticker := time.NewTicker(dur) - defer ticker.Stop() - - for { - select { - case <-ctx.Done(): - return nil - case <-ticker.C: - s.run(ctx) - } - } -} - -func (s *Scheduler) run(ctx context.Context) error { - var result error - - logrus.Debugln("cron: begin process pending jobs") - - defer func() { - if err := recover(); err != nil { - logger := logrus.WithField("error", err) - logger.Errorln("cron: unexpected panic") - } - }() - - now := time.Now() - jobs, err := s.cron.Ready(ctx, now.Unix()) - if err != nil { - logger := logrus.WithError(err) - logger.Error("cron: cannot list pending jobs") - return err - } - - logrus.Debugf("cron: found %d pending jobs", len(jobs)) - - for _, job := range jobs { - // jobs can be manually disabled in the user interface, - // and should be skipped. - if job.Disabled { - continue - } - - sched, err := cron.Parse(job.Expr) - if err != nil { - result = multierror.Append(result, err) - // this should never happen since we parse and verify - // the cron expression when the cron entry is created. - continue - } - - // calculate the next execution date. - job.Prev = job.Next - job.Next = sched.Next(now).Unix() - - logger := logrus.WithFields( - logrus.Fields{ - "repo": job.RepoID, - "cron": job.ID, - }, - ) - - err = s.cron.Update(ctx, job) - if err != nil { - logger := logrus.WithError(err) - logger.Warnln("cron: cannot re-schedule job") - result = multierror.Append(result, err) - continue - } - - repo, err := s.repos.Find(ctx, job.RepoID) - if err != nil { - logger := logrus.WithError(err) - logger.Warnln("cron: cannot find repository") - result = multierror.Append(result, err) - continue - } - - user, err := s.users.Find(ctx, repo.UserID) - if err != nil { - logger := logrus.WithError(err) - logger.Warnln("cron: cannot find repository owner") - result = multierror.Append(result, err) - continue - } - - if repo.Active == false { - logger.Traceln("cron: skip inactive repository") - continue - } - - // TODO(bradrydzewski) we may actually need to query the branch - // first to get the sha, and then query the commit. This works fine - // with github and gitlab, but may not work with other providers. - - commit, err := s.commits.FindRef(ctx, user, repo.Slug, job.Branch) - if err != nil { - logger.WithFields( - logrus.Fields{ - "error": err, - "repo": repo.Slug, - "branch": repo.Branch, - }).Warnln("cron: cannot find commit") - result = multierror.Append(result, err) - continue - } - - hook := &core.Hook{ - Trigger: core.TriggerCron, - Event: core.EventCron, - Link: commit.Link, - Timestamp: commit.Author.Date, - Message: commit.Message, - After: commit.Sha, - Ref: fmt.Sprintf("refs/heads/%s", job.Branch), - Target: job.Branch, - Author: commit.Author.Login, - AuthorName: commit.Author.Name, - AuthorEmail: commit.Author.Email, - AuthorAvatar: commit.Author.Avatar, - Cron: job.Name, - Sender: commit.Author.Login, - } - - logger.WithFields( - logrus.Fields{ - "cron": job.Name, - "repo": repo.Slug, - "branch": repo.Branch, - "sha": commit.Sha, - }).Warnln("cron: trigger build") - - _, err = s.trigger.Trigger(ctx, repo, hook) - if err != nil { - logger.WithFields( - logrus.Fields{ - "error": err, - "repo": repo.Slug, - "branch": repo.Branch, - "sha": commit.Sha, - }).Warnln("cron: cannot trigger build") - result = multierror.Append(result, err) - continue - } - } - - logrus.Debugf("cron: finished processing jobs") - return result -} diff --git a/trigger/cron/cron_oss.go b/trigger/cron/cron_oss.go deleted file mode 100644 index c87f9937fe..0000000000 --- a/trigger/cron/cron_oss.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build oss - -package cron - -import ( - "context" - "time" - - "github.com/drone/drone/core" -) - -// New returns a noop Cron scheduler. -func New( - core.CommitService, - core.CronStore, - core.RepositoryStore, - core.UserStore, - core.Triggerer, -) *Scheduler { - return &Scheduler{} -} - -// Schedule is a no-op cron scheduler. -type Scheduler struct{} - -// Start is a no-op. -func (Scheduler) Start(context.Context, time.Duration) error { - return nil -} diff --git a/trigger/cron/cron_test.go b/trigger/cron/cron_test.go deleted file mode 100644 index 2a6f20954d..0000000000 --- a/trigger/cron/cron_test.go +++ /dev/null @@ -1,489 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package cron - -import ( - "context" - "database/sql" - "io/ioutil" - "testing" - "time" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" - "github.com/hashicorp/go-multierror" - "github.com/sirupsen/logrus" -) - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -// TODO(bradrydzewski) test disabled cron jobs are skipped -// TODO(bradrydzewski) test to ensure panic does not exit program - -func TestCron(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkBuild := func(_ context.Context, _ *core.Repository, hook *core.Hook) { - ignoreHookFields := cmpopts.IgnoreFields(core.Hook{}, - "Source", "Before") - if diff := cmp.Diff(hook, dummyHook, ignoreHookFields); diff != "" { - t.Errorf(diff) - } - } - - before := time.Now().Unix() - checkCron := func(_ context.Context, cron *core.Cron) { - if got, want := cron.Prev, int64(2000000000); got != want { - t.Errorf("Expect Next copied to Prev") - } - if before > cron.Next { - t.Errorf("Expect Next is set to unix timestamp") - } - } - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Do(checkBuild) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronList, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Do(checkCron) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - if err != nil { - t.Error(err) - } -} - -func TestCron_Cancel(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - cancel() - - s := new(Scheduler) - err := s.Start(ctx, time.Minute) - if err != nil { - t.Errorf("Expect cron scheduler exits when context is canceled") - } -} - -// This unit tests demonstrates that if an error is encountered -// when returning a list of ready cronjobs, the process exits -// immediately with an error message. -func TestCron_ErrorList(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronList, sql.ErrNoRows) - - s := Scheduler{ - commits: nil, - cron: mockCrons, - repos: nil, - trigger: nil, - users: nil, - } - - err := s.run(noContext) - if err == nil { - t.Errorf("Want error when the select cron query fails") - } -} - -// This unit tests demonstrates that if an error is encountered -// when parsing a cronjob, the system will continue processing -// cron jobs and return an aggregated list of errors. -func TestCron_ErrorCronParse(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil).Times(1) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil).Times(1) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListInvalid, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Times(1) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(1) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(1) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } -} - -// This unit tests demonstrates that if an error is encountered -// when finding the associated cron repository, the system will -// continue processing cron jobs and return an aggregated list of -// errors. -func TestCron_ErrorFindRepo(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil).Times(1) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(nil, sql.ErrNoRows) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListMultiple, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Times(2) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(1) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(1) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } -} - -// This unit tests demonstrates that if an error is encountered -// when updating the next cron execution time, the system will -// continue processing cron jobs and return an aggregated list -// of errors. -func TestCron_ErrorUpdateCron(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil).Times(1) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil).Times(1) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListMultiple, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Return(nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Return(sql.ErrNoRows) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(1) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(1) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } - if got, want := merr.Errors[0], sql.ErrNoRows; got != want { - t.Errorf("Want error %v, got %v", want, got) - } -} - -// This unit tests demonstrates that if an error is encountered -// when finding the repository owner in the database, the system -// will continue processing cron jobs and return an aggregated -// list of errors. -func TestCron_ErrorFindUser(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil).Times(1) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil).Times(2) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListMultiple, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Times(2) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(1) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(nil, sql.ErrNoRows).Times(1) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(1) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } - if got, want := merr.Errors[0], sql.ErrNoRows; got != want { - t.Errorf("Want error %v, got %v", want, got) - } -} - -// This unit tests demonstrates that if an error is encountered -// when communicating with the source code management system, the -// system will continue processing cron jobs and return an aggregated -// list of errors. -func TestCron_ErrorFindCommit(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil).Times(1) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil).Times(2) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListMultiple, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Times(2) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(2) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(1) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(nil, sql.ErrNoRows).Times(1) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } - if got, want := merr.Errors[0], sql.ErrNoRows; got != want { - t.Errorf("Want error %v, got %v", want, got) - } -} - -// This unit tests demonstrates that if an error is encountered -// when triggering a build, the system will continue processing -// cron jobs and return an aggregated list of errors. -func TestCron_ErrorTrigger(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockTriggerer := mock.NewMockTriggerer(controller) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, sql.ErrNoRows) - mockTriggerer.EXPECT().Trigger(gomock.Any(), dummyRepo, gomock.Any()).Return(nil, nil) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Find(gomock.Any(), dummyCron.RepoID).Return(dummyRepo, nil).Times(2) - - mockCrons := mock.NewMockCronStore(controller) - mockCrons.EXPECT().Ready(gomock.Any(), gomock.Any()).Return(dummyCronListMultiple, nil) - mockCrons.EXPECT().Update(gomock.Any(), dummyCron).Times(2) - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil).Times(2) - - mockCommits := mock.NewMockCommitService(controller) - mockCommits.EXPECT().FindRef(gomock.Any(), dummyUser, dummyRepo.Slug, dummyRepo.Branch).Return(dummyCommit, nil).Times(2) - - s := Scheduler{ - commits: mockCommits, - cron: mockCrons, - repos: mockRepos, - users: mockUsers, - trigger: mockTriggerer, - } - - err := s.run(noContext) - merr := err.(*multierror.Error) - if got, want := len(merr.Errors), 1; got != want { - t.Errorf("Want %d errors, got %d", want, got) - } - if got, want := merr.Errors[0], sql.ErrNoRows; got != want { - t.Errorf("Want error %v, got %v", want, got) - } -} - -var ( - noContext = context.Background() - - dummyUser = &core.User{ - Login: "octocat", - } - - dummyBuild = &core.Build{ - Number: dummyRepo.Counter, - RepoID: dummyRepo.ID, - Status: core.StatusPending, - Event: core.EventCron, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - } - - dummyRepo = &core.Repository{ - ID: 1, - UID: "1296269", - UserID: 2, - Namespace: "octocat", - Name: "Hello-World", - Slug: "octocat/Hello-World", - SCM: "git", - HTTPURL: "https://github.com/octocat/Hello-World.git", - SSHURL: "git@github.com:octocat/Hello-World.git", - Link: "https://github.com/octocat/Hello-World", - Branch: "master", - Private: false, - Visibility: core.VisibilityPublic, - Active: true, - Counter: 42, - Signer: "g9dMChy22QutQM5lrpbe0yCR3f15t1gv", - Secret: "g9dMChy22QutQM5lrpbe0yCR3f15t1gv", - } - - dummyCron = &core.Cron{ - RepoID: dummyRepo.ID, - Name: "nightly", - Expr: "0 0 * * *", - Next: 2000000000, - Prev: 1000000000, - Branch: "master", - } - - dummyCronInvalid = &core.Cron{ - RepoID: dummyRepo.ID, - Name: "nightly", - Expr: "A B C D E", - Next: 2000000000, - Prev: 1000000000, - Branch: "master", - } - - dummyCronList = []*core.Cron{ - dummyCron, - } - - dummyCronListMultiple = []*core.Cron{ - dummyCron, - dummyCron, - } - - dummyCronListInvalid = []*core.Cron{ - dummyCronInvalid, - dummyCron, - } - - dummyHook = &core.Hook{ - Event: core.EventCron, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - Cron: "nightly", - Trigger: "@cron", - } - - dummyCommit = &core.Commit{ - Sha: dummyHook.After, - Message: dummyHook.Message, - Link: dummyHook.Link, - Committer: &core.Committer{ - Name: dummyHook.AuthorName, - Email: dummyHook.AuthorEmail, - Login: dummyHook.Author, - Avatar: dummyHook.AuthorAvatar, - Date: dummyHook.Timestamp, - }, - Author: &core.Committer{ - Name: dummyHook.AuthorName, - Email: dummyHook.AuthorEmail, - Login: dummyHook.Author, - Avatar: dummyHook.AuthorAvatar, - Date: dummyHook.Timestamp, - }, - } - - ignoreBuildFields = cmpopts.IgnoreFields(core.Build{}, - "Created", "Updated") -) diff --git a/trigger/dag/dag.go b/trigger/dag/dag.go deleted file mode 100644 index 85aa9b21c8..0000000000 --- a/trigger/dag/dag.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// Copyright 2018 natessilva -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package dag - -// Dag is a directed acyclic graph. -type Dag struct { - graph map[string]*Vertex -} - -// Vertex is a vertex in the graph. -type Vertex struct { - Name string - Skip bool - graph []string -} - -// New creates a new directed acyclic graph (dag) that can -// determinate if a stage has dependencies. -func New() *Dag { - return &Dag{ - graph: make(map[string]*Vertex), - } -} - -// Add establishes a dependency between two vertices in the graph. -func (d *Dag) Add(from string, to ...string) *Vertex { - vertex := new(Vertex) - vertex.Name = from - vertex.Skip = false - vertex.graph = to - d.graph[from] = vertex - return vertex -} - -// Get returns the vertex from the graph. -func (d *Dag) Get(name string) (*Vertex, bool) { - vertex, ok := d.graph[name] - return vertex, ok -} - -// Dependencies returns the direct dependencies accounting for -// skipped dependencies. -func (d *Dag) Dependencies(name string) []string { - vertex := d.graph[name] - return d.dependencies(vertex) -} - -// Ancestors returns the ancestors of the vertex. -func (d *Dag) Ancestors(name string) []*Vertex { - vertex := d.graph[name] - return d.ancestors(vertex) -} - -// DetectCycles returns true if cycles are detected in the graph. -func (d *Dag) DetectCycles() bool { - visited := make(map[string]bool) - recStack := make(map[string]bool) - - for vertex := range d.graph { - if !visited[vertex] { - if d.detectCycles(vertex, visited, recStack) { - return true - } - } - } - return false -} - -// helper function returns the list of ancestors for the vertex. -func (d *Dag) ancestors(parent *Vertex) []*Vertex { - if parent == nil { - return nil - } - var combined []*Vertex - for _, name := range parent.graph { - vertex, found := d.graph[name] - if !found { - continue - } - if !vertex.Skip { - combined = append(combined, vertex) - } - combined = append(combined, d.ancestors(vertex)...) - } - return combined -} - -// helper function returns the list of dependencies for the, -// vertex taking into account skipped dependencies. -func (d *Dag) dependencies(parent *Vertex) []string { - if parent == nil { - return nil - } - var combined []string - for _, name := range parent.graph { - vertex, found := d.graph[name] - if !found { - continue - } - if vertex.Skip { - // if the vertex is skipped we should move up the - // graph and check direct ancestors. - combined = append(combined, d.dependencies(vertex)...) - } else { - combined = append(combined, vertex.Name) - } - } - return combined -} - -// helper function returns true if the vertex is cyclical. -func (d *Dag) detectCycles(name string, visited, recStack map[string]bool) bool { - visited[name] = true - recStack[name] = true - - vertex, ok := d.graph[name] - if !ok { - return false - } - for _, v := range vertex.graph { - // only check cycles on a vertex one time - if !visited[v] { - if d.detectCycles(v, visited, recStack) { - return true - } - // if we've visited this vertex in this recursion - // stack, then we have a cycle - } else if recStack[v] { - return true - } - - } - recStack[name] = false - return false -} diff --git a/trigger/dag/dag_test.go b/trigger/dag/dag_test.go deleted file mode 100644 index c387955b9b..0000000000 --- a/trigger/dag/dag_test.go +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package dag - -import ( - "reflect" - "testing" -) - -func TestDag(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend") - dag.Add("notify", "backend", "frontend") - if dag.DetectCycles() { - t.Errorf("cycles detected") - } - - dag = New() - dag.Add("notify", "backend", "frontend") - if dag.DetectCycles() { - t.Errorf("cycles detected") - } - - dag = New() - dag.Add("backend", "frontend") - dag.Add("frontend", "backend") - dag.Add("notify", "backend", "frontend") - if dag.DetectCycles() == false { - t.Errorf("Expect cycles detected") - } - - dag = New() - dag.Add("backend", "backend") - dag.Add("frontend", "backend") - dag.Add("notify", "backend", "frontend") - if dag.DetectCycles() == false { - t.Errorf("Expect cycles detected") - } - - dag = New() - dag.Add("backend") - dag.Add("frontend") - dag.Add("notify", "backend", "frontend", "notify") - if dag.DetectCycles() == false { - t.Errorf("Expect cycles detected") - } -} - -func TestAncestors(t *testing.T) { - dag := New() - v := dag.Add("backend") - dag.Add("frontend", "backend") - dag.Add("notify", "frontend") - - ancestors := dag.Ancestors("frontend") - if got, want := len(ancestors), 1; got != want { - t.Errorf("Want %d ancestors, got %d", want, got) - } - if ancestors[0] != v { - t.Errorf("Unexpected ancestor") - } - - if v := dag.Ancestors("backend"); len(v) != 0 { - t.Errorf("Expect vertexes with no dependencies has zero ancestors") - } -} - -func TestAncestors_Skipped(t *testing.T) { - dag := New() - dag.Add("backend").Skip = true - dag.Add("frontend", "backend").Skip = true - dag.Add("notify", "frontend") - - if v := dag.Ancestors("frontend"); len(v) != 0 { - t.Errorf("Expect skipped vertexes excluded") - } - if v := dag.Ancestors("notify"); len(v) != 0 { - t.Errorf("Expect skipped vertexes excluded") - } -} - -func TestAncestors_NotFound(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend", "backend") - dag.Add("notify", "frontend") - if dag.DetectCycles() { - t.Errorf("cycles detected") - } - if v := dag.Ancestors("does-not-exist"); len(v) != 0 { - t.Errorf("Expect vertex not found does not panic") - } -} - -func TestAncestors_Malformed(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend", "does-not-exist") - dag.Add("notify", "frontend") - if dag.DetectCycles() { - t.Errorf("cycles detected") - } - if v := dag.Ancestors("frontend"); len(v) != 0 { - t.Errorf("Expect invalid dependency does not panic") - } -} - -func TestAncestors_Complex(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend") - dag.Add("publish", "backend", "frontend") - dag.Add("deploy", "publish") - last := dag.Add("notify", "deploy") - if dag.DetectCycles() { - t.Errorf("cycles detected") - } - - ancestors := dag.Ancestors("notify") - if got, want := len(ancestors), 4; got != want { - t.Errorf("Want %d ancestors, got %d", want, got) - return - } - for _, ancestor := range ancestors { - if ancestor == last { - t.Errorf("Unexpected ancestor") - } - } - - v, _ := dag.Get("publish") - v.Skip = true - ancestors = dag.Ancestors("notify") - if got, want := len(ancestors), 3; got != want { - t.Errorf("Want %d ancestors, got %d", want, got) - return - } -} - -func TestDependencies(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend") - dag.Add("publish", "backend", "frontend") - - if deps := dag.Dependencies("backend"); len(deps) != 0 { - t.Errorf("Expect zero dependencies") - } - if deps := dag.Dependencies("frontend"); len(deps) != 0 { - t.Errorf("Expect zero dependencies") - } - - got, want := dag.Dependencies("publish"), []string{"backend", "frontend"} - if !reflect.DeepEqual(got, want) { - t.Errorf("Unexpected dependencies, got %v", got) - } -} - -func TestDependencies_Skipped(t *testing.T) { - dag := New() - dag.Add("backend") - dag.Add("frontend").Skip = true - dag.Add("publish", "backend", "frontend") - - if deps := dag.Dependencies("backend"); len(deps) != 0 { - t.Errorf("Expect zero dependencies") - } - if deps := dag.Dependencies("frontend"); len(deps) != 0 { - t.Errorf("Expect zero dependencies") - } - - got, want := dag.Dependencies("publish"), []string{"backend"} - if !reflect.DeepEqual(got, want) { - t.Errorf("Unexpected dependencies, got %v", got) - } -} - -func TestDependencies_Complex(t *testing.T) { - dag := New() - dag.Add("clone") - dag.Add("backend") - dag.Add("frontend", "backend").Skip = true - dag.Add("publish", "frontend", "clone") - dag.Add("notify", "publish") - - if deps := dag.Dependencies("clone"); len(deps) != 0 { - t.Errorf("Expect zero dependencies for clone") - } - if deps := dag.Dependencies("backend"); len(deps) != 0 { - t.Errorf("Expect zero dependencies for backend") - } - - got, want := dag.Dependencies("frontend"), []string{"backend"} - if !reflect.DeepEqual(got, want) { - t.Errorf("Unexpected dependencies for frontend, got %v", got) - } - - got, want = dag.Dependencies("publish"), []string{"backend", "clone"} - if !reflect.DeepEqual(got, want) { - t.Errorf("Unexpected dependencies for publish, got %v", got) - } - - got, want = dag.Dependencies("notify"), []string{"publish"} - if !reflect.DeepEqual(got, want) { - t.Errorf("Unexpected dependencies for notify, got %v", got) - } -} diff --git a/trigger/skip.go b/trigger/skip.go deleted file mode 100644 index 2b62a4d1b4..0000000000 --- a/trigger/skip.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package trigger - -import ( - "strings" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" -) - -func skipBranch(document *yaml.Pipeline, branch string) bool { - return !document.Trigger.Branch.Match(branch) -} - -func skipRef(document *yaml.Pipeline, ref string) bool { - return !document.Trigger.Ref.Match(ref) -} - -func skipEvent(document *yaml.Pipeline, event string) bool { - return !document.Trigger.Event.Match(event) -} - -func skipAction(document *yaml.Pipeline, action string) bool { - return !document.Trigger.Action.Match(action) -} - -func skipInstance(document *yaml.Pipeline, instance string) bool { - return !document.Trigger.Instance.Match(instance) -} - -func skipTarget(document *yaml.Pipeline, env string) bool { - return !document.Trigger.Target.Match(env) -} - -func skipRepo(document *yaml.Pipeline, repo string) bool { - return !document.Trigger.Repo.Match(repo) -} - -func skipCron(document *yaml.Pipeline, cron string) bool { - return !document.Trigger.Cron.Match(cron) -} - -func skipMessage(hook *core.Hook) bool { - switch { - case hook.Event == core.EventTag: - return false - case hook.Event == core.EventCron: - return false - case hook.Event == core.EventCustom: - return false - case hook.Event == core.EventPromote: - return false - case hook.Event == core.EventRollback: - return false - case skipMessageEval(hook.Message): - return true - case skipMessageEval(hook.Title): - return true - default: - return false - } -} - -func skipMessageEval(str string) bool { - lower := strings.ToLower(str) - switch { - case strings.Contains(lower, "[ci skip]"), - strings.Contains(lower, "[skip ci]"), - strings.Contains(lower, "***no_ci***"): - return true - default: - return false - } -} - -// func skipPaths(document *config.Config, paths []string) bool { -// switch { -// // changed files are only returned for push and pull request -// // events. If the list of changed files is empty the system will -// // force-run all pipelines and pipeline steps -// case len(paths) == 0: -// return false -// // github returns a maximum of 300 changed files from the -// // api response. If there are 300+ changed files the system -// // will force-run all pipelines and pipeline steps. -// case len(paths) >= 300: -// return false -// default: -// return !document.Trigger.Paths.MatchAny(paths) -// } -// } diff --git a/trigger/skip_test.go b/trigger/skip_test.go deleted file mode 100644 index 0aa763e475..0000000000 --- a/trigger/skip_test.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -// +build !oss - -package trigger - -import ( - "testing" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone/core" -) - -func Test_skipBranch(t *testing.T) { - tests := []struct { - config string - branch string - want bool - }{ - { - config: "kind: pipeline\ntrigger: { }", - branch: "master", - want: false, - }, - { - config: "kind: pipeline\ntrigger: { branch: [ master ] }", - branch: "master", - want: false, - }, - { - config: "kind: pipeline\ntrigger: { branch: [ master ] }", - branch: "develop", - want: true, - }, - } - for i, test := range tests { - manifest, err := yaml.ParseString(test.config) - if err != nil { - t.Error(err) - } - pipeline := manifest.Resources[0].(*yaml.Pipeline) - got, want := skipBranch(pipeline, test.branch), test.want - if got != want { - t.Errorf("Want test %d to return %v", i, want) - } - } -} - -func Test_skipEvent(t *testing.T) { - tests := []struct { - config string - event string - want bool - }{ - { - config: "kind: pipeline\ntrigger: { }", - event: "push", - want: false, - }, - { - config: "kind: pipeline\ntrigger: { event: [ push ] }", - event: "push", - want: false, - }, - { - config: "kind: pipeline\ntrigger: { event: [ push ] }", - event: "pull_request", - want: true, - }, - } - for i, test := range tests { - manifest, err := yaml.ParseString(test.config) - if err != nil { - t.Error(err) - } - pipeline := manifest.Resources[0].(*yaml.Pipeline) - got, want := skipEvent(pipeline, test.event), test.want - if got != want { - t.Errorf("Want test %d to return %v", i, want) - } - } -} - -// func Test_skipPath(t *testing.T) { -// tests := []struct { -// config string -// paths []string -// want bool -// }{ -// { -// config: "trigger: { }", -// paths: []string{}, -// want: false, -// }, -// { -// config: "trigger: { }", -// paths: []string{"README.md"}, -// want: false, -// }, -// { -// config: "trigger: { paths: foo/* }", -// paths: []string{"foo/README"}, -// want: false, -// }, -// { -// config: "trigger: { paths: foo/* }", -// paths: []string{"bar/README"}, -// want: true, -// }, -// // if empty changeset, never skip the pipeline -// { -// config: "trigger: { paths: foo/* }", -// paths: []string{}, -// want: false, -// }, -// // if max changeset, never skip the pipeline -// { -// config: "trigger: { paths: foo/* }", -// paths: make([]string, 400), -// want: false, -// }, -// } -// for i, test := range tests { -// document, err := config.ParseString(test.config) -// if err != nil { -// t.Error(err) -// } -// got, want := skipPaths(document, test.paths), test.want -// if got != want { -// t.Errorf("Want test %d to return %v", i, want) -// } -// } -// } - -func Test_skipMessage(t *testing.T) { - tests := []struct { - event string - message string - title string - want bool - }{ - { - event: "push", - message: "update readme", - want: false, - }, - // skip when message contains [CI SKIP] - { - event: "push", - message: "update readme [CI SKIP]", - want: true, - }, - { - event: "pull_request", - message: "update readme [CI SKIP]", - want: true, - }, - // skip when title contains [CI SKIP] - - { - event: "push", - title: "update readme [CI SKIP]", - want: true, - }, - { - event: "pull_request", - title: "update readme [CI SKIP]", - want: true, - }, - // ignore [CI SKIP] when event is tag - { - event: "tag", - message: "update readme [CI SKIP]", - want: false, - }, - { - event: "tag", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "cron", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "cron", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "custom", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "custom", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "promote", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "promote", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "rollback", - title: "update readme [CI SKIP]", - want: false, - }, - { - event: "rollback", - title: "update readme [CI SKIP]", - want: false, - }, - } - for _, test := range tests { - hook := &core.Hook{ - Message: test.message, - Title: test.title, - Event: test.event, - } - got, want := skipMessage(hook), test.want - if got != want { - t.Errorf("Want { event: %q, message: %q, title: %q } to return %v", - test.event, test.message, test.title, want) - } - } -} - -func Test_skipMessageEval(t *testing.T) { - tests := []struct { - eval string - want bool - }{ - {"update readme", false}, - // test [CI SKIP] - {"foo [ci skip] bar", true}, - {"foo [CI SKIP] bar", true}, - {"foo [CI Skip] bar", true}, - {"foo [CI SKIP]", true}, - // test [SKIP CI] - {"foo [skip ci] bar", true}, - {"foo [SKIP CI] bar", true}, - {"foo [Skip CI] bar", true}, - {"foo [SKIP CI]", true}, - // test ***NO_CI*** - {"foo ***NO_CI*** bar", true}, - {"foo ***NO_CI*** bar", true}, - {"foo ***NO_CI*** bar", true}, - {"foo ***NO_CI***", true}, - } - for _, test := range tests { - got, want := skipMessageEval(test.eval), test.want - if got != want { - t.Errorf("Want %q to return %v, got %v", test.eval, want, got) - } - } -} diff --git a/trigger/trigger.go b/trigger/trigger.go deleted file mode 100644 index 164260d59d..0000000000 --- a/trigger/trigger.go +++ /dev/null @@ -1,643 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package trigger - -import ( - "context" - "runtime/debug" - "strings" - "time" - - "github.com/drone/drone-yaml/yaml" - "github.com/drone/drone-yaml/yaml/converter" - "github.com/drone/drone-yaml/yaml/linter" - "github.com/drone/drone-yaml/yaml/signer" - - "github.com/drone/drone/core" - "github.com/drone/drone/trigger/dag" - - "github.com/sirupsen/logrus" -) - -type triggerer struct { - canceler core.Canceler - config core.ConfigService - convert core.ConvertService - commits core.CommitService - status core.StatusService - builds core.BuildStore - sched core.Scheduler - repos core.RepositoryStore - users core.UserStore - validate core.ValidateService - hooks core.WebhookSender -} - -// New returns a new build triggerer. -func New( - canceler core.Canceler, - config core.ConfigService, - convert core.ConvertService, - commits core.CommitService, - status core.StatusService, - builds core.BuildStore, - sched core.Scheduler, - repos core.RepositoryStore, - users core.UserStore, - validate core.ValidateService, - hooks core.WebhookSender, -) core.Triggerer { - return &triggerer{ - canceler: canceler, - config: config, - convert: convert, - commits: commits, - status: status, - builds: builds, - sched: sched, - repos: repos, - users: users, - validate: validate, - hooks: hooks, - } -} - -func (t *triggerer) Trigger(ctx context.Context, repo *core.Repository, base *core.Hook) (*core.Build, error) { - logger := logrus.WithFields( - logrus.Fields{ - "repo": repo.Slug, - "ref": base.Ref, - "event": base.Event, - "commit": base.After, - }, - ) - - logger.Debugln("trigger: received") - defer func() { - // taking the paranoid approach to recover from - // a panic that should absolutely never happen. - if r := recover(); r != nil { - logger.Errorf("runner: unexpected panic: %s", r) - debug.PrintStack() - } - }() - - if skipMessage(base) { - logger.Infoln("trigger: skipping hook. found skip directive") - return nil, nil - } - if base.Event == core.EventPullRequest { - if repo.IgnorePulls { - logger.Infoln("trigger: skipping hook. project ignores pull requests") - return nil, nil - } - if repo.IgnoreForks && !strings.EqualFold(base.Fork, repo.Slug) { - logger.Infoln("trigger: skipping hook. project ignores forks") - return nil, nil - } - } - - user, err := t.users.Find(ctx, repo.UserID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot find repository owner") - return nil, err - } - - if user.Active == false { - logger.Infoln("trigger: skipping hook. repository owner is inactive") - return nil, nil - } - - // if the commit message is not included we should - // make an optional API call to the version control - // system to augment the available information. - if base.Message == "" && base.After != "" { - commit, err := t.commits.Find(ctx, user, repo.Slug, base.After) - if err == nil && commit != nil { - base.Message = commit.Message - if base.AuthorEmail == "" { - base.AuthorEmail = commit.Author.Email - } - if base.AuthorName == "" { - base.AuthorName = commit.Author.Name - } - if base.AuthorAvatar == "" { - base.AuthorAvatar = commit.Author.Avatar - } - } - } - - // // some tag hooks provide the tag but do not provide the sha. - // // this may be important if we want to fetch the .drone.yml - // if base.After == "" && base.Event == core.EventTag { - // tag, _, err := t.client.Git.FindTag(ctx, repo.Slug, base.Ref) - // if err != nil { - // logger.Error().Err(err). - // Msg("cannot find tag") - // return nil, err - // } - // base.After = tag.Sha - // } - - // TODO: do a better job of documenting this - // obj := base.After - // if len(obj) == 0 { - // if strings.HasPrefix(base.Ref, "refs/pull/") { - // obj = base.Target - // } else { - // obj = base.Ref - // } - // } - tmpBuild := &core.Build{ - RepoID: repo.ID, - Trigger: base.Trigger, - Parent: base.Parent, - Status: core.StatusPending, - Event: base.Event, - Action: base.Action, - Link: base.Link, - // Timestamp: base.Timestamp, - Title: base.Title, - Message: base.Message, - Before: base.Before, - After: base.After, - Ref: base.Ref, - Fork: base.Fork, - Source: base.Source, - Target: base.Target, - Author: base.Author, - AuthorName: base.AuthorName, - AuthorEmail: base.AuthorEmail, - AuthorAvatar: base.AuthorAvatar, - Params: base.Params, - Cron: base.Cron, - Deploy: base.Deployment, - DeployID: base.DeploymentID, - Debug: base.Debug, - Sender: base.Sender, - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - } - req := &core.ConfigArgs{ - User: user, - Repo: repo, - Build: tmpBuild, - } - raw, err := t.config.Find(ctx, req) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot find yaml") - return nil, err - } - - raw, err = t.convert.Convert(ctx, &core.ConvertArgs{ - User: user, - Repo: repo, - Build: tmpBuild, - Config: raw, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot convert yaml") - return t.createBuildError(ctx, repo, base, err.Error()) - } - - // this code is temporarily in place to detect and convert - // the legacy yaml configuration file to the new format. - raw.Data, err = converter.ConvertString(raw.Data, converter.Metadata{ - Filename: repo.Config, - URL: repo.Link, - Ref: base.Ref, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot convert yaml") - return t.createBuildError(ctx, repo, base, err.Error()) - } - - manifest, err := yaml.ParseString(raw.Data) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot parse yaml") - return t.createBuildError(ctx, repo, base, err.Error()) - } - - verr := t.validate.Validate(ctx, &core.ValidateArgs{ - User: user, - Repo: repo, - Build: tmpBuild, - Config: raw, - }) - switch verr { - case core.ErrValidatorBlock: - logger.Debugln("trigger: yaml validation error: block pipeline") - case core.ErrValidatorSkip: - logger.Debugln("trigger: yaml validation error: skip pipeline") - return nil, nil - default: - if verr != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: yaml validation error") - return t.createBuildError(ctx, repo, base, verr.Error()) - } - } - - err = linter.Manifest(manifest, repo.Trusted) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: yaml linting error") - return t.createBuildError(ctx, repo, base, err.Error()) - } - - verified := true - if repo.Protected && base.Trigger == core.TriggerHook { - key := signer.KeyString(repo.Secret) - val := []byte(raw.Data) - verified, _ = signer.Verify(val, key) - } - // if pipeline validation failed with a block error, the - // pipeline verification should be set to false, which will - // force manual review and approval. - if verr == core.ErrValidatorBlock { - verified = false - } - - // var paths []string - // paths, err := listChanges(t.client, repo, base) - // if err != nil { - // logger.Warn().Err(err). - // Msg("cannot fetch changeset") - // } - - var matched []*yaml.Pipeline - var dag = dag.New() - for _, document := range manifest.Resources { - pipeline, ok := document.(*yaml.Pipeline) - if !ok { - continue - } - // TODO add repo - // TODO add instance - // TODO add target - // TODO add ref - name := pipeline.Name - if name == "" { - name = "default" - } - node := dag.Add(pipeline.Name, pipeline.DependsOn...) - node.Skip = true - - if skipBranch(pipeline, base.Target) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match branch") - } else if skipEvent(pipeline, base.Event) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match event") - } else if skipAction(pipeline, base.Action) { - logger = logger.WithField("pipeline", pipeline.Name).WithField("action", base.Action) - logger.Infoln("trigger: skipping pipeline, does not match action") - } else if skipRef(pipeline, base.Ref) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match ref") - } else if skipRepo(pipeline, repo.Slug) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match repo") - } else if skipTarget(pipeline, base.Deployment) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match deploy target") - } else if skipCron(pipeline, base.Cron) { - logger = logger.WithField("pipeline", pipeline.Name) - logger.Infoln("trigger: skipping pipeline, does not match cron job") - } else { - matched = append(matched, pipeline) - node.Skip = false - } - } - - if dag.DetectCycles() { - return t.createBuildError(ctx, repo, base, "Error: Dependency cycle detected in Pipeline") - } - - if len(matched) == 0 { - logger.Infoln("trigger: skipping build, no matching pipelines") - return nil, nil - } - - repo, err = t.repos.Increment(ctx, repo) - if err != nil { - logger = logger.WithError(err) - logger.Errorln("trigger: cannot increment build sequence") - return nil, err - } - - build := &core.Build{ - RepoID: repo.ID, - Trigger: base.Trigger, - Number: repo.Counter, - Parent: base.Parent, - Status: core.StatusPending, - Event: base.Event, - Action: base.Action, - Link: base.Link, - // Timestamp: base.Timestamp, - Title: trunc(base.Title, 2000), - Message: trunc(base.Message, 2000), - Before: base.Before, - After: base.After, - Ref: base.Ref, - Fork: base.Fork, - Source: base.Source, - Target: base.Target, - Author: base.Author, - AuthorName: base.AuthorName, - AuthorEmail: base.AuthorEmail, - AuthorAvatar: base.AuthorAvatar, - Params: base.Params, - Deploy: base.Deployment, - DeployID: base.DeploymentID, - Debug: base.Debug, - Sender: base.Sender, - Cron: base.Cron, - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - } - - stages := make([]*core.Stage, len(matched)) - for i, match := range matched { - onSuccess := match.Trigger.Status.Match(core.StatusPassing) - onFailure := match.Trigger.Status.Match(core.StatusFailing) - if len(match.Trigger.Status.Include)+len(match.Trigger.Status.Exclude) == 0 { - onFailure = false - } - - stage := &core.Stage{ - RepoID: repo.ID, - Number: i + 1, - Name: match.Name, - Kind: match.Kind, - Type: match.Type, - OS: match.Platform.OS, - Arch: match.Platform.Arch, - Variant: match.Platform.Variant, - Kernel: match.Platform.Version, - Limit: match.Concurrency.Limit, - LimitRepo: int(repo.Throttle), - Status: core.StatusWaiting, - DependsOn: match.DependsOn, - OnSuccess: onSuccess, - OnFailure: onFailure, - Labels: match.Node, - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - } - if stage.Kind == "pipeline" && stage.Type == "" { - stage.Type = "docker" - } - if stage.OS == "" { - stage.OS = "linux" - } - if stage.Arch == "" { - stage.Arch = "amd64" - } - - if stage.Name == "" { - stage.Name = "default" - } - if verified == false { - stage.Status = core.StatusBlocked - } else if len(stage.DependsOn) == 0 { - stage.Status = core.StatusPending - } - stages[i] = stage - } - - for _, stage := range stages { - // here we re-work the dependencies for the stage to - // account for the fact that some steps may be skipped - // and may otherwise break the dependency chain. - stage.DependsOn = dag.Dependencies(stage.Name) - - // if the stage is pending dependencies, but those - // dependencies are skipped, the stage can be executed - // immediately. - if stage.Status == core.StatusWaiting && - len(stage.DependsOn) == 0 { - stage.Status = core.StatusPending - } - } - - err = t.builds.Create(ctx, build, stages) - if err != nil { - logger = logger.WithError(err) - logger.Errorln("trigger: cannot create build") - return nil, err - } - - err = t.status.Send(ctx, user, &core.StatusInput{ - Repo: repo, - Build: build, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot create status") - } - - for _, stage := range stages { - if stage.Status != core.StatusPending { - continue - } - err = t.sched.Schedule(ctx, stage) - if err != nil { - logger = logger.WithError(err) - logger.Errorln("trigger: cannot enqueue build") - return nil, err - } - } - - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionCreated, - User: user, - Repo: repo, - Build: build, - } - err = t.hooks.Send(ctx, payload) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot send webhook") - } - - if repo.CancelPush && build.Event == core.EventPush || - repo.CancelPulls && build.Event == core.EventPullRequest { - go t.canceler.CancelPending(ctx, repo, build) - } - - // err = t.hooks.SendEndpoint(ctx, payload, repo.Endpoints.Webhook) - // if err != nil { - // logger.Warn().Err(err). - // Int64("build", build.Number). - // Msg("cannot send user-defined webhook") - // } - - // // we should only synchronize the cronjob list on push - // // events to the default branch. - // if build.Event == core.EventPush && - // build.Target == repo.Branch { - // err = t.cron.Sync(ctx, repo, manifest) - // if err != nil { - // logger.Warn().Err(err). - // Msg("cannot sync cronjobs") - // } - // } - - return build, nil -} - -func trunc(s string, i int) string { - runes := []rune(s) - if len(runes) > i { - return string(runes[:i]) - } - return s -} - -func (t *triggerer) createBuildError(ctx context.Context, repo *core.Repository, base *core.Hook, message string) (*core.Build, error) { - logger := logrus.WithFields( - logrus.Fields{ - "repo": repo.Slug, - "ref": base.Ref, - "event": base.Event, - "commit": base.After, - }, - ) - - repo, err := t.repos.Increment(ctx, repo) - if err != nil { - return nil, err - } - - build := &core.Build{ - RepoID: repo.ID, - Number: repo.Counter, - Parent: base.Parent, - Status: core.StatusError, - Error: message, - Event: base.Event, - Action: base.Action, - Link: base.Link, - // Timestamp: base.Timestamp, - Title: base.Title, - Message: base.Message, - Before: base.Before, - After: base.After, - Ref: base.Ref, - Fork: base.Fork, - Source: base.Source, - Target: base.Target, - Author: base.Author, - AuthorName: base.AuthorName, - AuthorEmail: base.AuthorEmail, - AuthorAvatar: base.AuthorAvatar, - Deploy: base.Deployment, - DeployID: base.DeploymentID, - Debug: base.Debug, - Sender: base.Sender, - Created: time.Now().Unix(), - Updated: time.Now().Unix(), - Started: time.Now().Unix(), - Finished: time.Now().Unix(), - } - - err = t.builds.Create(ctx, build, nil) - if err != nil { - logger = logger.WithError(err) - logger.Errorln("trigger: cannot create build error") - return nil, err - } - - user, err := t.users.Find(ctx, repo.UserID) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot find repository owner") - return nil, err - } - - err = t.status.Send(ctx, user, &core.StatusInput{ - Repo: repo, - Build: build, - }) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot create status") - } - - payload := &core.WebhookData{ - Event: core.WebhookEventBuild, - Action: core.WebhookActionCreated, - User: user, - Repo: repo, - Build: build, - } - err = t.hooks.Send(ctx, payload) - if err != nil { - logger = logger.WithError(err) - logger.Warnln("trigger: cannot send webhook") - } - - return build, err -} - -// func shouldBlock(repo *core.Repository, build *core.Build) bool { -// switch { -// case repo.Hooks.Promote == core.HookBlock && build.Event == core.EventPromote: -// return true -// case repo.Hooks.Rollback == core.HookBlock && build.Event == core.EventRollback: -// return true -// case repo.Hooks.Deploy == core.HookBlock && build.Event == core.EventRollback: -// return true -// case repo.Hooks.Pull == core.HookBlock && build.Event == core.EventPullRequest: -// return true -// case repo.Hooks.Push == core.HookBlock && build.Event == core.EventPush: -// return true -// case repo.Hooks.Tags == core.HookBlock && build.Event == core.EventTag: -// return true -// case repo.Hooks.Forks == core.HookBlock && build.Fork != repo.Slug: -// return true -// default: -// return false -// } -// } - -// func skipHook(repo *core.Repository, build *core.Hook) bool { -// switch { -// case repo.Hooks.Promote == core.HookDisable && build.Event == core.EventPromote: -// return true -// case repo.Hooks.Rollback == core.HookDisable && build.Event == core.EventRollback: -// return true -// case repo.Hooks.Pull == core.HookDisable && build.Event == core.EventPullRequest: -// return true -// case repo.Hooks.Push == core.HookDisable && build.Event == core.EventPush: -// return true -// case repo.Hooks.Tags == core.HookDisable && build.Event == core.EventTag: -// return true -// default: -// return false -// } -// } - -// func skipFork(repo *core.Repository, build *core.Hook) bool { -// return repo.Hooks.Forks == core.HookDisable && build.Fork != repo.Slug -// } diff --git a/trigger/trigger_test.go b/trigger/trigger_test.go deleted file mode 100644 index 0e5e8b19e8..0000000000 --- a/trigger/trigger_test.go +++ /dev/null @@ -1,659 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package trigger - -import ( - "context" - "database/sql" - "io" - "io/ioutil" - "testing" - - "github.com/drone/drone/core" - "github.com/drone/drone/mock" - "github.com/sirupsen/logrus" - - "github.com/golang/mock/gomock" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" -) - -var noContext = context.Background() - -func init() { - logrus.SetOutput(ioutil.Discard) -} - -func TestTrigger(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkBuild := func(_ context.Context, build *core.Build, stages []*core.Stage) { - if diff := cmp.Diff(build, dummyBuild, ignoreBuildFields); diff != "" { - t.Errorf(diff) - } - if diff := cmp.Diff(stages, dummyStages, ignoreStageFields); diff != "" { - t.Errorf(diff) - } - } - - checkStatus := func(_ context.Context, _ *core.User, req *core.StatusInput) error { - if diff := cmp.Diff(req.Build, dummyBuild, ignoreBuildFields); diff != "" { - t.Errorf(diff) - } - if diff := cmp.Diff(req.Repo, dummyRepo, ignoreStageFields); diff != "" { - t.Errorf(diff) - } - return nil - } - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) - - mockValidateService := mock.NewMockValidateService(controller) - mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) - - mockStatus := mock.NewMockStatusService(controller) - mockStatus.EXPECT().Send(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).Do(checkStatus) - - mockQueue := mock.NewMockScheduler(controller) - mockQueue.EXPECT().Schedule(gomock.Any(), gomock.Any()).Return(nil) - - mockBuilds := mock.NewMockBuildStore(controller) - mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any(), gomock.Any()).Do(checkBuild).Return(nil) - - mockWebhooks := mock.NewMockWebhookSender(controller) - mockWebhooks.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - mockStatus, - mockBuilds, - mockQueue, - mockRepos, - mockUsers, - mockValidateService, - mockWebhooks, - ) - - build, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != nil { - t.Error(err) - return - } - if diff := cmp.Diff(build, dummyBuild, ignoreBuildFields); diff != "" { - t.Errorf(diff) - } -} - -// this test verifies that hook is ignored if the commit -// message includes the [CI SKIP] keyword. -func TestTrigger_SkipCI(t *testing.T) { - triggerer := New( - nil, - nil, - nil, - nil, - nil, - nil, - nil, - nil, - nil, - nil, - nil, - ) - dummyHookSkip := *dummyHook - dummyHookSkip.Message = "foo [CI SKIP] bar" - triggerer.Trigger(noContext, dummyRepo, &dummyHookSkip) -} - -// this test verifies that if the system cannot determine -// the repository owner, the function must exit with an error. -// The owner is required because we need an oauth token -// when fetching the configuration file. -func TestTrigger_NoOwner(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(nil, sql.ErrNoRows) - - triggerer := New( - nil, - nil, - nil, - nil, - nil, - nil, - nil, - nil, - mockUsers, - nil, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != sql.ErrNoRows { - t.Errorf("Expect error when yaml not found") - } -} - -// this test verifies that if the system cannot fetch the yaml -// configuration file, the function must exit with an error. -func TestTrigger_MissingYaml(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(nil, io.EOF) - - triggerer := New( - nil, - mockConfigService, - nil, - nil, - nil, - nil, - nil, - nil, - mockUsers, - nil, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err == nil { - t.Errorf("Expect error when yaml not found") - } -} - -// this test verifies that if the system cannot parse the yaml -// configuration file, the function must exit with an error. -func TestTrigger_ErrorYaml(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - checkBuild := func(_ context.Context, build *core.Build, stages []*core.Stage) { - if diff := cmp.Diff(build, dummyErrorBuild, ignoreBuildFields); diff != "" { - t.Errorf(diff) - } - if diff := cmp.Diff(stages, []*core.Stage(nil), ignoreStageFields); diff != "" { - t.Errorf(diff) - } - } - - checkStatus := func(_ context.Context, _ *core.User, req *core.StatusInput) error { - if diff := cmp.Diff(req.Build, dummyErrorBuild, ignoreBuildFields); diff != "" { - t.Errorf(diff) - } - if diff := cmp.Diff(req.Repo, dummyRepo, ignoreStageFields); diff != "" { - t.Errorf(diff) - } - return nil - } - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil).MaxTimes(3) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYamlInvalid, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYamlInvalid, nil) - - mockStatus := mock.NewMockStatusService(controller) - mockStatus.EXPECT().Send(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).Do(checkStatus) - - mockBuilds := mock.NewMockBuildStore(controller) - mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any(), gomock.Any()).Do(checkBuild).Return(nil) - - mockWebhooks := mock.NewMockWebhookSender(controller) - mockWebhooks.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - mockStatus, - mockBuilds, - nil, - mockRepos, - mockUsers, - nil, - mockWebhooks, - ) - - build, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != nil { - t.Error(err) - } - - if got, want := build.Status, core.StatusError; got != want { - t.Errorf("Want status %s, got %s", want, got) - } - if got, want := build.Error, "yaml: found unknown directive name"; got != want { - t.Errorf("Want error %s, got %s", want, got) - } - if build.Finished == 0 { - t.Errorf("Want non-zero finished time") - } -} - -// this test verifies that no build should be scheduled if the -// hook branch does not match the branches defined in the yaml. -func TestTrigger_SkipBranch(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYamlSkipBranch, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYamlSkipBranch, nil) - - mockValidateService := mock.NewMockValidateService(controller) - mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - nil, - nil, - nil, - nil, - mockUsers, - mockValidateService, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != nil { - t.Errorf("Expect build silently skipped if branch does not match") - } -} - -// this test verifies that no build should be scheduled if the -// hook event does not match the events defined in the yaml. -func TestTrigger_SkipEvent(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYamlSkipEvent, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYamlSkipEvent, nil) - - mockValidateService := mock.NewMockValidateService(controller) - mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - nil, - nil, - nil, - nil, - mockUsers, - mockValidateService, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != nil { - t.Errorf("Expect build silently skipped if event does not match") - } -} - -// this test verifies that no build should be scheduled if the -// hook action does not match the actions defined in the yaml. -func TestTrigger_SkipAction(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYamlSkipAction, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYamlSkipAction, nil) - - mockValidateService := mock.NewMockValidateService(controller) - mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - nil, - nil, - nil, - nil, - mockUsers, - mockValidateService, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != nil { - t.Errorf("Expect build silently skipped if action does not match") - } -} - -// this test verifies that if the system cannot increment the -// build number, the function must exit with error and must not -// schedule a new build. -func TestTrigger_ErrorIncrement(t *testing.T) { - controller := gomock.NewController(t) - defer controller.Finish() - - mockUsers := mock.NewMockUserStore(controller) - mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - mockRepos := mock.NewMockRepositoryStore(controller) - mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(nil, sql.ErrNoRows) - - mockConfigService := mock.NewMockConfigService(controller) - mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) - - mockConvertService := mock.NewMockConvertService(controller) - mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) - - mockValidateService := mock.NewMockValidateService(controller) - mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) - - triggerer := New( - nil, - mockConfigService, - mockConvertService, - nil, - nil, - nil, - nil, - mockRepos, - mockUsers, - mockValidateService, - nil, - ) - - _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - if err != sql.ErrNoRows { - t.Errorf("Expect error when unable to increment build sequence") - } -} - -func TestTrigger_ErrorCreate(t *testing.T) { - t.Skip() - // controller := gomock.NewController(t) - // defer controller.Finish() - - // mockUsers := mock.NewMockUserStore(controller) - // mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - // mockTriggers := mock.NewMockTriggerStore(controller) - // mockTriggers.EXPECT().List(noContext, dummyRepo.ID).Return([]*core.Trigger{dummyTrigger}, nil) - - // mockRepos := mock.NewMockRepositoryStore(controller) - // mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) - - // mockContents := mock.NewMockContentService(controller) - // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummyTrigger.Path, dummyHook.After).Return(dummyYaml, nil, nil) - // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummySignature.Path, dummyHook.After).Return(dummySignature, nil, nil) - - // mockClient := new(scm.Client) - // mockClient.Contents = mockContents - - // mockBuilds := mock.NewMockBuildStore(controller) - // mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any()).Return(sql.ErrNoRows) - - // triggerer := New( - // mockClient, - // mockBuilds, - // nil, - // mockRepos, - // mockTriggers, - // mockUsers, - // ) - - // builds, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - // if err != sql.ErrNoRows { - // t.Error("Expect error when persisting the build fails") - // } - // if got, want := len(builds), 0; got != want { - // t.Errorf("Got build count %d, want %d", got, want) - // } -} - -func TestTrigger_ErrorEnqueue(t *testing.T) { - t.Skip() - // controller := gomock.NewController(t) - // defer controller.Finish() - - // mockUsers := mock.NewMockUserStore(controller) - // mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) - - // mockTriggers := mock.NewMockTriggerStore(controller) - // mockTriggers.EXPECT().List(noContext, dummyRepo.ID).Return([]*core.Trigger{dummyTrigger}, nil) - - // mockRepos := mock.NewMockRepositoryStore(controller) - // mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) - - // mockContents := mock.NewMockContentService(controller) - // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummyTrigger.Path, dummyHook.After).Return(dummyYaml, nil, nil) - // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummySignature.Path, dummyHook.After).Return(dummySignature, nil, nil) - - // mockClient := new(scm.Client) - // mockClient.Contents = mockContents - - // mockQueue := mock.NewMockQueue(controller) - // mockQueue.EXPECT().Push(gomock.Any(), gomock.Any()).Return(sql.ErrNoRows) - - // mockBuilds := mock.NewMockBuildStore(controller) - // mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any()).Return(nil) - - // triggerer := New( - // mockClient, - // mockBuilds, - // mockQueue, - // mockRepos, - // mockTriggers, - // mockUsers, - // ) - - // builds, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) - // if err != sql.ErrNoRows { - // t.Error("Expect error when enqueueing the build fails") - // } - // if got, want := len(builds), 0; got != want { - // t.Errorf("Got build count %d, want %d", got, want) - // } -} - -var ( - dummyHook = &core.Hook{ - Event: core.EventPush, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - Action: "opened", - } - - dummyBuild = &core.Build{ - Number: dummyRepo.Counter, - RepoID: dummyRepo.ID, - Status: core.StatusPending, - Event: core.EventPush, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - // Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - Action: "opened", - } - - dummyErrorBuild = &core.Build{ - Number: dummyRepo.Counter, - RepoID: dummyRepo.ID, - Status: core.StatusError, - Error: "yaml: found unknown directive name", - Event: core.EventPush, - Link: "https://github.com/octocat/Hello-World/commit/7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - // Timestamp: 1299283200, - Message: "first commit", - Before: "553c2077f0edc3d5dc5d17262f6aa498e69d6f8e", - After: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d", - Ref: "refs/heads/master", - Source: "master", - Target: "master", - Author: "octocat", - AuthorName: "The Octocat", - AuthorEmail: "octocat@hello-world.com", - AuthorAvatar: "https://avatars3.githubusercontent.com/u/583231", - Sender: "octocat", - Action: "opened", - } - - dummyRepo = &core.Repository{ - ID: 1, - UID: "1296269", - UserID: 2, - Namespace: "octocat", - Name: "Hello-World", - Slug: "octocat/Hello-World", - SCM: "git", - HTTPURL: "https://github.com/octocat/Hello-World.git", - SSHURL: "git@github.com:octocat/Hello-World.git", - Link: "https://github.com/octocat/Hello-World", - Branch: "master", - Private: false, - Visibility: core.VisibilityPublic, - Active: true, - Counter: 42, - Secret: "g9dMChy22QutQM5lrpbe0yCR3f15t1gv", - Signer: "g9dMChy22QutQM5lrpbe0yCR3f15t1gv", - Config: ".drone.yml", - } - - dummyStage = &core.Stage{ - Kind: "pipeline", - Type: "docker", - RepoID: 1, - Name: "default", - Number: 1, - OS: "linux", - Arch: "amd64", - OnSuccess: true, - OnFailure: false, - Status: core.StatusPending, - } - - dummyStages = []*core.Stage{ - dummyStage, - } - - dummyErrorStages = []*core.Stage{ - dummyStage, - } - - dummyUser = &core.User{ - ID: 2, - Login: "octocat", - Active: true, - } - - dummyYaml = &core.Config{ - Data: "kind: pipeline\nsteps: [ ]", - } - - dummyYamlInvalid = &core.Config{ - Data: "%ERROR", - } - - dummyYamlSkipBranch = &core.Config{ - Data: ` -kind: pipeline -trigger: - branch: - exclude: - - master`, - } - - dummyYamlSkipEvent = &core.Config{ - Data: ` -kind: pipeline -trigger: - event: - exclude: - - push`, - } - - dummyYamlSkipAction = &core.Config{ - Data: ` -kind: pipeline -trigger: - action: - exclude: - - opened`, - } - - ignoreBuildFields = cmpopts.IgnoreFields(core.Build{}, - "Created", "Updated", "Started", "Finished") - - ignoreStageFields = cmpopts.IgnoreFields(core.Stage{}, - "Created", "Updated") -) diff --git a/version/version.go b/version/version.go deleted file mode 100644 index 2e7391661e..0000000000 --- a/version/version.go +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2019 Drone IO, Inc. -// Copyright 2016 The Linux Foundation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package version - -import "github.com/coreos/go-semver/semver" - -var ( - // GitRepository is the git repository that was compiled - GitRepository string - // GitCommit is the git commit that was compiled - GitCommit string - // VersionMajor is for an API incompatible changes. - VersionMajor int64 = 2 - // VersionMinor is for functionality in a backwards-compatible manner. - VersionMinor int64 = 20 - // VersionPatch is for backwards-compatible bug fixes. - VersionPatch int64 = 0 - // VersionPre indicates prerelease. - VersionPre = "" - // VersionDev indicates development branch. Releases will be empty string. - VersionDev string -) - -// Version is the specification version that the package types support. -var Version = semver.Version{ - Major: VersionMajor, - Minor: VersionMinor, - Patch: VersionPatch, - PreRelease: semver.PreRelease(VersionPre), - Metadata: VersionDev, -} diff --git a/version/version_test.go b/version/version_test.go deleted file mode 100644 index a69c29033e..0000000000 --- a/version/version_test.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2019 Drone.IO Inc. All rights reserved. -// Use of this source code is governed by the Drone Non-Commercial License -// that can be found in the LICENSE file. - -//go:build !oss -// +build !oss - -package version - -import "testing" - -func TestVersion(t *testing.T) { - if got, want := Version.String(), "2.20.0"; got != want { - t.Errorf("Want version %s, got %s", want, got) - } -} diff --git a/web/README b/web/README deleted file mode 100644 index dca8bff9bd..0000000000 --- a/web/README +++ /dev/null @@ -1 +0,0 @@ -Future home of github.com/drone/drone-ui \ No newline at end of file From e0aa6cb81ae73c087771c4686adf8dd59c3dfc85 Mon Sep 17 00:00:00 2001 From: Johannes Batzill Date: Thu, 21 Sep 2023 11:33:41 -0700 Subject: [PATCH 02/14] ADD GITNESS --- .dockerignore | 12 + .gitignore | 21 + .gitleaksignore | 0 .golangci.yml | 322 + .local.env | 4 + Dockerfile | 90 + LICENSE.md | 201 + Makefile | 163 + README.md | 101 + cache/cache.go | 44 + cache/cache_test.go | 69 + cache/no_cache.go | 37 + cache/redis_cache.go | 99 + cache/ttl_cache.go | 231 + cli/cli.go | 34 + cli/operations/account/login.go | 68 + cli/operations/account/logout.go | 37 + cli/operations/account/register.go | 78 + cli/operations/hooks/hooks.go | 32 + cli/operations/migrate/current.go | 59 + cli/operations/migrate/migrate.go | 50 + cli/operations/migrate/to.go | 56 + cli/operations/user/create_pat.go | 97 + cli/operations/user/self.go | 76 + cli/operations/user/users.go | 26 + cli/operations/users/create.go | 86 + cli/operations/users/delete.go | 47 + cli/operations/users/find.go | 74 + cli/operations/users/list.go | 92 + cli/operations/users/update.go | 118 + cli/operations/users/users.go | 29 + cli/provide/provider.go | 101 + cli/server/config.go | 195 + cli/server/redis.go | 54 + cli/server/server.go | 229 + cli/server/system.go | 52 + cli/session/session.go | 95 + cli/swagger.go | 49 + cli/textui/input.go | 84 + client/client.go | 250 + client/interface.go | 63 + cmd/gitness-githook/main.go | 45 + cmd/gitness/driver_pq.go | 22 + cmd/gitness/driver_sqlite.go | 22 + cmd/gitness/main.go | 56 + cmd/gitness/wire.go | 148 + cmd/gitness/wire_gen.go | 260 + cmd/gitrpcserver/config.go | 68 + cmd/gitrpcserver/main.go | 160 + cmd/gitrpcserver/redis.go | 54 + cmd/gitrpcserver/system.go | 33 + cmd/gitrpcserver/wire.go | 24 + cmd/gitrpcserver/wire_gen.go | 37 + encrypt/aesgcm.go | 84 + encrypt/encrypt.go | 30 + encrypt/none.go | 19 + encrypt/wire.go | 33 + events/error.go | 55 + events/events.go | 70 + events/options.go | 68 + events/reader.go | 225 + events/reporter.go | 60 + events/stream.go | 38 + events/system.go | 76 + events/wire.go | 101 + githook/cli.go | 176 + githook/client.go | 148 + githook/core.go | 142 + githook/env.go | 111 + githook/types.go | 50 + gitrpc/blame.go | 134 + gitrpc/blob.go | 77 + gitrpc/branch.go | 201 + gitrpc/check/branch.go | 96 + gitrpc/check/branch_test.go | 225 + gitrpc/client.go | 92 + gitrpc/commit.go | 278 + gitrpc/common.go | 66 + gitrpc/config.go | 36 + gitrpc/diff.go | 411 + gitrpc/diff/diff.go | 506 + gitrpc/enum/entry.go | 14 + gitrpc/enum/hunk_headers.go | 30 + gitrpc/enum/merge.go | 70 + gitrpc/enum/ref.go | 85 + gitrpc/errors.go | 216 + gitrpc/hash/aggregate_xor.go | 96 + gitrpc/hash/aggregate_xor_test.go | 126 + gitrpc/hash/git.go | 25 + gitrpc/hash/hash.go | 92 + gitrpc/hash/source.go | 74 + gitrpc/hash/source_test.go | 135 + gitrpc/interface.go | 87 + gitrpc/internal/files/file.go | 34 + gitrpc/internal/gitea/blame.go | 269 + gitrpc/internal/gitea/blame_test.go | 171 + gitrpc/internal/gitea/blob.go | 103 + gitrpc/internal/gitea/branch.go | 55 + gitrpc/internal/gitea/commit.go | 445 + gitrpc/internal/gitea/config.go | 38 + gitrpc/internal/gitea/diff.go | 208 + gitrpc/internal/gitea/errors.go | 38 + gitrpc/internal/gitea/gitea.go | 48 + gitrpc/internal/gitea/gitea_test.go | 101 + gitrpc/internal/gitea/gogit.go | 119 + gitrpc/internal/gitea/last_commit_cache.go | 170 + gitrpc/internal/gitea/mapping.go | 185 + gitrpc/internal/gitea/match_files.go | 102 + gitrpc/internal/gitea/merge.go | 566 + gitrpc/internal/gitea/paths_details.go | 81 + gitrpc/internal/gitea/ref.go | 188 + gitrpc/internal/gitea/repo.go | 285 + gitrpc/internal/gitea/submodule.go | 52 + gitrpc/internal/gitea/tag.go | 314 + gitrpc/internal/gitea/tree.go | 152 + gitrpc/internal/gitea/vars.go | 21 + gitrpc/internal/middleware/error.go | 103 + gitrpc/internal/middleware/log.go | 162 + gitrpc/internal/parser/diff_cut.go | 287 + gitrpc/internal/parser/diff_cut_test.go | 271 + gitrpc/internal/parser/diff_headers.go | 113 + gitrpc/internal/parser/diff_headers_test.go | 109 + gitrpc/internal/parser/hunk.go | 51 + gitrpc/internal/service/blame.go | 94 + gitrpc/internal/service/blob.go | 101 + gitrpc/internal/service/branch.go | 289 + gitrpc/internal/service/commit.go | 179 + gitrpc/internal/service/diff.go | 259 + gitrpc/internal/service/env.go | 22 + gitrpc/internal/service/errors.go | 238 + gitrpc/internal/service/http.go | 182 + gitrpc/internal/service/interface.go | 113 + gitrpc/internal/service/mapping.go | 197 + gitrpc/internal/service/match_files.go | 52 + gitrpc/internal/service/merge.go | 272 + gitrpc/internal/service/operations.go | 568 + gitrpc/internal/service/operations_test.go | 118 + gitrpc/internal/service/path.go | 32 + gitrpc/internal/service/pipeline.go | 74 + gitrpc/internal/service/push.go | 65 + gitrpc/internal/service/ref.go | 308 + gitrpc/internal/service/repo.go | 467 + gitrpc/internal/service/shared_repo.go | 435 + gitrpc/internal/service/submodule.go | 44 + gitrpc/internal/service/tag.go | 363 + gitrpc/internal/service/tree.go | 149 + gitrpc/internal/service/upload.go | 189 + gitrpc/internal/slices/slice.go | 31 + gitrpc/internal/storage/local.go | 47 + gitrpc/internal/streamio/stream.go | 93 + gitrpc/internal/tempdir/file.go | 42 + gitrpc/internal/tools.go | 24 + gitrpc/internal/types/errors.go | 120 + gitrpc/internal/types/hunk.go | 72 + gitrpc/internal/types/types.go | 344 + gitrpc/kuberesolver.go | 23 + gitrpc/log_interceptor.go | 72 + gitrpc/mapping.go | 295 + gitrpc/match_files.go | 65 + gitrpc/merge.go | 120 + gitrpc/operations.go | 152 + gitrpc/params.go | 27 + gitrpc/pipeline.go | 44 + gitrpc/proto/blame.proto | 27 + gitrpc/proto/diff.proto | 138 + gitrpc/proto/http.proto | 58 + gitrpc/proto/merge.proto | 75 + gitrpc/proto/operations.proto | 73 + gitrpc/proto/push.proto | 21 + gitrpc/proto/ref.proto | 141 + gitrpc/proto/repo.proto | 262 + gitrpc/proto/shared.proto | 74 + gitrpc/push_remote.go | 53 + gitrpc/ref.go | 82 + gitrpc/repo.go | 214 + gitrpc/rpc/blame.pb.go | 330 + gitrpc/rpc/blame_grpc.pb.go | 132 + gitrpc/rpc/constants.go | 28 + gitrpc/rpc/diff.pb.go | 1439 ++ gitrpc/rpc/diff_grpc.pb.go | 367 + gitrpc/rpc/http.pb.go | 478 + gitrpc/rpc/http_grpc.pb.go | 208 + gitrpc/rpc/merge.pb.go | 541 + gitrpc/rpc/merge_grpc.pb.go | 105 + gitrpc/rpc/operations.pb.go | 688 + gitrpc/rpc/operations_grpc.pb.go | 139 + gitrpc/rpc/push.pb.go | 227 + gitrpc/rpc/push_grpc.pb.go | 105 + gitrpc/rpc/ref.pb.go | 1840 +++ gitrpc/rpc/ref_grpc.pb.go | 448 + gitrpc/rpc/repo.pb.go | 3511 +++++ gitrpc/rpc/repo_grpc.pb.go | 725 + gitrpc/rpc/shared.pb.go | 974 ++ gitrpc/server/config.go | 89 + gitrpc/server/cron/clean_slate_data.go | 58 + gitrpc/server/cron/clean_slate_data_test.go | 44 + gitrpc/server/cron/manager.go | 94 + gitrpc/server/cron/manager_test.go | 117 + gitrpc/server/cron/wire.go | 30 + gitrpc/server/http.go | 254 + gitrpc/server/http_log.go | 80 + gitrpc/server/server.go | 142 + gitrpc/server/wire.go | 74 + gitrpc/smarthttp.go | 180 + gitrpc/sort.go | 23 + gitrpc/stream.go | 52 + gitrpc/submodule.go | 61 + gitrpc/tag.go | 210 + gitrpc/tree.go | 202 + gitrpc/upload.go | 111 + gitrpc/validate.go | 27 + gitrpc/wire.go | 26 + go.mod | 169 + go.sum | 1033 ++ http/server.go | 166 + internal/api/api.go | 15 + internal/api/auth/auth.go | 117 + internal/api/auth/connector.go | 45 + internal/api/auth/pipeline.go | 45 + internal/api/auth/repo.go | 52 + internal/api/auth/secret.go | 39 + internal/api/auth/service.go | 40 + internal/api/auth/service_account.go | 37 + internal/api/auth/space.go | 52 + internal/api/auth/template.go | 39 + internal/api/auth/user.go | 40 + internal/api/controller/check/check_list.go | 61 + internal/api/controller/check/check_report.go | 181 + internal/api/controller/check/controller.go | 74 + internal/api/controller/check/wire.go | 45 + .../api/controller/connector/controller.go | 47 + internal/api/controller/connector/create.go | 96 + internal/api/controller/connector/delete.go | 41 + internal/api/controller/connector/find.go | 46 + internal/api/controller/connector/update.go | 90 + internal/api/controller/connector/wire.go | 38 + internal/api/controller/execution/cancel.go | 67 + .../api/controller/execution/controller.go | 64 + internal/api/controller/execution/create.go | 90 + internal/api/controller/execution/delete.go | 51 + internal/api/controller/execution/find.go | 63 + internal/api/controller/execution/list.go | 70 + internal/api/controller/execution/wire.go | 46 + internal/api/controller/githook/controller.go | 93 + .../api/controller/githook/post_receive.go | 140 + .../api/controller/githook/pre_receive.go | 69 + internal/api/controller/githook/update.go | 40 + internal/api/controller/githook/wire.go | 34 + internal/api/controller/logs/controller.go | 59 + internal/api/controller/logs/find.go | 83 + internal/api/controller/logs/tail.go | 66 + internal/api/controller/logs/wire.go | 43 + .../api/controller/pipeline/controller.go | 51 + internal/api/controller/pipeline/create.go | 134 + internal/api/controller/pipeline/delete.go | 42 + internal/api/controller/pipeline/find.go | 42 + internal/api/controller/pipeline/update.go | 96 + internal/api/controller/pipeline/wire.go | 40 + internal/api/controller/plugin/controller.go | 36 + internal/api/controller/plugin/list.go | 46 + internal/api/controller/plugin/wire.go | 33 + .../api/controller/principal/controller.go | 29 + .../api/controller/principal/interface.go | 28 + internal/api/controller/principal/search.go | 36 + internal/api/controller/principal/wire.go | 30 + .../api/controller/pullreq/activity_list.go | 67 + .../api/controller/pullreq/comment_create.go | 309 + .../api/controller/pullreq/comment_delete.go | 84 + .../api/controller/pullreq/comment_status.go | 133 + .../api/controller/pullreq/comment_update.go | 89 + internal/api/controller/pullreq/controller.go | 248 + .../api/controller/pullreq/file_view_add.go | 164 + .../controller/pullreq/file_view_delete.go | 54 + .../api/controller/pullreq/file_view_list.go | 49 + internal/api/controller/pullreq/locks.go | 29 + internal/api/controller/pullreq/mapper.go | 27 + internal/api/controller/pullreq/merge.go | 208 + internal/api/controller/pullreq/pr_commits.go | 71 + internal/api/controller/pullreq/pr_create.go | 167 + internal/api/controller/pullreq/pr_find.go | 65 + internal/api/controller/pullreq/pr_list.go | 78 + internal/api/controller/pullreq/pr_recheck.go | 43 + internal/api/controller/pullreq/pr_state.go | 199 + internal/api/controller/pullreq/pr_update.go | 118 + .../api/controller/pullreq/review_submit.go | 182 + .../api/controller/pullreq/reviewer_add.go | 138 + .../api/controller/pullreq/reviewer_delete.go | 43 + .../api/controller/pullreq/reviewer_list.go | 45 + internal/api/controller/pullreq/wire.go | 53 + internal/api/controller/repo/blame.go | 61 + internal/api/controller/repo/commit.go | 110 + internal/api/controller/repo/content_get.go | 296 + .../controller/repo/content_paths_details.go | 77 + internal/api/controller/repo/controller.go | 151 + internal/api/controller/repo/create.go | 227 + internal/api/controller/repo/create_branch.go | 71 + .../api/controller/repo/create_commit_tag.go | 79 + internal/api/controller/repo/delete.go | 85 + internal/api/controller/repo/delete_branch.go | 60 + internal/api/controller/repo/delete_tag.go | 50 + internal/api/controller/repo/diff.go | 160 + internal/api/controller/repo/find.go | 42 + internal/api/controller/repo/get_branch.go | 51 + internal/api/controller/repo/get_commit.go | 54 + .../controller/repo/get_commit_divergences.go | 101 + internal/api/controller/repo/import.go | 88 + internal/api/controller/repo/import_cancel.go | 51 + .../api/controller/repo/import_progress.go | 54 + internal/api/controller/repo/list_branches.go | 112 + .../api/controller/repo/list_commit_tags.go | 116 + internal/api/controller/repo/list_commits.go | 83 + .../api/controller/repo/list_pipelines.go | 68 + .../controller/repo/list_service_accounts.go | 37 + internal/api/controller/repo/merge_check.go | 71 + internal/api/controller/repo/move.go | 94 + .../api/controller/repo/pipeline_generate.go | 45 + internal/api/controller/repo/raw.go | 75 + internal/api/controller/repo/update.go | 88 + internal/api/controller/repo/wire.go | 45 + internal/api/controller/secret/controller.go | 51 + internal/api/controller/secret/create.go | 126 + internal/api/controller/secret/delete.go | 41 + internal/api/controller/secret/find.go | 50 + internal/api/controller/secret/update.go | 92 + internal/api/controller/secret/wire.go | 40 + internal/api/controller/service/controller.go | 44 + internal/api/controller/service/create.go | 98 + internal/api/controller/service/delete.go | 41 + internal/api/controller/service/find.go | 48 + internal/api/controller/service/list.go | 49 + internal/api/controller/service/update.go | 87 + .../api/controller/service/update_admin.go | 49 + internal/api/controller/service/wire.go | 33 + .../controller/serviceaccount/controller.go | 51 + .../api/controller/serviceaccount/create.go | 134 + .../controller/serviceaccount/create_token.go | 71 + .../api/controller/serviceaccount/delete.go | 48 + .../controller/serviceaccount/delete_token.go | 56 + .../api/controller/serviceaccount/find.go | 49 + .../controller/serviceaccount/list_token.go | 41 + .../api/controller/serviceaccount/wire.go | 34 + internal/api/controller/space/controller.go | 86 + internal/api/controller/space/create.go | 211 + internal/api/controller/space/delete.go | 93 + internal/api/controller/space/events.go | 112 + internal/api/controller/space/export.go | 110 + .../api/controller/space/export_progress.go | 43 + internal/api/controller/space/find.go | 40 + internal/api/controller/space/import.go | 95 + .../api/controller/space/list_connectors.go | 54 + .../api/controller/space/list_repositories.go | 63 + internal/api/controller/space/list_secrets.go | 64 + .../controller/space/list_service_accounts.go | 41 + internal/api/controller/space/list_spaces.go | 72 + .../api/controller/space/list_templates.go | 54 + .../api/controller/space/membership_add.go | 110 + .../api/controller/space/membership_delete.go | 56 + .../api/controller/space/membership_list.go | 69 + .../api/controller/space/membership_update.go | 95 + internal/api/controller/space/move.go | 134 + internal/api/controller/space/update.go | 87 + internal/api/controller/space/wire.go | 49 + internal/api/controller/system/controller.go | 43 + internal/api/controller/system/wire.go | 31 + .../api/controller/template/controller.go | 47 + internal/api/controller/template/create.go | 93 + internal/api/controller/template/delete.go | 41 + internal/api/controller/template/find.go | 46 + internal/api/controller/template/update.go | 90 + internal/api/controller/template/wire.go | 38 + internal/api/controller/trigger/common.go | 68 + internal/api/controller/trigger/controller.go | 50 + internal/api/controller/trigger/create.go | 103 + internal/api/controller/trigger/delete.go | 54 + internal/api/controller/trigger/find.go | 54 + internal/api/controller/trigger/list.go | 60 + internal/api/controller/trigger/update.go | 120 + internal/api/controller/trigger/wire.go | 39 + internal/api/controller/tx.go | 52 + internal/api/controller/user/controller.go | 73 + internal/api/controller/user/create.go | 125 + .../controller/user/create_access_token.go | 73 + internal/api/controller/user/delete.go | 61 + internal/api/controller/user/delete_token.go | 63 + internal/api/controller/user/find.go | 50 + internal/api/controller/user/find_email.go | 42 + internal/api/controller/user/list.go | 52 + internal/api/controller/user/list_tokens.go | 47 + internal/api/controller/user/login.go | 90 + internal/api/controller/user/logout.go | 59 + .../api/controller/user/membership_spaces.go | 70 + internal/api/controller/user/register.go | 64 + internal/api/controller/user/update.go | 102 + internal/api/controller/user/update_admin.go | 68 + internal/api/controller/user/wire.go | 46 + internal/api/controller/util.go | 107 + internal/api/controller/webhook/common.go | 113 + internal/api/controller/webhook/controller.go | 87 + internal/api/controller/webhook/create.go | 112 + internal/api/controller/webhook/delete.go | 44 + internal/api/controller/webhook/find.go | 59 + .../api/controller/webhook/find_execution.go | 72 + internal/api/controller/webhook/list.go | 49 + .../api/controller/webhook/list_executions.go | 52 + .../controller/webhook/retrigger_execution.go | 67 + internal/api/controller/webhook/update.go | 123 + internal/api/controller/webhook/wire.go | 37 + internal/api/handler/account/cookie.go | 63 + internal/api/handler/account/login.go | 52 + internal/api/handler/account/login_test.go | 33 + internal/api/handler/account/logout.go | 46 + internal/api/handler/account/register.go | 58 + internal/api/handler/account/register_test.go | 37 + internal/api/handler/check/check_list.go | 54 + internal/api/handler/check/check_report.go | 60 + internal/api/handler/connector/create.go | 47 + internal/api/handler/connector/delete.go | 49 + internal/api/handler/connector/find.go | 50 + internal/api/handler/connector/update.go | 58 + internal/api/handler/execution/cancel.go | 52 + internal/api/handler/execution/create.go | 50 + internal/api/handler/execution/delete.go | 53 + internal/api/handler/execution/find.go | 53 + internal/api/handler/execution/list.go | 51 + internal/api/handler/githook/post_receive.go | 60 + internal/api/handler/githook/pre_receive.go | 60 + internal/api/handler/githook/update.go | 60 + internal/api/handler/logs/find.go | 64 + internal/api/handler/logs/tail.go | 136 + internal/api/handler/pipeline/create.go | 52 + internal/api/handler/pipeline/delete.go | 48 + internal/api/handler/pipeline/find.go | 48 + internal/api/handler/pipeline/update.go | 57 + internal/api/handler/plugin/list.go | 38 + internal/api/handler/principal/search.go | 38 + internal/api/handler/pullreq/activity_list.go | 58 + .../api/handler/pullreq/comment_create.go | 59 + .../api/handler/pullreq/comment_delete.go | 57 + .../api/handler/pullreq/comment_status.go | 65 + .../api/handler/pullreq/comment_update.go | 65 + internal/api/handler/pullreq/file_view_add.go | 59 + .../api/handler/pullreq/file_view_delete.go | 57 + .../api/handler/pullreq/file_view_list.go | 51 + internal/api/handler/pullreq/merge.go | 61 + internal/api/handler/pullreq/pr_commits.go | 61 + internal/api/handler/pullreq/pr_create.go | 53 + internal/api/handler/pullreq/pr_find.go | 51 + internal/api/handler/pullreq/pr_list.go | 57 + internal/api/handler/pullreq/pr_metadata.go | 51 + internal/api/handler/pullreq/pr_recheck.go | 51 + internal/api/handler/pullreq/pr_state.go | 59 + internal/api/handler/pullreq/pr_update.go | 59 + internal/api/handler/pullreq/review_submit.go | 59 + internal/api/handler/pullreq/reviewer_add.go | 59 + .../api/handler/pullreq/reviewer_delete.go | 57 + internal/api/handler/pullreq/reviewer_list.go | 51 + internal/api/handler/repo/blame.go | 64 + .../repo/calculate_commit_divergence.go | 54 + internal/api/handler/repo/commit.go | 51 + internal/api/handler/repo/content_get.go | 54 + .../api/handler/repo/content_paths_details.go | 54 + internal/api/handler/repo/create.go | 47 + internal/api/handler/repo/create_branch.go | 54 + .../api/handler/repo/create_commit_tag.go | 51 + internal/api/handler/repo/delete.go | 46 + internal/api/handler/repo/delete_branch.go | 50 + internal/api/handler/repo/delete_tag.go | 49 + internal/api/handler/repo/diff.go | 104 + internal/api/handler/repo/find.go | 44 + internal/api/handler/repo/get_branch.go | 51 + internal/api/handler/repo/get_commit.go | 51 + internal/api/handler/repo/http_git.go | 241 + internal/api/handler/repo/import.go | 46 + internal/api/handler/repo/import_cancel.go | 43 + internal/api/handler/repo/import_progress.go | 43 + internal/api/handler/repo/list_branches.go | 57 + internal/api/handler/repo/list_commit_tags.go | 57 + internal/api/handler/repo/list_commits.go | 57 + internal/api/handler/repo/list_pipelines.go | 46 + .../api/handler/repo/list_service_accounts.go | 47 + internal/api/handler/repo/merge_check.go | 46 + internal/api/handler/repo/move.go | 52 + .../api/handler/repo/pipeline_generate.go | 45 + internal/api/handler/repo/raw.go | 52 + internal/api/handler/repo/update.go | 54 + internal/api/handler/repo/upload_file.go | 15 + internal/api/handler/resource/resource.go | 46 + internal/api/handler/secret/create.go | 47 + internal/api/handler/secret/delete.go | 49 + internal/api/handler/secret/find.go | 50 + internal/api/handler/secret/update.go | 57 + internal/api/handler/serviceaccount/create.go | 49 + .../handler/serviceaccount/create_token.go | 53 + internal/api/handler/serviceaccount/delete.go | 46 + .../handler/serviceaccount/delete_token.go | 50 + internal/api/handler/serviceaccount/find.go | 45 + .../api/handler/serviceaccount/list_tokens.go | 44 + internal/api/handler/space/create.go | 47 + internal/api/handler/space/delete.go | 44 + internal/api/handler/space/events.go | 63 + internal/api/handler/space/export.go | 52 + internal/api/handler/space/export_progress.go | 44 + internal/api/handler/space/find.go | 46 + internal/api/handler/space/import.go | 46 + internal/api/handler/space/list.go | 51 + internal/api/handler/space/list_connectors.go | 45 + internal/api/handler/space/list_repos.go | 51 + internal/api/handler/space/list_secrets.go | 52 + .../handler/space/list_service_accounts.go | 45 + internal/api/handler/space/list_templates.go | 45 + internal/api/handler/space/membership_add.go | 53 + .../api/handler/space/membership_delete.go | 51 + internal/api/handler/space/membership_list.go | 48 + .../api/handler/space/membership_update.go | 59 + internal/api/handler/space/move.go | 52 + internal/api/handler/space/update.go | 52 + internal/api/handler/system/health.go | 23 + internal/api/handler/system/health_test.go | 21 + internal/api/handler/system/list_config.go | 43 + internal/api/handler/system/version.go | 28 + internal/api/handler/system/version_test.go | 21 + internal/api/handler/template/create.go | 47 + internal/api/handler/template/delete.go | 49 + internal/api/handler/template/find.go | 50 + internal/api/handler/template/update.go | 58 + internal/api/handler/trigger/create.go | 56 + internal/api/handler/trigger/delete.go | 53 + internal/api/handler/trigger/find.go | 53 + internal/api/handler/trigger/list.go | 51 + internal/api/handler/trigger/update.go | 62 + .../api/handler/user/create_access_token.go | 49 + internal/api/handler/user/delete_token.go | 48 + internal/api/handler/user/find.go | 41 + internal/api/handler/user/list_tokens.go | 42 + .../api/handler/user/membership_spaces.go | 42 + internal/api/handler/user/update.go | 49 + internal/api/handler/user/update_admin.go | 54 + internal/api/handler/users/create.go | 48 + internal/api/handler/users/create_test.go | 15 + internal/api/handler/users/delete.go | 45 + internal/api/handler/users/delete_test.go | 15 + internal/api/handler/users/find.go | 45 + internal/api/handler/users/find_test.go | 15 + internal/api/handler/users/list.go | 47 + internal/api/handler/users/list_test.go | 15 + internal/api/handler/users/update.go | 52 + internal/api/handler/users/update_test.go | 15 + internal/api/handler/webhook/create.go | 53 + internal/api/handler/webhook/delete.go | 51 + internal/api/handler/webhook/find.go | 51 + .../api/handler/webhook/find_execution.go | 57 + internal/api/handler/webhook/list.go | 52 + .../api/handler/webhook/list_executions.go | 56 + .../handler/webhook/retrigger_execution.go | 57 + internal/api/handler/webhook/update.go | 59 + internal/api/middleware/address/address.go | 92 + .../api/middleware/address/address_test.go | 15 + internal/api/middleware/authn/authn.go | 91 + internal/api/middleware/encode/encode.go | 114 + internal/api/middleware/logging/logging.go | 79 + .../api/middleware/principal/principal.go | 71 + internal/api/openapi/account.go | 87 + internal/api/openapi/check.go | 62 + internal/api/openapi/common.go | 98 + internal/api/openapi/connector.go | 89 + internal/api/openapi/openapi.go | 90 + internal/api/openapi/openapi_test.go | 15 + internal/api/openapi/pipeline.go | 311 + internal/api/openapi/plugin.go | 57 + internal/api/openapi/principals.go | 96 + internal/api/openapi/pullreq.go | 543 + internal/api/openapi/repo.go | 679 + internal/api/openapi/resource.go | 48 + internal/api/openapi/secret.go | 89 + internal/api/openapi/space.go | 375 + internal/api/openapi/system.go | 37 + internal/api/openapi/template.go | 89 + internal/api/openapi/user.go | 102 + internal/api/openapi/users.go | 120 + internal/api/openapi/webhook.go | 181 + internal/api/render/header.go | 103 + internal/api/render/header_test.go | 15 + internal/api/render/platform/render.go | 42 + internal/api/render/render.go | 178 + internal/api/render/render_test.go | 162 + internal/api/render/util.go | 37 + internal/api/render/util_test.go | 38 + internal/api/request/auth.go | 36 + internal/api/request/check.go | 29 + internal/api/request/connector.go | 20 + internal/api/request/context.go | 121 + internal/api/request/context_test.go | 21 + internal/api/request/git.go | 107 + internal/api/request/header.go | 22 + internal/api/request/membership.go | 54 + internal/api/request/pipeline.go | 74 + internal/api/request/principal.go | 98 + internal/api/request/pullreq.go | 155 + internal/api/request/repo.go | 61 + internal/api/request/secret.go | 34 + internal/api/request/space.go | 55 + internal/api/request/template.go | 20 + internal/api/request/token.go | 27 + internal/api/request/util.go | 262 + internal/api/request/util_test.go | 15 + internal/api/request/webhook.go | 61 + internal/api/usererror/translate.go | 141 + internal/api/usererror/usererror.go | 142 + internal/api/usererror/usererror_test.go | 24 + internal/auth/authn/authenticator.go | 50 + internal/auth/authn/jwt.go | 168 + internal/auth/authn/wire.go | 31 + internal/auth/authz/authz.go | 58 + internal/auth/authz/membership.go | 173 + internal/auth/authz/membership_cache.go | 104 + internal/auth/authz/unsafe.go | 62 + internal/auth/authz/wire.go | 41 + internal/auth/metadata.go | 48 + internal/auth/session.go | 28 + internal/bootstrap/bootstrap.go | 221 + internal/bootstrap/wire.go | 31 + internal/config/url.go | 5 + internal/cron/nightly.go | 49 + internal/cron/nightly_test.go | 15 + internal/cron/wire.go | 20 + internal/events/git/branch.go | 97 + internal/events/git/events.go | 20 + internal/events/git/reader.go | 40 + internal/events/git/reporter.go | 39 + internal/events/git/tag.go | 97 + internal/events/git/wire.go | 35 + internal/events/pullreq/category.go | 20 + internal/events/pullreq/events.go | 23 + internal/events/pullreq/events_branch.go | 53 + internal/events/pullreq/events_state.go | 133 + internal/events/pullreq/reader.go | 38 + internal/events/pullreq/reporter.go | 37 + internal/events/pullreq/wire.go | 35 + internal/githook/githook.go | 109 + internal/inernal_test.go | 15 + internal/internal.go | 24 + internal/jwt/jwt.go | 107 + internal/paths/paths.go | 99 + internal/pipeline/canceler/canceler.go | 143 + internal/pipeline/canceler/wire.go | 39 + internal/pipeline/checks/write.go | 57 + internal/pipeline/commit/gitness.go | 75 + internal/pipeline/commit/service.go | 34 + internal/pipeline/commit/wire.go | 32 + internal/pipeline/file/gitness.go | 74 + internal/pipeline/file/service.go | 41 + internal/pipeline/file/wire.go | 32 + internal/pipeline/manager/client.go | 178 + internal/pipeline/manager/convert.go | 261 + internal/pipeline/manager/manager.go | 485 + internal/pipeline/manager/setup.go | 136 + internal/pipeline/manager/teardown.go | 354 + internal/pipeline/manager/updater.go | 83 + internal/pipeline/manager/wire.go | 61 + internal/pipeline/plugin/manager.go | 240 + internal/pipeline/plugin/wire.go | 35 + internal/pipeline/runner/poller.go | 55 + internal/pipeline/runner/runner.go | 102 + internal/pipeline/runner/wire.go | 52 + internal/pipeline/scheduler/canceler.go | 90 + internal/pipeline/scheduler/queue.go | 305 + internal/pipeline/scheduler/scheduler.go | 69 + internal/pipeline/scheduler/wire.go | 35 + internal/pipeline/triggerer/dag/dag.go | 147 + internal/pipeline/triggerer/dag/dag_test.go | 219 + internal/pipeline/triggerer/skip.go | 65 + internal/pipeline/triggerer/trigger.go | 530 + internal/pipeline/triggerer/wire.go | 44 + internal/request/request.go | 73 + internal/router/api.go | 618 + internal/router/git.go | 93 + internal/router/router.go | 168 + internal/router/router_test.go | 38 + internal/router/web.go | 86 + internal/router/wire.go | 104 + internal/server/server.go | 25 + internal/server/server_test.go | 15 + internal/server/wire.go | 40 + internal/services/codecomments/migrator.go | 245 + .../services/codecomments/migrator_test.go | 173 + internal/services/codecomments/wire.go | 33 + .../services/exporter/harness_code_client.go | 228 + internal/services/exporter/repository.go | 268 + internal/services/exporter/wire.go | 41 + internal/services/importer/id.go | 34 + internal/services/importer/pipelines.go | 246 + internal/services/importer/provider.go | 246 + internal/services/importer/repository.go | 460 + internal/services/importer/wire.go | 66 + internal/services/job/definition.go | 77 + internal/services/job/executor.go | 161 + internal/services/job/job_overdue.go | 99 + internal/services/job/job_purge.go | 77 + internal/services/job/lock.go | 33 + internal/services/job/pubsub.go | 70 + internal/services/job/scheduler.go | 755 + internal/services/job/timer.go | 121 + internal/services/job/timer_test.go | 115 + internal/services/job/uid.go | 35 + internal/services/job/wire.go | 57 + internal/services/metric/metrics.go | 136 + internal/services/metric/wire.go | 41 + internal/services/pullreq/handlers_branch.go | 231 + .../pullreq/handlers_code_comments.go | 78 + .../services/pullreq/handlers_counters.go | 90 + .../services/pullreq/handlers_file_viewed.go | 96 + .../services/pullreq/handlers_head_ref.go | 119 + .../services/pullreq/handlers_mergeable.go | 279 + internal/services/pullreq/service.go | 284 + internal/services/pullreq/wire.go | 60 + internal/services/trigger/handler_branch.go | 94 + internal/services/trigger/handler_pullreq.go | 98 + internal/services/trigger/handler_tag.go | 64 + internal/services/trigger/service.go | 186 + internal/services/trigger/wire.go | 48 + internal/services/webhook/events.go | 195 + internal/services/webhook/handler_branch.go | 163 + internal/services/webhook/handler_pullreq.go | 187 + internal/services/webhook/handler_tag.go | 131 + internal/services/webhook/http_client.go | 101 + internal/services/webhook/service.go | 173 + internal/services/webhook/trigger.go | 487 + internal/services/webhook/types.go | 195 + internal/services/webhook/wire.go | 45 + internal/services/wire.go | 53 + internal/sse/sse.go | 106 + internal/sse/wire.go | 31 + internal/store/cache.go | 31 + internal/store/cache/path.go | 60 + internal/store/cache/wire.go | 57 + internal/store/database.go | 726 + internal/store/database/check.go | 300 + internal/store/database/check_req.go | 201 + internal/store/database/code_comment.go | 166 + internal/store/database/connector.go | 278 + internal/store/database/encode.go | 31 + internal/store/database/execution.go | 370 + internal/store/database/execution_map.go | 104 + internal/store/database/job.go | 486 + internal/store/database/membership.go | 483 + internal/store/database/migrate/migrate.go | 135 + .../0000_create_extension_btree.up.sql | 1 + .../0000_create_extension_citext.up.sql | 1 + .../0000_create_extension_trgm.up.sql | 1 + .../0001_create_table_a_principals.up.sql | 20 + .../0001_create_table_b_spaces.up.sql | 16 + .../0001_create_table_c_repositories.up.sql | 28 + .../postgres/0001_create_table_d_paths.up.sql | 26 + .../0001_create_table_e_tokens.up.sql | 16 + ...eate_index_paths_repo_id_is_primary.up.sql | 2 + ...ate_index_paths_space_id_is_primary.up.sql | 2 + ...create_index_principals_lower_email.up.sql | 2 + ...ncipals_sa_parent_id_sa_parent_type.up.sql | 2 + ...create_index_repositories_parent_id.up.sql | 2 + .../0002_create_index_spaces_parent_id.up.sql | 2 + ...02_create_index_tokens_principal_id.up.sql | 2 + .../0003_create_table_pullreqs.up.sql | 44 + ...urce_repo_branch_target_repo_branch.up.sql | 3 + ...ndex_pullreqs_target_repo_id_number.up.sql | 2 + ...005_create_table_pullreq_activities.up.sql | 42 + ...tivities_pullreq_id_order_sub_order.up.sql | 2 + .../0007_create_table_webhooks.up.sql | 29 + .../0008_create_index_webhooks_repo_id.up.sql | 2 + ...0008_create_index_webhooks_space_id.up.sql | 2 + ...009_create_table_webhook_executions.up.sql | 19 + ...index_webhook_executions_webhook_id.up.sql | 2 + .../0011_create_table_pullreq_reviews.up.sql | 17 + ...te_index_pullreq_reviews_pullreq_id.up.sql | 2 + ...13_create_table_pullreq_reviewers.down.sql | 2 + ...0013_create_table_pullreq_reviewers.up.sql | 33 + ...er_pullreq_activity_code_comments.down.sql | 9 + ...lter_pullreq_activity_code_comments.up.sql | 9 + ...r_pullreq_merge_base_not_nullable.down.sql | 4 + ...ter_pullreq_merge_base_not_nullable.up.sql | 4 + ...0016_alter_pullreq_add_unresolved.down.sql | 1 + .../0016_alter_pullreq_add_unresolved.up.sql | 18 + .../0017_create_table_checks.down.sql | 6 + .../postgres/0017_create_table_checks.up.sql | 49 + ...8_alter_check_add_payload_version.down.sql | 4 + ...018_alter_check_add_payload_version.up.sql | 4 + .../0019_create_table_memberships.down.sql | 2 + .../0019_create_table_memberships.up.sql | 21 + ...pullreq_source_repo_id_constraint.down.sql | 8 + ...r_pullreq_source_repo_id_constraint.up.sql | 8 + ..._alter_table_webhook_add_internal_down.sql | 1 + ...21_alter_table_webhook_add_internal_up.sql | 2 + .../postgres/0022_create_table_jobs.down.sql | 4 + .../postgres/0022_create_table_jobs.up.sql | 35 + .../0023_index_jobs_last_executed.down.sql | 4 + .../0023_index_jobs_last_executed.up.sql | 4 + .../0024_alter_repo_add_importing.down.sql | 4 + .../0024_alter_repo_add_importing.up.sql | 8 + ...0025_alter_table_job_add_group_id.down.sql | 1 + .../0025_alter_table_job_add_group_id.up.sql | 1 + .../0026_alter_repo_drop_job_id.up.sql | 3 + .../0026_alter_repo_drop_join_id.down.sql | 7 + .../postgres/0027_create_ci_tables.down.sql | 10 + .../postgres/0027_create_ci_tables.up.sql | 206 + .../0028_alter_token_drop_grants.down.sql | 1 + .../0028_alter_token_drop_grants.up.sql | 1 + ...029_create_index_job_job_group_id_down.sql | 1 + .../0029_create_index_job_job_group_id_up.sql | 1 + .../0030_create_table_space_paths.down.sql | 2 + .../0030_create_table_space_paths.up.sql | 61 + .../0031_alter_index_repositories.down.sql | 2 + .../0031_alter_index_repositories.up.sql | 2 + ...2_create_table_pullreq_file_views.down.sql | 1 + ...032_create_table_pullreq_file_views.up.sql | 26 + .../postgres/0033_alter_ci_tables.up.sql | 3 + .../0001_create_table_a_principals.up.sql | 20 + .../sqlite/0001_create_table_b_spaces.up.sql | 16 + .../0001_create_table_c_repositories.up.sql | 27 + .../sqlite/0001_create_table_d_paths.up.sql | 27 + .../sqlite/0001_create_table_e_tokens.up.sql | 16 + ...eate_index_paths_repo_id_is_primary.up.sql | 2 + ...ate_index_paths_space_id_is_primary.up.sql | 2 + ...create_index_principals_lower_email.up.sql | 2 + ...ncipals_sa_parent_id_sa_parent_type.up.sql | 2 + ...create_index_repositories_parent_id.up.sql | 2 + .../0002_create_index_spaces_parent_id.up.sql | 2 + ...02_create_index_tokens_principal_id.up.sql | 2 + .../sqlite/0003_create_table_pullreqs.up.sql | 44 + ...urce_repo_branch_target_repo_branch.up.sql | 3 + ...ndex_pullreqs_target_repo_id_number.up.sql | 2 + ...005_create_table_pullreq_activities.up.sql | 42 + ...tivities_pullreq_id_order_sub_order.up.sql | 2 + .../sqlite/0007_create_table_webhooks.up.sql | 29 + .../0008_create_index_webhooks_repo_id.up.sql | 2 + ...0008_create_index_webhooks_space_id.up.sql | 2 + ...009_create_table_webhook_executions.up.sql | 19 + ...index_webhook_executions_webhook_id.up.sql | 2 + .../0011_create_table_pullreq_reviews.up.sql | 17 + ...te_index_pullreq_reviews_pullreq_id.up.sql | 2 + ...13_create_table_pullreq_reviewers.down.sql | 2 + ...0013_create_table_pullreq_reviewers.up.sql | 33 + ...er_pullreq_activity_code_comments.down.sql | 8 + ...lter_pullreq_activity_code_comments.up.sql | 8 + ...r_pullreq_merge_base_not_nullable.down.sql | 4 + ...ter_pullreq_merge_base_not_nullable.up.sql | 4 + ...0016_alter_pullreq_add_unresolved.down.sql | 1 + .../0016_alter_pullreq_add_unresolved.up.sql | 18 + .../sqlite/0017_create_table_checks.down.sql | 6 + .../sqlite/0017_create_table_checks.up.sql | 49 + ...8_alter_check_add_payload_version.down.sql | 3 + ...018_alter_check_add_payload_version.up.sql | 3 + .../0019_create_table_memberships.down.sql | 2 + .../0019_create_table_memberships.up.sql | 21 + ...pullreq_source_repo_id_constraint.down.sql | 116 + ...r_pullreq_source_repo_id_constraint.up.sql | 116 + ..._alter_table_webhook_add_internal_down.sql | 1 + ...21_alter_table_webhook_add_internal_up.sql | 2 + .../sqlite/0022_create_table_jobs.down.sql | 4 + .../sqlite/0022_create_table_jobs.up.sql | 35 + .../0023_index_jobs_last_executed.down.sql | 4 + .../0023_index_jobs_last_executed.up.sql | 4 + .../0024_alter_repo_add_importing.down.sql | 2 + .../0024_alter_repo_add_importing.up.sql | 2 + ...0025_alter_table_job_add_group_id.down.sql | 1 + .../0025_alter_table_job_add_group_id.up.sql | 1 + .../0026_alter_repo_drop_job_id.down.sql | 1 + .../sqlite/0026_alter_repo_drop_job_id.up.sql | 1 + .../sqlite/0027_create_ci_tables.down.sql | 10 + .../sqlite/0027_create_ci_tables.up.sql | 284 + .../0028_alter_token_drop_grants.down.sql | 1 + .../0028_alter_token_drop_grants.up.sql | 1 + ...029_create_index_job_job_group_id_down.sql | 1 + .../0029_create_index_job_job_group_id_up.sql | 1 + .../0030_create_table_space_paths.down.sql | 2 + .../0030_create_table_space_paths.up.sql | 57 + .../0031_alter_index_repositories.down.sql | 2 + .../0031_alter_index_repositories.up.sql | 2 + ...2_create_table_pullreq_file_views.down.sql | 1 + ...032_create_table_pullreq_file_views.up.sql | 26 + .../sqlite/0033_alter_ci_tables.up.sql | 2 + internal/store/database/mutex/mutex.go | 32 + internal/store/database/pipeline.go | 392 + internal/store/database/pipeline_join.go | 94 + internal/store/database/plugin.go | 208 + internal/store/database/principal.go | 218 + internal/store/database/principal_info.go | 142 + internal/store/database/principal_service.go | 238 + .../database/principal_service_account.go | 243 + internal/store/database/principal_user.go | 297 + internal/store/database/pullreq.go | 642 + internal/store/database/pullreq_activity.go | 607 + .../store/database/pullreq_file_view_store.go | 205 + internal/store/database/pullreq_reviewers.go | 315 + internal/store/database/pullreq_reviews.go | 126 + internal/store/database/repo.go | 476 + internal/store/database/repo_git_info.go | 62 + internal/store/database/secret.go | 301 + internal/store/database/space.go | 409 + internal/store/database/space_path.go | 220 + internal/store/database/stage.go | 329 + internal/store/database/stage_map.go | 243 + internal/store/database/step.go | 200 + internal/store/database/step_map.go | 67 + internal/store/database/store_test.go | 81 + internal/store/database/template.go | 276 + internal/store/database/testdata/repos.json | 32 + internal/store/database/testdata/spaces.json | 22 + internal/store/database/testdata/users.json | 26 + internal/store/database/token.go | 190 + internal/store/database/trigger.go | 361 + internal/store/database/webhook.go | 458 + internal/store/database/webhook_execution.go | 260 + internal/store/database/wire.go | 239 + internal/store/logs.go | 35 + internal/store/logs/combine.go | 61 + internal/store/logs/db.go | 141 + internal/store/logs/s3.go | 98 + internal/store/logs/wire.go | 42 + internal/store/store_test.go | 15 + internal/store/transformation.go | 35 + internal/store/wire.go | 33 + internal/testing/integration/integration.go | 15 + internal/testing/testing.go | 15 + internal/token/token.go | 130 + internal/url/provider.go | 117 + internal/url/wire.go | 35 + internal/writer/writeflush.go | 47 + livelog/livelog.go | 52 + livelog/memory.go | 92 + livelog/stream.go | 90 + livelog/sub.go | 48 + livelog/wire.go | 31 + lock/config.go | 44 + lock/lock.go | 75 + lock/memory.go | 226 + lock/memory_test.go | 143 + lock/options.go | 98 + lock/redis.go | 115 + lock/util.go | 31 + lock/wire.go | 34 + profiler/gcpprofiler.go | 35 + profiler/noopprofiler.go | 24 + profiler/profiler.go | 48 + profiler/profiler_test.go | 52 + pubsub/config.go | 35 + pubsub/inmem.go | 230 + pubsub/options.go | 168 + pubsub/pubsub.go | 37 + pubsub/redis.go | 185 + pubsub/wire.go | 61 + resources/embed.go | 61 + resources/gitignore/AL.gitignore | 22 + resources/gitignore/Actionscript.gitignore | 18 + resources/gitignore/Ada.gitignore | 5 + resources/gitignore/Agda.gitignore | 2 + resources/gitignore/Android.gitignore | 33 + resources/gitignore/AppEngine.gitignore | 2 + .../gitignore/AppceleratorTitanium.gitignore | 3 + .../gitignore/ArchLinuxPackages.gitignore | 13 + resources/gitignore/Autotools.gitignore | 52 + resources/gitignore/C++.gitignore | 32 + resources/gitignore/C.gitignore | 52 + resources/gitignore/CFWheels.gitignore | 12 + resources/gitignore/CMake.gitignore | 11 + resources/gitignore/CONTRIBUTING.md | 39 + resources/gitignore/CUDA.gitignore | 6 + resources/gitignore/CakePHP.gitignore | 25 + resources/gitignore/ChefCookbook.gitignore | 9 + resources/gitignore/Clojure.gitignore | 14 + resources/gitignore/CodeIgniter.gitignore | 18 + resources/gitignore/CommonLisp.gitignore | 17 + resources/gitignore/Composer.gitignore | 6 + resources/gitignore/Concrete5.gitignore | 21 + resources/gitignore/Coq.gitignore | 45 + resources/gitignore/CraftCMS.gitignore | 4 + resources/gitignore/D.gitignore | 24 + resources/gitignore/DM.gitignore | 5 + resources/gitignore/Dart.gitignore | 27 + resources/gitignore/Delphi.gitignore | 69 + resources/gitignore/Drupal.gitignore | 62 + resources/gitignore/EPiServer.gitignore | 4 + resources/gitignore/Eagle.gitignore | 51 + resources/gitignore/Elisp.gitignore | 11 + resources/gitignore/Elixir.gitignore | 10 + resources/gitignore/Elm.gitignore | 4 + resources/gitignore/Erlang.gitignore | 17 + .../gitignore/ExpressionEngine.gitignore | 19 + resources/gitignore/ExtJs.gitignore | 14 + resources/gitignore/Fancy.gitignore | 2 + resources/gitignore/Finale.gitignore | 13 + resources/gitignore/FlaxEngine.gitignore | 45 + resources/gitignore/ForceDotCom.gitignore | 4 + resources/gitignore/Fortran.gitignore | 32 + resources/gitignore/FuelPHP.gitignore | 21 + resources/gitignore/GWT.gitignore | 25 + resources/gitignore/Gcov.gitignore | 5 + resources/gitignore/GitBook.gitignore | 16 + resources/gitignore/Go.gitignore | 21 + resources/gitignore/Godot.gitignore | 11 + resources/gitignore/Gradle.gitignore | 21 + resources/gitignore/Grails.gitignore | 33 + resources/gitignore/Haskell.gitignore | 23 + resources/gitignore/IGORPro.gitignore | 5 + resources/gitignore/Idris.gitignore | 7 + resources/gitignore/JBoss.gitignore | 19 + resources/gitignore/JENKINS_HOME.gitignore | 50 + resources/gitignore/Java.gitignore | 24 + resources/gitignore/Jekyll.gitignore | 7 + resources/gitignore/Joomla.gitignore | 705 + resources/gitignore/Julia.gitignore | 24 + resources/gitignore/KiCad.gitignore | 29 + resources/gitignore/Kohana.gitignore | 2 + resources/gitignore/Kotlin.gitignore | 24 + resources/gitignore/LICENSE | 116 + resources/gitignore/LabVIEW.gitignore | 17 + resources/gitignore/Laravel.gitignore | 23 + resources/gitignore/Leiningen.gitignore | 14 + resources/gitignore/LemonStand.gitignore | 21 + resources/gitignore/Lilypond.gitignore | 6 + resources/gitignore/Lithium.gitignore | 2 + resources/gitignore/Lua.gitignore | 41 + resources/gitignore/Magento.gitignore | 45 + resources/gitignore/Maven.gitignore | 17 + resources/gitignore/Mercury.gitignore | 13 + .../gitignore/MetaProgrammingSystem.gitignore | 16 + resources/gitignore/Nanoc.gitignore | 10 + resources/gitignore/Nim.gitignore | 3 + resources/gitignore/Node.gitignore | 130 + resources/gitignore/OCaml.gitignore | 29 + resources/gitignore/Objective-C.gitignore | 68 + resources/gitignore/Opa.gitignore | 13 + resources/gitignore/OpenCart.gitignore | 20 + resources/gitignore/OracleForms.gitignore | 8 + resources/gitignore/Packer.gitignore | 16 + resources/gitignore/Perl.gitignore | 35 + resources/gitignore/Phalcon.gitignore | 2 + resources/gitignore/PlayFramework.gitignore | 16 + resources/gitignore/Plone.gitignore | 18 + resources/gitignore/Prestashop.gitignore | 173 + resources/gitignore/Processing.gitignore | 10 + resources/gitignore/PureScript.gitignore | 9 + resources/gitignore/Python.gitignore | 160 + resources/gitignore/Qooxdoo.gitignore | 5 + resources/gitignore/Qt.gitignore | 54 + resources/gitignore/R.gitignore | 49 + resources/gitignore/README.md | 155 + resources/gitignore/ROS.gitignore | 51 + resources/gitignore/Racket.gitignore | 7 + resources/gitignore/Rails.gitignore | 69 + resources/gitignore/Raku.gitignore | 7 + resources/gitignore/RhodesRhomobile.gitignore | 9 + resources/gitignore/Ruby.gitignore | 56 + resources/gitignore/Rust.gitignore | 14 + resources/gitignore/SCons.gitignore | 6 + resources/gitignore/Sass.gitignore | 4 + resources/gitignore/Scala.gitignore | 5 + resources/gitignore/Scheme.gitignore | 7 + resources/gitignore/Scrivener.gitignore | 8 + resources/gitignore/Sdcc.gitignore | 8 + resources/gitignore/SeamGen.gitignore | 26 + resources/gitignore/SketchUp.gitignore | 1 + resources/gitignore/Smalltalk.gitignore | 31 + resources/gitignore/Stella.gitignore | 12 + resources/gitignore/SugarCRM.gitignore | 27 + resources/gitignore/Swift.gitignore | 90 + resources/gitignore/Symfony.gitignore | 52 + resources/gitignore/SymphonyCMS.gitignore | 6 + resources/gitignore/TeX.gitignore | 301 + resources/gitignore/Terraform.gitignore | 34 + resources/gitignore/Textpattern.gitignore | 11 + resources/gitignore/TurboGears2.gitignore | 20 + resources/gitignore/TwinCAT3.gitignore | 25 + resources/gitignore/Typo3.gitignore | 23 + resources/gitignore/Unity.gitignore | 72 + resources/gitignore/UnrealEngine.gitignore | 74 + resources/gitignore/VVVV.gitignore | 6 + resources/gitignore/VisualStudio.gitignore | 398 + resources/gitignore/Waf.gitignore | 9 + resources/gitignore/WordPress.gitignore | 48 + resources/gitignore/Xojo.gitignore | 11 + resources/gitignore/Yeoman.gitignore | 6 + resources/gitignore/Yii.gitignore | 6 + resources/gitignore/ZendFramework.gitignore | 24 + resources/gitignore/Zephir.gitignore | 26 + resources/license/afl-3.0.txt | 43 + resources/license/agpl-3.0.txt | 235 + resources/license/apache-2.0.txt | 73 + resources/license/artistic-2.0.txt | 85 + resources/license/bsd-2-clause.txt | 9 + resources/license/bsd-3-clause-clear.txt | 14 + resources/license/bsd-3-clause.txt | 11 + resources/license/bsl-1.0.txt | 7 + resources/license/cc-by-4.0.txt | 156 + resources/license/cc-by-sa-4.0.txt | 170 + resources/license/cc.txt | 0 resources/license/cc0-1.0.txt | 121 + resources/license/ecl-2.0.txt | 98 + resources/license/epl-1.0.txt | 73 + resources/license/epl-2.0.txt | 80 + resources/license/eupl-1.1.txt | 157 + resources/license/gpl-2.0.txt | 117 + resources/license/gpl-3.0.txt | 232 + resources/license/gpl.txt | 100 + resources/license/index.json | 31 + resources/license/isc.txt | 8 + resources/license/lgpl-2.1.txt | 175 + resources/license/lgpl-3.0.txt | 304 + resources/license/lgpl.txt | 0 resources/license/mit.txt | 21 + resources/license/mpl-2.0.txt | 373 + resources/license/osl-3.0.txt | 47 + resources/license/unlicense.txt | 10 + resources/license/zlib.txt | 11 + scripts/wire/gitrpcserver/wire.sh | 4 + scripts/wire/server/standalone.sh | 4 + store/database/config.go | 21 + store/database/dbtx/ctx.go | 55 + store/database/dbtx/db.go | 50 + store/database/dbtx/interface.go | 54 + store/database/dbtx/locker.go | 52 + store/database/dbtx/runner.go | 186 + store/database/dbtx/runner_test.go | 345 + store/database/dbtx/tx.go | 26 + store/database/store.go | 136 + store/database/util.go | 70 + store/database/util_pq.go | 32 + store/database/util_sqlite.go | 33 + store/database/util_test.go | 88 + store/errors.go | 34 + stream/memory_broker.go | 117 + stream/memory_consumer.go | 236 + stream/memory_producer.go | 54 + stream/options.go | 100 + stream/redis_consumer.go | 579 + stream/redis_producer.go | 67 + stream/stream.go | 76 + types/authz.go | 42 + types/check.go | 73 + types/check/common.go | 158 + types/check/error.go | 62 + types/check/password.go | 44 + types/check/path.go | 88 + types/check/service_account.go | 43 + types/check/token.go | 50 + types/check/wire.go | 33 + types/code_comment.go | 34 + types/config.go | 249 + types/config_test.go | 15 + types/connector.go | 17 + types/enum/check.go | 65 + types/enum/ci_status.go | 92 + types/enum/common.go | 74 + types/enum/encoding.go | 33 + types/enum/git.go | 89 + types/enum/job.go | 56 + types/enum/membership.go | 113 + types/enum/membership_role.go | 118 + types/enum/order.go | 56 + types/enum/order_test.go | 44 + types/enum/permission.go | 126 + types/enum/principal.go | 37 + types/enum/pullreq.go | 225 + types/enum/repo.go | 66 + types/enum/resource.go | 34 + types/enum/scm.go | 34 + types/enum/space.go | 59 + types/enum/sse.go | 31 + types/enum/token.go | 29 + types/enum/trigger_actions.go | 60 + types/enum/trigger_events.go | 27 + types/enum/user.go | 53 + types/enum/user_test.go | 40 + types/enum/webhook.go | 148 + types/execution.go | 58 + types/git.go | 88 + types/githook.go | 51 + types/job.go | 56 + types/list_filters.go | 21 + types/membership.go | 64 + types/pagination.go | 21 + types/path.go | 39 + types/pipeline.go | 31 + types/plugin.go | 50 + types/principal.go | 73 + types/pullreq.go | 144 + types/pullreq_activity.go | 279 + types/repo.go | 70 + types/secret.go | 30 + types/service.go | 53 + types/service_account.go | 65 + types/space.go | 53 + types/stage.go | 48 + types/step.go | 50 + types/stream.go | 19 + types/template.go | 16 + types/token.go | 39 + types/trigger.go | 23 + types/types_test.go | 15 + types/user.go | 76 + types/webhook.go | 106 + version/version.go | 62 + version/version_test.go | 23 + web/.eslintignore | 6 + web/.eslintrc.yml | 126 + web/.prettierrc.yml | 10 + web/.vscode/extensions.json | 3 + web/.vscode/settings.json | 27 + web/config/moduleFederation.config.js | 49 + web/config/webpack.common.js | 240 + web/config/webpack.dev.js | 102 + web/config/webpack.prod.js | 55 + web/dist.go | 118 + web/jest.config.js | 70 + web/jest.coverage.config.js | 9 + web/package.json | 159 + web/restful-react.config.js | 32 + web/scripts/clean-css-types.js | 55 + .../eslint-rules/duplicate-data-tooltip-id.js | 43 + web/scripts/eslint-rules/jest-no-mock.js | 59 + .../eslint-rules/no-document-body-snapshot.js | 44 + web/scripts/jest/file-mock.js | 17 + web/scripts/jest/gql-loader.js | 26 + web/scripts/jest/setup-file.js | 59 + web/scripts/jest/yaml-transform.js | 25 + web/scripts/strings/generateTypes.cjs | 79 + web/scripts/strings/generateTypesCli.mjs | 21 + web/scripts/swagger-custom-generator.js | 37 + web/scripts/swagger-transform.js | 61 + web/scripts/utils/runPrettier.cjs | 33 + .../webpack/GenerateStringTypesPlugin.js | 34 + web/src/App.module.scss | 39 + web/src/App.module.scss.d.ts | 20 + web/src/App.tsx | 118 + web/src/AppContext.tsx | 83 + web/src/AppProps.ts | 71 + web/src/AppUtils.ts | 53 + web/src/RouteDefinitions.ts | 155 + web/src/RouteDestinations.tsx | 333 + web/src/bootstrap.scss | 35 + web/src/bootstrap.scss.d.ts | 19 + web/src/bootstrap.tsx | 43 + .../AuthLayout/AuthLayout.module.scss | 98 + .../AuthLayout/AuthLayout.module.scss.d.ts | 31 + web/src/components/AuthLayout/AuthLayout.tsx | 46 + .../BranchTagSelect.module.scss | 119 + .../BranchTagSelect.module.scss.d.ts | 29 + .../BranchTagSelect/BranchTagSelect.tsx | 323 + .../components/Changes/Changes.module.scss | 170 + .../Changes/Changes.module.scss.d.ts | 37 + web/src/components/Changes/Changes.tsx | 417 + .../Changes/ChangesDropdown.module.scss | 49 + .../Changes/ChangesDropdown.module.scss.d.ts | 22 + .../components/Changes/ChangesDropdown.tsx | 103 + .../CommitRangeDropdown.tsx | 168 + .../Changes/DiffViewConfiguration.tsx | 101 + .../ReviewSplitButton/ReviewSplitButton.tsx | 129 + .../CloneButtonTooltip.module.scss | 48 + .../CloneButtonTooltip.module.scss.d.ts | 23 + .../CloneButtonTooltip/CloneButtonTooltip.tsx | 64 + .../CloneCredentialDialog.module.scss | 41 + .../CloneCredentialDialog.module.scss.d.ts | 21 + .../CloneCredentialDialog.tsx | 123 + .../CodeCommentSecondarySaveButton.tsx | 90 + .../CodeCommentStatusButton.tsx | 89 + .../CodeCommentStatusSelect.module.scss | 31 + .../CodeCommentStatusSelect.module.scss.d.ts | 19 + .../CodeCommentStatusSelect.tsx | 106 + .../CommentBox/CommentBox.module.scss | 129 + .../CommentBox/CommentBox.module.scss.d.ts | 33 + web/src/components/CommentBox/CommentBox.tsx | 459 + .../CommitActions/CommitActions.module.scss | 45 + .../CommitActions.module.scss.d.ts | 22 + .../CommitActions/CommitActions.tsx | 70 + .../CommitDivergence.module.scss | 89 + .../CommitDivergence.module.scss.d.ts | 23 + .../CommitDivergence/CommitDivergence.tsx | 83 + .../CommitInfo/CommitInfo.module.scss | 45 + .../CommitInfo/CommitInfo.module.scss.d.ts | 24 + web/src/components/CommitInfo/CommitInfo.tsx | 105 + .../CommitModalButton.module.scss | 74 + .../CommitModalButton.module.scss.d.ts | 24 + .../CommitModalButton/CommitModalButton.tsx | 282 + .../CommitsView/CommitsView.module.scss | 104 + .../CommitsView/CommitsView.module.scss.d.ts | 29 + .../components/CommitsView/CommitsView.tsx | 267 + .../components/Console/Console.module.scss | 57 + .../Console/Console.module.scss.d.ts | 24 + web/src/components/Console/Console.tsx | 80 + .../ConsoleLogs/ConsoleLogs.module.scss | 49 + .../ConsoleLogs/ConsoleLogs.module.scss.d.ts | 23 + .../components/ConsoleLogs/ConsoleLogs.tsx | 71 + .../ConsoleStep/ConsoleStep.module.scss | 62 + .../ConsoleStep/ConsoleStep.module.scss.d.ts | 24 + .../components/ConsoleStep/ConsoleStep.tsx | 144 + web/src/components/CopyButton/CopyButton.tsx | 50 + .../CreateBranchModal.module.scss | 68 + .../CreateBranchModal.module.scss.d.ts | 26 + .../CreateBranchModal/CreateBranchModal.tsx | 237 + .../CreateTagModal/CreateTagModal.module.scss | 66 + .../CreateTagModal.module.scss.d.ts | 26 + .../CreateTagModal/CreateTagModal.tsx | 245 + .../DiffViewer/DiffViewer.module.scss | 288 + .../DiffViewer/DiffViewer.module.scss.d.ts | 30 + web/src/components/DiffViewer/DiffViewer.tsx | 720 + .../components/DiffViewer/DiffViewerUtils.tsx | 254 + web/src/components/Editor/Editor.module.scss | 46 + .../components/Editor/Editor.module.scss.d.ts | 19 + web/src/components/Editor/Editor.tsx | 158 + .../ExecutionPageHeader.module.scss | 50 + .../ExecutionPageHeader.module.scss.d.ts | 23 + .../ExecutionPageHeader.tsx | 196 + .../ExecutionStageList.module.scss | 62 + .../ExecutionStageList.module.scss.d.ts | 25 + .../ExecutionStageList/ExecutionStageList.tsx | 92 + .../ExecutionStatus.module.scss | 130 + .../ExecutionStatus.module.scss.d.ts | 29 + .../ExecutionStatus/ExecutionStatus.tsx | 105 + .../ExecutionStatusLabel.module.scss | 68 + .../ExecutionStatusLabel.module.scss.d.ts | 23 + .../ExecutionStatusLabel.tsx | 57 + .../ExecutionText/ExecutionText.module.scss | 30 + .../ExecutionText.module.scss.d.ts | 21 + .../ExecutionText/ExecutionText.tsx | 177 + .../GitRefLink/GitRefLink.module.scss | 54 + .../GitRefLink/GitRefLink.module.scss.d.ts | 21 + web/src/components/GitRefLink/GitRefLink.tsx | 51 + .../GitRefsSelect/GitRefsSelect.tsx | 49 + .../GitnessLogo/GitnessLogo.module.scss | 36 + .../GitnessLogo/GitnessLogo.module.scss.d.ts | 21 + .../components/GitnessLogo/GitnessLogo.tsx | 36 + web/src/components/GitnessLogo/gitness.svg | 1 + .../ImageCarousel/ImageCarousel.module.scss | 86 + .../ImageCarousel.module.scss.d.ts | 23 + .../ImageCarousel/ImageCarousel.tsx | 102 + .../LatestCommit/LatestCommit.module.scss | 64 + .../LatestCommit.module.scss.d.ts | 25 + .../components/LatestCommit/LatestCommit.tsx | 112 + .../LoadingSpinner/LoadingSpinner.module.scss | 49 + .../LoadingSpinner.module.scss.d.ts | 23 + .../LoadingSpinner/LoadingSpinner.tsx | 45 + .../LogViewer/LogViewer.module.scss | 35 + .../LogViewer/LogViewer.module.scss.d.ts | 20 + web/src/components/LogViewer/LogViewer.tsx | 46 + .../MarkdownEditorWithPreview.module.scss | 264 + ...MarkdownEditorWithPreview.module.scss.d.ts | 29 + .../MarkdownEditorWithPreview.tsx | 336 + .../MarkdownViewer/MarkdownViewer.module.scss | 61 + .../MarkdownViewer.module.scss.d.ts | 20 + .../MarkdownViewer/MarkdownViewer.tsx | 151 + .../NavigationCheck.module.scss | 23 + .../NavigationCheck.module.scss.d.ts | 19 + .../NavigationCheck/NavigationCheck.tsx | 72 + .../NewPipelineModal.module.scss | 33 + .../NewPipelineModal.module.scss.d.ts | 19 + .../NewPipelineModal/NewPipelineModal.tsx | 178 + .../ImportForm/ImportForm.tsx | 249 + .../NewRepoModalButton.module.scss | 50 + .../NewRepoModalButton.module.scss.d.ts | 23 + .../NewRepoModalButton/NewRepoModalButton.tsx | 464 + .../NewSecretModalButton.tsx | 186 + .../ImportSpaceForm/ImportSpaceForm.tsx | 368 + .../NewSpaceModalButton.module.scss | 170 + .../NewSpaceModalButton.module.scss.d.ts | 34 + .../NewSpaceModalButton.tsx | 304 + .../NewTriggerModalButton.module.scss | 29 + .../NewTriggerModalButton.module.scss.d.ts | 20 + .../NewTriggerModalButton.tsx | 186 + .../NoExecutionsCard.module.scss | 21 + .../NoExecutionsCard.module.scss.d.ts | 19 + .../NoExecutionsCard/NoExecutionsCard.tsx | 39 + .../NoResultCard/NoResultCard.module.scss | 21 + .../NoResultCard.module.scss.d.ts | 19 + .../components/NoResultCard/NoResultCard.tsx | 78 + .../OptionsMenuButton.module.scss | 44 + .../OptionsMenuButton.module.scss.d.ts | 21 + .../OptionsMenuButton/OptionsMenuButton.tsx | 94 + .../PipeSeparator/PipeSeparator.tsx | 25 + .../PipelineSettings.module.scss | 28 + .../PipelineSettings.module.scss.d.ts | 21 + .../PipelineSettings/PipelineSettings.tsx | 96 + .../PipelineSettingsPageHeader.module.scss | 47 + ...ipelineSettingsPageHeader.module.scss.d.ts | 23 + .../PipelineSettingsPageHeader.tsx | 98 + .../PipelineSettingsTab.module.scss | 38 + .../PipelineSettingsTab.module.scss.d.ts | 22 + .../PipelineSettingsTab.tsx | 196 + .../PipelineTriggersTab.module.scss | 86 + .../PipelineTriggersTab.module.scss.d.ts | 31 + .../PipelineTriggersTab.tsx | 356 + .../PlainButton/PlainButton.module.scss | 33 + .../PlainButton/PlainButton.module.scss.d.ts | 20 + .../components/PlainButton/PlainButton.tsx | 21 + .../PluginsPanel/PluginsPanel.module.scss | 91 + .../PluginsPanel.module.scss.d.ts | 31 + .../components/PluginsPanel/PluginsPanel.tsx | 601 + .../PluginsPanel/plugins/plugins.json | 6135 ++++++++ .../PullRequestStateLabel.module.scss | 74 + .../PullRequestStateLabel.module.scss.d.ts | 24 + .../PullRequestStateLabel.tsx | 67 + .../components/RepoMetadata/RepoMetadata.tsx | 35 + .../RepoPublicLabel.module.scss | 28 + .../RepoPublicLabel.module.scss.d.ts | 19 + .../RepoPublicLabel/RepoPublicLabel.tsx | 33 + .../RepositoryPageHeader.module.scss | 28 + .../RepositoryPageHeader.module.scss.d.ts | 20 + .../RepositoryPageHeader.tsx | 95 + .../ResourceListingPagination.module.scss | 58 + ...ResourceListingPagination.module.scss.d.ts | 24 + .../ResourceListingPagination.tsx | 136 + .../ReviewerSelect/ReviewerSelect.module.scss | 104 + .../ReviewerSelect.module.scss.d.ts | 33 + .../ReviewerSelect/ReviewerSelect.tsx | 190 + .../RunPipelineModal.module.scss | 33 + .../RunPipelineModal.module.scss.d.ts | 19 + .../RunPipelineModal/RunPipelineModal.tsx | 150 + .../SearchInputWithSpinner.module.scss | 41 + .../SearchInputWithSpinner.module.scss.d.ts | 23 + .../SearchInputWithSpinner.tsx | 93 + .../MonacoSourceCodeEditor.tsx | 171 + .../SourceCodeEditor/SourceCodeEditor.tsx | 25 + .../SourceCodeViewer.module.scss | 38 + .../SourceCodeViewer.module.scss.d.ts | 19 + .../SourceCodeViewer/SourceCodeViewer.tsx | 25 + .../SpaceSelector/SpaceSelector.module.scss | 200 + .../SpaceSelector.module.scss.d.ts | 38 + .../SpaceSelector/SpaceSelector.tsx | 264 + web/src/components/Split/Split.module.scss | 64 + .../components/Split/Split.module.scss.d.ts | 19 + web/src/components/Split/Split.tsx | 24 + .../TabContentWrapper/TabContentWrapper.tsx | 43 + .../TabTitleWithCount.module.scss | 67 + .../TabTitleWithCount.module.scss.d.ts | 21 + .../TabTitleWithCount/TabTitleWithCount.tsx | 54 + .../ThreadSection/ThreadSection.module.scss | 108 + .../ThreadSection.module.scss.d.ts | 25 + .../ThreadSection/ThreadSection.tsx | 63 + .../UpdateSecretModal/UpdateSecretModal.tsx | 179 + .../UserManagementFlows/AddUserModal.tsx | 249 + .../UserManagementFlows/ResetPassword.tsx | 121 + .../UserManagementFlows.module.scss | 65 + .../UserManagementFlows.module.scss.d.ts | 25 + web/src/favicon.svg | 3 + .../AppErrorBoundary.i18n.json | 8 + .../AppErrorBoundary/AppErrorBoundary.tsx | 75 + web/src/framework/strings/String.tsx | 79 + web/src/framework/strings/StringsContext.tsx | 35 + .../strings/StringsContextProvider.tsx | 59 + .../strings/__tests__/Strings.test.tsx | 179 + web/src/framework/strings/index.ts | 20 + web/src/framework/strings/languageLoader.ts | 33 + web/src/framework/strings/stringTypes.ts | 765 + web/src/global.d.ts | 86 + web/src/hooks/useConfirmAction.tsx | 117 + web/src/hooks/useConfirmationDialog.tsx | 93 + web/src/hooks/useDisableCodeMainLinks.ts | 37 + web/src/hooks/useDocumentTitle.tsx | 32 + web/src/hooks/useDownloadRawFile.ts | 70 + web/src/hooks/useEmitCodeCommentStatus.ts | 53 + web/src/hooks/useEventListener.ts | 31 + web/src/hooks/useGetRepositoryMetadata.ts | 70 + web/src/hooks/useGetResourceContent.ts | 49 + web/src/hooks/useGetSpaceParam.ts | 34 + web/src/hooks/useLiveTimeHook.tsx | 41 + web/src/hooks/useLocalStorage.ts | 51 + web/src/hooks/useModalHook.tsx | 148 + web/src/hooks/usePRChecksDecision.tsx | 142 + web/src/hooks/usePageIndex.ts | 21 + web/src/hooks/useQueryParams.ts | 40 + web/src/hooks/useShowRequestError.ts | 30 + web/src/hooks/useSpaceSSE.tsx | 76 + web/src/hooks/useUpdateQueryParams.ts | 45 + web/src/hooks/useUserPreference.ts | 83 + web/src/i18n/strings.en.yaml | 826 ++ web/src/i18n/strings.es.yaml | 0 web/src/icons/Branches.svg | 1 + web/src/icons/CodeFileFill.svg | 4 + web/src/icons/Harness.svg | 1 + web/src/icons/Submodules.svg | 5 + web/src/icons/Symlink.svg | 4 + web/src/icons/Upgrade.svg | 1 + web/src/icons/private.svg | 1 + web/src/images/404-error.svg | 33 + web/src/images/Subtract.png | Bin 0 -> 53100 bytes web/src/images/dark-background.png | Bin 0 -> 7940 bytes web/src/images/empty-state.svg | 1 + web/src/images/gitLogo.png | Bin 0 -> 3728 bytes web/src/images/index.ts | 47 + web/src/images/logo-dark.png | Bin 0 -> 4610 bytes web/src/images/no-space.svg | 167 + web/src/images/pull-request-closed.svg | 1 + web/src/images/pull-request-draft.svg | 1 + web/src/images/pull-request-merged.svg | 1 + web/src/images/pull-request-open.svg | 1 + web/src/images/pull-request-rejected.svg | 1 + web/src/images/pull-request-unchecked.svg | 1 + web/src/images/signup-old.png | Bin 0 -> 20408 bytes web/src/images/signup.png | Bin 0 -> 21994 bytes web/src/index.html | 15 + web/src/index.tsx | 19 + web/src/layouts/layout.module.scss | 55 + web/src/layouts/layout.module.scss.d.ts | 24 + web/src/layouts/layout.tsx | 83 + web/src/layouts/menu/DefaultMenu.module.scss | 119 + .../layouts/menu/DefaultMenu.module.scss.d.ts | 29 + web/src/layouts/menu/DefaultMenu.tsx | 188 + web/src/layouts/menu/NavMenuItem.module.scss | 93 + .../layouts/menu/NavMenuItem.module.scss.d.ts | 24 + web/src/layouts/menu/NavMenuItem.tsx | 65 + web/src/pages/404/NotFoundPage.tsx | 27 + .../AddUpdatePipeline.module.scss | 90 + .../AddUpdatePipeline.module.scss.d.ts | 30 + .../AddUpdatePipeline/AddUpdatePipeline.tsx | 445 + web/src/pages/AddUpdatePipeline/Constants.ts | 27 + .../schema/pipeline-schema-v0.json | 72 + .../schema/pipeline-schema-v1.json | 3750 +++++ .../ChangePassword/ChangePassword.module.scss | 28 + .../ChangePassword.module.scss.d.ts | 21 + .../pages/ChangePassword/ChangePassword.tsx | 108 + web/src/pages/Compare/Compare.module.scss | 104 + .../pages/Compare/Compare.module.scss.d.ts | 26 + web/src/pages/Compare/Compare.tsx | 314 + web/src/pages/Compare/CompareCommits.tsx | 61 + .../CompareContentHeader.module.scss | 56 + .../CompareContentHeader.module.scss.d.ts | 23 + .../CompareContentHeader.tsx | 202 + web/src/pages/Execution/Execution.module.scss | 36 + .../Execution/Execution.module.scss.d.ts | 22 + web/src/pages/Execution/Execution.tsx | 163 + .../ExecutionList/ExecutionList.module.scss | 70 + .../ExecutionList.module.scss.d.ts | 26 + web/src/pages/ExecutionList/ExecutionList.tsx | 278 + web/src/pages/Home/Home.module.scss | 53 + web/src/pages/Home/Home.module.scss.d.ts | 23 + web/src/pages/Home/Home.tsx | 114 + .../PipelineList/PipelineList.module.scss | 68 + .../PipelineList.module.scss.d.ts | 29 + web/src/pages/PipelineList/PipelineList.tsx | 393 + .../Checks/CheckPipelineStages.tsx | 170 + .../PullRequest/Checks/CheckPipelineSteps.tsx | 210 + .../PullRequest/Checks/Checks.module.scss | 289 + .../Checks/Checks.module.scss.d.ts | 49 + web/src/pages/PullRequest/Checks/Checks.tsx | 156 + .../pages/PullRequest/Checks/ChecksMenu.tsx | 209 + .../Checks/ChecksOverview.module.scss | 100 + .../Checks/ChecksOverview.module.scss.d.ts | 29 + .../PullRequest/Checks/ChecksOverview.tsx | 166 + .../pages/PullRequest/Checks/ChecksUtils.ts | 37 + .../Conversation/CodeCommentHeader.tsx | 71 + .../Conversation/Conversation.module.scss | 238 + .../Conversation.module.scss.d.ts | 48 + .../PullRequest/Conversation/Conversation.tsx | 475 + .../Conversation/DescriptionBox.tsx | 122 + .../PullRequestActionsBox.module.scss | 155 + .../PullRequestActionsBox.module.scss.d.ts | 37 + .../PullRequestActionsBox.tsx | 386 + .../PullRequestSideBar.module.scss | 49 + .../PullRequestSideBar.module.scss.d.ts | 23 + .../PullRequestSideBar/PullRequestSideBar.tsx | 313 + .../Conversation/SystemComment.tsx | 308 + .../pages/PullRequest/PullRequest.module.scss | 88 + .../PullRequest/PullRequest.module.scss.d.ts | 27 + web/src/pages/PullRequest/PullRequest.tsx | 355 + .../PullRequestCommits.module.scss | 18 + .../PullRequestCommits/PullRequestCommits.tsx | 61 + .../PullRequestMetaLine.module.scss | 65 + .../PullRequestMetaLine.module.scss.d.ts | 25 + .../pages/PullRequest/PullRequestMetaLine.tsx | 77 + .../PullRequestMetadataInfo.module.scss.d.ts | 25 + .../PullRequestTabContentWrapper.tsx | 43 + .../pages/PullRequest/PullRequestTitle.tsx | 140 + .../pages/PullRequest/PullRequestUtils.tsx | 30 + .../PullRequests/PullRequests.module.scss | 50 + .../PullRequests.module.scss.d.ts | 26 + web/src/pages/PullRequests/PullRequests.tsx | 280 + .../PullRequestsContentHeader.module.scss | 32 + ...PullRequestsContentHeader.module.scss.d.ts | 20 + .../PullRequestsContentHeader.tsx | 115 + .../RepositoriesListing.module.scss | 137 + .../RepositoriesListing.module.scss.d.ts | 33 + .../RepositoriesListing.tsx | 243 + web/src/pages/RepositoriesListing/no-repo.svg | 1 + .../EmptyRepositoryInfo.module.scss | 61 + .../EmptyRepositoryInfo.module.scss.d.ts | 25 + .../pages/Repository/EmptyRepositoryInfo.tsx | 159 + .../pages/Repository/Repository.module.scss | 72 + .../Repository/Repository.module.scss.d.ts | 25 + web/src/pages/Repository/Repository.tsx | 107 + .../ContentHeader/ContentHeader.module.scss | 214 + .../ContentHeader.module.scss.d.ts | 32 + .../ContentHeader/ContentHeader.tsx | 308 + .../ContentHeader/search-background.svg | 7 + .../FileContent/FileContent.module.scss | 123 + .../FileContent/FileContent.module.scss.d.ts | 28 + .../FileContent/FileContent.tsx | 484 + .../FileContent/GitBlame.module.scss | 108 + .../FileContent/GitBlame.module.scss.d.ts | 26 + .../FileContent/GitBlame.tsx | 362 + .../RenameContentHistory.module.scss | 54 + .../RenameContentHistory.module.scss.d.ts | 21 + .../FileContent/RenameContentHistory.tsx | 175 + .../FileContent/lineWidget.ts | 77 + .../FolderContent/FolderContent.module.scss | 106 + .../FolderContent.module.scss.d.ts | 32 + .../FolderContent/FolderContent.tsx | 350 + .../FolderContent/Readme.module.scss | 40 + .../FolderContent/Readme.module.scss.d.ts | 22 + .../FolderContent/Readme.tsx | 138 + .../RepositoryContent.module.scss | 22 + .../RepositoryContent.module.scss.d.ts | 19 + .../RepositoryContent/RepositoryContent.tsx | 59 + .../RepositoryHeader.module.scss | 38 + .../RepositoryHeader.module.scss.d.ts | 22 + .../RepositoryHeader/RepositoryHeader.tsx | 42 + .../RepositoryTree/ResourceTree.module.scss | 51 + .../ResourceTree.module.scss.d.ts | 25 + .../RepositoryTree/ResourceTree.tsx | 74 + .../Repository/RepositoryTree/TreeExample.tsx | 58 + .../Repository/RepositoryTree/demodata.ts | 454 + .../Repository/RepositoryTree/renderers.tsx | 150 + .../RepositoryBranches.module.scss | 20 + .../RepositoryBranches.module.scss.d.ts | 19 + .../RepositoryBranches/RepositoryBranches.tsx | 45 + .../BranchesContent.module.scss | 91 + .../BranchesContent.module.scss.d.ts | 27 + .../BranchesContent/BranchesContent.tsx | 246 + .../BranchesContentHeader.module.scss | 36 + .../BranchesContentHeader.module.scss.d.ts | 20 + .../BranchesContentHeader.tsx | 87 + .../RepositoryBranchesContent.module.scss | 23 + ...RepositoryBranchesContent.module.scss.d.ts | 20 + .../RepositoryBranchesContent.tsx | 106 + .../RepositoryCommit.module.scss | 34 + .../RepositoryCommit.module.scss.d.ts | 22 + .../RepositoryCommit/RepositoryCommit.tsx | 103 + .../RepositoryCommits.module.scss | 35 + .../RepositoryCommits.module.scss.d.ts | 22 + .../RepositoryCommits/RepositoryCommits.tsx | 116 + .../FileEditor/FileEditor.module.scss | 126 + .../FileEditor/FileEditor.module.scss.d.ts | 27 + .../FileEditor/FileEditor.tsx | 298 + .../RepositoryFileEdit.module.scss | 46 + .../RepositoryFileEdit.module.scss.d.ts | 20 + .../RepositoryFileEdit/RepositoryFileEdit.tsx | 63 + .../RepositoryFileEditHeader.module.scss | 40 + .../RepositoryFileEditHeader.module.scss.d.ts | 22 + .../RepositoryFileEditHeader.tsx | 56 + .../DeleteRepoModal/DeleteRepoModal.tsx | 138 + .../GeneralSettingsContent.tsx | 189 + .../RepositorySettings.module.scss | 104 + .../RepositorySettings.module.scss.d.ts | 30 + .../RepositorySettings/RepositorySettings.tsx | 70 + .../RepositorySettingsContent.tsx | 25 + .../RepositorySettings/SettingsContent.tsx | 78 + .../RepositorySettings/mockWebhooks.json | 14 + .../RepositoryTags/RepositoryTags.module.scss | 20 + .../RepositoryTags.module.scss.d.ts | 19 + .../pages/RepositoryTags/RepositoryTags.tsx | 41 + .../RepositoryTagsContent.module.scss | 23 + .../RepositoryTagsContent.module.scss.d.ts | 20 + .../RepositoryTagsContent.tsx | 139 + .../RepositoryTagsContentHeader.module.scss | 36 + ...positoryTagsContentHeader.module.scss.d.ts | 20 + .../RepositoryTagsContentHeader.tsx | 66 + .../TagsContent/TagsContent.module.scss | 80 + .../TagsContent/TagsContent.module.scss.d.ts | 27 + .../TagsContent/TagsContent.tsx | 246 + web/src/pages/Search/Search.module.scss | 245 + web/src/pages/Search/Search.module.scss.d.ts | 37 + web/src/pages/Search/Search.tsx | 359 + web/src/pages/Secret/Secret.module.scss | 20 + web/src/pages/Secret/Secret.module.scss.d.ts | 19 + web/src/pages/Secret/Secret.tsx | 47 + .../pages/SecretList/SecretList.module.scss | 92 + .../SecretList/SecretList.module.scss.d.ts | 29 + web/src/pages/SecretList/SecretList.tsx | 228 + web/src/pages/Settings/Settings.tsx | 22 + web/src/pages/SignIn/SignIn.module.scss | 20 + web/src/pages/SignIn/SignIn.module.scss.d.ts | 19 + web/src/pages/SignIn/SignIn.tsx | 126 + web/src/pages/SignUp/SignUp.module.scss | 129 + web/src/pages/SignUp/SignUp.module.scss.d.ts | 25 + web/src/pages/SignUp/SignUp.tsx | 147 + .../AddNewMember/AddNewMember.tsx | 166 + .../SpaceAccessControl.module.scss | 28 + .../SpaceAccessControl.module.scss.d.ts | 20 + .../SpaceAccessControl/SpaceAccessControl.tsx | 161 + .../DeleteSpaceModal/DeleteSpaceModal.tsx | 136 + .../SpaceSettings/ExportForm/ExportForm.tsx | 348 + .../SpaceSettings/SpaceSettings.module.scss | 207 + .../SpaceSettings.module.scss.d.ts | 45 + web/src/pages/SpaceSettings/SpaceSettings.tsx | 489 + .../pages/UserProfile/EditableTextField.tsx | 78 + .../pages/UserProfile/NewToken/NewToken.tsx | 176 + .../pages/UserProfile/UserProfile.module.scss | 76 + .../UserProfile/UserProfile.module.scss.d.ts | 28 + web/src/pages/UserProfile/UserProfile.tsx | 255 + .../UsersListing/UsersListing.module.scss | 27 + .../UsersListing.module.scss.d.ts | 20 + web/src/pages/UsersListing/UsersListing.tsx | 221 + .../pages/WebhookDetails/WebhookDetails.tsx | 66 + web/src/pages/WebhookNew/WebhookNew.tsx | 52 + .../pages/WebhookNew/WehookForm.module.scss | 126 + .../WebhookNew/WehookForm.module.scss.d.ts | 36 + web/src/pages/WebhookNew/WehookForm.tsx | 345 + web/src/pages/Webhooks/Webhooks.module.scss | 61 + .../pages/Webhooks/Webhooks.module.scss.d.ts | 27 + web/src/pages/Webhooks/Webhooks.tsx | 344 + .../WebhooksHeader/WebhooksHeader.module.scss | 41 + .../WebhooksHeader.module.scss.d.ts | 21 + .../WebhooksHeader/WebhooksHeader.tsx | 59 + web/src/public-path.ts | 19 + web/src/services/code/index.tsx | 5140 +++++++ web/src/services/code/overrides.yaml | 2 + web/src/services/code/swagger.yaml | 7921 +++++++++++ web/src/services/config.ts | 25 + web/src/utils/ExecutionUtils.ts | 38 + web/src/utils/FileUtils.ts | 497 + web/src/utils/GitUtils.ts | 268 + web/src/utils/Utils.ts | 407 + .../codemirror/addClassToLinesExtension.tsx | 82 + web/src/utils/test/testUtils.module.scss | 50 + web/src/utils/test/testUtils.module.scss.d.ts | 22 + web/src/utils/test/testUtils.tsx | 180 + web/src/utils/types.ts | 27 + web/src/utils/utils.scss | 31 + web/src/utils/vars.scss | 26 + web/tsconfig-eslint.json | 5 + web/tsconfig.json | 28 + web/typed-scss-modules.config.js | 20 + web/yarn.lock | 11569 ++++++++++++++++ 1729 files changed, 192634 insertions(+) create mode 100644 .dockerignore create mode 100644 .gitignore create mode 100644 .gitleaksignore create mode 100644 .golangci.yml create mode 100644 .local.env create mode 100644 Dockerfile create mode 100644 LICENSE.md create mode 100644 Makefile create mode 100644 README.md create mode 100644 cache/cache.go create mode 100644 cache/cache_test.go create mode 100644 cache/no_cache.go create mode 100644 cache/redis_cache.go create mode 100644 cache/ttl_cache.go create mode 100644 cli/cli.go create mode 100644 cli/operations/account/login.go create mode 100644 cli/operations/account/logout.go create mode 100644 cli/operations/account/register.go create mode 100644 cli/operations/hooks/hooks.go create mode 100644 cli/operations/migrate/current.go create mode 100644 cli/operations/migrate/migrate.go create mode 100644 cli/operations/migrate/to.go create mode 100644 cli/operations/user/create_pat.go create mode 100644 cli/operations/user/self.go create mode 100644 cli/operations/user/users.go create mode 100644 cli/operations/users/create.go create mode 100644 cli/operations/users/delete.go create mode 100644 cli/operations/users/find.go create mode 100644 cli/operations/users/list.go create mode 100644 cli/operations/users/update.go create mode 100644 cli/operations/users/users.go create mode 100644 cli/provide/provider.go create mode 100644 cli/server/config.go create mode 100644 cli/server/redis.go create mode 100644 cli/server/server.go create mode 100644 cli/server/system.go create mode 100644 cli/session/session.go create mode 100644 cli/swagger.go create mode 100644 cli/textui/input.go create mode 100644 client/client.go create mode 100644 client/interface.go create mode 100644 cmd/gitness-githook/main.go create mode 100644 cmd/gitness/driver_pq.go create mode 100644 cmd/gitness/driver_sqlite.go create mode 100644 cmd/gitness/main.go create mode 100644 cmd/gitness/wire.go create mode 100644 cmd/gitness/wire_gen.go create mode 100644 cmd/gitrpcserver/config.go create mode 100644 cmd/gitrpcserver/main.go create mode 100644 cmd/gitrpcserver/redis.go create mode 100644 cmd/gitrpcserver/system.go create mode 100644 cmd/gitrpcserver/wire.go create mode 100644 cmd/gitrpcserver/wire_gen.go create mode 100644 encrypt/aesgcm.go create mode 100644 encrypt/encrypt.go create mode 100644 encrypt/none.go create mode 100644 encrypt/wire.go create mode 100644 events/error.go create mode 100644 events/events.go create mode 100644 events/options.go create mode 100644 events/reader.go create mode 100644 events/reporter.go create mode 100644 events/stream.go create mode 100644 events/system.go create mode 100644 events/wire.go create mode 100644 githook/cli.go create mode 100644 githook/client.go create mode 100644 githook/core.go create mode 100644 githook/env.go create mode 100644 githook/types.go create mode 100644 gitrpc/blame.go create mode 100644 gitrpc/blob.go create mode 100644 gitrpc/branch.go create mode 100644 gitrpc/check/branch.go create mode 100644 gitrpc/check/branch_test.go create mode 100644 gitrpc/client.go create mode 100644 gitrpc/commit.go create mode 100644 gitrpc/common.go create mode 100644 gitrpc/config.go create mode 100644 gitrpc/diff.go create mode 100644 gitrpc/diff/diff.go create mode 100644 gitrpc/enum/entry.go create mode 100644 gitrpc/enum/hunk_headers.go create mode 100644 gitrpc/enum/merge.go create mode 100644 gitrpc/enum/ref.go create mode 100644 gitrpc/errors.go create mode 100644 gitrpc/hash/aggregate_xor.go create mode 100644 gitrpc/hash/aggregate_xor_test.go create mode 100644 gitrpc/hash/git.go create mode 100644 gitrpc/hash/hash.go create mode 100644 gitrpc/hash/source.go create mode 100644 gitrpc/hash/source_test.go create mode 100644 gitrpc/interface.go create mode 100644 gitrpc/internal/files/file.go create mode 100644 gitrpc/internal/gitea/blame.go create mode 100644 gitrpc/internal/gitea/blame_test.go create mode 100644 gitrpc/internal/gitea/blob.go create mode 100644 gitrpc/internal/gitea/branch.go create mode 100644 gitrpc/internal/gitea/commit.go create mode 100644 gitrpc/internal/gitea/config.go create mode 100644 gitrpc/internal/gitea/diff.go create mode 100644 gitrpc/internal/gitea/errors.go create mode 100644 gitrpc/internal/gitea/gitea.go create mode 100644 gitrpc/internal/gitea/gitea_test.go create mode 100644 gitrpc/internal/gitea/gogit.go create mode 100644 gitrpc/internal/gitea/last_commit_cache.go create mode 100644 gitrpc/internal/gitea/mapping.go create mode 100644 gitrpc/internal/gitea/match_files.go create mode 100644 gitrpc/internal/gitea/merge.go create mode 100644 gitrpc/internal/gitea/paths_details.go create mode 100644 gitrpc/internal/gitea/ref.go create mode 100644 gitrpc/internal/gitea/repo.go create mode 100644 gitrpc/internal/gitea/submodule.go create mode 100644 gitrpc/internal/gitea/tag.go create mode 100644 gitrpc/internal/gitea/tree.go create mode 100644 gitrpc/internal/gitea/vars.go create mode 100644 gitrpc/internal/middleware/error.go create mode 100644 gitrpc/internal/middleware/log.go create mode 100644 gitrpc/internal/parser/diff_cut.go create mode 100644 gitrpc/internal/parser/diff_cut_test.go create mode 100644 gitrpc/internal/parser/diff_headers.go create mode 100644 gitrpc/internal/parser/diff_headers_test.go create mode 100644 gitrpc/internal/parser/hunk.go create mode 100644 gitrpc/internal/service/blame.go create mode 100644 gitrpc/internal/service/blob.go create mode 100644 gitrpc/internal/service/branch.go create mode 100644 gitrpc/internal/service/commit.go create mode 100644 gitrpc/internal/service/diff.go create mode 100644 gitrpc/internal/service/env.go create mode 100644 gitrpc/internal/service/errors.go create mode 100644 gitrpc/internal/service/http.go create mode 100644 gitrpc/internal/service/interface.go create mode 100644 gitrpc/internal/service/mapping.go create mode 100644 gitrpc/internal/service/match_files.go create mode 100644 gitrpc/internal/service/merge.go create mode 100644 gitrpc/internal/service/operations.go create mode 100644 gitrpc/internal/service/operations_test.go create mode 100644 gitrpc/internal/service/path.go create mode 100644 gitrpc/internal/service/pipeline.go create mode 100644 gitrpc/internal/service/push.go create mode 100644 gitrpc/internal/service/ref.go create mode 100644 gitrpc/internal/service/repo.go create mode 100644 gitrpc/internal/service/shared_repo.go create mode 100644 gitrpc/internal/service/submodule.go create mode 100644 gitrpc/internal/service/tag.go create mode 100644 gitrpc/internal/service/tree.go create mode 100644 gitrpc/internal/service/upload.go create mode 100644 gitrpc/internal/slices/slice.go create mode 100644 gitrpc/internal/storage/local.go create mode 100644 gitrpc/internal/streamio/stream.go create mode 100644 gitrpc/internal/tempdir/file.go create mode 100644 gitrpc/internal/tools.go create mode 100644 gitrpc/internal/types/errors.go create mode 100644 gitrpc/internal/types/hunk.go create mode 100644 gitrpc/internal/types/types.go create mode 100644 gitrpc/kuberesolver.go create mode 100644 gitrpc/log_interceptor.go create mode 100644 gitrpc/mapping.go create mode 100644 gitrpc/match_files.go create mode 100644 gitrpc/merge.go create mode 100644 gitrpc/operations.go create mode 100644 gitrpc/params.go create mode 100644 gitrpc/pipeline.go create mode 100644 gitrpc/proto/blame.proto create mode 100644 gitrpc/proto/diff.proto create mode 100644 gitrpc/proto/http.proto create mode 100644 gitrpc/proto/merge.proto create mode 100644 gitrpc/proto/operations.proto create mode 100644 gitrpc/proto/push.proto create mode 100644 gitrpc/proto/ref.proto create mode 100644 gitrpc/proto/repo.proto create mode 100644 gitrpc/proto/shared.proto create mode 100644 gitrpc/push_remote.go create mode 100644 gitrpc/ref.go create mode 100644 gitrpc/repo.go create mode 100644 gitrpc/rpc/blame.pb.go create mode 100644 gitrpc/rpc/blame_grpc.pb.go create mode 100644 gitrpc/rpc/constants.go create mode 100644 gitrpc/rpc/diff.pb.go create mode 100644 gitrpc/rpc/diff_grpc.pb.go create mode 100644 gitrpc/rpc/http.pb.go create mode 100644 gitrpc/rpc/http_grpc.pb.go create mode 100644 gitrpc/rpc/merge.pb.go create mode 100644 gitrpc/rpc/merge_grpc.pb.go create mode 100644 gitrpc/rpc/operations.pb.go create mode 100644 gitrpc/rpc/operations_grpc.pb.go create mode 100644 gitrpc/rpc/push.pb.go create mode 100644 gitrpc/rpc/push_grpc.pb.go create mode 100644 gitrpc/rpc/ref.pb.go create mode 100644 gitrpc/rpc/ref_grpc.pb.go create mode 100644 gitrpc/rpc/repo.pb.go create mode 100644 gitrpc/rpc/repo_grpc.pb.go create mode 100644 gitrpc/rpc/shared.pb.go create mode 100644 gitrpc/server/config.go create mode 100644 gitrpc/server/cron/clean_slate_data.go create mode 100644 gitrpc/server/cron/clean_slate_data_test.go create mode 100644 gitrpc/server/cron/manager.go create mode 100644 gitrpc/server/cron/manager_test.go create mode 100644 gitrpc/server/cron/wire.go create mode 100644 gitrpc/server/http.go create mode 100644 gitrpc/server/http_log.go create mode 100644 gitrpc/server/server.go create mode 100644 gitrpc/server/wire.go create mode 100644 gitrpc/smarthttp.go create mode 100644 gitrpc/sort.go create mode 100644 gitrpc/stream.go create mode 100644 gitrpc/submodule.go create mode 100644 gitrpc/tag.go create mode 100644 gitrpc/tree.go create mode 100644 gitrpc/upload.go create mode 100644 gitrpc/validate.go create mode 100644 gitrpc/wire.go create mode 100644 go.mod create mode 100644 go.sum create mode 100644 http/server.go create mode 100644 internal/api/api.go create mode 100644 internal/api/auth/auth.go create mode 100644 internal/api/auth/connector.go create mode 100644 internal/api/auth/pipeline.go create mode 100644 internal/api/auth/repo.go create mode 100644 internal/api/auth/secret.go create mode 100644 internal/api/auth/service.go create mode 100644 internal/api/auth/service_account.go create mode 100644 internal/api/auth/space.go create mode 100644 internal/api/auth/template.go create mode 100644 internal/api/auth/user.go create mode 100644 internal/api/controller/check/check_list.go create mode 100644 internal/api/controller/check/check_report.go create mode 100644 internal/api/controller/check/controller.go create mode 100644 internal/api/controller/check/wire.go create mode 100644 internal/api/controller/connector/controller.go create mode 100644 internal/api/controller/connector/create.go create mode 100644 internal/api/controller/connector/delete.go create mode 100644 internal/api/controller/connector/find.go create mode 100644 internal/api/controller/connector/update.go create mode 100644 internal/api/controller/connector/wire.go create mode 100644 internal/api/controller/execution/cancel.go create mode 100644 internal/api/controller/execution/controller.go create mode 100644 internal/api/controller/execution/create.go create mode 100644 internal/api/controller/execution/delete.go create mode 100644 internal/api/controller/execution/find.go create mode 100644 internal/api/controller/execution/list.go create mode 100644 internal/api/controller/execution/wire.go create mode 100644 internal/api/controller/githook/controller.go create mode 100644 internal/api/controller/githook/post_receive.go create mode 100644 internal/api/controller/githook/pre_receive.go create mode 100644 internal/api/controller/githook/update.go create mode 100644 internal/api/controller/githook/wire.go create mode 100644 internal/api/controller/logs/controller.go create mode 100644 internal/api/controller/logs/find.go create mode 100644 internal/api/controller/logs/tail.go create mode 100644 internal/api/controller/logs/wire.go create mode 100644 internal/api/controller/pipeline/controller.go create mode 100644 internal/api/controller/pipeline/create.go create mode 100644 internal/api/controller/pipeline/delete.go create mode 100644 internal/api/controller/pipeline/find.go create mode 100644 internal/api/controller/pipeline/update.go create mode 100644 internal/api/controller/pipeline/wire.go create mode 100644 internal/api/controller/plugin/controller.go create mode 100644 internal/api/controller/plugin/list.go create mode 100644 internal/api/controller/plugin/wire.go create mode 100644 internal/api/controller/principal/controller.go create mode 100644 internal/api/controller/principal/interface.go create mode 100644 internal/api/controller/principal/search.go create mode 100644 internal/api/controller/principal/wire.go create mode 100644 internal/api/controller/pullreq/activity_list.go create mode 100644 internal/api/controller/pullreq/comment_create.go create mode 100644 internal/api/controller/pullreq/comment_delete.go create mode 100644 internal/api/controller/pullreq/comment_status.go create mode 100644 internal/api/controller/pullreq/comment_update.go create mode 100644 internal/api/controller/pullreq/controller.go create mode 100644 internal/api/controller/pullreq/file_view_add.go create mode 100644 internal/api/controller/pullreq/file_view_delete.go create mode 100644 internal/api/controller/pullreq/file_view_list.go create mode 100644 internal/api/controller/pullreq/locks.go create mode 100644 internal/api/controller/pullreq/mapper.go create mode 100644 internal/api/controller/pullreq/merge.go create mode 100644 internal/api/controller/pullreq/pr_commits.go create mode 100644 internal/api/controller/pullreq/pr_create.go create mode 100644 internal/api/controller/pullreq/pr_find.go create mode 100644 internal/api/controller/pullreq/pr_list.go create mode 100644 internal/api/controller/pullreq/pr_recheck.go create mode 100644 internal/api/controller/pullreq/pr_state.go create mode 100644 internal/api/controller/pullreq/pr_update.go create mode 100644 internal/api/controller/pullreq/review_submit.go create mode 100644 internal/api/controller/pullreq/reviewer_add.go create mode 100644 internal/api/controller/pullreq/reviewer_delete.go create mode 100644 internal/api/controller/pullreq/reviewer_list.go create mode 100644 internal/api/controller/pullreq/wire.go create mode 100644 internal/api/controller/repo/blame.go create mode 100644 internal/api/controller/repo/commit.go create mode 100644 internal/api/controller/repo/content_get.go create mode 100644 internal/api/controller/repo/content_paths_details.go create mode 100644 internal/api/controller/repo/controller.go create mode 100644 internal/api/controller/repo/create.go create mode 100644 internal/api/controller/repo/create_branch.go create mode 100644 internal/api/controller/repo/create_commit_tag.go create mode 100644 internal/api/controller/repo/delete.go create mode 100644 internal/api/controller/repo/delete_branch.go create mode 100644 internal/api/controller/repo/delete_tag.go create mode 100644 internal/api/controller/repo/diff.go create mode 100644 internal/api/controller/repo/find.go create mode 100644 internal/api/controller/repo/get_branch.go create mode 100644 internal/api/controller/repo/get_commit.go create mode 100644 internal/api/controller/repo/get_commit_divergences.go create mode 100644 internal/api/controller/repo/import.go create mode 100644 internal/api/controller/repo/import_cancel.go create mode 100644 internal/api/controller/repo/import_progress.go create mode 100644 internal/api/controller/repo/list_branches.go create mode 100644 internal/api/controller/repo/list_commit_tags.go create mode 100644 internal/api/controller/repo/list_commits.go create mode 100644 internal/api/controller/repo/list_pipelines.go create mode 100644 internal/api/controller/repo/list_service_accounts.go create mode 100644 internal/api/controller/repo/merge_check.go create mode 100644 internal/api/controller/repo/move.go create mode 100644 internal/api/controller/repo/pipeline_generate.go create mode 100644 internal/api/controller/repo/raw.go create mode 100644 internal/api/controller/repo/update.go create mode 100644 internal/api/controller/repo/wire.go create mode 100644 internal/api/controller/secret/controller.go create mode 100644 internal/api/controller/secret/create.go create mode 100644 internal/api/controller/secret/delete.go create mode 100644 internal/api/controller/secret/find.go create mode 100644 internal/api/controller/secret/update.go create mode 100644 internal/api/controller/secret/wire.go create mode 100644 internal/api/controller/service/controller.go create mode 100644 internal/api/controller/service/create.go create mode 100644 internal/api/controller/service/delete.go create mode 100644 internal/api/controller/service/find.go create mode 100644 internal/api/controller/service/list.go create mode 100644 internal/api/controller/service/update.go create mode 100644 internal/api/controller/service/update_admin.go create mode 100644 internal/api/controller/service/wire.go create mode 100644 internal/api/controller/serviceaccount/controller.go create mode 100644 internal/api/controller/serviceaccount/create.go create mode 100644 internal/api/controller/serviceaccount/create_token.go create mode 100644 internal/api/controller/serviceaccount/delete.go create mode 100644 internal/api/controller/serviceaccount/delete_token.go create mode 100644 internal/api/controller/serviceaccount/find.go create mode 100644 internal/api/controller/serviceaccount/list_token.go create mode 100644 internal/api/controller/serviceaccount/wire.go create mode 100644 internal/api/controller/space/controller.go create mode 100644 internal/api/controller/space/create.go create mode 100644 internal/api/controller/space/delete.go create mode 100644 internal/api/controller/space/events.go create mode 100644 internal/api/controller/space/export.go create mode 100644 internal/api/controller/space/export_progress.go create mode 100644 internal/api/controller/space/find.go create mode 100644 internal/api/controller/space/import.go create mode 100644 internal/api/controller/space/list_connectors.go create mode 100644 internal/api/controller/space/list_repositories.go create mode 100644 internal/api/controller/space/list_secrets.go create mode 100644 internal/api/controller/space/list_service_accounts.go create mode 100644 internal/api/controller/space/list_spaces.go create mode 100644 internal/api/controller/space/list_templates.go create mode 100644 internal/api/controller/space/membership_add.go create mode 100644 internal/api/controller/space/membership_delete.go create mode 100644 internal/api/controller/space/membership_list.go create mode 100644 internal/api/controller/space/membership_update.go create mode 100644 internal/api/controller/space/move.go create mode 100644 internal/api/controller/space/update.go create mode 100644 internal/api/controller/space/wire.go create mode 100644 internal/api/controller/system/controller.go create mode 100644 internal/api/controller/system/wire.go create mode 100644 internal/api/controller/template/controller.go create mode 100644 internal/api/controller/template/create.go create mode 100644 internal/api/controller/template/delete.go create mode 100644 internal/api/controller/template/find.go create mode 100644 internal/api/controller/template/update.go create mode 100644 internal/api/controller/template/wire.go create mode 100644 internal/api/controller/trigger/common.go create mode 100644 internal/api/controller/trigger/controller.go create mode 100644 internal/api/controller/trigger/create.go create mode 100644 internal/api/controller/trigger/delete.go create mode 100644 internal/api/controller/trigger/find.go create mode 100644 internal/api/controller/trigger/list.go create mode 100644 internal/api/controller/trigger/update.go create mode 100644 internal/api/controller/trigger/wire.go create mode 100644 internal/api/controller/tx.go create mode 100644 internal/api/controller/user/controller.go create mode 100644 internal/api/controller/user/create.go create mode 100644 internal/api/controller/user/create_access_token.go create mode 100644 internal/api/controller/user/delete.go create mode 100644 internal/api/controller/user/delete_token.go create mode 100644 internal/api/controller/user/find.go create mode 100644 internal/api/controller/user/find_email.go create mode 100644 internal/api/controller/user/list.go create mode 100644 internal/api/controller/user/list_tokens.go create mode 100644 internal/api/controller/user/login.go create mode 100644 internal/api/controller/user/logout.go create mode 100644 internal/api/controller/user/membership_spaces.go create mode 100644 internal/api/controller/user/register.go create mode 100644 internal/api/controller/user/update.go create mode 100644 internal/api/controller/user/update_admin.go create mode 100644 internal/api/controller/user/wire.go create mode 100644 internal/api/controller/util.go create mode 100644 internal/api/controller/webhook/common.go create mode 100644 internal/api/controller/webhook/controller.go create mode 100644 internal/api/controller/webhook/create.go create mode 100644 internal/api/controller/webhook/delete.go create mode 100644 internal/api/controller/webhook/find.go create mode 100644 internal/api/controller/webhook/find_execution.go create mode 100644 internal/api/controller/webhook/list.go create mode 100644 internal/api/controller/webhook/list_executions.go create mode 100644 internal/api/controller/webhook/retrigger_execution.go create mode 100644 internal/api/controller/webhook/update.go create mode 100644 internal/api/controller/webhook/wire.go create mode 100644 internal/api/handler/account/cookie.go create mode 100644 internal/api/handler/account/login.go create mode 100644 internal/api/handler/account/login_test.go create mode 100644 internal/api/handler/account/logout.go create mode 100644 internal/api/handler/account/register.go create mode 100644 internal/api/handler/account/register_test.go create mode 100644 internal/api/handler/check/check_list.go create mode 100644 internal/api/handler/check/check_report.go create mode 100644 internal/api/handler/connector/create.go create mode 100644 internal/api/handler/connector/delete.go create mode 100644 internal/api/handler/connector/find.go create mode 100644 internal/api/handler/connector/update.go create mode 100644 internal/api/handler/execution/cancel.go create mode 100644 internal/api/handler/execution/create.go create mode 100644 internal/api/handler/execution/delete.go create mode 100644 internal/api/handler/execution/find.go create mode 100644 internal/api/handler/execution/list.go create mode 100644 internal/api/handler/githook/post_receive.go create mode 100644 internal/api/handler/githook/pre_receive.go create mode 100644 internal/api/handler/githook/update.go create mode 100644 internal/api/handler/logs/find.go create mode 100644 internal/api/handler/logs/tail.go create mode 100644 internal/api/handler/pipeline/create.go create mode 100644 internal/api/handler/pipeline/delete.go create mode 100644 internal/api/handler/pipeline/find.go create mode 100644 internal/api/handler/pipeline/update.go create mode 100644 internal/api/handler/plugin/list.go create mode 100644 internal/api/handler/principal/search.go create mode 100644 internal/api/handler/pullreq/activity_list.go create mode 100644 internal/api/handler/pullreq/comment_create.go create mode 100644 internal/api/handler/pullreq/comment_delete.go create mode 100644 internal/api/handler/pullreq/comment_status.go create mode 100644 internal/api/handler/pullreq/comment_update.go create mode 100644 internal/api/handler/pullreq/file_view_add.go create mode 100644 internal/api/handler/pullreq/file_view_delete.go create mode 100644 internal/api/handler/pullreq/file_view_list.go create mode 100644 internal/api/handler/pullreq/merge.go create mode 100644 internal/api/handler/pullreq/pr_commits.go create mode 100644 internal/api/handler/pullreq/pr_create.go create mode 100644 internal/api/handler/pullreq/pr_find.go create mode 100644 internal/api/handler/pullreq/pr_list.go create mode 100644 internal/api/handler/pullreq/pr_metadata.go create mode 100644 internal/api/handler/pullreq/pr_recheck.go create mode 100644 internal/api/handler/pullreq/pr_state.go create mode 100644 internal/api/handler/pullreq/pr_update.go create mode 100644 internal/api/handler/pullreq/review_submit.go create mode 100644 internal/api/handler/pullreq/reviewer_add.go create mode 100644 internal/api/handler/pullreq/reviewer_delete.go create mode 100644 internal/api/handler/pullreq/reviewer_list.go create mode 100644 internal/api/handler/repo/blame.go create mode 100644 internal/api/handler/repo/calculate_commit_divergence.go create mode 100644 internal/api/handler/repo/commit.go create mode 100644 internal/api/handler/repo/content_get.go create mode 100644 internal/api/handler/repo/content_paths_details.go create mode 100644 internal/api/handler/repo/create.go create mode 100644 internal/api/handler/repo/create_branch.go create mode 100644 internal/api/handler/repo/create_commit_tag.go create mode 100644 internal/api/handler/repo/delete.go create mode 100644 internal/api/handler/repo/delete_branch.go create mode 100644 internal/api/handler/repo/delete_tag.go create mode 100644 internal/api/handler/repo/diff.go create mode 100644 internal/api/handler/repo/find.go create mode 100644 internal/api/handler/repo/get_branch.go create mode 100644 internal/api/handler/repo/get_commit.go create mode 100644 internal/api/handler/repo/http_git.go create mode 100644 internal/api/handler/repo/import.go create mode 100644 internal/api/handler/repo/import_cancel.go create mode 100644 internal/api/handler/repo/import_progress.go create mode 100644 internal/api/handler/repo/list_branches.go create mode 100644 internal/api/handler/repo/list_commit_tags.go create mode 100644 internal/api/handler/repo/list_commits.go create mode 100644 internal/api/handler/repo/list_pipelines.go create mode 100644 internal/api/handler/repo/list_service_accounts.go create mode 100644 internal/api/handler/repo/merge_check.go create mode 100644 internal/api/handler/repo/move.go create mode 100644 internal/api/handler/repo/pipeline_generate.go create mode 100644 internal/api/handler/repo/raw.go create mode 100644 internal/api/handler/repo/update.go create mode 100644 internal/api/handler/repo/upload_file.go create mode 100644 internal/api/handler/resource/resource.go create mode 100644 internal/api/handler/secret/create.go create mode 100644 internal/api/handler/secret/delete.go create mode 100644 internal/api/handler/secret/find.go create mode 100644 internal/api/handler/secret/update.go create mode 100644 internal/api/handler/serviceaccount/create.go create mode 100644 internal/api/handler/serviceaccount/create_token.go create mode 100644 internal/api/handler/serviceaccount/delete.go create mode 100644 internal/api/handler/serviceaccount/delete_token.go create mode 100644 internal/api/handler/serviceaccount/find.go create mode 100644 internal/api/handler/serviceaccount/list_tokens.go create mode 100644 internal/api/handler/space/create.go create mode 100644 internal/api/handler/space/delete.go create mode 100644 internal/api/handler/space/events.go create mode 100644 internal/api/handler/space/export.go create mode 100644 internal/api/handler/space/export_progress.go create mode 100644 internal/api/handler/space/find.go create mode 100644 internal/api/handler/space/import.go create mode 100644 internal/api/handler/space/list.go create mode 100644 internal/api/handler/space/list_connectors.go create mode 100644 internal/api/handler/space/list_repos.go create mode 100644 internal/api/handler/space/list_secrets.go create mode 100644 internal/api/handler/space/list_service_accounts.go create mode 100644 internal/api/handler/space/list_templates.go create mode 100644 internal/api/handler/space/membership_add.go create mode 100644 internal/api/handler/space/membership_delete.go create mode 100644 internal/api/handler/space/membership_list.go create mode 100644 internal/api/handler/space/membership_update.go create mode 100644 internal/api/handler/space/move.go create mode 100644 internal/api/handler/space/update.go create mode 100644 internal/api/handler/system/health.go create mode 100644 internal/api/handler/system/health_test.go create mode 100644 internal/api/handler/system/list_config.go create mode 100644 internal/api/handler/system/version.go create mode 100644 internal/api/handler/system/version_test.go create mode 100644 internal/api/handler/template/create.go create mode 100644 internal/api/handler/template/delete.go create mode 100644 internal/api/handler/template/find.go create mode 100644 internal/api/handler/template/update.go create mode 100644 internal/api/handler/trigger/create.go create mode 100644 internal/api/handler/trigger/delete.go create mode 100644 internal/api/handler/trigger/find.go create mode 100644 internal/api/handler/trigger/list.go create mode 100644 internal/api/handler/trigger/update.go create mode 100644 internal/api/handler/user/create_access_token.go create mode 100644 internal/api/handler/user/delete_token.go create mode 100644 internal/api/handler/user/find.go create mode 100644 internal/api/handler/user/list_tokens.go create mode 100644 internal/api/handler/user/membership_spaces.go create mode 100644 internal/api/handler/user/update.go create mode 100644 internal/api/handler/user/update_admin.go create mode 100644 internal/api/handler/users/create.go create mode 100644 internal/api/handler/users/create_test.go create mode 100644 internal/api/handler/users/delete.go create mode 100644 internal/api/handler/users/delete_test.go create mode 100644 internal/api/handler/users/find.go create mode 100644 internal/api/handler/users/find_test.go create mode 100644 internal/api/handler/users/list.go create mode 100644 internal/api/handler/users/list_test.go create mode 100644 internal/api/handler/users/update.go create mode 100644 internal/api/handler/users/update_test.go create mode 100644 internal/api/handler/webhook/create.go create mode 100644 internal/api/handler/webhook/delete.go create mode 100644 internal/api/handler/webhook/find.go create mode 100644 internal/api/handler/webhook/find_execution.go create mode 100644 internal/api/handler/webhook/list.go create mode 100644 internal/api/handler/webhook/list_executions.go create mode 100644 internal/api/handler/webhook/retrigger_execution.go create mode 100644 internal/api/handler/webhook/update.go create mode 100644 internal/api/middleware/address/address.go create mode 100644 internal/api/middleware/address/address_test.go create mode 100644 internal/api/middleware/authn/authn.go create mode 100644 internal/api/middleware/encode/encode.go create mode 100644 internal/api/middleware/logging/logging.go create mode 100644 internal/api/middleware/principal/principal.go create mode 100644 internal/api/openapi/account.go create mode 100644 internal/api/openapi/check.go create mode 100644 internal/api/openapi/common.go create mode 100644 internal/api/openapi/connector.go create mode 100644 internal/api/openapi/openapi.go create mode 100644 internal/api/openapi/openapi_test.go create mode 100644 internal/api/openapi/pipeline.go create mode 100644 internal/api/openapi/plugin.go create mode 100644 internal/api/openapi/principals.go create mode 100644 internal/api/openapi/pullreq.go create mode 100644 internal/api/openapi/repo.go create mode 100644 internal/api/openapi/resource.go create mode 100644 internal/api/openapi/secret.go create mode 100644 internal/api/openapi/space.go create mode 100644 internal/api/openapi/system.go create mode 100644 internal/api/openapi/template.go create mode 100644 internal/api/openapi/user.go create mode 100644 internal/api/openapi/users.go create mode 100644 internal/api/openapi/webhook.go create mode 100644 internal/api/render/header.go create mode 100644 internal/api/render/header_test.go create mode 100644 internal/api/render/platform/render.go create mode 100644 internal/api/render/render.go create mode 100644 internal/api/render/render_test.go create mode 100644 internal/api/render/util.go create mode 100644 internal/api/render/util_test.go create mode 100644 internal/api/request/auth.go create mode 100644 internal/api/request/check.go create mode 100644 internal/api/request/connector.go create mode 100644 internal/api/request/context.go create mode 100644 internal/api/request/context_test.go create mode 100644 internal/api/request/git.go create mode 100644 internal/api/request/header.go create mode 100644 internal/api/request/membership.go create mode 100644 internal/api/request/pipeline.go create mode 100644 internal/api/request/principal.go create mode 100644 internal/api/request/pullreq.go create mode 100644 internal/api/request/repo.go create mode 100644 internal/api/request/secret.go create mode 100644 internal/api/request/space.go create mode 100644 internal/api/request/template.go create mode 100644 internal/api/request/token.go create mode 100644 internal/api/request/util.go create mode 100644 internal/api/request/util_test.go create mode 100644 internal/api/request/webhook.go create mode 100644 internal/api/usererror/translate.go create mode 100644 internal/api/usererror/usererror.go create mode 100644 internal/api/usererror/usererror_test.go create mode 100644 internal/auth/authn/authenticator.go create mode 100644 internal/auth/authn/jwt.go create mode 100644 internal/auth/authn/wire.go create mode 100644 internal/auth/authz/authz.go create mode 100644 internal/auth/authz/membership.go create mode 100644 internal/auth/authz/membership_cache.go create mode 100644 internal/auth/authz/unsafe.go create mode 100644 internal/auth/authz/wire.go create mode 100644 internal/auth/metadata.go create mode 100644 internal/auth/session.go create mode 100644 internal/bootstrap/bootstrap.go create mode 100644 internal/bootstrap/wire.go create mode 100644 internal/config/url.go create mode 100644 internal/cron/nightly.go create mode 100644 internal/cron/nightly_test.go create mode 100644 internal/cron/wire.go create mode 100644 internal/events/git/branch.go create mode 100644 internal/events/git/events.go create mode 100644 internal/events/git/reader.go create mode 100644 internal/events/git/reporter.go create mode 100644 internal/events/git/tag.go create mode 100644 internal/events/git/wire.go create mode 100644 internal/events/pullreq/category.go create mode 100644 internal/events/pullreq/events.go create mode 100644 internal/events/pullreq/events_branch.go create mode 100644 internal/events/pullreq/events_state.go create mode 100644 internal/events/pullreq/reader.go create mode 100644 internal/events/pullreq/reporter.go create mode 100644 internal/events/pullreq/wire.go create mode 100644 internal/githook/githook.go create mode 100644 internal/inernal_test.go create mode 100644 internal/internal.go create mode 100644 internal/jwt/jwt.go create mode 100644 internal/paths/paths.go create mode 100644 internal/pipeline/canceler/canceler.go create mode 100644 internal/pipeline/canceler/wire.go create mode 100644 internal/pipeline/checks/write.go create mode 100644 internal/pipeline/commit/gitness.go create mode 100644 internal/pipeline/commit/service.go create mode 100644 internal/pipeline/commit/wire.go create mode 100644 internal/pipeline/file/gitness.go create mode 100644 internal/pipeline/file/service.go create mode 100644 internal/pipeline/file/wire.go create mode 100644 internal/pipeline/manager/client.go create mode 100644 internal/pipeline/manager/convert.go create mode 100644 internal/pipeline/manager/manager.go create mode 100644 internal/pipeline/manager/setup.go create mode 100644 internal/pipeline/manager/teardown.go create mode 100644 internal/pipeline/manager/updater.go create mode 100644 internal/pipeline/manager/wire.go create mode 100644 internal/pipeline/plugin/manager.go create mode 100644 internal/pipeline/plugin/wire.go create mode 100644 internal/pipeline/runner/poller.go create mode 100644 internal/pipeline/runner/runner.go create mode 100644 internal/pipeline/runner/wire.go create mode 100644 internal/pipeline/scheduler/canceler.go create mode 100644 internal/pipeline/scheduler/queue.go create mode 100644 internal/pipeline/scheduler/scheduler.go create mode 100644 internal/pipeline/scheduler/wire.go create mode 100644 internal/pipeline/triggerer/dag/dag.go create mode 100644 internal/pipeline/triggerer/dag/dag_test.go create mode 100644 internal/pipeline/triggerer/skip.go create mode 100644 internal/pipeline/triggerer/trigger.go create mode 100644 internal/pipeline/triggerer/wire.go create mode 100644 internal/request/request.go create mode 100644 internal/router/api.go create mode 100644 internal/router/git.go create mode 100644 internal/router/router.go create mode 100644 internal/router/router_test.go create mode 100644 internal/router/web.go create mode 100644 internal/router/wire.go create mode 100644 internal/server/server.go create mode 100644 internal/server/server_test.go create mode 100644 internal/server/wire.go create mode 100644 internal/services/codecomments/migrator.go create mode 100644 internal/services/codecomments/migrator_test.go create mode 100644 internal/services/codecomments/wire.go create mode 100644 internal/services/exporter/harness_code_client.go create mode 100644 internal/services/exporter/repository.go create mode 100644 internal/services/exporter/wire.go create mode 100644 internal/services/importer/id.go create mode 100644 internal/services/importer/pipelines.go create mode 100644 internal/services/importer/provider.go create mode 100644 internal/services/importer/repository.go create mode 100644 internal/services/importer/wire.go create mode 100644 internal/services/job/definition.go create mode 100644 internal/services/job/executor.go create mode 100644 internal/services/job/job_overdue.go create mode 100644 internal/services/job/job_purge.go create mode 100644 internal/services/job/lock.go create mode 100644 internal/services/job/pubsub.go create mode 100644 internal/services/job/scheduler.go create mode 100644 internal/services/job/timer.go create mode 100644 internal/services/job/timer_test.go create mode 100644 internal/services/job/uid.go create mode 100644 internal/services/job/wire.go create mode 100644 internal/services/metric/metrics.go create mode 100644 internal/services/metric/wire.go create mode 100644 internal/services/pullreq/handlers_branch.go create mode 100644 internal/services/pullreq/handlers_code_comments.go create mode 100644 internal/services/pullreq/handlers_counters.go create mode 100644 internal/services/pullreq/handlers_file_viewed.go create mode 100644 internal/services/pullreq/handlers_head_ref.go create mode 100644 internal/services/pullreq/handlers_mergeable.go create mode 100644 internal/services/pullreq/service.go create mode 100644 internal/services/pullreq/wire.go create mode 100644 internal/services/trigger/handler_branch.go create mode 100644 internal/services/trigger/handler_pullreq.go create mode 100644 internal/services/trigger/handler_tag.go create mode 100644 internal/services/trigger/service.go create mode 100644 internal/services/trigger/wire.go create mode 100644 internal/services/webhook/events.go create mode 100644 internal/services/webhook/handler_branch.go create mode 100644 internal/services/webhook/handler_pullreq.go create mode 100644 internal/services/webhook/handler_tag.go create mode 100644 internal/services/webhook/http_client.go create mode 100644 internal/services/webhook/service.go create mode 100644 internal/services/webhook/trigger.go create mode 100644 internal/services/webhook/types.go create mode 100644 internal/services/webhook/wire.go create mode 100644 internal/services/wire.go create mode 100644 internal/sse/sse.go create mode 100644 internal/sse/wire.go create mode 100644 internal/store/cache.go create mode 100644 internal/store/cache/path.go create mode 100644 internal/store/cache/wire.go create mode 100644 internal/store/database.go create mode 100644 internal/store/database/check.go create mode 100644 internal/store/database/check_req.go create mode 100644 internal/store/database/code_comment.go create mode 100644 internal/store/database/connector.go create mode 100644 internal/store/database/encode.go create mode 100644 internal/store/database/execution.go create mode 100644 internal/store/database/execution_map.go create mode 100644 internal/store/database/job.go create mode 100644 internal/store/database/membership.go create mode 100644 internal/store/database/migrate/migrate.go create mode 100644 internal/store/database/migrate/postgres/0000_create_extension_btree.up.sql create mode 100644 internal/store/database/migrate/postgres/0000_create_extension_citext.up.sql create mode 100644 internal/store/database/migrate/postgres/0000_create_extension_trgm.up.sql create mode 100644 internal/store/database/migrate/postgres/0001_create_table_a_principals.up.sql create mode 100644 internal/store/database/migrate/postgres/0001_create_table_b_spaces.up.sql create mode 100644 internal/store/database/migrate/postgres/0001_create_table_c_repositories.up.sql create mode 100644 internal/store/database/migrate/postgres/0001_create_table_d_paths.up.sql create mode 100644 internal/store/database/migrate/postgres/0001_create_table_e_tokens.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_paths_repo_id_is_primary.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_paths_space_id_is_primary.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_principals_lower_email.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_repositories_parent_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_spaces_parent_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0002_create_index_tokens_principal_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0003_create_table_pullreqs.up.sql create mode 100644 internal/store/database/migrate/postgres/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql create mode 100644 internal/store/database/migrate/postgres/0004_create_index_pullreqs_target_repo_id_number.up.sql create mode 100644 internal/store/database/migrate/postgres/0005_create_table_pullreq_activities.up.sql create mode 100644 internal/store/database/migrate/postgres/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql create mode 100644 internal/store/database/migrate/postgres/0007_create_table_webhooks.up.sql create mode 100644 internal/store/database/migrate/postgres/0008_create_index_webhooks_repo_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0008_create_index_webhooks_space_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0009_create_table_webhook_executions.up.sql create mode 100644 internal/store/database/migrate/postgres/0010_create_index_webhook_executions_webhook_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0011_create_table_pullreq_reviews.up.sql create mode 100644 internal/store/database/migrate/postgres/0012_create_index_pullreq_reviews_pullreq_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.down.sql create mode 100644 internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.up.sql create mode 100644 internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.down.sql create mode 100644 internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.up.sql create mode 100644 internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.down.sql create mode 100644 internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.up.sql create mode 100644 internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.down.sql create mode 100644 internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.up.sql create mode 100644 internal/store/database/migrate/postgres/0017_create_table_checks.down.sql create mode 100644 internal/store/database/migrate/postgres/0017_create_table_checks.up.sql create mode 100644 internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.down.sql create mode 100644 internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.up.sql create mode 100644 internal/store/database/migrate/postgres/0019_create_table_memberships.down.sql create mode 100644 internal/store/database/migrate/postgres/0019_create_table_memberships.up.sql create mode 100644 internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.down.sql create mode 100644 internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.up.sql create mode 100644 internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_down.sql create mode 100644 internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_up.sql create mode 100644 internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql create mode 100644 internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql create mode 100644 internal/store/database/migrate/postgres/0023_index_jobs_last_executed.down.sql create mode 100644 internal/store/database/migrate/postgres/0023_index_jobs_last_executed.up.sql create mode 100644 internal/store/database/migrate/postgres/0024_alter_repo_add_importing.down.sql create mode 100644 internal/store/database/migrate/postgres/0024_alter_repo_add_importing.up.sql create mode 100644 internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.down.sql create mode 100644 internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql create mode 100644 internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql create mode 100644 internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql create mode 100644 internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql create mode 100644 internal/store/database/migrate/postgres/0028_alter_token_drop_grants.down.sql create mode 100644 internal/store/database/migrate/postgres/0028_alter_token_drop_grants.up.sql create mode 100644 internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_down.sql create mode 100644 internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_up.sql create mode 100644 internal/store/database/migrate/postgres/0030_create_table_space_paths.down.sql create mode 100644 internal/store/database/migrate/postgres/0030_create_table_space_paths.up.sql create mode 100644 internal/store/database/migrate/postgres/0031_alter_index_repositories.down.sql create mode 100644 internal/store/database/migrate/postgres/0031_alter_index_repositories.up.sql create mode 100644 internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.down.sql create mode 100644 internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.up.sql create mode 100644 internal/store/database/migrate/postgres/0033_alter_ci_tables.up.sql create mode 100644 internal/store/database/migrate/sqlite/0001_create_table_a_principals.up.sql create mode 100644 internal/store/database/migrate/sqlite/0001_create_table_b_spaces.up.sql create mode 100644 internal/store/database/migrate/sqlite/0001_create_table_c_repositories.up.sql create mode 100644 internal/store/database/migrate/sqlite/0001_create_table_d_paths.up.sql create mode 100644 internal/store/database/migrate/sqlite/0001_create_table_e_tokens.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_paths_repo_id_is_primary.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_paths_space_id_is_primary.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_principals_lower_email.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_repositories_parent_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_spaces_parent_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0002_create_index_tokens_principal_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0003_create_table_pullreqs.up.sql create mode 100644 internal/store/database/migrate/sqlite/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql create mode 100644 internal/store/database/migrate/sqlite/0004_create_index_pullreqs_target_repo_id_number.up.sql create mode 100644 internal/store/database/migrate/sqlite/0005_create_table_pullreq_activities.up.sql create mode 100644 internal/store/database/migrate/sqlite/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql create mode 100644 internal/store/database/migrate/sqlite/0007_create_table_webhooks.up.sql create mode 100644 internal/store/database/migrate/sqlite/0008_create_index_webhooks_repo_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0008_create_index_webhooks_space_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0009_create_table_webhook_executions.up.sql create mode 100644 internal/store/database/migrate/sqlite/0010_create_index_webhook_executions_webhook_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0011_create_table_pullreq_reviews.up.sql create mode 100644 internal/store/database/migrate/sqlite/0012_create_index_pullreq_reviews_pullreq_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.down.sql create mode 100644 internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.up.sql create mode 100644 internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.down.sql create mode 100644 internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.up.sql create mode 100644 internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.down.sql create mode 100644 internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.up.sql create mode 100644 internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.down.sql create mode 100644 internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.up.sql create mode 100644 internal/store/database/migrate/sqlite/0017_create_table_checks.down.sql create mode 100644 internal/store/database/migrate/sqlite/0017_create_table_checks.up.sql create mode 100644 internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.down.sql create mode 100644 internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.up.sql create mode 100644 internal/store/database/migrate/sqlite/0019_create_table_memberships.down.sql create mode 100644 internal/store/database/migrate/sqlite/0019_create_table_memberships.up.sql create mode 100644 internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.down.sql create mode 100644 internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.up.sql create mode 100644 internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_down.sql create mode 100644 internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_up.sql create mode 100644 internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql create mode 100644 internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql create mode 100644 internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.down.sql create mode 100644 internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.up.sql create mode 100644 internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.down.sql create mode 100644 internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.up.sql create mode 100644 internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.down.sql create mode 100644 internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql create mode 100644 internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql create mode 100644 internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql create mode 100644 internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql create mode 100644 internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.down.sql create mode 100644 internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.up.sql create mode 100644 internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_down.sql create mode 100644 internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_up.sql create mode 100644 internal/store/database/migrate/sqlite/0030_create_table_space_paths.down.sql create mode 100644 internal/store/database/migrate/sqlite/0030_create_table_space_paths.up.sql create mode 100644 internal/store/database/migrate/sqlite/0031_alter_index_repositories.down.sql create mode 100644 internal/store/database/migrate/sqlite/0031_alter_index_repositories.up.sql create mode 100644 internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.down.sql create mode 100644 internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.up.sql create mode 100644 internal/store/database/migrate/sqlite/0033_alter_ci_tables.up.sql create mode 100644 internal/store/database/mutex/mutex.go create mode 100644 internal/store/database/pipeline.go create mode 100644 internal/store/database/pipeline_join.go create mode 100644 internal/store/database/plugin.go create mode 100644 internal/store/database/principal.go create mode 100644 internal/store/database/principal_info.go create mode 100644 internal/store/database/principal_service.go create mode 100644 internal/store/database/principal_service_account.go create mode 100644 internal/store/database/principal_user.go create mode 100644 internal/store/database/pullreq.go create mode 100644 internal/store/database/pullreq_activity.go create mode 100644 internal/store/database/pullreq_file_view_store.go create mode 100644 internal/store/database/pullreq_reviewers.go create mode 100644 internal/store/database/pullreq_reviews.go create mode 100644 internal/store/database/repo.go create mode 100644 internal/store/database/repo_git_info.go create mode 100644 internal/store/database/secret.go create mode 100644 internal/store/database/space.go create mode 100644 internal/store/database/space_path.go create mode 100644 internal/store/database/stage.go create mode 100644 internal/store/database/stage_map.go create mode 100644 internal/store/database/step.go create mode 100644 internal/store/database/step_map.go create mode 100644 internal/store/database/store_test.go create mode 100644 internal/store/database/template.go create mode 100644 internal/store/database/testdata/repos.json create mode 100644 internal/store/database/testdata/spaces.json create mode 100644 internal/store/database/testdata/users.json create mode 100644 internal/store/database/token.go create mode 100644 internal/store/database/trigger.go create mode 100644 internal/store/database/webhook.go create mode 100644 internal/store/database/webhook_execution.go create mode 100644 internal/store/database/wire.go create mode 100644 internal/store/logs.go create mode 100644 internal/store/logs/combine.go create mode 100644 internal/store/logs/db.go create mode 100644 internal/store/logs/s3.go create mode 100644 internal/store/logs/wire.go create mode 100644 internal/store/store_test.go create mode 100644 internal/store/transformation.go create mode 100644 internal/store/wire.go create mode 100644 internal/testing/integration/integration.go create mode 100644 internal/testing/testing.go create mode 100644 internal/token/token.go create mode 100644 internal/url/provider.go create mode 100644 internal/url/wire.go create mode 100644 internal/writer/writeflush.go create mode 100644 livelog/livelog.go create mode 100644 livelog/memory.go create mode 100644 livelog/stream.go create mode 100644 livelog/sub.go create mode 100644 livelog/wire.go create mode 100644 lock/config.go create mode 100644 lock/lock.go create mode 100644 lock/memory.go create mode 100644 lock/memory_test.go create mode 100644 lock/options.go create mode 100644 lock/redis.go create mode 100644 lock/util.go create mode 100644 lock/wire.go create mode 100644 profiler/gcpprofiler.go create mode 100644 profiler/noopprofiler.go create mode 100644 profiler/profiler.go create mode 100644 profiler/profiler_test.go create mode 100644 pubsub/config.go create mode 100644 pubsub/inmem.go create mode 100644 pubsub/options.go create mode 100644 pubsub/pubsub.go create mode 100644 pubsub/redis.go create mode 100644 pubsub/wire.go create mode 100644 resources/embed.go create mode 100644 resources/gitignore/AL.gitignore create mode 100644 resources/gitignore/Actionscript.gitignore create mode 100644 resources/gitignore/Ada.gitignore create mode 100644 resources/gitignore/Agda.gitignore create mode 100644 resources/gitignore/Android.gitignore create mode 100644 resources/gitignore/AppEngine.gitignore create mode 100644 resources/gitignore/AppceleratorTitanium.gitignore create mode 100644 resources/gitignore/ArchLinuxPackages.gitignore create mode 100644 resources/gitignore/Autotools.gitignore create mode 100644 resources/gitignore/C++.gitignore create mode 100644 resources/gitignore/C.gitignore create mode 100644 resources/gitignore/CFWheels.gitignore create mode 100644 resources/gitignore/CMake.gitignore create mode 100644 resources/gitignore/CONTRIBUTING.md create mode 100644 resources/gitignore/CUDA.gitignore create mode 100644 resources/gitignore/CakePHP.gitignore create mode 100644 resources/gitignore/ChefCookbook.gitignore create mode 100644 resources/gitignore/Clojure.gitignore create mode 100644 resources/gitignore/CodeIgniter.gitignore create mode 100644 resources/gitignore/CommonLisp.gitignore create mode 100644 resources/gitignore/Composer.gitignore create mode 100644 resources/gitignore/Concrete5.gitignore create mode 100644 resources/gitignore/Coq.gitignore create mode 100644 resources/gitignore/CraftCMS.gitignore create mode 100644 resources/gitignore/D.gitignore create mode 100644 resources/gitignore/DM.gitignore create mode 100644 resources/gitignore/Dart.gitignore create mode 100644 resources/gitignore/Delphi.gitignore create mode 100644 resources/gitignore/Drupal.gitignore create mode 100644 resources/gitignore/EPiServer.gitignore create mode 100644 resources/gitignore/Eagle.gitignore create mode 100644 resources/gitignore/Elisp.gitignore create mode 100644 resources/gitignore/Elixir.gitignore create mode 100644 resources/gitignore/Elm.gitignore create mode 100644 resources/gitignore/Erlang.gitignore create mode 100644 resources/gitignore/ExpressionEngine.gitignore create mode 100644 resources/gitignore/ExtJs.gitignore create mode 100644 resources/gitignore/Fancy.gitignore create mode 100644 resources/gitignore/Finale.gitignore create mode 100644 resources/gitignore/FlaxEngine.gitignore create mode 100644 resources/gitignore/ForceDotCom.gitignore create mode 100644 resources/gitignore/Fortran.gitignore create mode 100644 resources/gitignore/FuelPHP.gitignore create mode 100644 resources/gitignore/GWT.gitignore create mode 100644 resources/gitignore/Gcov.gitignore create mode 100644 resources/gitignore/GitBook.gitignore create mode 100644 resources/gitignore/Go.gitignore create mode 100644 resources/gitignore/Godot.gitignore create mode 100644 resources/gitignore/Gradle.gitignore create mode 100644 resources/gitignore/Grails.gitignore create mode 100644 resources/gitignore/Haskell.gitignore create mode 100644 resources/gitignore/IGORPro.gitignore create mode 100644 resources/gitignore/Idris.gitignore create mode 100644 resources/gitignore/JBoss.gitignore create mode 100644 resources/gitignore/JENKINS_HOME.gitignore create mode 100644 resources/gitignore/Java.gitignore create mode 100644 resources/gitignore/Jekyll.gitignore create mode 100644 resources/gitignore/Joomla.gitignore create mode 100644 resources/gitignore/Julia.gitignore create mode 100644 resources/gitignore/KiCad.gitignore create mode 100644 resources/gitignore/Kohana.gitignore create mode 100644 resources/gitignore/Kotlin.gitignore create mode 100644 resources/gitignore/LICENSE create mode 100644 resources/gitignore/LabVIEW.gitignore create mode 100644 resources/gitignore/Laravel.gitignore create mode 100644 resources/gitignore/Leiningen.gitignore create mode 100644 resources/gitignore/LemonStand.gitignore create mode 100644 resources/gitignore/Lilypond.gitignore create mode 100644 resources/gitignore/Lithium.gitignore create mode 100644 resources/gitignore/Lua.gitignore create mode 100644 resources/gitignore/Magento.gitignore create mode 100644 resources/gitignore/Maven.gitignore create mode 100644 resources/gitignore/Mercury.gitignore create mode 100644 resources/gitignore/MetaProgrammingSystem.gitignore create mode 100644 resources/gitignore/Nanoc.gitignore create mode 100644 resources/gitignore/Nim.gitignore create mode 100644 resources/gitignore/Node.gitignore create mode 100644 resources/gitignore/OCaml.gitignore create mode 100644 resources/gitignore/Objective-C.gitignore create mode 100644 resources/gitignore/Opa.gitignore create mode 100644 resources/gitignore/OpenCart.gitignore create mode 100644 resources/gitignore/OracleForms.gitignore create mode 100644 resources/gitignore/Packer.gitignore create mode 100644 resources/gitignore/Perl.gitignore create mode 100644 resources/gitignore/Phalcon.gitignore create mode 100644 resources/gitignore/PlayFramework.gitignore create mode 100644 resources/gitignore/Plone.gitignore create mode 100644 resources/gitignore/Prestashop.gitignore create mode 100644 resources/gitignore/Processing.gitignore create mode 100644 resources/gitignore/PureScript.gitignore create mode 100644 resources/gitignore/Python.gitignore create mode 100644 resources/gitignore/Qooxdoo.gitignore create mode 100644 resources/gitignore/Qt.gitignore create mode 100644 resources/gitignore/R.gitignore create mode 100644 resources/gitignore/README.md create mode 100644 resources/gitignore/ROS.gitignore create mode 100644 resources/gitignore/Racket.gitignore create mode 100644 resources/gitignore/Rails.gitignore create mode 100644 resources/gitignore/Raku.gitignore create mode 100644 resources/gitignore/RhodesRhomobile.gitignore create mode 100644 resources/gitignore/Ruby.gitignore create mode 100644 resources/gitignore/Rust.gitignore create mode 100644 resources/gitignore/SCons.gitignore create mode 100644 resources/gitignore/Sass.gitignore create mode 100644 resources/gitignore/Scala.gitignore create mode 100644 resources/gitignore/Scheme.gitignore create mode 100644 resources/gitignore/Scrivener.gitignore create mode 100644 resources/gitignore/Sdcc.gitignore create mode 100644 resources/gitignore/SeamGen.gitignore create mode 100644 resources/gitignore/SketchUp.gitignore create mode 100644 resources/gitignore/Smalltalk.gitignore create mode 100644 resources/gitignore/Stella.gitignore create mode 100644 resources/gitignore/SugarCRM.gitignore create mode 100644 resources/gitignore/Swift.gitignore create mode 100644 resources/gitignore/Symfony.gitignore create mode 100644 resources/gitignore/SymphonyCMS.gitignore create mode 100644 resources/gitignore/TeX.gitignore create mode 100644 resources/gitignore/Terraform.gitignore create mode 100644 resources/gitignore/Textpattern.gitignore create mode 100644 resources/gitignore/TurboGears2.gitignore create mode 100644 resources/gitignore/TwinCAT3.gitignore create mode 100644 resources/gitignore/Typo3.gitignore create mode 100644 resources/gitignore/Unity.gitignore create mode 100644 resources/gitignore/UnrealEngine.gitignore create mode 100644 resources/gitignore/VVVV.gitignore create mode 100644 resources/gitignore/VisualStudio.gitignore create mode 100644 resources/gitignore/Waf.gitignore create mode 100644 resources/gitignore/WordPress.gitignore create mode 100644 resources/gitignore/Xojo.gitignore create mode 100644 resources/gitignore/Yeoman.gitignore create mode 100644 resources/gitignore/Yii.gitignore create mode 100644 resources/gitignore/ZendFramework.gitignore create mode 100644 resources/gitignore/Zephir.gitignore create mode 100644 resources/license/afl-3.0.txt create mode 100644 resources/license/agpl-3.0.txt create mode 100644 resources/license/apache-2.0.txt create mode 100644 resources/license/artistic-2.0.txt create mode 100644 resources/license/bsd-2-clause.txt create mode 100644 resources/license/bsd-3-clause-clear.txt create mode 100644 resources/license/bsd-3-clause.txt create mode 100644 resources/license/bsl-1.0.txt create mode 100644 resources/license/cc-by-4.0.txt create mode 100644 resources/license/cc-by-sa-4.0.txt create mode 100644 resources/license/cc.txt create mode 100644 resources/license/cc0-1.0.txt create mode 100644 resources/license/ecl-2.0.txt create mode 100644 resources/license/epl-1.0.txt create mode 100644 resources/license/epl-2.0.txt create mode 100644 resources/license/eupl-1.1.txt create mode 100644 resources/license/gpl-2.0.txt create mode 100644 resources/license/gpl-3.0.txt create mode 100644 resources/license/gpl.txt create mode 100644 resources/license/index.json create mode 100644 resources/license/isc.txt create mode 100644 resources/license/lgpl-2.1.txt create mode 100644 resources/license/lgpl-3.0.txt create mode 100644 resources/license/lgpl.txt create mode 100644 resources/license/mit.txt create mode 100644 resources/license/mpl-2.0.txt create mode 100644 resources/license/osl-3.0.txt create mode 100644 resources/license/unlicense.txt create mode 100644 resources/license/zlib.txt create mode 100755 scripts/wire/gitrpcserver/wire.sh create mode 100755 scripts/wire/server/standalone.sh create mode 100644 store/database/config.go create mode 100644 store/database/dbtx/ctx.go create mode 100644 store/database/dbtx/db.go create mode 100644 store/database/dbtx/interface.go create mode 100644 store/database/dbtx/locker.go create mode 100644 store/database/dbtx/runner.go create mode 100644 store/database/dbtx/runner_test.go create mode 100644 store/database/dbtx/tx.go create mode 100644 store/database/store.go create mode 100644 store/database/util.go create mode 100644 store/database/util_pq.go create mode 100644 store/database/util_sqlite.go create mode 100644 store/database/util_test.go create mode 100644 store/errors.go create mode 100644 stream/memory_broker.go create mode 100644 stream/memory_consumer.go create mode 100644 stream/memory_producer.go create mode 100644 stream/options.go create mode 100644 stream/redis_consumer.go create mode 100644 stream/redis_producer.go create mode 100644 stream/stream.go create mode 100644 types/authz.go create mode 100644 types/check.go create mode 100644 types/check/common.go create mode 100644 types/check/error.go create mode 100644 types/check/password.go create mode 100644 types/check/path.go create mode 100644 types/check/service_account.go create mode 100644 types/check/token.go create mode 100644 types/check/wire.go create mode 100644 types/code_comment.go create mode 100644 types/config.go create mode 100644 types/config_test.go create mode 100644 types/connector.go create mode 100644 types/enum/check.go create mode 100644 types/enum/ci_status.go create mode 100644 types/enum/common.go create mode 100644 types/enum/encoding.go create mode 100644 types/enum/git.go create mode 100644 types/enum/job.go create mode 100644 types/enum/membership.go create mode 100644 types/enum/membership_role.go create mode 100644 types/enum/order.go create mode 100644 types/enum/order_test.go create mode 100644 types/enum/permission.go create mode 100644 types/enum/principal.go create mode 100644 types/enum/pullreq.go create mode 100644 types/enum/repo.go create mode 100644 types/enum/resource.go create mode 100644 types/enum/scm.go create mode 100644 types/enum/space.go create mode 100644 types/enum/sse.go create mode 100644 types/enum/token.go create mode 100644 types/enum/trigger_actions.go create mode 100644 types/enum/trigger_events.go create mode 100644 types/enum/user.go create mode 100644 types/enum/user_test.go create mode 100644 types/enum/webhook.go create mode 100644 types/execution.go create mode 100644 types/git.go create mode 100644 types/githook.go create mode 100644 types/job.go create mode 100644 types/list_filters.go create mode 100644 types/membership.go create mode 100644 types/pagination.go create mode 100644 types/path.go create mode 100644 types/pipeline.go create mode 100644 types/plugin.go create mode 100644 types/principal.go create mode 100644 types/pullreq.go create mode 100644 types/pullreq_activity.go create mode 100644 types/repo.go create mode 100644 types/secret.go create mode 100644 types/service.go create mode 100644 types/service_account.go create mode 100644 types/space.go create mode 100644 types/stage.go create mode 100644 types/step.go create mode 100644 types/stream.go create mode 100644 types/template.go create mode 100644 types/token.go create mode 100644 types/trigger.go create mode 100644 types/types_test.go create mode 100644 types/user.go create mode 100644 types/webhook.go create mode 100644 version/version.go create mode 100644 version/version_test.go create mode 100644 web/.eslintignore create mode 100644 web/.eslintrc.yml create mode 100644 web/.prettierrc.yml create mode 100644 web/.vscode/extensions.json create mode 100644 web/.vscode/settings.json create mode 100644 web/config/moduleFederation.config.js create mode 100644 web/config/webpack.common.js create mode 100644 web/config/webpack.dev.js create mode 100644 web/config/webpack.prod.js create mode 100644 web/dist.go create mode 100644 web/jest.config.js create mode 100644 web/jest.coverage.config.js create mode 100644 web/package.json create mode 100644 web/restful-react.config.js create mode 100644 web/scripts/clean-css-types.js create mode 100644 web/scripts/eslint-rules/duplicate-data-tooltip-id.js create mode 100644 web/scripts/eslint-rules/jest-no-mock.js create mode 100644 web/scripts/eslint-rules/no-document-body-snapshot.js create mode 100644 web/scripts/jest/file-mock.js create mode 100644 web/scripts/jest/gql-loader.js create mode 100644 web/scripts/jest/setup-file.js create mode 100644 web/scripts/jest/yaml-transform.js create mode 100644 web/scripts/strings/generateTypes.cjs create mode 100644 web/scripts/strings/generateTypesCli.mjs create mode 100644 web/scripts/swagger-custom-generator.js create mode 100644 web/scripts/swagger-transform.js create mode 100644 web/scripts/utils/runPrettier.cjs create mode 100644 web/scripts/webpack/GenerateStringTypesPlugin.js create mode 100644 web/src/App.module.scss create mode 100644 web/src/App.module.scss.d.ts create mode 100644 web/src/App.tsx create mode 100644 web/src/AppContext.tsx create mode 100644 web/src/AppProps.ts create mode 100644 web/src/AppUtils.ts create mode 100644 web/src/RouteDefinitions.ts create mode 100644 web/src/RouteDestinations.tsx create mode 100644 web/src/bootstrap.scss create mode 100644 web/src/bootstrap.scss.d.ts create mode 100644 web/src/bootstrap.tsx create mode 100644 web/src/components/AuthLayout/AuthLayout.module.scss create mode 100644 web/src/components/AuthLayout/AuthLayout.module.scss.d.ts create mode 100644 web/src/components/AuthLayout/AuthLayout.tsx create mode 100644 web/src/components/BranchTagSelect/BranchTagSelect.module.scss create mode 100644 web/src/components/BranchTagSelect/BranchTagSelect.module.scss.d.ts create mode 100644 web/src/components/BranchTagSelect/BranchTagSelect.tsx create mode 100644 web/src/components/Changes/Changes.module.scss create mode 100644 web/src/components/Changes/Changes.module.scss.d.ts create mode 100644 web/src/components/Changes/Changes.tsx create mode 100644 web/src/components/Changes/ChangesDropdown.module.scss create mode 100644 web/src/components/Changes/ChangesDropdown.module.scss.d.ts create mode 100644 web/src/components/Changes/ChangesDropdown.tsx create mode 100644 web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx create mode 100644 web/src/components/Changes/DiffViewConfiguration.tsx create mode 100644 web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx create mode 100644 web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss create mode 100644 web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts create mode 100644 web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx create mode 100644 web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss create mode 100644 web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts create mode 100644 web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx create mode 100644 web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx create mode 100644 web/src/components/CodeCommentStatusButton/CodeCommentStatusButton.tsx create mode 100644 web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss create mode 100644 web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts create mode 100644 web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx create mode 100644 web/src/components/CommentBox/CommentBox.module.scss create mode 100644 web/src/components/CommentBox/CommentBox.module.scss.d.ts create mode 100644 web/src/components/CommentBox/CommentBox.tsx create mode 100644 web/src/components/CommitActions/CommitActions.module.scss create mode 100644 web/src/components/CommitActions/CommitActions.module.scss.d.ts create mode 100644 web/src/components/CommitActions/CommitActions.tsx create mode 100644 web/src/components/CommitDivergence/CommitDivergence.module.scss create mode 100644 web/src/components/CommitDivergence/CommitDivergence.module.scss.d.ts create mode 100644 web/src/components/CommitDivergence/CommitDivergence.tsx create mode 100644 web/src/components/CommitInfo/CommitInfo.module.scss create mode 100644 web/src/components/CommitInfo/CommitInfo.module.scss.d.ts create mode 100644 web/src/components/CommitInfo/CommitInfo.tsx create mode 100644 web/src/components/CommitModalButton/CommitModalButton.module.scss create mode 100644 web/src/components/CommitModalButton/CommitModalButton.module.scss.d.ts create mode 100644 web/src/components/CommitModalButton/CommitModalButton.tsx create mode 100644 web/src/components/CommitsView/CommitsView.module.scss create mode 100644 web/src/components/CommitsView/CommitsView.module.scss.d.ts create mode 100644 web/src/components/CommitsView/CommitsView.tsx create mode 100644 web/src/components/Console/Console.module.scss create mode 100644 web/src/components/Console/Console.module.scss.d.ts create mode 100644 web/src/components/Console/Console.tsx create mode 100644 web/src/components/ConsoleLogs/ConsoleLogs.module.scss create mode 100644 web/src/components/ConsoleLogs/ConsoleLogs.module.scss.d.ts create mode 100644 web/src/components/ConsoleLogs/ConsoleLogs.tsx create mode 100644 web/src/components/ConsoleStep/ConsoleStep.module.scss create mode 100644 web/src/components/ConsoleStep/ConsoleStep.module.scss.d.ts create mode 100644 web/src/components/ConsoleStep/ConsoleStep.tsx create mode 100644 web/src/components/CopyButton/CopyButton.tsx create mode 100644 web/src/components/CreateBranchModal/CreateBranchModal.module.scss create mode 100644 web/src/components/CreateBranchModal/CreateBranchModal.module.scss.d.ts create mode 100644 web/src/components/CreateBranchModal/CreateBranchModal.tsx create mode 100644 web/src/components/CreateTagModal/CreateTagModal.module.scss create mode 100644 web/src/components/CreateTagModal/CreateTagModal.module.scss.d.ts create mode 100644 web/src/components/CreateTagModal/CreateTagModal.tsx create mode 100644 web/src/components/DiffViewer/DiffViewer.module.scss create mode 100644 web/src/components/DiffViewer/DiffViewer.module.scss.d.ts create mode 100644 web/src/components/DiffViewer/DiffViewer.tsx create mode 100644 web/src/components/DiffViewer/DiffViewerUtils.tsx create mode 100644 web/src/components/Editor/Editor.module.scss create mode 100644 web/src/components/Editor/Editor.module.scss.d.ts create mode 100644 web/src/components/Editor/Editor.tsx create mode 100644 web/src/components/ExecutionPageHeader/ExecutionPageHeader.module.scss create mode 100644 web/src/components/ExecutionPageHeader/ExecutionPageHeader.module.scss.d.ts create mode 100644 web/src/components/ExecutionPageHeader/ExecutionPageHeader.tsx create mode 100644 web/src/components/ExecutionStageList/ExecutionStageList.module.scss create mode 100644 web/src/components/ExecutionStageList/ExecutionStageList.module.scss.d.ts create mode 100644 web/src/components/ExecutionStageList/ExecutionStageList.tsx create mode 100644 web/src/components/ExecutionStatus/ExecutionStatus.module.scss create mode 100644 web/src/components/ExecutionStatus/ExecutionStatus.module.scss.d.ts create mode 100644 web/src/components/ExecutionStatus/ExecutionStatus.tsx create mode 100644 web/src/components/ExecutionStatusLabel/ExecutionStatusLabel.module.scss create mode 100644 web/src/components/ExecutionStatusLabel/ExecutionStatusLabel.module.scss.d.ts create mode 100644 web/src/components/ExecutionStatusLabel/ExecutionStatusLabel.tsx create mode 100644 web/src/components/ExecutionText/ExecutionText.module.scss create mode 100644 web/src/components/ExecutionText/ExecutionText.module.scss.d.ts create mode 100644 web/src/components/ExecutionText/ExecutionText.tsx create mode 100644 web/src/components/GitRefLink/GitRefLink.module.scss create mode 100644 web/src/components/GitRefLink/GitRefLink.module.scss.d.ts create mode 100644 web/src/components/GitRefLink/GitRefLink.tsx create mode 100644 web/src/components/GitRefsSelect/GitRefsSelect.tsx create mode 100644 web/src/components/GitnessLogo/GitnessLogo.module.scss create mode 100644 web/src/components/GitnessLogo/GitnessLogo.module.scss.d.ts create mode 100644 web/src/components/GitnessLogo/GitnessLogo.tsx create mode 100644 web/src/components/GitnessLogo/gitness.svg create mode 100644 web/src/components/ImageCarousel/ImageCarousel.module.scss create mode 100644 web/src/components/ImageCarousel/ImageCarousel.module.scss.d.ts create mode 100644 web/src/components/ImageCarousel/ImageCarousel.tsx create mode 100644 web/src/components/LatestCommit/LatestCommit.module.scss create mode 100644 web/src/components/LatestCommit/LatestCommit.module.scss.d.ts create mode 100644 web/src/components/LatestCommit/LatestCommit.tsx create mode 100644 web/src/components/LoadingSpinner/LoadingSpinner.module.scss create mode 100644 web/src/components/LoadingSpinner/LoadingSpinner.module.scss.d.ts create mode 100644 web/src/components/LoadingSpinner/LoadingSpinner.tsx create mode 100644 web/src/components/LogViewer/LogViewer.module.scss create mode 100644 web/src/components/LogViewer/LogViewer.module.scss.d.ts create mode 100644 web/src/components/LogViewer/LogViewer.tsx create mode 100644 web/src/components/MarkdownEditorWithPreview/MarkdownEditorWithPreview.module.scss create mode 100644 web/src/components/MarkdownEditorWithPreview/MarkdownEditorWithPreview.module.scss.d.ts create mode 100644 web/src/components/MarkdownEditorWithPreview/MarkdownEditorWithPreview.tsx create mode 100644 web/src/components/MarkdownViewer/MarkdownViewer.module.scss create mode 100644 web/src/components/MarkdownViewer/MarkdownViewer.module.scss.d.ts create mode 100644 web/src/components/MarkdownViewer/MarkdownViewer.tsx create mode 100644 web/src/components/NavigationCheck/NavigationCheck.module.scss create mode 100644 web/src/components/NavigationCheck/NavigationCheck.module.scss.d.ts create mode 100644 web/src/components/NavigationCheck/NavigationCheck.tsx create mode 100644 web/src/components/NewPipelineModal/NewPipelineModal.module.scss create mode 100644 web/src/components/NewPipelineModal/NewPipelineModal.module.scss.d.ts create mode 100644 web/src/components/NewPipelineModal/NewPipelineModal.tsx create mode 100644 web/src/components/NewRepoModalButton/ImportForm/ImportForm.tsx create mode 100644 web/src/components/NewRepoModalButton/NewRepoModalButton.module.scss create mode 100644 web/src/components/NewRepoModalButton/NewRepoModalButton.module.scss.d.ts create mode 100644 web/src/components/NewRepoModalButton/NewRepoModalButton.tsx create mode 100644 web/src/components/NewSecretModalButton/NewSecretModalButton.tsx create mode 100644 web/src/components/NewSpaceModalButton/ImportSpaceForm/ImportSpaceForm.tsx create mode 100644 web/src/components/NewSpaceModalButton/NewSpaceModalButton.module.scss create mode 100644 web/src/components/NewSpaceModalButton/NewSpaceModalButton.module.scss.d.ts create mode 100644 web/src/components/NewSpaceModalButton/NewSpaceModalButton.tsx create mode 100644 web/src/components/NewTriggerModalButton/NewTriggerModalButton.module.scss create mode 100644 web/src/components/NewTriggerModalButton/NewTriggerModalButton.module.scss.d.ts create mode 100644 web/src/components/NewTriggerModalButton/NewTriggerModalButton.tsx create mode 100644 web/src/components/NoExecutionsCard/NoExecutionsCard.module.scss create mode 100644 web/src/components/NoExecutionsCard/NoExecutionsCard.module.scss.d.ts create mode 100644 web/src/components/NoExecutionsCard/NoExecutionsCard.tsx create mode 100644 web/src/components/NoResultCard/NoResultCard.module.scss create mode 100644 web/src/components/NoResultCard/NoResultCard.module.scss.d.ts create mode 100644 web/src/components/NoResultCard/NoResultCard.tsx create mode 100644 web/src/components/OptionsMenuButton/OptionsMenuButton.module.scss create mode 100644 web/src/components/OptionsMenuButton/OptionsMenuButton.module.scss.d.ts create mode 100644 web/src/components/OptionsMenuButton/OptionsMenuButton.tsx create mode 100644 web/src/components/PipeSeparator/PipeSeparator.tsx create mode 100644 web/src/components/PipelineSettings/PipelineSettings.module.scss create mode 100644 web/src/components/PipelineSettings/PipelineSettings.module.scss.d.ts create mode 100644 web/src/components/PipelineSettings/PipelineSettings.tsx create mode 100644 web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss create mode 100644 web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.module.scss.d.ts create mode 100644 web/src/components/PipelineSettingsPageHeader/PipelineSettingsPageHeader.tsx create mode 100644 web/src/components/PipelineSettingsTab/PipelineSettingsTab.module.scss create mode 100644 web/src/components/PipelineSettingsTab/PipelineSettingsTab.module.scss.d.ts create mode 100644 web/src/components/PipelineSettingsTab/PipelineSettingsTab.tsx create mode 100644 web/src/components/PipelineTriggersTab/PipelineTriggersTab.module.scss create mode 100644 web/src/components/PipelineTriggersTab/PipelineTriggersTab.module.scss.d.ts create mode 100644 web/src/components/PipelineTriggersTab/PipelineTriggersTab.tsx create mode 100644 web/src/components/PlainButton/PlainButton.module.scss create mode 100644 web/src/components/PlainButton/PlainButton.module.scss.d.ts create mode 100644 web/src/components/PlainButton/PlainButton.tsx create mode 100644 web/src/components/PluginsPanel/PluginsPanel.module.scss create mode 100644 web/src/components/PluginsPanel/PluginsPanel.module.scss.d.ts create mode 100644 web/src/components/PluginsPanel/PluginsPanel.tsx create mode 100644 web/src/components/PluginsPanel/plugins/plugins.json create mode 100644 web/src/components/PullRequestStateLabel/PullRequestStateLabel.module.scss create mode 100644 web/src/components/PullRequestStateLabel/PullRequestStateLabel.module.scss.d.ts create mode 100644 web/src/components/PullRequestStateLabel/PullRequestStateLabel.tsx create mode 100644 web/src/components/RepoMetadata/RepoMetadata.tsx create mode 100644 web/src/components/RepoPublicLabel/RepoPublicLabel.module.scss create mode 100644 web/src/components/RepoPublicLabel/RepoPublicLabel.module.scss.d.ts create mode 100644 web/src/components/RepoPublicLabel/RepoPublicLabel.tsx create mode 100644 web/src/components/RepositoryPageHeader/RepositoryPageHeader.module.scss create mode 100644 web/src/components/RepositoryPageHeader/RepositoryPageHeader.module.scss.d.ts create mode 100644 web/src/components/RepositoryPageHeader/RepositoryPageHeader.tsx create mode 100644 web/src/components/ResourceListingPagination/ResourceListingPagination.module.scss create mode 100644 web/src/components/ResourceListingPagination/ResourceListingPagination.module.scss.d.ts create mode 100644 web/src/components/ResourceListingPagination/ResourceListingPagination.tsx create mode 100644 web/src/components/ReviewerSelect/ReviewerSelect.module.scss create mode 100644 web/src/components/ReviewerSelect/ReviewerSelect.module.scss.d.ts create mode 100644 web/src/components/ReviewerSelect/ReviewerSelect.tsx create mode 100644 web/src/components/RunPipelineModal/RunPipelineModal.module.scss create mode 100644 web/src/components/RunPipelineModal/RunPipelineModal.module.scss.d.ts create mode 100644 web/src/components/RunPipelineModal/RunPipelineModal.tsx create mode 100644 web/src/components/SearchInputWithSpinner/SearchInputWithSpinner.module.scss create mode 100644 web/src/components/SearchInputWithSpinner/SearchInputWithSpinner.module.scss.d.ts create mode 100644 web/src/components/SearchInputWithSpinner/SearchInputWithSpinner.tsx create mode 100644 web/src/components/SourceCodeEditor/MonacoSourceCodeEditor.tsx create mode 100644 web/src/components/SourceCodeEditor/SourceCodeEditor.tsx create mode 100644 web/src/components/SourceCodeViewer/SourceCodeViewer.module.scss create mode 100644 web/src/components/SourceCodeViewer/SourceCodeViewer.module.scss.d.ts create mode 100644 web/src/components/SourceCodeViewer/SourceCodeViewer.tsx create mode 100644 web/src/components/SpaceSelector/SpaceSelector.module.scss create mode 100644 web/src/components/SpaceSelector/SpaceSelector.module.scss.d.ts create mode 100644 web/src/components/SpaceSelector/SpaceSelector.tsx create mode 100644 web/src/components/Split/Split.module.scss create mode 100644 web/src/components/Split/Split.module.scss.d.ts create mode 100644 web/src/components/Split/Split.tsx create mode 100644 web/src/components/TabContentWrapper/TabContentWrapper.tsx create mode 100644 web/src/components/TabTitleWithCount/TabTitleWithCount.module.scss create mode 100644 web/src/components/TabTitleWithCount/TabTitleWithCount.module.scss.d.ts create mode 100644 web/src/components/TabTitleWithCount/TabTitleWithCount.tsx create mode 100644 web/src/components/ThreadSection/ThreadSection.module.scss create mode 100644 web/src/components/ThreadSection/ThreadSection.module.scss.d.ts create mode 100644 web/src/components/ThreadSection/ThreadSection.tsx create mode 100644 web/src/components/UpdateSecretModal/UpdateSecretModal.tsx create mode 100644 web/src/components/UserManagementFlows/AddUserModal.tsx create mode 100644 web/src/components/UserManagementFlows/ResetPassword.tsx create mode 100644 web/src/components/UserManagementFlows/UserManagementFlows.module.scss create mode 100644 web/src/components/UserManagementFlows/UserManagementFlows.module.scss.d.ts create mode 100644 web/src/favicon.svg create mode 100644 web/src/framework/AppErrorBoundary/AppErrorBoundary.i18n.json create mode 100644 web/src/framework/AppErrorBoundary/AppErrorBoundary.tsx create mode 100644 web/src/framework/strings/String.tsx create mode 100644 web/src/framework/strings/StringsContext.tsx create mode 100644 web/src/framework/strings/StringsContextProvider.tsx create mode 100644 web/src/framework/strings/__tests__/Strings.test.tsx create mode 100644 web/src/framework/strings/index.ts create mode 100644 web/src/framework/strings/languageLoader.ts create mode 100644 web/src/framework/strings/stringTypes.ts create mode 100644 web/src/global.d.ts create mode 100644 web/src/hooks/useConfirmAction.tsx create mode 100644 web/src/hooks/useConfirmationDialog.tsx create mode 100644 web/src/hooks/useDisableCodeMainLinks.ts create mode 100644 web/src/hooks/useDocumentTitle.tsx create mode 100644 web/src/hooks/useDownloadRawFile.ts create mode 100644 web/src/hooks/useEmitCodeCommentStatus.ts create mode 100644 web/src/hooks/useEventListener.ts create mode 100644 web/src/hooks/useGetRepositoryMetadata.ts create mode 100644 web/src/hooks/useGetResourceContent.ts create mode 100644 web/src/hooks/useGetSpaceParam.ts create mode 100644 web/src/hooks/useLiveTimeHook.tsx create mode 100644 web/src/hooks/useLocalStorage.ts create mode 100644 web/src/hooks/useModalHook.tsx create mode 100644 web/src/hooks/usePRChecksDecision.tsx create mode 100644 web/src/hooks/usePageIndex.ts create mode 100644 web/src/hooks/useQueryParams.ts create mode 100644 web/src/hooks/useShowRequestError.ts create mode 100644 web/src/hooks/useSpaceSSE.tsx create mode 100644 web/src/hooks/useUpdateQueryParams.ts create mode 100644 web/src/hooks/useUserPreference.ts create mode 100644 web/src/i18n/strings.en.yaml create mode 100644 web/src/i18n/strings.es.yaml create mode 100644 web/src/icons/Branches.svg create mode 100644 web/src/icons/CodeFileFill.svg create mode 100644 web/src/icons/Harness.svg create mode 100644 web/src/icons/Submodules.svg create mode 100644 web/src/icons/Symlink.svg create mode 100644 web/src/icons/Upgrade.svg create mode 100644 web/src/icons/private.svg create mode 100644 web/src/images/404-error.svg create mode 100644 web/src/images/Subtract.png create mode 100644 web/src/images/dark-background.png create mode 100644 web/src/images/empty-state.svg create mode 100644 web/src/images/gitLogo.png create mode 100644 web/src/images/index.ts create mode 100644 web/src/images/logo-dark.png create mode 100644 web/src/images/no-space.svg create mode 100644 web/src/images/pull-request-closed.svg create mode 100644 web/src/images/pull-request-draft.svg create mode 100644 web/src/images/pull-request-merged.svg create mode 100644 web/src/images/pull-request-open.svg create mode 100644 web/src/images/pull-request-rejected.svg create mode 100644 web/src/images/pull-request-unchecked.svg create mode 100644 web/src/images/signup-old.png create mode 100644 web/src/images/signup.png create mode 100644 web/src/index.html create mode 100644 web/src/index.tsx create mode 100644 web/src/layouts/layout.module.scss create mode 100644 web/src/layouts/layout.module.scss.d.ts create mode 100644 web/src/layouts/layout.tsx create mode 100644 web/src/layouts/menu/DefaultMenu.module.scss create mode 100644 web/src/layouts/menu/DefaultMenu.module.scss.d.ts create mode 100644 web/src/layouts/menu/DefaultMenu.tsx create mode 100644 web/src/layouts/menu/NavMenuItem.module.scss create mode 100644 web/src/layouts/menu/NavMenuItem.module.scss.d.ts create mode 100644 web/src/layouts/menu/NavMenuItem.tsx create mode 100644 web/src/pages/404/NotFoundPage.tsx create mode 100644 web/src/pages/AddUpdatePipeline/AddUpdatePipeline.module.scss create mode 100644 web/src/pages/AddUpdatePipeline/AddUpdatePipeline.module.scss.d.ts create mode 100644 web/src/pages/AddUpdatePipeline/AddUpdatePipeline.tsx create mode 100644 web/src/pages/AddUpdatePipeline/Constants.ts create mode 100644 web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v0.json create mode 100644 web/src/pages/AddUpdatePipeline/schema/pipeline-schema-v1.json create mode 100644 web/src/pages/ChangePassword/ChangePassword.module.scss create mode 100644 web/src/pages/ChangePassword/ChangePassword.module.scss.d.ts create mode 100644 web/src/pages/ChangePassword/ChangePassword.tsx create mode 100644 web/src/pages/Compare/Compare.module.scss create mode 100644 web/src/pages/Compare/Compare.module.scss.d.ts create mode 100644 web/src/pages/Compare/Compare.tsx create mode 100644 web/src/pages/Compare/CompareCommits.tsx create mode 100644 web/src/pages/Compare/CompareContentHeader/CompareContentHeader.module.scss create mode 100644 web/src/pages/Compare/CompareContentHeader/CompareContentHeader.module.scss.d.ts create mode 100644 web/src/pages/Compare/CompareContentHeader/CompareContentHeader.tsx create mode 100644 web/src/pages/Execution/Execution.module.scss create mode 100644 web/src/pages/Execution/Execution.module.scss.d.ts create mode 100644 web/src/pages/Execution/Execution.tsx create mode 100644 web/src/pages/ExecutionList/ExecutionList.module.scss create mode 100644 web/src/pages/ExecutionList/ExecutionList.module.scss.d.ts create mode 100644 web/src/pages/ExecutionList/ExecutionList.tsx create mode 100644 web/src/pages/Home/Home.module.scss create mode 100644 web/src/pages/Home/Home.module.scss.d.ts create mode 100644 web/src/pages/Home/Home.tsx create mode 100644 web/src/pages/PipelineList/PipelineList.module.scss create mode 100644 web/src/pages/PipelineList/PipelineList.module.scss.d.ts create mode 100644 web/src/pages/PipelineList/PipelineList.tsx create mode 100644 web/src/pages/PullRequest/Checks/CheckPipelineStages.tsx create mode 100644 web/src/pages/PullRequest/Checks/CheckPipelineSteps.tsx create mode 100644 web/src/pages/PullRequest/Checks/Checks.module.scss create mode 100644 web/src/pages/PullRequest/Checks/Checks.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/Checks/Checks.tsx create mode 100644 web/src/pages/PullRequest/Checks/ChecksMenu.tsx create mode 100644 web/src/pages/PullRequest/Checks/ChecksOverview.module.scss create mode 100644 web/src/pages/PullRequest/Checks/ChecksOverview.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/Checks/ChecksOverview.tsx create mode 100644 web/src/pages/PullRequest/Checks/ChecksUtils.ts create mode 100644 web/src/pages/PullRequest/Conversation/CodeCommentHeader.tsx create mode 100644 web/src/pages/PullRequest/Conversation/Conversation.module.scss create mode 100644 web/src/pages/PullRequest/Conversation/Conversation.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/Conversation/Conversation.tsx create mode 100644 web/src/pages/PullRequest/Conversation/DescriptionBox.tsx create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestSideBar/PullRequestSideBar.module.scss create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestSideBar/PullRequestSideBar.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/Conversation/PullRequestSideBar/PullRequestSideBar.tsx create mode 100644 web/src/pages/PullRequest/Conversation/SystemComment.tsx create mode 100644 web/src/pages/PullRequest/PullRequest.module.scss create mode 100644 web/src/pages/PullRequest/PullRequest.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/PullRequest.tsx create mode 100644 web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss create mode 100644 web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx create mode 100644 web/src/pages/PullRequest/PullRequestMetaLine.module.scss create mode 100644 web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/PullRequestMetaLine.tsx create mode 100644 web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts create mode 100644 web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx create mode 100644 web/src/pages/PullRequest/PullRequestTitle.tsx create mode 100644 web/src/pages/PullRequest/PullRequestUtils.tsx create mode 100644 web/src/pages/PullRequests/PullRequests.module.scss create mode 100644 web/src/pages/PullRequests/PullRequests.module.scss.d.ts create mode 100644 web/src/pages/PullRequests/PullRequests.tsx create mode 100644 web/src/pages/PullRequests/PullRequestsContentHeader/PullRequestsContentHeader.module.scss create mode 100644 web/src/pages/PullRequests/PullRequestsContentHeader/PullRequestsContentHeader.module.scss.d.ts create mode 100644 web/src/pages/PullRequests/PullRequestsContentHeader/PullRequestsContentHeader.tsx create mode 100644 web/src/pages/RepositoriesListing/RepositoriesListing.module.scss create mode 100644 web/src/pages/RepositoriesListing/RepositoriesListing.module.scss.d.ts create mode 100644 web/src/pages/RepositoriesListing/RepositoriesListing.tsx create mode 100644 web/src/pages/RepositoriesListing/no-repo.svg create mode 100644 web/src/pages/Repository/EmptyRepositoryInfo.module.scss create mode 100644 web/src/pages/Repository/EmptyRepositoryInfo.module.scss.d.ts create mode 100644 web/src/pages/Repository/EmptyRepositoryInfo.tsx create mode 100644 web/src/pages/Repository/Repository.module.scss create mode 100644 web/src/pages/Repository/Repository.module.scss.d.ts create mode 100644 web/src/pages/Repository/Repository.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/ContentHeader/search-background.svg create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/FileContent.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/FileContent.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/FileContent.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/GitBlame.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/GitBlame.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/GitBlame.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/RenameContentHistory.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/RenameContentHistory.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/RenameContentHistory.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/FileContent/lineWidget.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/FolderContent.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/Readme.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/Readme.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/FolderContent/Readme.tsx create mode 100644 web/src/pages/Repository/RepositoryContent/RepositoryContent.module.scss create mode 100644 web/src/pages/Repository/RepositoryContent/RepositoryContent.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryContent/RepositoryContent.tsx create mode 100644 web/src/pages/Repository/RepositoryHeader/RepositoryHeader.module.scss create mode 100644 web/src/pages/Repository/RepositoryHeader/RepositoryHeader.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryHeader/RepositoryHeader.tsx create mode 100644 web/src/pages/Repository/RepositoryTree/ResourceTree.module.scss create mode 100644 web/src/pages/Repository/RepositoryTree/ResourceTree.module.scss.d.ts create mode 100644 web/src/pages/Repository/RepositoryTree/ResourceTree.tsx create mode 100644 web/src/pages/Repository/RepositoryTree/TreeExample.tsx create mode 100644 web/src/pages/Repository/RepositoryTree/demodata.ts create mode 100644 web/src/pages/Repository/RepositoryTree/renderers.tsx create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranches.module.scss create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranches.module.scss.d.ts create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranches.tsx create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.module.scss create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.module.scss.d.ts create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContent/BranchesContent.tsx create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContentHeader/BranchesContentHeader.module.scss create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContentHeader/BranchesContentHeader.module.scss.d.ts create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/BranchesContentHeader/BranchesContentHeader.tsx create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/RepositoryBranchesContent.module.scss create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/RepositoryBranchesContent.module.scss.d.ts create mode 100644 web/src/pages/RepositoryBranches/RepositoryBranchesContent/RepositoryBranchesContent.tsx create mode 100644 web/src/pages/RepositoryCommit/RepositoryCommit.module.scss create mode 100644 web/src/pages/RepositoryCommit/RepositoryCommit.module.scss.d.ts create mode 100644 web/src/pages/RepositoryCommit/RepositoryCommit.tsx create mode 100644 web/src/pages/RepositoryCommits/RepositoryCommits.module.scss create mode 100644 web/src/pages/RepositoryCommits/RepositoryCommits.module.scss.d.ts create mode 100644 web/src/pages/RepositoryCommits/RepositoryCommits.tsx create mode 100644 web/src/pages/RepositoryFileEdit/FileEditor/FileEditor.module.scss create mode 100644 web/src/pages/RepositoryFileEdit/FileEditor/FileEditor.module.scss.d.ts create mode 100644 web/src/pages/RepositoryFileEdit/FileEditor/FileEditor.tsx create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEdit.module.scss create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEdit.module.scss.d.ts create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEdit.tsx create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEditHeader/RepositoryFileEditHeader.module.scss create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEditHeader/RepositoryFileEditHeader.module.scss.d.ts create mode 100644 web/src/pages/RepositoryFileEdit/RepositoryFileEditHeader/RepositoryFileEditHeader.tsx create mode 100644 web/src/pages/RepositorySettings/GeneralSettingsContent/DeleteRepoModal/DeleteRepoModal.tsx create mode 100644 web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx create mode 100644 web/src/pages/RepositorySettings/RepositorySettings.module.scss create mode 100644 web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts create mode 100644 web/src/pages/RepositorySettings/RepositorySettings.tsx create mode 100644 web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx create mode 100644 web/src/pages/RepositorySettings/SettingsContent.tsx create mode 100644 web/src/pages/RepositorySettings/mockWebhooks.json create mode 100644 web/src/pages/RepositoryTags/RepositoryTags.module.scss create mode 100644 web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts create mode 100644 web/src/pages/RepositoryTags/RepositoryTags.tsx create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts create mode 100644 web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx create mode 100644 web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss create mode 100644 web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts create mode 100644 web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx create mode 100644 web/src/pages/Search/Search.module.scss create mode 100644 web/src/pages/Search/Search.module.scss.d.ts create mode 100644 web/src/pages/Search/Search.tsx create mode 100644 web/src/pages/Secret/Secret.module.scss create mode 100644 web/src/pages/Secret/Secret.module.scss.d.ts create mode 100644 web/src/pages/Secret/Secret.tsx create mode 100644 web/src/pages/SecretList/SecretList.module.scss create mode 100644 web/src/pages/SecretList/SecretList.module.scss.d.ts create mode 100644 web/src/pages/SecretList/SecretList.tsx create mode 100644 web/src/pages/Settings/Settings.tsx create mode 100644 web/src/pages/SignIn/SignIn.module.scss create mode 100644 web/src/pages/SignIn/SignIn.module.scss.d.ts create mode 100644 web/src/pages/SignIn/SignIn.tsx create mode 100644 web/src/pages/SignUp/SignUp.module.scss create mode 100644 web/src/pages/SignUp/SignUp.module.scss.d.ts create mode 100644 web/src/pages/SignUp/SignUp.tsx create mode 100644 web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx create mode 100644 web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss create mode 100644 web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts create mode 100644 web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx create mode 100644 web/src/pages/SpaceSettings/DeleteSpaceModal/DeleteSpaceModal.tsx create mode 100644 web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx create mode 100644 web/src/pages/SpaceSettings/SpaceSettings.module.scss create mode 100644 web/src/pages/SpaceSettings/SpaceSettings.module.scss.d.ts create mode 100644 web/src/pages/SpaceSettings/SpaceSettings.tsx create mode 100644 web/src/pages/UserProfile/EditableTextField.tsx create mode 100644 web/src/pages/UserProfile/NewToken/NewToken.tsx create mode 100644 web/src/pages/UserProfile/UserProfile.module.scss create mode 100644 web/src/pages/UserProfile/UserProfile.module.scss.d.ts create mode 100644 web/src/pages/UserProfile/UserProfile.tsx create mode 100644 web/src/pages/UsersListing/UsersListing.module.scss create mode 100644 web/src/pages/UsersListing/UsersListing.module.scss.d.ts create mode 100644 web/src/pages/UsersListing/UsersListing.tsx create mode 100644 web/src/pages/WebhookDetails/WebhookDetails.tsx create mode 100644 web/src/pages/WebhookNew/WebhookNew.tsx create mode 100644 web/src/pages/WebhookNew/WehookForm.module.scss create mode 100644 web/src/pages/WebhookNew/WehookForm.module.scss.d.ts create mode 100644 web/src/pages/WebhookNew/WehookForm.tsx create mode 100644 web/src/pages/Webhooks/Webhooks.module.scss create mode 100644 web/src/pages/Webhooks/Webhooks.module.scss.d.ts create mode 100644 web/src/pages/Webhooks/Webhooks.tsx create mode 100644 web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss create mode 100644 web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts create mode 100644 web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx create mode 100644 web/src/public-path.ts create mode 100644 web/src/services/code/index.tsx create mode 100644 web/src/services/code/overrides.yaml create mode 100644 web/src/services/code/swagger.yaml create mode 100644 web/src/services/config.ts create mode 100644 web/src/utils/ExecutionUtils.ts create mode 100644 web/src/utils/FileUtils.ts create mode 100644 web/src/utils/GitUtils.ts create mode 100644 web/src/utils/Utils.ts create mode 100644 web/src/utils/codemirror/addClassToLinesExtension.tsx create mode 100644 web/src/utils/test/testUtils.module.scss create mode 100644 web/src/utils/test/testUtils.module.scss.d.ts create mode 100644 web/src/utils/test/testUtils.tsx create mode 100644 web/src/utils/types.ts create mode 100644 web/src/utils/utils.scss create mode 100644 web/src/utils/vars.scss create mode 100644 web/tsconfig-eslint.json create mode 100644 web/tsconfig.json create mode 100644 web/typed-scss-modules.config.js create mode 100644 web/yarn.lock diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..4472e33005 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +*.sqlite +*.sqlite3 +web/node_modules +web/dist +release +.idea +coverage.out + +# ignore any executables we build +/gitness +/gitrpcserver +/gitness-githook \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..85119a2a23 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +.DS_Store +NOTES* +__debug_bin +_research +.env +*.sqlite +*.sqlite3 +web/node_modules +web/dist +web/coverage +yarn-error* +release +.idea +.vscode/settings.json +coverage.out +gitness.session.sql + +# ignore any executables we build +/gitness +/gitrpcserver +/gitness-githook \ No newline at end of file diff --git a/.gitleaksignore b/.gitleaksignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000000..cd26ed84f2 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,322 @@ +## Golden config for golangci-lint v1.49.0 + +run: + # Timeout for analysis, e.g. 30s, 5m. + # Default: 1m + timeout: 3m + + +# This file contains only configs which differ from defaults. +# All possible options can be found here https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml +linters-settings: + cyclop: + # The maximal code complexity to report. + # Default: 10 + max-complexity: 30 + # The maximal average package complexity. + # If it's higher than 0.0 (float) the check is enabled + # Default: 0.0 + package-average: 10.0 + + errcheck: + # Report about not checking of errors in type assertions: `a := b.(MyStruct)`. + # Such cases aren't reported by default. + # Default: false + check-type-assertions: true + + funlen: + # Checks the number of lines in a function. + # If lower than 0, disable the check. + # Default: 60 + lines: 100 + # Checks the number of statements in a function. + # If lower than 0, disable the check. + # Default: 40 + statements: 50 + + gocognit: + # Minimal code complexity to report + # Default: 30 (but we recommend 10-20) + min-complexity: 20 + + gocritic: + # Settings passed to gocritic. + # The settings key is the name of a supported gocritic checker. + # The list of supported checkers can be find in https://go-critic.github.io/overview. + settings: + captLocal: + # Whether to restrict checker to params only. + # Default: true + paramsOnly: false + underef: + # Whether to skip (*x).method() calls where x is a pointer receiver. + # Default: true + skipRecvDeref: false + + gomnd: + # List of function patterns to exclude from analysis. + # Values always ignored: `time.Date` + # Default: [] + ignored-functions: + - os.Chmod + - os.Mkdir + - os.MkdirAll + - os.OpenFile + - os.WriteFile + - prometheus.ExponentialBuckets + - prometheus.ExponentialBucketsRange + - prometheus.LinearBuckets + - strconv.FormatFloat + - strconv.FormatInt + - strconv.FormatUint + - strconv.ParseFloat + - strconv.ParseInt + - strconv.ParseUint + + gomodguard: + blocked: + # List of blocked modules. + # Default: [] + modules: + - github.com/golang/protobuf: + recommendations: + - google.golang.org/protobuf + reason: "see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules" + - github.com/satori/go.uuid: + recommendations: + - github.com/google/uuid + reason: "satori's package is not maintained" + - github.com/gofrs/uuid: + recommendations: + - github.com/google/uuid + reason: "see recommendation from dev-infra team: https://confluence.gtforge.com/x/gQI6Aw" + + govet: + # Enable all analyzers. + # Default: false + enable-all: true + # Disable analyzers by name. + # Run `go tool vet help` to see all analyzers. + # Default: [] + disable: + - fieldalignment # too strict + # Settings per analyzer. + settings: + shadow: + # Whether to be strict about shadowing; can be noisy. + # Default: false + strict: true + + nakedret: + # Make an issue if func has more lines of code than this setting, and it has naked returns. + # Default: 30 + max-func-lines: 30 + + rowserrcheck: + # database/sql is always checked + # Default: [] + packages: + - github.com/jmoiron/sqlx + + tenv: + # The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures. + # Otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked. + # Default: false + all: true + + goheader: + # Supports two types 'const` and `regexp`. + # Values can be used recursively. + # Default: {} + values: + const: + # Define here const type values in format k:v. + # For example: + COMPANY: Harness Inc. + # The template use for checking. + # Default: "" + template: |- + Copyright 2023 Harness, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + # As alternative of directive 'template', you may put the path to file with the template source. + # Useful if you need to load the template from a specific file. + # Default: "" + # template-path: /path/to/my/template.tmpl + gci: + # DEPRECATED: use `sections` and `prefix(github.com/org/project)` instead. + # local-prefixes: github.com/harness/gitness + # Section configuration to compare against. + # Section names are case-insensitive and may contain parameters in (). + # The default order of sections is `standard > default > custom > blank > dot`, + # If `custom-order` is `true`, it follows the order of `sections` option. + # Default: ["standard", "default"] + sections: + - standard # Standard section: captures all standard packages. + - prefix(github.com/harness/gitness) # Custom section: groups all imports with the specified Prefix. + - default # Default section: contains all imports that could not be matched to another section type. + - blank # Blank section: contains all blank imports. This section is not present unless explicitly enabled. + - dot # Dot section: contains all dot imports. This section is not present unless explicitly enabled. + # Skip generated files. + # Default: true + skip-generated: false + # Enable custom order of sections. + # If `true`, make the section order the same as the order of `sections`. + # Default: false + custom-order: true + + tagliatelle: + # Check the struck tag name case. + case: + rules: + # Any struct tag type can be used. + # Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` + json: snake + db: snake + yaml: snake + xml: snake + bson: snake + avro: snake + mapstructure: snake + + +linters: + disable-all: true + enable: + ## enabled by default + - errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases + - gosimple # specializes in simplifying a code + - govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string + - ineffassign # detects when assignments to existing variables are not used + - staticcheck # is a go vet on steroids, applying a ton of static analysis checks + - typecheck # like the front-end of a Go compiler, parses and type-checks Go code + - unused # checks for unused constants, variables, functions and types + ## disabled by default + - asasalint # checks for pass []any as any in variadic func(...any) + - asciicheck # checks that your code does not contain non-ASCII identifiers + - bidichk # checks for dangerous unicode character sequences + - bodyclose # checks whether HTTP response body is closed successfully + #- contextcheck # checks the function whether use a non-inherited context # TODO: enable after golangci-lint uses https://github.com/sylvia7788/contextcheck/releases/tag/v1.0.7 + - cyclop # checks function and package cyclomatic complexity + # - dupl # tool for code clone detection + - durationcheck # checks for two durations multiplied together + - errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error + - errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13 + - execinquery # checks query string in Query function which reads your Go src files and warning it finds + - exhaustive # checks exhaustiveness of enum switch statements + - exportloopref # checks for pointers to enclosing loop variables + - forbidigo # forbids identifiers + #- funlen # tool for detection of long functions + #- gochecknoglobals # checks that no global variables exist + #- gochecknoinits # checks that no init functions are present in Go code + - gocognit # computes and checks the cognitive complexity of functions + - goconst # finds repeated strings that could be replaced by a constant + - gocritic # provides diagnostics that check for bugs, performance and style issues + - gocyclo # computes and checks the cyclomatic complexity of functions + - godot # checks if comments end in a period + - goimports # in addition to fixing imports, goimports also formats your code in the same style as gofmt + - gomnd # detects magic numbers + - gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod + - gomodguard # allow and block lists linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations + - goprintffuncname # checks that printf-like functions are named with f at the end + - gosec # inspects source code for security problems + - lll # reports long lines + - makezero # finds slice declarations with non-zero initial length + - nakedret # finds naked returns in functions greater than a specified function length + - nestif # reports deeply nested if statements + - nilerr # finds the code that returns nil even if it checks that the error is not nil + - nilnil # checks that there is no simultaneous return of nil error and an invalid value + - noctx # finds sending http request without context.Context + # - nolintlint # reports ill-formed or insufficient nolint directives + # - nonamedreturns # reports all named returns + - nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL + - predeclared # finds code that shadows one of Go's predeclared identifiers + - promlinter # checks Prometheus metrics naming via promlint + - reassign # checks that package variables are not reassigned + - revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint + - rowserrcheck # checks whether Err of rows is checked successfully + - sqlclosecheck # checks that sql.Rows and sql.Stmt are closed + - stylecheck # is a replacement for golint + - tagliatelle # checks the struct tags + - tenv # detects using os.Setenv instead of t.Setenv since Go1.17 + #- testpackage # makes you use a separate _test package + - tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes + - unconvert # removes unnecessary type conversions + - unparam # reports unused function parameters + - usestdlibvars # detects the possibility to use variables/constants from the Go standard library + - wastedassign # finds wasted assignment statements + - whitespace # detects leading and trailing whitespace + + ## you may want to enable + #- decorder # checks declaration order and count of types, constants, variables and functions + #- exhaustruct # checks if all structure fields are initialized + - gci # controls golang package import order and makes it always deterministic + #- godox # detects FIXME, TODO and other comment keywords + - goheader # checks is file header matches to pattern + #- interfacebloat # checks the number of methods inside an interface + #- ireturn # accept interfaces, return concrete types + #- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated + #- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope + #- wrapcheck # checks that errors returned from external packages are wrapped + + ## disabled + #- containedctx # detects struct contained context.Context field + #- depguard # [replaced by gomodguard] checks if package imports are in a list of acceptable packages + #- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) + #- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted + #- forcetypeassert # [replaced by errcheck] finds forced type assertions + #- goerr113 # [too strict] checks the errors handling expressions + #- gofmt # [replaced by goimports] checks whether code was gofmt-ed + #- gofumpt # [replaced by goimports, gofumports is not available yet] checks whether code was gofumpt-ed + #- grouper # analyzes expression groups + #- importas # enforces consistent import aliases + #- logrlint # [owner archived repository] checks logr arguments + #- maintidx # measures the maintainability index of each function + - misspell # [useless] finds commonly misspelled English words in comments + #- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity + #- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test + #- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers + #- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines + + +issues: + # Maximum count of issues with the same text. + # Set to 0 to disable. + # Default: 3 + max-same-issues: 50 + + exclude-rules: + - text: 'shadow: declaration of "(err|ctx)" shadows declaration at' + linters: [ govet ] + - source: "^//\\s*go:generate\\s" + linters: [ lll ] + - source: "(noinspection|TODO)" + linters: [ godot ] + - source: "//noinspection" + linters: [ gocritic ] + - source: "^\\s+if _, ok := err\\.\\([^.]+\\.InternalError\\); ok {" + linters: [ errorlint ] + - path: "^cli/" + linters: [forbidigo] + - text: "mnd: Magic number: \\d" + linters: + - gomnd + - path: "_test\\.go" + linters: + - bodyclose + - dupl + - funlen + - goconst + - gosec + - noctx + - wrapcheck \ No newline at end of file diff --git a/.local.env b/.local.env new file mode 100644 index 0000000000..a536f05356 --- /dev/null +++ b/.local.env @@ -0,0 +1,4 @@ +GITNESS_TRACE=true +GITNESS_WEBHOOK_ALLOW_LOOPBACK=true +GITNESS_PRINCIPAL_ADMIN_PASSWORD=changeit +GITNESS_METRIC_ENABLED=false diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000..e4c0975369 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,90 @@ +# ---------------------------------------------------------# +# Build web image # +# ---------------------------------------------------------# +FROM node:16 as web + +WORKDIR /usr/src/app + +COPY web/package.json ./ +COPY web/yarn.lock ./ + +ARG GITHUB_ACCESS_TOKEN + +# If you are building your code for production +# RUN npm ci --omit=dev + +COPY ./web . + +RUN yarn && yarn build && yarn cache clean + +# ---------------------------------------------------------# +# Build gitness image # +# ---------------------------------------------------------# +FROM golang:1.19-alpine as builder + +RUN apk update \ + && apk add --no-cache protoc build-base git + +# Setup workig dir +WORKDIR /app + +# Access to private repos +ARG GITHUB_ACCESS_TOKEN +RUN git config --global url."https://${GITHUB_ACCESS_TOKEN}:x-oauth-basic@github.com/harness".insteadOf "https://github.com/harness" +RUN git config --global --add safe.directory '/app' +RUN go env -w GOPRIVATE=github.com/harness/* + +# Get dependancies - will also be cached if we won't change mod/sum +COPY go.mod . +COPY go.sum . +COPY Makefile . +RUN make dep +RUN make tools +# COPY the source code as the last step +COPY . . + +COPY --from=web /usr/src/app/dist /app/web/dist + +# build +ARG GIT_COMMIT +ARG GITNESS_VERSION_MAJOR +ARG GITNESS_VERSION_MINOR +ARG GITNESS_VERSION_PATCH +ARG BUILD_TAGS + +# set required build flags +RUN CGO_ENABLED=1 \ + GOOS=linux \ + GOARCH=amd64 \ + BUILD_TAGS=${BUILD_TAGS} \ + make build + +### Pull CA Certs +FROM alpine:latest as cert-image + +RUN apk --update add ca-certificates + +# ---------------------------------------------------------# +# Create final image # +# ---------------------------------------------------------# +FROM alpine/git:2.36.3 as final + +# setup app dir and its content +WORKDIR /app +VOLUME /data + +ENV XDG_CACHE_HOME /data +ENV GITRPC_SERVER_GIT_ROOT /data +ENV GITNESS_DATABASE_DRIVER sqlite3 +ENV GITNESS_DATABASE_DATASOURCE /data/database.sqlite +ENV GITNESS_METRIC_ENABLED=true +ENV GITNESS_METRIC_ENDPOINT=https://stats.drone.ci/api/v1/gitness +ENV GITNESS_TOKEN_COOKIE_NAME=token + +COPY --from=builder /app/gitness /app/gitness +COPY --from=cert-image /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt + +EXPOSE 3000 +EXPOSE 3001 + +ENTRYPOINT [ "/app/gitness", "server" ] \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000000..8dada3edaf --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..bec9ba406e --- /dev/null +++ b/Makefile @@ -0,0 +1,163 @@ +ifndef GOPATH + GOPATH := $(shell go env GOPATH) +endif +ifndef GOBIN # derive value from gopath (default to first entry, similar to 'go get') + GOBIN := $(shell go env GOPATH | sed 's/:.*//')/bin +endif +ifndef DOCKER_BUILD_OPTS + DOCKER_BUILD_OPTS := +endif + +tools = $(addprefix $(GOBIN)/, golangci-lint goimports govulncheck protoc-gen-go protoc-gen-go-grpc gci) +deps = $(addprefix $(GOBIN)/, wire dbmate) + +LDFLAGS = "-X github.com/harness/gitness/version.GitCommit=${GIT_COMMIT} -X github.com/harness/gitness/version.major=${GITNESS_VERSION_MAJOR} -X github.com/harness/gitness/version.minor=${GITNESS_VERSION_MINOR} -X github.com/harness/gitness/version.patch=${GITNESS_VERSION_PATCH}" + +ifneq (,$(wildcard ./.local.env)) + include ./.local.env + export +endif + +.DEFAULT_GOAL := all + +ifeq ($(BUILD_TAGS),) + BUILD_TAGS := sqlite +endif + +BUILD_TAGS := $(BUILD_TAGS),gogit + +############################################################################### +# +# Initialization +# +############################################################################### + +dep: $(deps) ## Install the deps required to generate code and build gitness + @echo "Installing dependencies" + @go mod download + +tools: $(tools) ## Install tools required for the build + @echo "Installed tools" + +############################################################################### +# +# Build and testing rules +# +############################################################################### + +build: generate ## Build the all-in-one gitness binary + @echo "Building Gitness Server" + go build -tags=${BUILD_TAGS} -ldflags=${LDFLAGS} -o ./gitness ./cmd/gitness + +build-gitrpc: generate ## Build the gitrpc binary + @echo "Building GitRPC Server" + go build -tags=${BUILD_TAGS} -ldflags=${LDFLAGS} -o ./gitrpcserver ./cmd/gitrpcserver + +build-githook: generate ## Build the githook binary for gitness + @echo "Building gitness GitHook Binary" + go build -tags=${BUILD_TAGS} -ldflags=${LDFLAGS} -o ./gitness-githook ./cmd/gitness-githook + +test: generate ## Run the go tests + @echo "Running tests" + go test -v -coverprofile=coverage.out ./internal/... + go tool cover -html=coverage.out + +run: dep ## Run the gitness binary from source + @go run -race -ldflags=${LDFLAGS} ./cmd/gitness + +############################################################################### +# +# Code Formatting and linting +# +############################################################################### + +format: tools # Format go code and error if any changes are made + @echo "Formating ..." + @goimports -w . + @gci write --custom-order -s standard -s "prefix(github.com/harness/gitness)" -s default -s blank -s dot . + @echo "Formatting complete" + +sec: + @echo "Vulnerability detection $(1)" + @govulncheck ./... + +lint: tools generate # lint the golang code + @echo "Linting $(1)" + @golangci-lint run --timeout=3m --verbose + +############################################################################### +# Code Generation +# +# Some code generation can be slow, so we only run it if +# the source file has changed. +############################################################################### + +generate: wire proto + @echo "Generating Code" + +wire: cmd/gitness/wire_gen.go cmd/gitrpcserver/wire_gen.go + +force-wire: ## Force wire code generation + @sh ./scripts/wire/server/standalone.sh + @sh ./scripts/wire/gitrpcserver/wire.sh + +cmd/gitness/wire_gen.go: cmd/gitness/wire.go + @sh ./scripts/wire/server/standalone.sh + +cmd/gitrpcserver/wire_gen.go: cmd/gitrpcserver/wire.go + @sh ./scripts/wire/gitrpcserver/wire.sh + +proto: ## generate proto files for gitrpc integration + @protoc --proto_path=./gitrpc/proto \ + --go_out=./gitrpc/rpc \ + --go_opt=paths=source_relative \ + --go-grpc_out=./gitrpc/rpc \ + --go-grpc_opt=paths=source_relative \ + ./gitrpc/proto/*.proto + +############################################################################### +# Install Tools and deps +# +# These targets specify the full path to where the tool is installed +# If the tool already exists it wont be re-installed. +############################################################################### + +update-tools: delete-tools $(tools) ## Update the tools by deleting and re-installing + +delete-tools: ## Delete the tools + @rm $(tools) || true + +# Install golangci-lint +$(GOBIN)/golangci-lint: + @echo "🔘 Installing golangci-lint... (`date '+%H:%M:%S'`)" + @curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) + +# Install goimports to format code +$(GOBIN)/goimports: + @echo "🔘 Installing goimports ... (`date '+%H:%M:%S'`)" + @go install golang.org/x/tools/cmd/goimports + +# Install wire to generate dependency injection +$(GOBIN)/wire: + go install github.com/google/wire/cmd/wire@latest + +# Install dbmate to perform db migrations +$(GOBIN)/dbmate: + go install github.com/amacneil/dbmate@v1.15.0 + +$(GOBIN)/govulncheck: + go install golang.org/x/vuln/cmd/govulncheck@latest + +$(GOBIN)/protoc-gen-go: + go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.28 + +$(GOBIN)/protoc-gen-go-grpc: + go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.2 + +$(GOBIN)/gci: + go install github.com/daixiang0/gci@latest + +help: ## show help message + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% 0-9a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + +.PHONY: delete-tools update-tools help format lint \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000000..c1b0865a2c --- /dev/null +++ b/README.md @@ -0,0 +1,101 @@ +# Gitness +Your lightweight, super fast code hosting and build service. + +For more information, please visit [gitness.com](https://gitness.com/) + +# Pre-Requisites + +Install the latest stable version of Node and Go version 1.19 or higher, and then install the below Go programs. Ensure the GOPATH [bin directory](https://go.dev/doc/gopath_code#GOPATH) is added to your PATH. + +Install protobuf +- Check if you've already installed protobuf ```protoc --version``` +- If your version is different than v3.21.11, run ```brew unlink protobuf``` +- Get v3.21.11 ```curl -s https://raw.githubusercontent.com/Homebrew/homebrew-core/9de8de7a533609ebfded833480c1f7c05a3448cb/Formula/protobuf.rb > /tmp/protobuf.rb``` +- Install it ```brew install /tmp/protobuf.rb``` +- Check out your version ```protoc --version``` + +Install protoc-gen-go and protoc-gen-go-rpc: + +- Install protoc-gen-go v1.28.1 ```go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.28.1``` +(Note that this will install a binary in $GOBIN so make sure $GOBIN is in your $PATH) + +- Install protoc-gen-go-grpc v1.2.0 ```go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.2.0``` + +```bash +$ make dep +$ make tools +``` + +# Build + +Build the user interface: + +```bash +$ pushd web +$ yarn install +$ yarn build +$ popd +``` + +Build the server and command line tools: + +```bash +$ make build +``` + +# Run + +This project supports all operating systems and architectures supported by Go. This means you can build and run the system on your machine; docker containers are not required for local development and testing. + +Start the server at `localhost:3000` + +```bash +./gitness server .local.env +``` + +# User Interface + +This project includes a full user interface for interacting with the system. When you run the application, you can access the user interface by navigating to `http://localhost:3000` in your browser. + +# Swagger + +This project includes a swagger specification. When you run the application, you can access the swagger specification by navigating to `http://localhost:3000/swagger` in your browser (for raw yaml see `http://localhost:3000/openapi.yaml`). + + +## Auto-Generate Gitness API Client used by UI using Swagger +Please make sure to update the autogenerated client code used by the UI when adding new rest APIs. + +To regenerate the code, please execute the following steps: +- Run local gitness instance with latest changes +- Get latest OpenAPI specs from `http://localhost:3000/openapi.yaml` and store it in `web/src/services/code/swagger.yaml` + +The latest API changes should now be reflected in `web/src/services/code/index.tsx` + +# REST API +Please refer to the swagger for the specification of our rest API. + +For testing, it's simplest to execute operations as the default user `admin` using a PAT: +```bash +# LOGIN (user: admin, pw: changeit) +$ ./gitness login + +# GENERATE PAT (1 YEAR VALIDITY) +$ ./gitness user pat "my-pat-uid" 2592000 +``` + +The command outputs a valid PAT that has been granted full access as the user. +The token can then be send as part of the `Authorization` header with Postman or curl: + +```bash +$ curl http://localhost:3000/api/v1/user \ +-H "Authorization: Bearer $TOKEN" +``` + + +# CLI +This project includes VERY basic command line tools for development and running the service. Please remember that you must start the server before you can execute commands. + +For a full list of supported operations, please see +```bash +$ ./gitness --help +``` \ No newline at end of file diff --git a/cache/cache.go b/cache/cache.go new file mode 100644 index 0000000000..23083df97c --- /dev/null +++ b/cache/cache.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" +) + +// Cache is an abstraction of a simple cache. +type Cache[K any, V any] interface { + Stats() (int64, int64) + Get(ctx context.Context, key K) (V, error) +} + +// ExtendedCache is an extension of the simple cache abstraction that adds mapping functionality. +type ExtendedCache[K comparable, V Identifiable[K]] interface { + Cache[K, V] + Map(ctx context.Context, keys []K) (map[K]V, error) +} + +type Identifiable[K comparable] interface { + Identifier() K +} + +type Getter[K any, V any] interface { + Find(ctx context.Context, key K) (V, error) +} + +type ExtendedGetter[K comparable, V Identifiable[K]] interface { + Getter[K, V] + FindMany(ctx context.Context, keys []K) ([]V, error) +} diff --git a/cache/cache_test.go b/cache/cache_test.go new file mode 100644 index 0000000000..8355d86210 --- /dev/null +++ b/cache/cache_test.go @@ -0,0 +1,69 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "reflect" + "testing" +) + +func TestDeduplicate(t *testing.T) { + tests := []struct { + name string + input []int + expected []int + }{ + { + name: "empty", + input: nil, + expected: nil, + }, + { + name: "one-element", + input: []int{1}, + expected: []int{1}, + }, + { + name: "one-element-duplicated", + input: []int{1, 1}, + expected: []int{1}, + }, + { + name: "two-elements", + input: []int{2, 1}, + expected: []int{1, 2}, + }, + { + name: "three-elements", + input: []int{2, 2, 3, 3, 1, 1}, + expected: []int{1, 2, 3}, + }, + { + name: "many-elements", + input: []int{2, 5, 1, 2, 3, 3, 4, 5, 1, 1}, + expected: []int{1, 2, 3, 4, 5}, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + test.input = deduplicate(test.input) + if want, got := test.expected, test.input; !reflect.DeepEqual(want, got) { + t.Errorf("failed - want=%v, got=%v", want, got) + return + } + }) + } +} diff --git a/cache/no_cache.go b/cache/no_cache.go new file mode 100644 index 0000000000..404f85cacb --- /dev/null +++ b/cache/no_cache.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" +) + +type NoCache[K any, V any] struct { + getter Getter[K, V] +} + +func NewNoCache[K any, V any](getter Getter[K, V]) NoCache[K, V] { + return NoCache[K, V]{ + getter: getter, + } +} + +func (c NoCache[K, V]) Stats() (int64, int64) { + return 0, 0 +} + +func (c NoCache[K, V]) Get(ctx context.Context, key K) (V, error) { + return c.getter.Find(ctx, key) +} diff --git a/cache/redis_cache.go b/cache/redis_cache.go new file mode 100644 index 0000000000..a0e31463d9 --- /dev/null +++ b/cache/redis_cache.go @@ -0,0 +1,99 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" + "fmt" + "time" + + "github.com/go-redis/redis/v8" +) + +type Redis[K any, V any] struct { + client redis.UniversalClient + duration time.Duration + getter Getter[K, V] + keyEncoder func(K) string + codec Codec[V] + countHit int64 + countMiss int64 +} + +type Encoder[V any] interface { + Encode(value V) string +} + +type Decoder[V any] interface { + Decode(encoded string) (V, error) +} + +type Codec[V any] interface { + Encoder[V] + Decoder[V] +} + +func NewRedis[K any, V any]( + client redis.UniversalClient, + getter Getter[K, V], + keyEncoder func(K) string, + codec Codec[V], + duration time.Duration, +) *Redis[K, V] { + return &Redis[K, V]{ + client: client, + duration: duration, + getter: getter, + keyEncoder: keyEncoder, + codec: codec, + countHit: 0, + countMiss: 0, + } +} + +// Stats returns number of cache hits and misses and can be used to monitor the cache efficiency. +func (c *Redis[K, V]) Stats() (int64, int64) { + return c.countHit, c.countMiss +} + +// Get implements the cache.Cache interface. +func (c *Redis[K, V]) Get(ctx context.Context, key K) (V, error) { + var nothing V + + strKey := c.keyEncoder(key) + + raw, err := c.client.Get(ctx, strKey).Result() + if err == nil { + c.countHit++ + return c.codec.Decode(raw) + } + if err != redis.Nil { + return nothing, err + } + + c.countMiss++ + + item, err := c.getter.Find(ctx, key) + if err != nil { + return nothing, fmt.Errorf("cache: failed to find one: %w", err) + } + + err = c.client.Set(ctx, strKey, c.codec.Encode(item), c.duration).Err() + if err != nil { + return nothing, err + } + + return item, nil +} diff --git a/cache/ttl_cache.go b/cache/ttl_cache.go new file mode 100644 index 0000000000..71998673ca --- /dev/null +++ b/cache/ttl_cache.go @@ -0,0 +1,231 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" + "fmt" + "sort" + "sync" + "time" + + "golang.org/x/exp/constraints" +) + +// TTLCache is a generic TTL based cache that stores objects for the specified period. +// The TTLCache has no maximum capacity, so the idea is to store objects for short period. +// The goal of the TTLCache is to reduce database load. +// Every instance of TTLCache has a background routine that purges stale items. +type TTLCache[K comparable, V any] struct { + mx sync.RWMutex + cache map[K]cacheEntry[V] + purgeStop chan struct{} + getter Getter[K, V] + maxAge time.Duration + countHit int64 + countMiss int64 +} + +// ExtendedTTLCache is an extended version of the TTLCache. +type ExtendedTTLCache[K constraints.Ordered, V Identifiable[K]] struct { + TTLCache[K, V] + getter ExtendedGetter[K, V] +} + +type cacheEntry[V any] struct { + added time.Time + data V +} + +// New creates a new TTLCache instance and a background routine +// that periodically purges stale items. +func New[K comparable, V any](getter Getter[K, V], maxAge time.Duration) *TTLCache[K, V] { + c := &TTLCache[K, V]{ + cache: make(map[K]cacheEntry[V]), + purgeStop: make(chan struct{}), + getter: getter, + maxAge: maxAge, + } + + go c.purger() + + return c +} + +// NewExtended creates a new TTLCacheExtended instance and a background routine +// that periodically purges stale items. +func NewExtended[K constraints.Ordered, V Identifiable[K]]( + getter ExtendedGetter[K, V], + maxAge time.Duration, +) *ExtendedTTLCache[K, V] { + c := &ExtendedTTLCache[K, V]{ + TTLCache: TTLCache[K, V]{ + cache: make(map[K]cacheEntry[V]), + purgeStop: make(chan struct{}), + getter: getter, + maxAge: maxAge, + }, + getter: getter, + } + + go c.purger() + + return c +} + +// purger periodically evicts stale items from the Cache. +func (c *TTLCache[K, V]) purger() { + purgeTick := time.NewTicker(time.Minute) + defer purgeTick.Stop() + + for { + select { + case <-c.purgeStop: + return + case now := <-purgeTick.C: + c.mx.Lock() + for id, v := range c.cache { + if now.Sub(v.added) >= c.maxAge { + delete(c.cache, id) + } + } + c.mx.Unlock() + } + } +} + +// Stop stops the internal purger of stale elements. +func (c *TTLCache[K, V]) Stop() { + close(c.purgeStop) +} + +// Stats returns number of cache hits and misses and can be used to monitor the cache efficiency. +func (c *TTLCache[K, V]) Stats() (int64, int64) { + return c.countHit, c.countMiss +} + +func (c *TTLCache[K, V]) fetch(key K, now time.Time) (V, bool) { + c.mx.RLock() + defer c.mx.RUnlock() + + item, ok := c.cache[key] + if !ok || now.Sub(item.added) > c.maxAge { + c.countMiss++ + var nothing V + return nothing, false + } + + c.countHit++ + + // we deliberately don't update the `item.added` timestamp for `now` because + // we want to cache the items only for a short period. + + return item.data, true +} + +// Map returns map with all objects requested through the slice of IDs. +func (c *ExtendedTTLCache[K, V]) Map(ctx context.Context, keys []K) (map[K]V, error) { + m := make(map[K]V) + now := time.Now() + + keys = deduplicate(keys) + + // Check what's already available in the cache. + + var idx int + for idx < len(keys) { + key := keys[idx] + + item, ok := c.fetch(key, now) + if !ok { + idx++ + continue + } + + // found in cache: Add to the result map and remove the ID from the list. + m[key] = item + keys[idx] = keys[len(keys)-1] + keys = keys[:len(keys)-1] + } + + if len(keys) == 0 { + return m, nil + } + + // Pull entries from the getter that are not in the cache. + + items, err := c.getter.FindMany(ctx, keys) + if err != nil { + return nil, fmt.Errorf("cache: failed to find many: %w", err) + } + + c.mx.Lock() + defer c.mx.Unlock() + + for _, item := range items { + id := item.Identifier() + m[id] = item + c.cache[id] = cacheEntry[V]{ + added: now, + data: item, + } + } + + return m, nil +} + +// Get returns one object by its ID. +func (c *TTLCache[K, V]) Get(ctx context.Context, key K) (V, error) { + now := time.Now() + var nothing V + + item, ok := c.fetch(key, now) + if ok { + return item, nil + } + + item, err := c.getter.Find(ctx, key) + if err != nil { + return nothing, fmt.Errorf("cache: failed to find one: %w", err) + } + + c.mx.Lock() + c.cache[key] = cacheEntry[V]{ + added: now, + data: item, + } + c.mx.Unlock() + + return item, nil +} + +// deduplicate is a utility function that removes duplicates from slice. +func deduplicate[V constraints.Ordered](slice []V) []V { + if len(slice) <= 1 { + return slice + } + + sort.Slice(slice, func(i, j int) bool { return slice[i] < slice[j] }) + + pointer := 0 + for i := 1; i < len(slice); i++ { + if slice[pointer] != slice[i] { + pointer++ + slice[pointer] = slice[i] + } + } + + return slice[:pointer+1] +} diff --git a/cli/cli.go b/cli/cli.go new file mode 100644 index 0000000000..bd8fdd68d8 --- /dev/null +++ b/cli/cli.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "os" + + "github.com/harness/gitness/cli/operations/hooks" + "github.com/harness/gitness/githook" +) + +func GetArguments() []string { + command := os.Args[0] + args := os.Args[1:] + + // in case of githooks, translate the arguments coming from git to work with gitness. + if gitArgs, fromGit := githook.SanitizeArgsForGit(command, args); fromGit { + return append([]string{hooks.ParamHooks}, gitArgs...) + } + + return args +} diff --git a/cli/operations/account/login.go b/cli/operations/account/login.go new file mode 100644 index 0000000000..4ed56892e4 --- /dev/null +++ b/cli/operations/account/login.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "context" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/cli/textui" + "github.com/harness/gitness/internal/api/controller/user" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type loginCommand struct { + server string +} + +func (c *loginCommand) run(*kingpin.ParseContext) error { + ss := provide.NewSession() + + loginIdentifier, password := textui.Credentials() + + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + in := &user.LoginInput{ + LoginIdentifier: loginIdentifier, + Password: password, + } + + ts, err := provide.OpenClient(c.server).Login(ctx, in) + if err != nil { + return err + } + + return ss. + SetURI(c.server). + // login token always has an expiry date + SetExpiresAt(*ts.Token.ExpiresAt). + SetAccessToken(ts.AccessToken). + Store() +} + +// RegisterLogin helper function to register the logout command. +func RegisterLogin(app *kingpin.Application) { + c := &loginCommand{} + + cmd := app.Command("login", "login to the remote server"). + Action(c.run) + + cmd.Arg("server", "server address"). + Default(provide.DefaultServerURI). + StringVar(&c.server) +} diff --git a/cli/operations/account/logout.go b/cli/operations/account/logout.go new file mode 100644 index 0000000000..d687b3c80e --- /dev/null +++ b/cli/operations/account/logout.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "os" + + "github.com/harness/gitness/cli/provide" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type logoutCommand struct{} + +func (c *logoutCommand) run(*kingpin.ParseContext) error { + return os.Remove(provide.Session().Path()) +} + +// RegisterLogout helper function to register the logout command. +func RegisterLogout(app *kingpin.Application) { + c := &logoutCommand{} + + app.Command("logout", "logout from the remote server"). + Action(c.run) +} diff --git a/cli/operations/account/register.go b/cli/operations/account/register.go new file mode 100644 index 0000000000..c6244821a5 --- /dev/null +++ b/cli/operations/account/register.go @@ -0,0 +1,78 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "context" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/cli/session" + "github.com/harness/gitness/cli/textui" + "github.com/harness/gitness/internal/api/controller/user" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type Session interface { + SetURI(uri string) session.Session + SetExpiresAt(expiresAt int64) session.Session + SetAccessToken(token string) session.Session + Path() string + Store() error +} + +type registerCommand struct { + server string +} + +func (c *registerCommand) run(*kingpin.ParseContext) error { + ss := provide.NewSession() + + uid, displayName, email, password := textui.Registration() + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + input := &user.RegisterInput{ + UID: uid, + Email: email, + DisplayName: displayName, + Password: password, + } + + ts, err := provide.OpenClient(c.server).Register(ctx, input) + if err != nil { + return err + } + + return ss. + SetURI(c.server). + // register token always has an expiry date + SetExpiresAt(*ts.Token.ExpiresAt). + SetAccessToken(ts.AccessToken). + Store() +} + +// RegisterRegister helper function to register the register command. +func RegisterRegister(app *kingpin.Application) { + c := ®isterCommand{} + + cmd := app.Command("register", "register a user"). + Action(c.run) + + cmd.Arg("server", "server address"). + Default(provide.DefaultServerURI). + StringVar(&c.server) +} diff --git a/cli/operations/hooks/hooks.go b/cli/operations/hooks/hooks.go new file mode 100644 index 0000000000..a288c5da67 --- /dev/null +++ b/cli/operations/hooks/hooks.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hooks + +import ( + "github.com/harness/gitness/githook" + gitnessgithook "github.com/harness/gitness/internal/githook" + + "gopkg.in/alecthomas/kingpin.v2" +) + +const ( + // ParamHooks defines the parameter for the git hooks sub-commands. + ParamHooks = "hooks" +) + +func Register(app *kingpin.Application) { + subCmd := app.Command(ParamHooks, "manage git server hooks") + githook.RegisterAll(subCmd, gitnessgithook.LoadFromEnvironment) +} diff --git a/cli/operations/migrate/current.go b/cli/operations/migrate/current.go new file mode 100644 index 0000000000..9214db0005 --- /dev/null +++ b/cli/operations/migrate/current.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package migrate + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store/database/migrate" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type commandCurrent struct { + envfile string +} + +func (c *commandCurrent) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + db, err := getDB(ctx, c.envfile) + if err != nil { + return err + } + + version, err := migrate.Current(ctx, db) + if err != nil { + return err + } + + fmt.Println(version) + + return nil +} + +func registerCurrent(app *kingpin.CmdClause) { + c := &commandCurrent{} + + cmd := app.Command("current", "display the current version of the database"). + Action(c.run) + + cmd.Arg("envfile", "load the environment variable file"). + Default(""). + StringVar(&c.envfile) +} diff --git a/cli/operations/migrate/migrate.go b/cli/operations/migrate/migrate.go new file mode 100644 index 0000000000..53f14adbd6 --- /dev/null +++ b/cli/operations/migrate/migrate.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package migrate + +import ( + "context" + "fmt" + + "github.com/harness/gitness/cli/server" + "github.com/harness/gitness/store/database" + + "github.com/jmoiron/sqlx" + "github.com/joho/godotenv" + "gopkg.in/alecthomas/kingpin.v2" +) + +func getDB(ctx context.Context, envfile string) (*sqlx.DB, error) { + _ = godotenv.Load(envfile) + + config, err := server.LoadConfig() + if err != nil { + return nil, fmt.Errorf("failed to load configuration: %w", err) + } + + db, err := database.Connect(ctx, config.Database.Driver, config.Database.Datasource) + if err != nil { + return nil, fmt.Errorf("failed to create database handle: %w", err) + } + + return db, nil +} + +// Register the server command. +func Register(app *kingpin.Application) { + cmd := app.Command("migrate", "database migration tool") + registerCurrent(cmd) + registerTo(cmd) +} diff --git a/cli/operations/migrate/to.go b/cli/operations/migrate/to.go new file mode 100644 index 0000000000..35081e7b10 --- /dev/null +++ b/cli/operations/migrate/to.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package migrate + +import ( + "context" + "time" + + "github.com/harness/gitness/internal/store/database/migrate" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type commandTo struct { + envfile string + version string +} + +func (c *commandTo) run(k *kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + db, err := getDB(ctx, c.envfile) + if err != nil { + return err + } + + return migrate.To(ctx, db, c.version) +} + +func registerTo(app *kingpin.CmdClause) { + c := &commandTo{} + + cmd := app.Command("to", "migrates the database to the provided version"). + Action(c.run) + + cmd.Arg("version", "database version to migrate to"). + Required(). + StringVar(&c.version) + + cmd.Arg("envfile", "load the environment variable file"). + Default(""). + StringVar(&c.envfile) +} diff --git a/cli/operations/user/create_pat.go b/cli/operations/user/create_pat.go new file mode 100644 index 0000000000..62df54e916 --- /dev/null +++ b/cli/operations/user/create_pat.go @@ -0,0 +1,97 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "encoding/json" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/internal/api/controller/user" + + "github.com/drone/funcmap" + "github.com/gotidy/ptr" + "gopkg.in/alecthomas/kingpin.v2" +) + +const tokenTmpl = ` +principalID: {{ .Token.PrincipalID }} +uid: {{ .Token.UID }} +expiresAt: {{ .Token.ExpiresAt }} +token: {{ .AccessToken }} +` //#nosec G101 + +type createPATCommand struct { + uid string + lifetimeInS int64 + + json bool + tmpl string +} + +func (c *createPATCommand) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + var lifeTime *time.Duration + if c.lifetimeInS > 0 { + lifeTime = ptr.Duration(time.Duration(int64(time.Second) * c.lifetimeInS)) + } + + in := user.CreateTokenInput{ + UID: c.uid, + Lifetime: lifeTime, + } + + tokenResp, err := provide.Client().UserCreatePAT(ctx, in) + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(tokenResp) + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl) + if err != nil { + return err + } + return tmpl.Execute(os.Stdout, tokenResp) +} + +// Register the command. +func registerCreatePAT(app *kingpin.CmdClause) { + c := &createPATCommand{} + + cmd := app.Command("pat", "create personal access token"). + Action(c.run) + + cmd.Arg("uid", "the uid of the token"). + Required().StringVar(&c.uid) + + cmd.Arg("lifetime", "the lifetime of the token in seconds"). + Int64Var(&c.lifetimeInS) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(tokenTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/user/self.go b/cli/operations/user/self.go new file mode 100644 index 0000000000..eb12043afe --- /dev/null +++ b/cli/operations/user/self.go @@ -0,0 +1,76 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "encoding/json" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + + "github.com/drone/funcmap" + "gopkg.in/alecthomas/kingpin.v2" +) + +const userTmpl = ` +uid: {{ .UID }} +name: {{ .DisplayName }} +email: {{ .Email }} +admin: {{ .Admin }} +` + +type command struct { + tmpl string + json bool +} + +func (c *command) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + user, err := provide.Client().Self(ctx) + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(user) + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl) + if err != nil { + return err + } + return tmpl.Execute(os.Stdout, user) +} + +// Register the command. +func registerSelf(app *kingpin.CmdClause) { + c := &command{} + + cmd := app.Command("self", "display authenticated user"). + Action(c.run) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(userTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/user/users.go b/cli/operations/user/users.go new file mode 100644 index 0000000000..596343448f --- /dev/null +++ b/cli/operations/user/users.go @@ -0,0 +1,26 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "gopkg.in/alecthomas/kingpin.v2" +) + +// Register the command. +func Register(app *kingpin.Application) { + cmd := app.Command("user", "manage currently logged-in user") + registerSelf(cmd) + registerCreatePAT(cmd) +} diff --git a/cli/operations/users/create.go b/cli/operations/users/create.go new file mode 100644 index 0000000000..52c9152e5f --- /dev/null +++ b/cli/operations/users/create.go @@ -0,0 +1,86 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "context" + "encoding/json" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/cli/textui" + "github.com/harness/gitness/types" + + "github.com/drone/funcmap" + "gopkg.in/alecthomas/kingpin.v2" +) + +type createCommand struct { + email string + admin bool + tmpl string + json bool +} + +func (c *createCommand) run(*kingpin.ParseContext) error { + in := &types.User{ + Admin: c.admin, + Email: c.email, + Password: textui.Password(), + } + + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + user, err := provide.Client().UserCreate(ctx, in) + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(user) + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl) + if err != nil { + return err + } + return tmpl.Execute(os.Stdout, user) +} + +// helper function registers the user create command. +func registerCreate(app *kingpin.CmdClause) { + c := &createCommand{} + + cmd := app.Command("create", "create a user"). + Action(c.run) + + cmd.Arg("email", "user email"). + Required(). + StringVar(&c.email) + + cmd.Arg("admin", "user is admin"). + BoolVar(&c.admin) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(userTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/users/delete.go b/cli/operations/users/delete.go new file mode 100644 index 0000000000..e5c5039cb1 --- /dev/null +++ b/cli/operations/users/delete.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "context" + "time" + + "github.com/harness/gitness/cli/provide" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type deleteCommand struct { + email string +} + +func (c *deleteCommand) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + return provide.Client().UserDelete(ctx, c.email) +} + +// helper function registers the user delete command. +func registerDelete(app *kingpin.CmdClause) { + c := &deleteCommand{} + + cmd := app.Command("delete", "delete a user"). + Action(c.run) + + cmd.Arg("id or email", "user id or email"). + Required(). + StringVar(&c.email) +} diff --git a/cli/operations/users/find.go b/cli/operations/users/find.go new file mode 100644 index 0000000000..d39109faf8 --- /dev/null +++ b/cli/operations/users/find.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "context" + "encoding/json" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + + "github.com/drone/funcmap" + "gopkg.in/alecthomas/kingpin.v2" +) + +type findCommand struct { + email string + tmpl string + json bool +} + +func (c *findCommand) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + user, err := provide.Client().User(ctx, c.email) + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(user) + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl + "\n") + if err != nil { + return err + } + return tmpl.Execute(os.Stdout, user) +} + +// helper function registers the user find command. +func registerFind(app *kingpin.CmdClause) { + c := &findCommand{} + + cmd := app.Command("find", "display user details"). + Action(c.run) + + cmd.Arg("id or email", "user id or email"). + Required(). + StringVar(&c.email) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(userTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/users/list.go b/cli/operations/users/list.go new file mode 100644 index 0000000000..82762e096f --- /dev/null +++ b/cli/operations/users/list.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "context" + "encoding/json" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/types" + + "github.com/drone/funcmap" + "gopkg.in/alecthomas/kingpin.v2" +) + +const userTmpl = ` +id: {{ .ID }} +email: {{ .Email }} +admin: {{ .Admin }} +` + +type listCommand struct { + tmpl string + page int + size int + json bool +} + +func (c *listCommand) run(*kingpin.ParseContext) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + list, err := provide.Client().UserList(ctx, types.UserFilter{ + Size: c.size, + Page: c.page, + }) + if err != nil { + return err + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl + "\n") + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(list) + } + for _, item := range list { + if err = tmpl.Execute(os.Stdout, item); err != nil { + return err + } + } + return nil +} + +// helper function registers the user list command. +func registerList(app *kingpin.CmdClause) { + c := &listCommand{} + + cmd := app.Command("ls", "display a list of users"). + Action(c.run) + + cmd.Flag("page", "page number"). + IntVar(&c.page) + + cmd.Flag("per-page", "page size"). + IntVar(&c.size) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(userTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/users/update.go b/cli/operations/users/update.go new file mode 100644 index 0000000000..9131f38b1a --- /dev/null +++ b/cli/operations/users/update.go @@ -0,0 +1,118 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "context" + "encoding/json" + "fmt" + "os" + "text/template" + "time" + + "github.com/harness/gitness/cli/provide" + "github.com/harness/gitness/types" + + "github.com/dchest/uniuri" + "github.com/drone/funcmap" + "github.com/gotidy/ptr" + "gopkg.in/alecthomas/kingpin.v2" +) + +type updateCommand struct { + id string + email string + admin bool + demote bool + passgen bool + pass string + tmpl string + json bool +} + +func (c *updateCommand) run(*kingpin.ParseContext) error { + in := new(types.UserInput) + if v := c.email; v != "" { + in.Email = ptr.String(v) + } + if v := c.pass; v != "" { + in.Password = ptr.String(v) + } + if v := c.admin; v { + in.Admin = ptr.Bool(v) + } + if v := c.demote; v { + in.Admin = ptr.Bool(false) + } + if c.passgen { + const maxRandomChars = 8 + v := uniuri.NewLen(maxRandomChars) + in.Password = ptr.String(v) + fmt.Printf("generated temporary password: %s\n", v) + } + + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + user, err := provide.Client().UserUpdate(ctx, c.id, in) + if err != nil { + return err + } + if c.json { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + return enc.Encode(user) + } + tmpl, err := template.New("_").Funcs(funcmap.Funcs).Parse(c.tmpl) + if err != nil { + return err + } + return tmpl.Execute(os.Stdout, user) +} + +// helper function registers the user update command. +func registerUpdate(app *kingpin.CmdClause) { + c := &updateCommand{} + + cmd := app.Command("update", "update a user"). + Action(c.run) + + cmd.Arg("id or email", "user id or email"). + Required(). + StringVar(&c.id) + + cmd.Flag("email", "update user email"). + StringVar(&c.email) + + cmd.Flag("password", "update user password"). + StringVar(&c.pass) + + cmd.Flag("password-gen", "generate and update user password"). + BoolVar(&c.passgen) + + cmd.Flag("promote", "promote user to admin"). + BoolVar(&c.admin) + + cmd.Flag("demote", "demote user from admin"). + BoolVar(&c.demote) + + cmd.Flag("json", "json encode the output"). + BoolVar(&c.json) + + cmd.Flag("format", "format the output using a Go template"). + Default(userTmpl). + Hidden(). + StringVar(&c.tmpl) +} diff --git a/cli/operations/users/users.go b/cli/operations/users/users.go new file mode 100644 index 0000000000..dbf4179377 --- /dev/null +++ b/cli/operations/users/users.go @@ -0,0 +1,29 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "gopkg.in/alecthomas/kingpin.v2" +) + +// Register the command. +func Register(app *kingpin.Application) { + cmd := app.Command("users", "manage users") + registerFind(cmd) + registerList(cmd) + registerCreate(cmd) + registerUpdate(cmd) + registerDelete(cmd) +} diff --git a/cli/provide/provider.go b/cli/provide/provider.go new file mode 100644 index 0000000000..38dbcef223 --- /dev/null +++ b/cli/provide/provider.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package provide + +import ( + "errors" + "io/fs" + "os" + "path/filepath" + + "github.com/harness/gitness/cli/session" + "github.com/harness/gitness/client" + + "github.com/adrg/xdg" + "github.com/rs/zerolog/log" +) + +const DefaultServerURI = "http://localhost:3000" + +func NewSession() session.Session { + ss, err := newSession() + if err != nil { + log.Err(err).Msg("failed to get active session") + os.Exit(1) + } + + return ss +} + +func Session() session.Session { + ss, err := loadSession() + if err != nil { + log.Err(err).Msg("failed to get active session") + os.Exit(1) + } + + return ss +} + +func Client() client.Client { + return newClient(Session()) +} + +func OpenClient(uri string) client.Client { + return newClient(session.Session{URI: uri}) +} + +func sessionPath() (string, error) { + return xdg.ConfigFile(filepath.Join("app", "config.json")) +} + +func newSession() (session.Session, error) { + path, err := sessionPath() + if err != nil && !errors.Is(err, fs.ErrNotExist) { + return session.Session{}, err + } + + return session.New(path).SetURI(DefaultServerURI), nil +} + +func loadSession() (session.Session, error) { + path, err := sessionPath() + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return session.Session{URI: DefaultServerURI}, nil + } + return session.Session{}, err + } + + ss, err := session.LoadFromPath(path) + if err != nil { + return session.Session{}, err + } + + if ss.URI == "" { + ss = ss.SetURI(DefaultServerURI) + } + + return ss, nil +} + +func newClient(ss session.Session) client.Client { + httpClient := client.NewToken(ss.URI, ss.AccessToken) + if os.Getenv("DEBUG") == "true" { + httpClient.SetDebug(true) + } + + return httpClient +} diff --git a/cli/server/config.go b/cli/server/config.go new file mode 100644 index 0000000000..cef6819fa5 --- /dev/null +++ b/cli/server/config.go @@ -0,0 +1,195 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "fmt" + "os" + "path/filepath" + "unicode" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/gitrpc/server" + "github.com/harness/gitness/internal/services/trigger" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/types" + + "github.com/kelseyhightower/envconfig" + "golang.org/x/text/runes" + "golang.org/x/text/transform" + "golang.org/x/text/unicode/norm" +) + +// LoadConfig returns the system configuration from the +// host environment. +func LoadConfig() (*types.Config, error) { + config := new(types.Config) + err := envconfig.Process("", config) + if err != nil { + return nil, err + } + + config.InstanceID, err = getSanitizedMachineName() + if err != nil { + return nil, fmt.Errorf("unable to ensure that instance ID is set in config: %w", err) + } + + return config, nil +} + +// getSanitizedMachineName gets the name of the machine and returns it in sanitized format. +func getSanitizedMachineName() (string, error) { + // use the hostname as default id of the instance + hostName, err := os.Hostname() + if err != nil { + return "", err + } + + // Always cast to lower and remove all unwanted chars + // NOTE: this could theoretically lead to overlaps, then it should be passed explicitly + // NOTE: for k8s names/ids below modifications are all noops + // https://kubernetes.io/docs/concepts/overview/working-with-objects/names/ + + // The following code will: + // * remove invalid runes + // * remove diacritical marks (ie "smörgåsbord" to "smorgasbord") + // * lowercase A-Z to a-z + // * leave only a-z, 0-9, '-', '.' and replace everything else with '_' + hostName, _, err = transform.String( + transform.Chain( + norm.NFD, + runes.ReplaceIllFormed(), + runes.Remove(runes.In(unicode.Mn)), + runes.Map(func(r rune) rune { + switch { + case 'A' <= r && r <= 'Z': + return r + 32 + case 'a' <= r && r <= 'z': + return r + case '0' <= r && r <= '9': + return r + case r == '-', r == '.': + return r + default: + return '_' + } + }), + norm.NFC), + hostName) + if err != nil { + return "", err + } + + return hostName, nil +} + +// ProvideDatabaseConfig loads the database config from the main config. +func ProvideDatabaseConfig(config *types.Config) database.Config { + return database.Config{ + Driver: config.Database.Driver, + Datasource: config.Database.Datasource, + } +} + +// ProvideGitRPCServerConfig loads the gitrpc server config from the environment. +// It backfills certain config elements to work with cmdone. +func ProvideGitRPCServerConfig() (server.Config, error) { + config := server.Config{} + err := envconfig.Process("", &config) + if err != nil { + return server.Config{}, fmt.Errorf("failed to load gitrpc server config: %w", err) + } + if config.GitHookPath == "" { + var executablePath string + executablePath, err = os.Executable() + if err != nil { + return server.Config{}, fmt.Errorf("failed to get path of current executable: %w", err) + } + + config.GitHookPath = executablePath + } + if config.GitRoot == "" { + var homedir string + homedir, err = os.UserHomeDir() + if err != nil { + return server.Config{}, err + } + + config.GitRoot = filepath.Join(homedir, ".gitrpc") + } + + return config, nil +} + +// ProvideGitRPCClientConfig loads the gitrpc client config from the environment. +func ProvideGitRPCClientConfig() (gitrpc.Config, error) { + config := gitrpc.Config{} + err := envconfig.Process("", &config) + if err != nil { + return gitrpc.Config{}, fmt.Errorf("failed to load gitrpc client config: %w", err) + } + + return config, nil +} + +// ProvideEventsConfig loads the events config from the environment. +func ProvideEventsConfig() (events.Config, error) { + config := events.Config{} + err := envconfig.Process("", &config) + if err != nil { + return events.Config{}, fmt.Errorf("failed to load events config: %w", err) + } + + return config, nil +} + +// ProvideWebhookConfig loads the webhook service config from the main config. +func ProvideWebhookConfig(config *types.Config) webhook.Config { + return webhook.Config{ + UserAgentIdentity: config.Webhook.UserAgentIdentity, + HeaderIdentity: config.Webhook.HeaderIdentity, + EventReaderName: config.InstanceID, + Concurrency: config.Webhook.Concurrency, + MaxRetries: config.Webhook.MaxRetries, + AllowPrivateNetwork: config.Webhook.AllowPrivateNetwork, + AllowLoopback: config.Webhook.AllowLoopback, + } +} + +// ProvideTriggerConfig loads the trigger service config from the main config. +func ProvideTriggerConfig(config *types.Config) trigger.Config { + return trigger.Config{ + EventReaderName: config.InstanceID, + Concurrency: config.Webhook.Concurrency, + MaxRetries: config.Webhook.MaxRetries, + } +} + +// ProvideLockConfig generates the `lock` package config from the gitness config. +func ProvideLockConfig(config *types.Config) lock.Config { + return lock.Config{ + App: config.Lock.AppNamespace, + Namespace: config.Lock.DefaultNamespace, + Provider: lock.Provider(config.Lock.Provider), + Expiry: config.Lock.Expiry, + Tries: config.Lock.Tries, + RetryDelay: config.Lock.RetryDelay, + DriftFactor: config.Lock.DriftFactor, + TimeoutFactor: config.Lock.TimeoutFactor, + } +} diff --git a/cli/server/redis.go b/cli/server/redis.go new file mode 100644 index 0000000000..b1a709b841 --- /dev/null +++ b/cli/server/redis.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "strings" + + "github.com/harness/gitness/types" + + "github.com/go-redis/redis/v8" +) + +// ProvideRedis provides a redis client based on the configuration. +// TODO: add support for TLS +func ProvideRedis(config *types.Config) (redis.UniversalClient, error) { + if config.Redis.SentinelMode { + addrs := strings.Split(config.Redis.SentinelEndpoint, ",") + + failoverOptions := &redis.FailoverOptions{ + MasterName: config.Redis.SentinelMaster, + SentinelAddrs: addrs, + MaxRetries: config.Redis.MaxRetries, + MinIdleConns: config.Redis.MinIdleConnections, + } + if config.Redis.Password != "" { + failoverOptions.Password = config.Redis.Password + } + return redis.NewFailoverClient(failoverOptions), nil + } + + options := &redis.Options{ + Addr: config.Redis.Endpoint, + MaxRetries: config.Redis.MaxRetries, + MinIdleConns: config.Redis.MinIdleConnections, + } + + if config.Redis.Password != "" { + options.Password = config.Redis.Password + } + + return redis.NewClient(options), nil +} diff --git a/cli/server/server.go b/cli/server/server.go new file mode 100644 index 0000000000..e7430c7262 --- /dev/null +++ b/cli/server/server.go @@ -0,0 +1,229 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "fmt" + "os" + "os/signal" + "syscall" + "time" + + "github.com/harness/gitness/profiler" + "github.com/harness/gitness/types" + "github.com/harness/gitness/version" + + "github.com/drone/runner-go/logger" + "github.com/joho/godotenv" + "github.com/mattn/go-isatty" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "github.com/sirupsen/logrus" + "golang.org/x/sync/errgroup" + "gopkg.in/alecthomas/kingpin.v2" +) + +type command struct { + envfile string + enableGitRPC bool + enableCI bool + initializer func(context.Context, *types.Config) (*System, error) +} + +func (c *command) run(*kingpin.ParseContext) error { + // Create context that listens for the interrupt signal from the OS. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + // load environment variables from file. + // no error handling needed when file is not present + _ = godotenv.Load(c.envfile) + + // create the system configuration store by loading + // data from the environment. + config, err := LoadConfig() + if err != nil { + return fmt.Errorf("encountered an error while loading configuration: %w", err) + } + + // configure the log level + SetupLogger(config) + + // configure profiler + SetupProfiler(config) + + // add logger to context + log := log.Logger.With().Logger() + ctx = log.WithContext(ctx) + + // initialize system + system, err := c.initializer(ctx, config) + if err != nil { + return fmt.Errorf("encountered an error while wiring the system: %w", err) + } + + // bootstrap the system + err = system.bootstrap(ctx) + if err != nil { + return fmt.Errorf("encountered an error while bootstrapping the system: %w", err) + } + + // gCtx is canceled if any of the following occurs: + // - any go routine launched with g encounters an error + // - ctx is canceled + g, gCtx := errgroup.WithContext(ctx) + + g.Go(func() error { + // initialize metric collector + if system.services.MetricCollector != nil { + system.services.MetricCollector.Register(gCtx) + } + + return system.services.JobScheduler.Run(gCtx) + }) + + // start server + gHTTP, shutdownHTTP := system.server.ListenAndServe() + g.Go(gHTTP.Wait) + if c.enableCI { + // start populating plugins + g.Go(func() error { + err := system.pluginManager.Populate(ctx) + if err != nil { + log.Error().Err(err).Msg("could not populate plugins") + } + return nil + }) + // start poller for CI build executions. + g.Go(func() error { + log := logrus.New() + log.Out = os.Stdout + log.Level = logrus.DebugLevel // print all debug logs in common runner code. + ctx = logger.WithContext(ctx, logger.Logrus(log.WithContext(ctx))) + system.poller.Poll(ctx, config.CI.ParallelWorkers) + return nil + }) + } + + log.Info(). + Str("port", config.Server.HTTP.Bind). + Str("revision", version.GitCommit). + Str("repository", version.GitRepository). + Stringer("version", version.Version). + Msg("server started") + + if c.enableGitRPC { + // start grpc server + g.Go(system.gitRPCServer.Start) + log.Info().Msg("gitrpc server started") + + // run the gitrpc cron jobs + g.Go(func() error { + return system.gitRPCCronMngr.Run(ctx) + }) + log.Info().Msg("gitrpc cron manager subroutine started") + } + + // wait until the error group context is done + <-gCtx.Done() + + // restore default behavior on the interrupt signal and notify user of shutdown. + stop() + log.Info().Msg("shutting down gracefully (press Ctrl+C again to force)") + + // shutdown servers gracefully + shutdownCtx, cancel := context.WithTimeout(context.Background(), config.GracefulShutdownTime) + defer cancel() + + if sErr := shutdownHTTP(shutdownCtx); sErr != nil { + log.Err(sErr).Msg("failed to shutdown http server gracefully") + } + + if c.enableGitRPC { + if rpcErr := system.gitRPCServer.Stop(); rpcErr != nil { + log.Err(rpcErr).Msg("failed to shutdown grpc server gracefully") + } + } + + system.services.JobScheduler.WaitJobsDone(shutdownCtx) + + log.Info().Msg("wait for subroutines to complete") + err = g.Wait() + + return err +} + +// SetupLogger configures the global logger from the loaded configuration. +func SetupLogger(config *types.Config) { + // configure the log level + switch { + case config.Trace: + zerolog.SetGlobalLevel(zerolog.TraceLevel) + case config.Debug: + zerolog.SetGlobalLevel(zerolog.DebugLevel) + default: + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } + + // configure time format (ignored if running in terminal) + zerolog.TimeFieldFormat = time.RFC3339Nano + + // if the terminal is a tty we should output the + // logs in pretty format + if isatty.IsTerminal(os.Stdout.Fd()) { + log.Logger = log.Output( + zerolog.ConsoleWriter{ + Out: os.Stderr, + NoColor: false, + TimeFormat: "15:04:05.999", + }, + ) + } +} + +func SetupProfiler(config *types.Config) { + profilerType, parsed := profiler.ParseType(config.Profiler.Type) + if !parsed { + log.Info().Msgf("No valid profiler so skipping profiling ['%s']", config.Profiler.Type) + return + } + + gitnessProfiler, _ := profiler.New(profilerType) + gitnessProfiler.StartProfiling(config.Profiler.ServiceName, version.Version.String()) +} + +// Register the server command. +func Register(app *kingpin.Application, initializer func(context.Context, *types.Config) (*System, error)) { + c := new(command) + c.initializer = initializer + + cmd := app.Command("server", "starts the server"). + Action(c.run) + + cmd.Arg("envfile", "load the environment variable file"). + Default(""). + StringVar(&c.envfile) + + cmd.Flag("enable-gitrpc", "start the gitrpc server"). + Default("true"). + Envar("ENABLE_GITRPC"). + BoolVar(&c.enableGitRPC) + + cmd.Flag("enable-ci", "start ci runners for build executions"). + Default("true"). + Envar("ENABLE_CI"). + BoolVar(&c.enableCI) +} diff --git a/cli/server/system.go b/cli/server/system.go new file mode 100644 index 0000000000..7ef302a3e0 --- /dev/null +++ b/cli/server/system.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + gitrpcserver "github.com/harness/gitness/gitrpc/server" + gitrpccron "github.com/harness/gitness/gitrpc/server/cron" + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/internal/pipeline/plugin" + "github.com/harness/gitness/internal/server" + "github.com/harness/gitness/internal/services" + + "github.com/drone/runner-go/poller" +) + +// System stores high level System sub-routines. +type System struct { + bootstrap bootstrap.Bootstrap + server *server.Server + gitRPCServer *gitrpcserver.GRPCServer + pluginManager *plugin.PluginManager + poller *poller.Poller + services services.Services + gitRPCCronMngr *gitrpccron.Manager +} + +// NewSystem returns a new system structure. +func NewSystem(bootstrap bootstrap.Bootstrap, server *server.Server, poller *poller.Poller, + gitRPCServer *gitrpcserver.GRPCServer, pluginManager *plugin.PluginManager, + gitrpccron *gitrpccron.Manager, services services.Services) *System { + return &System{ + bootstrap: bootstrap, + server: server, + poller: poller, + gitRPCServer: gitRPCServer, + pluginManager: pluginManager, + services: services, + gitRPCCronMngr: gitrpccron, + } +} diff --git a/cli/session/session.go b/cli/session/session.go new file mode 100644 index 0000000000..ecd8d107dc --- /dev/null +++ b/cli/session/session.go @@ -0,0 +1,95 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package session + +import ( + "encoding/json" + "errors" + "fmt" + "os" + "time" +) + +var ( + ErrTokenExpired = errors.New("token is expired, please login") +) + +type Session struct { + path string + URI string `json:"uri"` + ExpiresAt int64 `json:"expires_at"` + AccessToken string `json:"access_token"` +} + +// New creates a new session to be stores to the provided path. +func New(path string) Session { + return Session{ + path: path, + } +} + +// LoadFromPath loads an existing session from a file. +func LoadFromPath(path string) (Session, error) { + session := Session{ + path: path, + } + data, err := os.ReadFile(path) + if err != nil { + return session, fmt.Errorf("failed to read session from file: %w", err) + } + if err = json.Unmarshal(data, &session); err != nil { + return session, fmt.Errorf("failed to deserialize session: %w", err) + } + + if time.Now().Unix() > session.ExpiresAt { + return session, ErrTokenExpired + } + + return session, nil +} + +// Store stores an existing session to the default file. +func (s Session) Store() error { + data, err := json.Marshal(s) + if err != nil { + return fmt.Errorf("failed to serialize session: %w", err) + } + + err = os.WriteFile(s.path, data, 0o600) + if err != nil { + return fmt.Errorf("failed to write session to file: %w", err) + } + + return nil +} + +func (s Session) SetURI(uri string) Session { + s.URI = uri + return s +} + +func (s Session) SetExpiresAt(expiresAt int64) Session { + s.ExpiresAt = expiresAt + return s +} + +func (s Session) SetAccessToken(token string) Session { + s.AccessToken = token + return s +} + +func (s Session) Path() string { + return s.path +} diff --git a/cli/swagger.go b/cli/swagger.go new file mode 100644 index 0000000000..166e04708e --- /dev/null +++ b/cli/swagger.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "os" + + "github.com/harness/gitness/internal/api/openapi" + + "gopkg.in/alecthomas/kingpin.v2" +) + +type swaggerCommand struct { + path string +} + +func (c *swaggerCommand) run(*kingpin.ParseContext) error { + spec := openapi.Generate() + data, _ := spec.MarshalYAML() + if c.path == "" { + os.Stdout.Write(data) + return nil + } + return os.WriteFile(c.path, data, 0o600) +} + +// helper function to register the swagger command. +func RegisterSwagger(app *kingpin.Application) { + c := new(swaggerCommand) + + cmd := app.Command("swagger", "generate swagger file"). + Hidden(). + Action(c.run) + + cmd.Arg("path", "path to save swagger file"). + StringVar(&c.path) +} diff --git a/cli/textui/input.go b/cli/textui/input.go new file mode 100644 index 0000000000..9b26deda39 --- /dev/null +++ b/cli/textui/input.go @@ -0,0 +1,84 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package textui + +import ( + "bufio" + "fmt" + "os" + "strings" + "syscall" + + "golang.org/x/term" +) + +// Registration returns the userID, displayName, email and password from stdin. +func Registration() (string, string, string, string) { + return UserID(), DisplayName(), Email(), Password() +} + +// Credentials returns the login identifier and password from stdin. +func Credentials() (string, string) { + return LoginIdentifier(), Password() +} + +// UserID returns the user ID from stdin. +func UserID() string { + reader := bufio.NewReader(os.Stdin) + + fmt.Print("Enter User ID: ") + uid, _ := reader.ReadString('\n') + + return strings.TrimSpace(uid) +} + +// LoginIdentifier returns the login identifier from stdin. +func LoginIdentifier() string { + reader := bufio.NewReader(os.Stdin) + + fmt.Print("Enter User ID or Email: ") + id, _ := reader.ReadString('\n') + + return strings.TrimSpace(id) +} + +// DisplayName returns the display name from stdin. +func DisplayName() string { + reader := bufio.NewReader(os.Stdin) + + fmt.Print("Enter Display Name: ") + name, _ := reader.ReadString('\n') + + return strings.TrimSpace(name) +} + +// Email returns the email from stdin. +func Email() string { + reader := bufio.NewReader(os.Stdin) + + fmt.Print("Enter Email: ") + email, _ := reader.ReadString('\n') + + return strings.TrimSpace(email) +} + +// Password returns the password from stdin. +func Password() string { + fmt.Print("Enter Password: ") + passwordb, _ := term.ReadPassword(syscall.Stdin) + password := string(passwordb) + + return strings.TrimSpace(password) +} diff --git a/client/client.go b/client/client.go new file mode 100644 index 0000000000..8540e4613a --- /dev/null +++ b/client/client.go @@ -0,0 +1,250 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httputil" + "net/url" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/types" + "github.com/harness/gitness/version" + + "github.com/rs/zerolog/log" +) + +// ensure HTTPClient implements Client interface. +var _ Client = (*HTTPClient)(nil) + +// HTTPClient provides an HTTP client for interacting +// with the remote API. +type HTTPClient struct { + client *http.Client + base string + token string + debug bool +} + +// New returns a client at the specified url. +func New(uri string) *HTTPClient { + return NewToken(uri, "") +} + +// NewToken returns a client at the specified url that +// authenticates all outbound requests with the given token. +func NewToken(uri, token string) *HTTPClient { + return &HTTPClient{http.DefaultClient, uri, token, false} +} + +// SetClient sets the default http client. This can be +// used in conjunction with golang.org/x/oauth2 to +// authenticate requests to the server. +func (c *HTTPClient) SetClient(client *http.Client) { + c.client = client +} + +// SetDebug sets the debug flag. When the debug flag is +// true, the http.Resposne body to stdout which can be +// helpful when debugging. +func (c *HTTPClient) SetDebug(debug bool) { + c.debug = debug +} + +// Login authenticates the user and returns a JWT token. +func (c *HTTPClient) Login(ctx context.Context, input *user.LoginInput) (*types.TokenResponse, error) { + out := new(types.TokenResponse) + uri := fmt.Sprintf("%s/api/v1/login", c.base) + err := c.post(ctx, uri, true, input, out) + return out, err +} + +// Register registers a new user and returns a JWT token. +func (c *HTTPClient) Register(ctx context.Context, input *user.RegisterInput) (*types.TokenResponse, error) { + out := new(types.TokenResponse) + uri := fmt.Sprintf("%s/api/v1/register", c.base) + err := c.post(ctx, uri, true, input, out) + return out, err +} + +// +// User Endpoints +// + +// Self returns the currently authenticated user. +func (c *HTTPClient) Self(ctx context.Context) (*types.User, error) { + out := new(types.User) + uri := fmt.Sprintf("%s/api/v1/user", c.base) + err := c.get(ctx, uri, out) + return out, err +} + +// UserCreatePAT creates a new PAT for the user. +func (c *HTTPClient) UserCreatePAT(ctx context.Context, in user.CreateTokenInput) (*types.TokenResponse, error) { + out := new(types.TokenResponse) + uri := fmt.Sprintf("%s/api/v1/user/tokens", c.base) + err := c.post(ctx, uri, false, in, out) + return out, err +} + +// User returns a user by ID or email. +func (c *HTTPClient) User(ctx context.Context, key string) (*types.User, error) { + out := new(types.User) + uri := fmt.Sprintf("%s/api/v1/users/%s", c.base, key) + err := c.get(ctx, uri, out) + return out, err +} + +// UserList returns a list of all registered users. +func (c *HTTPClient) UserList(ctx context.Context, params types.UserFilter) ([]types.User, error) { + out := []types.User{} + uri := fmt.Sprintf("%s/api/v1/users?page=%d&limit=%d", c.base, params.Page, params.Size) + err := c.get(ctx, uri, &out) + return out, err +} + +// UserCreate creates a new user account. +func (c *HTTPClient) UserCreate(ctx context.Context, user *types.User) (*types.User, error) { + out := new(types.User) + uri := fmt.Sprintf("%s/api/v1/users", c.base) + err := c.post(ctx, uri, false, user, out) + return out, err +} + +// UserUpdate updates a user account by ID or email. +func (c *HTTPClient) UserUpdate(ctx context.Context, key string, user *types.UserInput) (*types.User, error) { + out := new(types.User) + uri := fmt.Sprintf("%s/api/v1/users/%s", c.base, key) + err := c.patch(ctx, uri, user, out) + return out, err +} + +// UserDelete deletes a user account by ID or email. +func (c *HTTPClient) UserDelete(ctx context.Context, key string) error { + uri := fmt.Sprintf("%s/api/v1/users/%s", c.base, key) + err := c.delete(ctx, uri) + return err +} + +// +// http request helper functions +// + +// helper function for making an http GET request. +func (c *HTTPClient) get(ctx context.Context, rawurl string, out interface{}) error { + return c.do(ctx, rawurl, "GET", false, nil, out) +} + +// helper function for making an http POST request. +func (c *HTTPClient) post(ctx context.Context, rawurl string, noToken bool, in, out interface{}) error { + return c.do(ctx, rawurl, "POST", noToken, in, out) +} + +// helper function for making an http PATCH request. +func (c *HTTPClient) patch(ctx context.Context, rawurl string, in, out interface{}) error { + return c.do(ctx, rawurl, "PATCH", false, in, out) +} + +// helper function for making an http DELETE request. +func (c *HTTPClient) delete(ctx context.Context, rawurl string) error { + return c.do(ctx, rawurl, "DELETE", false, nil, nil) +} + +// helper function to make an http request. +func (c *HTTPClient) do(ctx context.Context, rawurl, method string, noToken bool, in, out interface{}) error { + // executes the http request and returns the body as + // and io.ReadCloser + body, err := c.stream(ctx, rawurl, method, noToken, in, out) + if body != nil { + defer func(body io.ReadCloser) { + _ = body.Close() + }(body) + } + if err != nil { + return err + } + + // if a json response is expected, parse and return + // the json response. + if out != nil { + return json.NewDecoder(body).Decode(out) + } + return nil +} + +// helper function to stream a http request. +func (c *HTTPClient) stream(ctx context.Context, rawurl, method string, noToken bool, + in, _ interface{}) (io.ReadCloser, error) { + uri, err := url.Parse(rawurl) + if err != nil { + return nil, err + } + + // if we are posting or putting data, we need to + // write it to the body of the request. + var buf io.ReadWriter + if in != nil { + buf = &bytes.Buffer{} + if err = json.NewEncoder(buf).Encode(in); err != nil { + return nil, err + } + } + + // creates a new http request. + req, err := http.NewRequestWithContext(ctx, method, uri.String(), buf) + if err != nil { + return nil, err + } + if in != nil { + req.Header.Set("Content-Type", "application/json") + } + if !noToken && c.token != "" { + req.Header.Set("Authorization", "Bearer "+c.token) + } + if _, ok := in.(*url.Values); ok { + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + } + + // include the client version information in the + // http accept header for debugging purposes. + req.Header.Set("Accept", "application/json;version="+version.Version.String()) + + // send the http request. + resp, err := c.client.Do(req) + if err != nil { + return nil, err + } + if c.debug { + dump, _ := httputil.DumpResponse(resp, true) + log.Debug().Msgf("method %s, url %s", method, rawurl) + log.Debug().Msg(string(dump)) + } + if resp.StatusCode >= http.StatusMultipleChoices { + defer func(Body io.ReadCloser) { + _ = Body.Close() + }(resp.Body) + err = &remoteError{} + if decodeErr := json.NewDecoder(resp.Body).Decode(err); decodeErr != nil { + return nil, decodeErr + } + return nil, err + } + return resp.Body, nil +} diff --git a/client/interface.go b/client/interface.go new file mode 100644 index 0000000000..87e627c227 --- /dev/null +++ b/client/interface.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "context" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/types" +) + +// Client to access the remote APIs. +type Client interface { + // Login authenticates the user and returns a JWT token. + Login(ctx context.Context, input *user.LoginInput) (*types.TokenResponse, error) + + // Register registers a new user and returns a JWT token. + Register(ctx context.Context, input *user.RegisterInput) (*types.TokenResponse, error) + + // Self returns the currently authenticated user. + Self(ctx context.Context) (*types.User, error) + + // User returns a user by ID or email. + User(ctx context.Context, key string) (*types.User, error) + + // UserList returns a list of all registered users. + UserList(ctx context.Context, params types.UserFilter) ([]types.User, error) + + // UserCreate creates a new user account. + UserCreate(ctx context.Context, user *types.User) (*types.User, error) + + // UserUpdate updates a user account by ID or email. + UserUpdate(ctx context.Context, key string, input *types.UserInput) (*types.User, error) + + // UserDelete deletes a user account by ID or email. + UserDelete(ctx context.Context, key string) error + + // UserCreatePAT creates a new PAT for the user. + UserCreatePAT(ctx context.Context, in user.CreateTokenInput) (*types.TokenResponse, error) +} + +// remoteError store the error payload returned +// fro the remote API. +type remoteError struct { + Message string `json:"message"` +} + +// Error returns the error message. +func (e *remoteError) Error() string { + return e.Message +} diff --git a/cmd/gitness-githook/main.go b/cmd/gitness-githook/main.go new file mode 100644 index 0000000000..1286616fcb --- /dev/null +++ b/cmd/gitness-githook/main.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "os" + + "github.com/harness/gitness/githook" + gitnessgithook "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/version" + + "gopkg.in/alecthomas/kingpin.v2" +) + +const ( + application = "gitness-githook" + description = "A lightweight executable that forwards git server hooks to the gitness API server." +) + +func main() { + // ensure args are properly sanitized if called by git + command := os.Args[0] + args := os.Args[1:] + args, _ = githook.SanitizeArgsForGit(command, args) + + // define new kingpin application and register githooks globally + app := kingpin.New(application, description) + app.Version(version.Version.String()) + githook.RegisterAll(app, gitnessgithook.LoadFromEnvironment) + + // trigger execution + kingpin.MustParse(app.Parse(args)) +} diff --git a/cmd/gitness/driver_pq.go b/cmd/gitness/driver_pq.go new file mode 100644 index 0000000000..4713aa48a2 --- /dev/null +++ b/cmd/gitness/driver_pq.go @@ -0,0 +1,22 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build pq +// +build pq + +package main + +import ( + _ "github.com/lib/pq" +) diff --git a/cmd/gitness/driver_sqlite.go b/cmd/gitness/driver_sqlite.go new file mode 100644 index 0000000000..c9a0165205 --- /dev/null +++ b/cmd/gitness/driver_sqlite.go @@ -0,0 +1,22 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build sqlite +// +build sqlite + +package main + +import ( + _ "github.com/mattn/go-sqlite3" +) diff --git a/cmd/gitness/main.go b/cmd/gitness/main.go new file mode 100644 index 0000000000..60d6876d4f --- /dev/null +++ b/cmd/gitness/main.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "github.com/harness/gitness/cli" + "github.com/harness/gitness/cli/operations/account" + "github.com/harness/gitness/cli/operations/hooks" + "github.com/harness/gitness/cli/operations/migrate" + "github.com/harness/gitness/cli/operations/user" + "github.com/harness/gitness/cli/operations/users" + "github.com/harness/gitness/cli/server" + "github.com/harness/gitness/version" + + "gopkg.in/alecthomas/kingpin.v2" +) + +const ( + application = "gitness" + description = "Gitness Open source edition" +) + +func main() { + args := cli.GetArguments() + + app := kingpin.New(application, description) + + migrate.Register(app) + server.Register(app, initSystem) + + user.Register(app) + users.Register(app) + + account.RegisterLogin(app) + account.RegisterRegister(app) + account.RegisterLogout(app) + + hooks.Register(app) + + cli.RegisterSwagger(app) + + kingpin.Version(version.Version.String()) + kingpin.MustParse(app.Parse(args)) +} diff --git a/cmd/gitness/wire.go b/cmd/gitness/wire.go new file mode 100644 index 0000000000..05ce27a522 --- /dev/null +++ b/cmd/gitness/wire.go @@ -0,0 +1,148 @@ +// Copyright 2021 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +//go:build wireinject +// +build wireinject + +package main + +import ( + "context" + + cliserver "github.com/harness/gitness/cli/server" + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitrpcserver "github.com/harness/gitness/gitrpc/server" + gitrpccron "github.com/harness/gitness/gitrpc/server/cron" + checkcontroller "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/controller/githook" + controllerlogs "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/controller/plugin" + "github.com/harness/gitness/internal/api/controller/principal" + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/controller/service" + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/controller/template" + controllertrigger "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/controller/user" + controllerwebhook "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/auth/authn" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/bootstrap" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/canceler" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/manager" + pluginmanager "github.com/harness/gitness/internal/pipeline/plugin" + "github.com/harness/gitness/internal/pipeline/runner" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/router" + "github.com/harness/gitness/internal/server" + "github.com/harness/gitness/internal/services" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/services/metric" + pullreqservice "github.com/harness/gitness/internal/services/pullreq" + "github.com/harness/gitness/internal/services/trigger" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/store/cache" + "github.com/harness/gitness/internal/store/database" + "github.com/harness/gitness/internal/store/logs" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" +) + +func initSystem(ctx context.Context, config *types.Config) (*cliserver.System, error) { + wire.Build( + cliserver.NewSystem, + cliserver.ProvideRedis, + bootstrap.WireSet, + cliserver.ProvideDatabaseConfig, + database.WireSet, + cache.WireSet, + router.WireSet, + pullreqservice.WireSet, + services.WireSet, + server.WireSet, + url.WireSet, + space.WireSet, + repo.WireSet, + pullreq.WireSet, + controllerwebhook.WireSet, + serviceaccount.WireSet, + user.WireSet, + service.WireSet, + principal.WireSet, + system.WireSet, + authn.WireSet, + authz.WireSet, + gitevents.WireSet, + pullreqevents.WireSet, + cliserver.ProvideGitRPCServerConfig, + gitrpcserver.WireSet, + cliserver.ProvideGitRPCClientConfig, + gitrpc.WireSet, + store.WireSet, + check.WireSet, + encrypt.WireSet, + cliserver.ProvideEventsConfig, + events.WireSet, + cliserver.ProvideWebhookConfig, + webhook.WireSet, + cliserver.ProvideTriggerConfig, + trigger.WireSet, + githook.WireSet, + cliserver.ProvideLockConfig, + lock.WireSet, + pubsub.WireSet, + codecomments.WireSet, + job.WireSet, + gitrpccron.WireSet, + checkcontroller.WireSet, + execution.WireSet, + pipeline.WireSet, + logs.WireSet, + livelog.WireSet, + controllerlogs.WireSet, + secret.WireSet, + connector.WireSet, + template.WireSet, + manager.WireSet, + triggerer.WireSet, + file.WireSet, + runner.WireSet, + sse.WireSet, + scheduler.WireSet, + commit.WireSet, + controllertrigger.WireSet, + plugin.WireSet, + pluginmanager.WireSet, + importer.WireSet, + canceler.WireSet, + exporter.WireSet, + metric.WireSet, + ) + return &cliserver.System{}, nil +} diff --git a/cmd/gitness/wire_gen.go b/cmd/gitness/wire_gen.go new file mode 100644 index 0000000000..0441b4ee80 --- /dev/null +++ b/cmd/gitness/wire_gen.go @@ -0,0 +1,260 @@ +// Code generated by Wire. DO NOT EDIT. + +//go:generate go run github.com/google/wire/cmd/wire +//go:build !wireinject +// +build !wireinject + +package main + +import ( + "context" + "github.com/harness/gitness/cli/server" + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + server3 "github.com/harness/gitness/gitrpc/server" + "github.com/harness/gitness/gitrpc/server/cron" + check2 "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/controller/githook" + logs2 "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/controller/plugin" + "github.com/harness/gitness/internal/api/controller/principal" + pullreq2 "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/controller/service" + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/controller/user" + webhook2 "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/auth/authn" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/bootstrap" + events3 "github.com/harness/gitness/internal/events/git" + events2 "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/canceler" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/manager" + plugin2 "github.com/harness/gitness/internal/pipeline/plugin" + "github.com/harness/gitness/internal/pipeline/runner" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/router" + server2 "github.com/harness/gitness/internal/server" + "github.com/harness/gitness/internal/services" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/services/metric" + "github.com/harness/gitness/internal/services/pullreq" + trigger2 "github.com/harness/gitness/internal/services/trigger" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/store/cache" + "github.com/harness/gitness/internal/store/database" + "github.com/harness/gitness/internal/store/logs" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" +) + +// Injectors from wire.go: + +func initSystem(ctx context.Context, config *types.Config) (*server.System, error) { + databaseConfig := server.ProvideDatabaseConfig(config) + db, err := database.ProvideDatabase(ctx, databaseConfig) + if err != nil { + return nil, err + } + principalUID := check.ProvidePrincipalUIDCheck() + spacePathTransformation := store.ProvidePathTransformation() + spacePathStore := database.ProvideSpacePathStore(db, spacePathTransformation) + spacePathCache := cache.ProvidePathCache(spacePathStore, spacePathTransformation) + spaceStore := database.ProvideSpaceStore(db, spacePathCache, spacePathStore) + principalInfoView := database.ProvidePrincipalInfoView(db) + principalInfoCache := cache.ProvidePrincipalInfoCache(principalInfoView) + membershipStore := database.ProvideMembershipStore(db, principalInfoCache, spacePathStore) + permissionCache := authz.ProvidePermissionCache(spaceStore, membershipStore) + authorizer := authz.ProvideAuthorizer(permissionCache, spaceStore) + principalUIDTransformation := store.ProvidePrincipalUIDTransformation() + principalStore := database.ProvidePrincipalStore(db, principalUIDTransformation) + tokenStore := database.ProvideTokenStore(db) + controller := user.ProvideController(db, principalUID, authorizer, principalStore, tokenStore, membershipStore) + serviceController := service.NewController(principalUID, authorizer, principalStore) + bootstrapBootstrap := bootstrap.ProvideBootstrap(config, controller, serviceController) + authenticator := authn.ProvideAuthenticator(config, principalStore, tokenStore) + provider, err := url.ProvideURLProvider(config) + if err != nil { + return nil, err + } + pathUID := check.ProvidePathUIDCheck() + repoStore := database.ProvideRepoStore(db, spacePathCache, spacePathStore) + pipelineStore := database.ProvidePipelineStore(db) + gitrpcConfig, err := server.ProvideGitRPCClientConfig() + if err != nil { + return nil, err + } + gitrpcInterface, err := gitrpc.ProvideClient(gitrpcConfig) + if err != nil { + return nil, err + } + triggerStore := database.ProvideTriggerStore(db) + encrypter, err := encrypt.ProvideEncrypter(config) + if err != nil { + return nil, err + } + jobStore := database.ProvideJobStore(db) + pubsubConfig := pubsub.ProvideConfig(config) + universalClient, err := server.ProvideRedis(config) + if err != nil { + return nil, err + } + pubSub := pubsub.ProvidePubSub(pubsubConfig, universalClient) + executor := job.ProvideExecutor(jobStore, pubSub) + lockConfig := server.ProvideLockConfig(config) + mutexManager := lock.ProvideMutexManager(lockConfig, universalClient) + jobScheduler, err := job.ProvideScheduler(jobStore, executor, mutexManager, pubSub, config) + if err != nil { + return nil, err + } + streamer := sse.ProvideEventsStreaming(pubSub) + repository, err := importer.ProvideRepoImporter(config, provider, gitrpcInterface, db, repoStore, pipelineStore, triggerStore, encrypter, jobScheduler, executor, streamer) + if err != nil { + return nil, err + } + repoController := repo.ProvideController(config, db, provider, pathUID, authorizer, repoStore, spaceStore, pipelineStore, principalStore, gitrpcInterface, repository) + executionStore := database.ProvideExecutionStore(db) + checkStore := database.ProvideCheckStore(db, principalInfoCache) + stageStore := database.ProvideStageStore(db) + schedulerScheduler, err := scheduler.ProvideScheduler(stageStore, mutexManager) + if err != nil { + return nil, err + } + stepStore := database.ProvideStepStore(db) + cancelerCanceler := canceler.ProvideCanceler(executionStore, streamer, repoStore, schedulerScheduler, stageStore, stepStore) + commitService := commit.ProvideCommitService(gitrpcInterface) + fileService := file.ProvideFileService(gitrpcInterface) + triggererTriggerer := triggerer.ProvideTriggerer(executionStore, checkStore, stageStore, db, pipelineStore, fileService, schedulerScheduler, repoStore) + executionController := execution.ProvideController(db, authorizer, executionStore, checkStore, cancelerCanceler, commitService, triggererTriggerer, repoStore, stageStore, pipelineStore) + logStore := logs.ProvideLogStore(db, config) + logStream := livelog.ProvideLogStream(config) + logsController := logs2.ProvideController(db, authorizer, executionStore, repoStore, pipelineStore, stageStore, stepStore, logStore, logStream) + secretStore := database.ProvideSecretStore(db) + connectorStore := database.ProvideConnectorStore(db) + templateStore := database.ProvideTemplateStore(db) + exporterRepository, err := exporter.ProvideSpaceExporter(provider, gitrpcInterface, repoStore, jobScheduler, executor, encrypter, streamer) + if err != nil { + return nil, err + } + spaceController := space.ProvideController(config, db, provider, streamer, pathUID, authorizer, spacePathStore, pipelineStore, secretStore, connectorStore, templateStore, spaceStore, repoStore, principalStore, repoController, membershipStore, repository, exporterRepository) + pipelineController := pipeline.ProvideController(db, pathUID, repoStore, triggerStore, authorizer, pipelineStore) + secretController := secret.ProvideController(db, pathUID, encrypter, secretStore, authorizer, spaceStore) + triggerController := trigger.ProvideController(db, authorizer, triggerStore, pathUID, pipelineStore, repoStore) + connectorController := connector.ProvideController(db, pathUID, connectorStore, authorizer, spaceStore) + templateController := template.ProvideController(db, pathUID, templateStore, authorizer, spaceStore) + pluginStore := database.ProvidePluginStore(db) + pluginController := plugin.ProvideController(db, pluginStore) + pullReqStore := database.ProvidePullReqStore(db, principalInfoCache) + pullReqActivityStore := database.ProvidePullReqActivityStore(db, principalInfoCache) + codeCommentView := database.ProvideCodeCommentView(db) + pullReqReviewStore := database.ProvidePullReqReviewStore(db) + pullReqReviewerStore := database.ProvidePullReqReviewerStore(db, principalInfoCache) + pullReqFileViewStore := database.ProvidePullReqFileViewStore(db) + eventsConfig, err := server.ProvideEventsConfig() + if err != nil { + return nil, err + } + eventsSystem, err := events.ProvideSystem(eventsConfig, universalClient) + if err != nil { + return nil, err + } + reporter, err := events2.ProvideReporter(eventsSystem) + if err != nil { + return nil, err + } + migrator := codecomments.ProvideMigrator(gitrpcInterface) + readerFactory, err := events3.ProvideReaderFactory(eventsSystem) + if err != nil { + return nil, err + } + eventsReaderFactory, err := events2.ProvideReaderFactory(eventsSystem) + if err != nil { + return nil, err + } + repoGitInfoView := database.ProvideRepoGitInfoView(db) + repoGitInfoCache := cache.ProvideRepoGitInfoCache(repoGitInfoView) + pullreqService, err := pullreq.ProvideService(ctx, config, readerFactory, eventsReaderFactory, reporter, gitrpcInterface, db, repoGitInfoCache, repoStore, pullReqStore, pullReqActivityStore, codeCommentView, migrator, pullReqFileViewStore, pubSub, provider, streamer) + if err != nil { + return nil, err + } + pullreqController := pullreq2.ProvideController(db, provider, authorizer, pullReqStore, pullReqActivityStore, codeCommentView, pullReqReviewStore, pullReqReviewerStore, repoStore, principalStore, pullReqFileViewStore, gitrpcInterface, reporter, mutexManager, migrator, pullreqService, streamer) + webhookConfig := server.ProvideWebhookConfig(config) + webhookStore := database.ProvideWebhookStore(db) + webhookExecutionStore := database.ProvideWebhookExecutionStore(db) + webhookService, err := webhook.ProvideService(ctx, webhookConfig, readerFactory, eventsReaderFactory, webhookStore, webhookExecutionStore, repoStore, pullReqStore, provider, principalStore, gitrpcInterface, encrypter) + if err != nil { + return nil, err + } + webhookController := webhook2.ProvideController(webhookConfig, db, authorizer, webhookStore, webhookExecutionStore, repoStore, webhookService, encrypter) + eventsReporter, err := events3.ProvideReporter(eventsSystem) + if err != nil { + return nil, err + } + githookController := githook.ProvideController(db, authorizer, principalStore, repoStore, eventsReporter) + serviceaccountController := serviceaccount.NewController(principalUID, authorizer, principalStore, spaceStore, repoStore, tokenStore) + principalController := principal.ProvideController(principalStore) + checkController := check2.ProvideController(db, authorizer, repoStore, checkStore, gitrpcInterface) + systemController := system.NewController(principalStore, config) + apiHandler := router.ProvideAPIHandler(config, authenticator, repoController, executionController, logsController, spaceController, pipelineController, secretController, triggerController, connectorController, templateController, pluginController, pullreqController, webhookController, githookController, serviceaccountController, controller, principalController, checkController, systemController) + gitHandler := router.ProvideGitHandler(config, provider, repoStore, authenticator, authorizer, gitrpcInterface) + webHandler := router.ProvideWebHandler(config) + routerRouter := router.ProvideRouter(config, apiHandler, gitHandler, webHandler) + serverServer := server2.ProvideServer(config, routerRouter) + executionManager := manager.ProvideExecutionManager(config, executionStore, pipelineStore, provider, streamer, fileService, logStore, logStream, checkStore, repoStore, schedulerScheduler, secretStore, stageStore, stepStore, principalStore) + client := manager.ProvideExecutionClient(executionManager, config) + pluginManager := plugin2.ProvidePluginManager(config, pluginStore) + runtimeRunner, err := runner.ProvideExecutionRunner(config, client, pluginManager, executionManager) + if err != nil { + return nil, err + } + poller := runner.ProvideExecutionPoller(runtimeRunner, config, client) + serverConfig, err := server.ProvideGitRPCServerConfig() + if err != nil { + return nil, err + } + goGitRepoProvider := server3.ProvideGoGitRepoProvider() + cacheCache := server3.ProvideLastCommitCache(serverConfig, universalClient, goGitRepoProvider) + gitAdapter, err := server3.ProvideGITAdapter(goGitRepoProvider, cacheCache) + if err != nil { + return nil, err + } + grpcServer, err := server3.ProvideServer(serverConfig, gitAdapter) + if err != nil { + return nil, err + } + cronManager := cron.ProvideManager(serverConfig) + triggerConfig := server.ProvideTriggerConfig(config) + triggerService, err := trigger2.ProvideService(ctx, triggerConfig, triggerStore, commitService, pullReqStore, repoStore, pipelineStore, triggererTriggerer, readerFactory, eventsReaderFactory) + if err != nil { + return nil, err + } + collector, err := metric.ProvideCollector(config, principalStore, repoStore, pipelineStore, executionStore, jobScheduler, executor) + if err != nil { + return nil, err + } + servicesServices := services.ProvideServices(webhookService, pullreqService, triggerService, jobScheduler, collector) + serverSystem := server.NewSystem(bootstrapBootstrap, serverServer, poller, grpcServer, pluginManager, cronManager, servicesServices) + return serverSystem, nil +} diff --git a/cmd/gitrpcserver/config.go b/cmd/gitrpcserver/config.go new file mode 100644 index 0000000000..be90b27987 --- /dev/null +++ b/cmd/gitrpcserver/config.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "github.com/harness/gitness/gitrpc/server" + + "github.com/kelseyhightower/envconfig" +) + +type Config struct { + Debug bool `envconfig:"GITRPC_SERVER_DEBUG"` + Trace bool `envconfig:"GITRPC_SERVER_TRACE"` + + // GracefulShutdownTime defines the max time we wait when shutting down a server. + // 5min should be enough for most git clones to complete. + GracefulShutdownTime time.Duration `envconfig:"GITRPC_SERVER_GRACEFUL_SHUTDOWN_TIME" default:"300s"` + + Profiler struct { + Type string `envconfig:"GITRPC_PROFILER_TYPE"` + ServiceName string `envconfig:"GITRPC_PROFILER_SERVICE_NAME" default:"gitrpcserver"` + } +} + +func loadConfig() (Config, error) { + config := Config{} + err := envconfig.Process("", &config) + if err != nil { + return Config{}, fmt.Errorf("processing of config failed: %w", err) + } + return config, nil +} + +func ProvideGitRPCServerConfig() (server.Config, error) { + config := server.Config{} + err := envconfig.Process("", &config) + if err != nil { + return server.Config{}, fmt.Errorf("processing of gitrpc server config failed: %w", err) + } + if config.GitRoot == "" { + var homedir string + homedir, err = os.UserHomeDir() + if err != nil { + return server.Config{}, err + } + + config.GitRoot = filepath.Join(homedir, ".gitrpc") + } + + return config, nil +} diff --git a/cmd/gitrpcserver/main.go b/cmd/gitrpcserver/main.go new file mode 100644 index 0000000000..33780a12e7 --- /dev/null +++ b/cmd/gitrpcserver/main.go @@ -0,0 +1,160 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "context" + "fmt" + "os" + "os/signal" + "syscall" + "time" + + "github.com/harness/gitness/profiler" + "github.com/harness/gitness/version" + + "github.com/joho/godotenv" + "github.com/mattn/go-isatty" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "golang.org/x/sync/errgroup" + "gopkg.in/alecthomas/kingpin.v2" +) + +const ( + application = "gitrpcserver" + description = "GitRPC is a GRPC server that exposes git via RPC." +) + +func main() { + // define new kingpin application with global entry point + app := kingpin.New(application, description) + app.Version(version.Version.String()) + + var envFile string + app.Action(func(*kingpin.ParseContext) error { return run(envFile) }) + app.Arg("envfile", "load the environment variable file"). + Default(""). + StringVar(&envFile) + + // trigger execution + kingpin.MustParse(app.Parse(os.Args[1:])) +} + +func run(envFile string) error { + // Create context that listens for the interrupt signal from the OS. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + if envFile != "" { + if err := godotenv.Load(envFile); err != nil { + return fmt.Errorf("failed to load environment file: %w", err) + } + } + + config, err := loadConfig() + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + // setup logger and inject into context + setupLogger(config) + log := log.Logger.With().Logger() + ctx = log.WithContext(ctx) + + setupProfiler(config) + + system, err := initSystem() + if err != nil { + return fmt.Errorf("failed to init gitrpc server: %w", err) + } + + // gCtx is canceled if any of the following occurs: + // - any go routine launched with g encounters an error + // - ctx is canceled + g, gCtx := errgroup.WithContext(ctx) + + // start grpc server + g.Go(system.grpcServer.Start) + log.Info().Msg("grpc server started") + + gHTTP, shutdownHTTP := system.httpServer.ListenAndServe() + g.Go(gHTTP.Wait) + log.Info().Msgf("http server started") + + // wait until the error group context is done + <-gCtx.Done() + + // restore default behavior on the interrupt signal and notify user of shutdown. + stop() + log.Info().Msg("shutting down gracefully (press Ctrl+C again to force)") + + // shutdown servers gracefully + shutdownCtx, cancel := context.WithTimeout(context.Background(), config.GracefulShutdownTime) + defer cancel() + + if rpcErr := system.grpcServer.Stop(); rpcErr != nil { + log.Err(rpcErr).Msg("failed to shutdown grpc server gracefully") + } + + if sErr := shutdownHTTP(shutdownCtx); sErr != nil { + log.Err(sErr).Msg("failed to shutdown http server gracefully") + } + + log.Info().Msg("wait for subroutines to complete") + err = g.Wait() + + return err +} + +// helper function configures the global logger from +// the loaded configuration. +func setupLogger(config Config) { + // configure the log level + switch { + case config.Trace: + zerolog.SetGlobalLevel(zerolog.TraceLevel) + case config.Debug: + zerolog.SetGlobalLevel(zerolog.DebugLevel) + default: + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } + + // configure time format (ignored if running in terminal) + zerolog.TimeFieldFormat = time.RFC3339Nano + + // if the terminal is a tty we should output the + // logs in pretty format + if isatty.IsTerminal(os.Stdout.Fd()) { + log.Logger = log.Output( + zerolog.ConsoleWriter{ + Out: os.Stderr, + NoColor: false, + TimeFormat: "15:04:05.999", + }, + ) + } +} + +func setupProfiler(config Config) { + profilerType, parsed := profiler.ParseType(config.Profiler.Type) + if !parsed { + log.Info().Msgf("No valid profiler so skipping profiling ['%s']", config.Profiler.Type) + return + } + + gitrpcProfiler, _ := profiler.New(profilerType) + gitrpcProfiler.StartProfiling(config.Profiler.ServiceName, version.Version.String()) +} diff --git a/cmd/gitrpcserver/redis.go b/cmd/gitrpcserver/redis.go new file mode 100644 index 0000000000..d7c323fa8a --- /dev/null +++ b/cmd/gitrpcserver/redis.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "strings" + + "github.com/harness/gitness/gitrpc/server" + + "github.com/go-redis/redis/v8" +) + +// ProvideRedis provides a redis client based on the configuration. +// TODO: add support for TLS +func ProvideRedis(config server.Config) redis.UniversalClient { + if config.Redis.SentinelMode { + addrs := strings.Split(config.Redis.SentinelEndpoint, ",") + + failoverOptions := &redis.FailoverOptions{ + MasterName: config.Redis.SentinelMaster, + SentinelAddrs: addrs, + MaxRetries: config.Redis.MaxRetries, + MinIdleConns: config.Redis.MinIdleConnections, + } + if config.Redis.Password != "" { + failoverOptions.Password = config.Redis.Password + } + return redis.NewFailoverClient(failoverOptions) + } + + options := &redis.Options{ + Addr: config.Redis.Endpoint, + MaxRetries: config.Redis.MaxRetries, + MinIdleConns: config.Redis.MinIdleConnections, + } + + if config.Redis.Password != "" { + options.Password = config.Redis.Password + } + + return redis.NewClient(options) +} diff --git a/cmd/gitrpcserver/system.go b/cmd/gitrpcserver/system.go new file mode 100644 index 0000000000..aba136aada --- /dev/null +++ b/cmd/gitrpcserver/system.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "github.com/harness/gitness/gitrpc/server" +) + +// system stores high level system components. +type system struct { + grpcServer *server.GRPCServer + httpServer *server.HTTPServer +} + +// newSystem returns a new system structure. +func newSystem(grpcServer *server.GRPCServer, httpServer *server.HTTPServer) *system { + return &system{ + grpcServer: grpcServer, + httpServer: httpServer, + } +} diff --git a/cmd/gitrpcserver/wire.go b/cmd/gitrpcserver/wire.go new file mode 100644 index 0000000000..2c8433be51 --- /dev/null +++ b/cmd/gitrpcserver/wire.go @@ -0,0 +1,24 @@ +// Copyright 2021 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +//go:build wireinject +// +build wireinject + +package main + +import ( + "github.com/harness/gitness/gitrpc/server" + + "github.com/google/wire" +) + +func initSystem() (*system, error) { + wire.Build( + newSystem, + ProvideGitRPCServerConfig, + server.WireSet, + ProvideRedis, + ) + return &system{}, nil +} diff --git a/cmd/gitrpcserver/wire_gen.go b/cmd/gitrpcserver/wire_gen.go new file mode 100644 index 0000000000..680fe9f666 --- /dev/null +++ b/cmd/gitrpcserver/wire_gen.go @@ -0,0 +1,37 @@ +// Code generated by Wire. DO NOT EDIT. + +//go:generate go run github.com/google/wire/cmd/wire +//go:build !wireinject +// +build !wireinject + +package main + +import ( + "github.com/harness/gitness/gitrpc/server" +) + +// Injectors from wire.go: + +func initSystem() (*system, error) { + config, err := ProvideGitRPCServerConfig() + if err != nil { + return nil, err + } + goGitRepoProvider := server.ProvideGoGitRepoProvider() + universalClient := ProvideRedis(config) + cache := server.ProvideLastCommitCache(config, universalClient, goGitRepoProvider) + gitAdapter, err := server.ProvideGITAdapter(goGitRepoProvider, cache) + if err != nil { + return nil, err + } + grpcServer, err := server.ProvideServer(config, gitAdapter) + if err != nil { + return nil, err + } + httpServer, err := server.ProvideHTTPServer(config) + if err != nil { + return nil, err + } + mainSystem := newSystem(grpcServer, httpServer) + return mainSystem, nil +} diff --git a/encrypt/aesgcm.go b/encrypt/aesgcm.go new file mode 100644 index 0000000000..efb0749c42 --- /dev/null +++ b/encrypt/aesgcm.go @@ -0,0 +1,84 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package encrypt + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "errors" + "io" +) + +// Aesgcm provides an encrypter that uses the aesgcm encryption +// algorithm. +type Aesgcm struct { + block cipher.Block + Compat bool +} + +// Encrypt encrypts the plaintext using aesgcm. +func (e *Aesgcm) Encrypt(plaintext string) ([]byte, error) { + gcm, err := cipher.NewGCM(e.block) + if err != nil { + return nil, err + } + + nonce := make([]byte, gcm.NonceSize()) + _, err = io.ReadFull(rand.Reader, nonce) + if err != nil { + return nil, err + } + + return gcm.Seal(nonce, nonce, []byte(plaintext), nil), nil +} + +// Decrypt decrypts the ciphertext using aesgcm. +func (e *Aesgcm) Decrypt(ciphertext []byte) (string, error) { + gcm, err := cipher.NewGCM(e.block) + if err != nil { + return "", err + } + + if len(ciphertext) < gcm.NonceSize() { + // if the decryption utility is running in compatibility + // mode, it will return the ciphertext as plain text if + // decryption fails. This should be used when running the + // database in mixed-mode, where there is a mix of encrypted + // and unencrypted content. + if e.Compat { + return string(ciphertext), nil + } + return "", errors.New("malformed ciphertext") + } + + plaintext, err := gcm.Open(nil, + ciphertext[:gcm.NonceSize()], + ciphertext[gcm.NonceSize():], + nil, + ) + // if the decryption utility is running in compatibility + // mode, it will return the ciphertext as plain text if + // decryption fails. This should be used when running the + // database in mixed-mode, where there is a mix of encrypted + // and unencrypted content. + if err != nil && e.Compat { + return string(ciphertext), nil + } + return string(plaintext), err +} + +// New provides a new aesgcm encrypter. +func New(key string, compat bool) (Encrypter, error) { + if len(key) != 32 { + return nil, errKeySize + } + b := []byte(key) + block, err := aes.NewCipher(b) + if err != nil { + return nil, err + } + return &Aesgcm{block: block, Compat: compat}, nil +} diff --git a/encrypt/encrypt.go b/encrypt/encrypt.go new file mode 100644 index 0000000000..a905971b35 --- /dev/null +++ b/encrypt/encrypt.go @@ -0,0 +1,30 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package encrypt + +import ( + "errors" +) + +// indicates key size is too small. +var errKeySize = errors.New("encryption key must be 32 bytes") + +// Encrypter provides field encryption and decryption. +// Encrypted values are currently limited to strings, which is +// reflected in the interface design. +type Encrypter interface { + Encrypt(plaintext string) ([]byte, error) + Decrypt(ciphertext []byte) (string, error) +} diff --git a/encrypt/none.go b/encrypt/none.go new file mode 100644 index 0000000000..91897cc09a --- /dev/null +++ b/encrypt/none.go @@ -0,0 +1,19 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package encrypt + +// none is an encryption strategy that stores secret +// values in plain text. This is the default strategy +// when no key is specified. +type none struct { +} + +func (*none) Encrypt(plaintext string) ([]byte, error) { + return []byte(plaintext), nil +} + +func (*none) Decrypt(ciphertext []byte) (string, error) { + return string(ciphertext), nil +} diff --git a/encrypt/wire.go b/encrypt/wire.go new file mode 100644 index 0000000000..fd87d77910 --- /dev/null +++ b/encrypt/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package encrypt + +import ( + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideEncrypter, +) + +func ProvideEncrypter(config *types.Config) (Encrypter, error) { + if config.Encrypter.Secret == "" { + return &none{}, nil + } + return New(config.Encrypter.Secret, config.Encrypter.MixedContent) +} diff --git a/events/error.go b/events/error.go new file mode 100644 index 0000000000..95b2767684 --- /dev/null +++ b/events/error.go @@ -0,0 +1,55 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "errors" + "fmt" +) + +var ( + errDiscardEvent = &discardEventError{} +) + +// discardEventError is an error which, if returned by the event handler, +// causes the source event to be discarded despite any erros. +type discardEventError struct { + inner error +} + +func NewDiscardEventError(inner error) error { + return &discardEventError{ + inner: inner, + } +} + +func NewDiscardEventErrorf(format string, args ...interface{}) error { + return &discardEventError{ + inner: fmt.Errorf(format, args...), + } +} + +func (e *discardEventError) Error() string { + return fmt.Sprintf("discarding requested due to: %s", e.inner) +} + +func (e *discardEventError) Unwrap() error { + return e.inner +} + +func (e *discardEventError) Is(target error) bool { + // NOTE: it's an internal event and we only ever check with the singleton instance + return errors.Is(target, errDiscardEvent) +} diff --git a/events/events.go b/events/events.go new file mode 100644 index 0000000000..a34f276a2d --- /dev/null +++ b/events/events.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "errors" + "fmt" + "time" +) + +const ( + // streamPayloadKey is the key used for storing the event in a stream message. + streamPayloadKey = "event" +) + +type Event[T interface{}] struct { + ID string `json:"id"` + Timestamp time.Time `json:"timestamp"` + Payload T `json:"payload"` +} + +// EventType describes the type of event. +type EventType string + +// getStreamID generates the streamID for a given category and type of event. +func getStreamID(category string, event EventType) string { + return fmt.Sprintf("events:%s:%s", category, event) +} + +// Mode defines the different modes of the event framework. +type Mode string + +const ( + ModeRedis Mode = "redis" + ModeInMemory Mode = "inmemory" +) + +// Config defines the config of the events system. +type Config struct { + Mode Mode `envconfig:"GITNESS_EVENTS_MODE" default:"inmemory"` + Namespace string `envconfig:"GITNESS_EVENTS_NAMESPACE" default:"gitness"` + MaxStreamLength int64 `envconfig:"GITNESS_EVENTS_MAX_STREAM_LENGTH" default:"10000"` + ApproxMaxStreamLength bool `envconfig:"GITNESS_EVENTS_APPROX_MAX_STREAM_LENGTH" default:"true"` +} + +func (c *Config) Validate() error { + if c == nil { + return errors.New("config is required") + } + if c.Mode != ModeRedis && c.Mode != ModeInMemory { + return fmt.Errorf("config.Mode '%s' is not supported", c.Mode) + } + if c.MaxStreamLength < 1 { + return errors.New("config.MaxStreamLength has to be a positive number") + } + + return nil +} diff --git a/events/options.go b/events/options.go new file mode 100644 index 0000000000..5bebb8faf4 --- /dev/null +++ b/events/options.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "time" + + "github.com/harness/gitness/stream" +) + +/* + * Expose event package options to simplify usage for consumers by hiding the stream package. + * Since we only forward the options, event options are simply aliases of stream options. + */ + +// ReaderOption can be used to configure event readers. +type ReaderOption stream.ConsumerOption + +func toStreamConsumerOptions(opts []ReaderOption) []stream.ConsumerOption { + streamOpts := make([]stream.ConsumerOption, len(opts)) + for i, opt := range opts { + streamOpts[i] = stream.ConsumerOption(opt) + } + return streamOpts +} + +// WithConcurrency sets up the concurrency of the reader. +func WithConcurrency(concurrency int) ReaderOption { + return stream.WithConcurrency(concurrency) +} + +// WithHandlerOptions sets up the default options for event handlers. +func WithHandlerOptions(opts ...HandlerOption) ReaderOption { + return stream.WithHandlerOptions(toStreamHandlerOptions(opts)...) +} + +// HandlerOption can be used to configure event handlers. +type HandlerOption stream.HandlerOption + +func toStreamHandlerOptions(opts []HandlerOption) []stream.HandlerOption { + streamOpts := make([]stream.HandlerOption, len(opts)) + for i, opt := range opts { + streamOpts[i] = stream.HandlerOption(opt) + } + return streamOpts +} + +// WithMaxRetries can be used to set the max retry count for a specific event handler. +func WithMaxRetries(maxRetries int) HandlerOption { + return stream.WithMaxRetries(maxRetries) +} + +// WithIdleTimeout can be used to set the idle timeout for a specific event handler. +func WithIdleTimeout(timeout time.Duration) HandlerOption { + return stream.WithIdleTimeout(timeout) +} diff --git a/events/reader.go b/events/reader.go new file mode 100644 index 0000000000..ef5ecc3ea1 --- /dev/null +++ b/events/reader.go @@ -0,0 +1,225 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "bytes" + "context" + "encoding/gob" + "errors" + "fmt" + + "github.com/rs/zerolog/log" +) + +// ReaderFactoryFunc is an abstraction of a factory method that creates customized Reader implementations (type [R]). +// It is triggered by the ReaderFactory to create a new instance of the Reader to launch. +// The provided GenericReader object is available exclusively to the factory method (every call has a fresh instance) +// and should be used as base of any custom Reader implementation (use ReaderRegisterEvent to register custom handler). +type ReaderFactoryFunc[R Reader] func(reader *GenericReader) (R, error) + +// ReaderFactory allows to launch event readers of type [R] (can be GenericReader or customized readers). +type ReaderFactory[R Reader] struct { + category string + streamConsumerFactoryFn StreamConsumerFactoryFunc + readerFactoryFn ReaderFactoryFunc[R] +} + +// Launch launches a new reader for the provided group and client name. +// The setup method should be used to register the differents events the reader will act on. +// To stop the reader and cleanup its resources the returned ReaderCanceler can be used. +// The reader also cancels automatically when the provided context is canceled. +// NOTE: Do not setup the reader outside of the setup method! +func (f *ReaderFactory[R]) Launch(ctx context.Context, + groupName string, readerName string, setup func(R) error) (*ReaderCanceler, error) { + if groupName == "" { + return nil, errors.New("groupName can't be empty") + } + if setup == nil { + return nil, errors.New("setup function can't be nil") + } + + // setup ctx with copied logger that has extra fields set + log := log.Ctx(ctx).With(). + Str("events.category", f.category). + Str("events.group_name", groupName). + Str("events.reader_name", readerName). + Logger() + + // create new stream consumer using factory method + streamConsumer, err := f.streamConsumerFactoryFn(groupName, readerName) + if err != nil { + return nil, fmt.Errorf("failed to create new stream consumer: %w", err) + } + + // create generic reader object + innerReader := &GenericReader{ + streamConsumer: streamConsumer, + category: f.category, + } + + // create new reader (could return the innerReader itself, but also allows to launch customized readers) + reader, err := f.readerFactoryFn(innerReader) + if err != nil { + //nolint:gocritic // only way to achieve this AFAIK - lint proposal is not building + return nil, fmt.Errorf("failed creation of event reader of type %T: %w", *new(R), err) + } + + // execute setup function on reader (will configure concurrency, processingTimeout, ..., and register handlers) + err = setup(reader) + if err != nil { + return nil, fmt.Errorf("failed custom setup of event reader: %w", err) + } + + // hook into all available logs + go func(errorCh <-chan error) { + for err := range errorCh { + log.Err(err).Msg("received an error from stream consumer") + } + }(streamConsumer.Errors()) + + go func(infoCh <-chan string) { + for s := range infoCh { + log.Info().Msgf("stream consumer: %s", s) + } + }(streamConsumer.Infos()) + + // prepare context (inject logger and make canceable) + ctx = log.WithContext(ctx) + ctx, cancelFn := context.WithCancel(ctx) + + // start consumer + err = innerReader.streamConsumer.Start(ctx) + if err != nil { + cancelFn() + return nil, fmt.Errorf("failed to start consumer: %w", err) + } + + return &ReaderCanceler{ + cancelFn: func() error { + cancelFn() + return nil + }, + }, nil +} + +// ReaderCanceler exposes the functionality to cancel a reader explicitly. +type ReaderCanceler struct { + canceled bool + cancelFn func() error +} + +func (d *ReaderCanceler) Cancel() error { + if d.canceled { + return errors.New("reader has already been canceled") + } + + // call cancel (might be async) + err := d.cancelFn() + if err != nil { + return fmt.Errorf("failed to cancel reader: %w", err) + } + + d.canceled = true + + return nil +} + +// Reader specifies the minimum functionality a reader should expose. +// NOTE: we don't want to enforce any event registration methods here, allowing full control for customized readers. +type Reader interface { + Configure(opts ...ReaderOption) +} + +type HandlerFunc[T interface{}] func(context.Context, *Event[T]) error + +// GenericReader represents an event reader that supports registering type safe handlers +// for an arbitrary set of custom events within a given event category using the ReaderRegisterEvent method. +// NOTE: Optimally this should be an interface with RegisterEvent[T] method, but that's currently not possible in go. +// IMPORTANT: This reader should not be instantiated from external packages. +type GenericReader struct { + streamConsumer StreamConsumer + category string +} + +// ReaderRegisterEvent registers a type safe handler function on the reader for a specific event. +// This method allows to register type safe handlers without the need of handling the raw stream payload. +// NOTE: Generic arguments are not allowed for struct methods, hence pass the reader as input parameter. +func ReaderRegisterEvent[T interface{}](reader *GenericReader, + eventType EventType, fn HandlerFunc[T], opts ...HandlerOption) error { + streamID := getStreamID(reader.category, eventType) + + // register handler for event specific stream. + return reader.streamConsumer.Register(streamID, + func(ctx context.Context, messageID string, streamPayload map[string]interface{}) error { + if streamPayload == nil { + return fmt.Errorf("stream payload is nil for message '%s'", messageID) + } + + // retrieve event from stream payload + eventRaw, ok := streamPayload[streamPayloadKey] + if !ok { + return fmt.Errorf("stream payload doesn't contain event (key: '%s') for message '%s'", streamPayloadKey, messageID) + } + + // retrieve bytes from raw event + // NOTE: Redis returns []byte as string - to avoid unnecessary conversion we handle both types here. + var eventBytes []byte + switch v := eventRaw.(type) { + case string: + eventBytes = []byte(v) + case []byte: + eventBytes = v + default: + return fmt.Errorf("stream payload is not of expected type string or []byte but of type %T (message '%s')", + eventRaw, messageID) + } + + // decode event to correct type + var event Event[T] + decoder := gob.NewDecoder(bytes.NewReader(eventBytes)) + err := decoder.Decode(&event) + if err != nil { + //nolint:gocritic // only way to achieve this AFAIK - lint proposal is not building + return fmt.Errorf("stream payload can't be decoded into type %T (message '%s')", *new(T), messageID) + } + + // populate event ID using the message ID (has to be populated here, producer doesn't know the message ID yet) + event.ID = messageID + + // update ctx with event type for proper logging + log := log.Ctx(ctx).With(). + Str("events.type", string(eventType)). + Str("events.id", event.ID). + Logger() + ctx = log.WithContext(ctx) + + // call provided handler with correctly typed payload + err = fn(ctx, &event) + + // handle discardEventError + if errors.Is(err, errDiscardEvent) { + log.Warn().Err(err).Msgf("discarding event '%s'", event.ID) + return nil + } + + // any other error we return as is + return err + }, toStreamHandlerOptions(opts)...) +} + +func (r *GenericReader) Configure(opts ...ReaderOption) { + r.streamConsumer.Configure(toStreamConsumerOptions(opts)...) +} diff --git a/events/reporter.go b/events/reporter.go new file mode 100644 index 0000000000..7e820bef95 --- /dev/null +++ b/events/reporter.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "bytes" + "context" + "encoding/gob" + "fmt" + "time" +) + +// GenericReporter represents an event reporter that supports sending typesafe messages +// for an arbitrary set of custom events within an event category using the ReporterSendEvent method. +// NOTE: Optimally this should be an interface with SendEvent[T] method, but that's not possible in go. +type GenericReporter struct { + producer StreamProducer + category string +} + +// ReportEvent reports an event using the provided GenericReporter. +// Returns the reported event's ID in case of success. +// NOTE: This call is blocking until the event was send (not until it was processed). +// +//nolint:revive // emphasize that this is meant to be an operation on *GenericReporter +func ReporterSendEvent[T interface{}](reporter *GenericReporter, ctx context.Context, + eventType EventType, payload T) (string, error) { + streamID := getStreamID(reporter.category, eventType) + event := Event[T]{ + ID: "", // will be set by GenericReader + Timestamp: time.Now(), + Payload: payload, + } + + buff := &bytes.Buffer{} + encoder := gob.NewEncoder(buff) + + if err := encoder.Encode(&event); err != nil { + return "", fmt.Errorf("failed to encode payload: %w", err) + } + + streamPayload := map[string]interface{}{ + streamPayloadKey: buff.Bytes(), + } + + // We are using the message ID as event ID. + return reporter.producer.Send(ctx, streamID, streamPayload) +} diff --git a/events/stream.go b/events/stream.go new file mode 100644 index 0000000000..37c973724e --- /dev/null +++ b/events/stream.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "context" + + "github.com/harness/gitness/stream" +) + +// StreamProducer is an abstraction of a producer from the streams package. +type StreamProducer interface { + Send(ctx context.Context, streamID string, payload map[string]interface{}) (string, error) +} + +// StreamConsumer is an abstraction of a consumer from the streams package. +type StreamConsumer interface { + Register(streamID string, handler stream.HandlerFunc, opts ...stream.HandlerOption) error + Configure(opts ...stream.ConsumerOption) + Start(ctx context.Context) error + Errors() <-chan error + Infos() <-chan string +} + +// StreamConsumerFactoryFunc is an abstraction of a factory method for stream consumers. +type StreamConsumerFactoryFunc func(groupName string, consumerName string) (StreamConsumer, error) diff --git a/events/system.go b/events/system.go new file mode 100644 index 0000000000..d25916ff5e --- /dev/null +++ b/events/system.go @@ -0,0 +1,76 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import "errors" + +// System represents a single contained event system that is used +// to setup event Reporters and ReaderFactories. +type System struct { + streamConsumerFactoryFn StreamConsumerFactoryFunc + streamProducer StreamProducer +} + +func NewSystem(streamConsumerFactoryFunc StreamConsumerFactoryFunc, streamProducer StreamProducer) (*System, error) { + if streamConsumerFactoryFunc == nil { + return nil, errors.New("streamConsumerFactoryFunc can't be empty") + } + if streamProducer == nil { + return nil, errors.New("streamProducer can't be empty") + } + + return &System{ + streamConsumerFactoryFn: streamConsumerFactoryFunc, + streamProducer: streamProducer, + }, nil +} + +func NewReaderFactory[R Reader](system *System, category string, fn ReaderFactoryFunc[R]) (*ReaderFactory[R], error) { + if system == nil { + return nil, errors.New("system can't be empty") + } + if category == "" { + return nil, errors.New("category can't be empty") + } + if fn == nil { + return nil, errors.New("fn can't be empty") + } + + return &ReaderFactory[R]{ + // values coming from system + streamConsumerFactoryFn: system.streamConsumerFactoryFn, + + // values coming from input parameters + category: category, + readerFactoryFn: fn, + }, nil +} + +func NewReporter(system *System, category string) (*GenericReporter, error) { + if system == nil { + return nil, errors.New("system can't be empty") + } + if category == "" { + return nil, errors.New("category can't be empty") + } + + return &GenericReporter{ + // values coming from system + producer: system.streamProducer, + + // values coming from input parameters + category: category, + }, nil +} diff --git a/events/wire.go b/events/wire.go new file mode 100644 index 0000000000..eddca013e0 --- /dev/null +++ b/events/wire.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "errors" + "fmt" + + "github.com/harness/gitness/stream" + + "github.com/go-redis/redis/v8" + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideSystem, +) + +func ProvideSystem(config Config, redisClient redis.UniversalClient) (*System, error) { + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("provided config is invalid: %w", err) + } + + var system *System + var err error + switch config.Mode { + case ModeInMemory: + system, err = provideSystemInMemory(config) + case ModeRedis: + system, err = provideSystemRedis(config, redisClient) + default: + return nil, fmt.Errorf("events system mode '%s' is not supported", config.Mode) + } + + if err != nil { + return nil, fmt.Errorf("failed to setup event system for mode '%s': %w", config.Mode, err) + } + + return system, nil +} + +func provideSystemInMemory(config Config) (*System, error) { + broker, err := stream.NewMemoryBroker(config.MaxStreamLength) + if err != nil { + return nil, err + } + + return NewSystem( + newMemoryStreamConsumerFactoryMethod(broker, config.Namespace), + newMemoryStreamProducer(broker, config.Namespace), + ) +} + +func provideSystemRedis(config Config, redisClient redis.UniversalClient) (*System, error) { + if redisClient == nil { + return nil, errors.New("redis client required") + } + + return NewSystem( + newRedisStreamConsumerFactoryMethod(redisClient, config.Namespace), + newRedisStreamProducer(redisClient, config.Namespace, + config.MaxStreamLength, config.ApproxMaxStreamLength), + ) +} + +func newMemoryStreamConsumerFactoryMethod(broker *stream.MemoryBroker, namespace string) StreamConsumerFactoryFunc { + return func(groupName string, consumerName string) (StreamConsumer, error) { + return stream.NewMemoryConsumer(broker, namespace, groupName) + } +} + +func newMemoryStreamProducer(broker *stream.MemoryBroker, namespace string) StreamProducer { + return stream.NewMemoryProducer(broker, namespace) +} + +func newRedisStreamConsumerFactoryMethod( + redisClient redis.UniversalClient, + namespace string, +) StreamConsumerFactoryFunc { + return func(groupName string, consumerName string) (StreamConsumer, error) { + return stream.NewRedisConsumer(redisClient, namespace, groupName, consumerName) + } +} + +func newRedisStreamProducer(redisClient redis.UniversalClient, namespace string, + maxStreamLength int64, approxMaxStreamLength bool) StreamProducer { + return stream.NewRedisProducer(redisClient, namespace, maxStreamLength, approxMaxStreamLength) +} diff --git a/githook/cli.go b/githook/cli.go new file mode 100644 index 0000000000..ab28e84518 --- /dev/null +++ b/githook/cli.go @@ -0,0 +1,176 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "errors" + "os/signal" + "syscall" + + "gopkg.in/alecthomas/kingpin.v2" +) + +const ( + // ParamPreReceive is the parameter under which the pre-receive operation is registered. + ParamPreReceive = "pre-receive" + // ParamUpdate is the parameter under which the update operation is registered. + ParamUpdate = "update" + // ParamPostReceive is the parameter under which the post-receive operation is registered. + ParamPostReceive = "post-receive" + + // CommandNamePreReceive is the command used by git for the pre-receive hook + // (os.args[0] == "hooks/pre-receive"). + CommandNamePreReceive = "hooks/pre-receive" + // CommandNameUpdate is the command used by git for the update hook + // (os.args[0] == "hooks/update"). + CommandNameUpdate = "hooks/update" + // CommandNamePostReceive is the command used by git for the post-receive hook + // (os.args[0] == "hooks/post-receive"). + CommandNamePostReceive = "hooks/post-receive" +) + +// SanitizeArgsForGit sanitizes the command line arguments (os.Args) if the command indicates they are coming from git. +// Returns the santized args and true if the call comes from git, otherwise the original args are returned with false. +func SanitizeArgsForGit(command string, args []string) ([]string, bool) { + switch command { + case CommandNamePreReceive: + return append([]string{ParamPreReceive}, args...), true + case CommandNameUpdate: + return append([]string{ParamUpdate}, args...), true + case CommandNamePostReceive: + return append([]string{ParamPostReceive}, args...), true + default: + return args, false + } +} + +// KingpinRegister is an abstraction of an entity that allows to register commands. +// This is required to allow registering hook commands both on application and sub command level. +type KingpinRegister interface { + Command(name, help string) *kingpin.CmdClause +} + +var ( + // ErrDisabled can be returned by the loading function to indicate the githook has been disabled. + // Returning the error will cause the githook execution to be skipped (githook is noop and returns success). + ErrDisabled = errors.New("githook disabled") +) + +// LoadCLICoreFunc is a function that creates a new CLI core that's used for githook cli execution. +// This allows users to initialize their own CLI core with custom Client and configuration. +type LoadCLICoreFunc func() (*CLICore, error) + +// RegisterAll registers all githook commands. +func RegisterAll(cmd KingpinRegister, loadCoreFn LoadCLICoreFunc) { + RegisterPreReceive(cmd, loadCoreFn) + RegisterUpdate(cmd, loadCoreFn) + RegisterPostReceive(cmd, loadCoreFn) +} + +// RegisterPreReceive registers the pre-receive githook command. +func RegisterPreReceive(cmd KingpinRegister, loadCoreFn LoadCLICoreFunc) { + c := &preReceiveCommand{ + loadCoreFn: loadCoreFn, + } + + cmd.Command(ParamPreReceive, "hook that is executed before any reference of the push is updated"). + Action(c.run) +} + +// RegisterUpdate registers the update githook command. +func RegisterUpdate(cmd KingpinRegister, loadCoreFn LoadCLICoreFunc) { + c := &updateCommand{ + loadCoreFn: loadCoreFn, + } + + subCmd := cmd.Command(ParamUpdate, "hook that is executed before the specific reference gets updated"). + Action(c.run) + + subCmd.Arg("ref", "reference for which the hook is executed"). + Required(). + StringVar(&c.ref) + + subCmd.Arg("old", "old commit sha"). + Required(). + StringVar(&c.oldSHA) + + subCmd.Arg("new", "new commit sha"). + Required(). + StringVar(&c.newSHA) +} + +// RegisterPostReceive registers the post-receive githook command. +func RegisterPostReceive(cmd KingpinRegister, loadCoreFn LoadCLICoreFunc) { + c := &postReceiveCommand{ + loadCoreFn: loadCoreFn, + } + + cmd.Command(ParamPostReceive, "hook that is executed after all references of the push got updated"). + Action(c.run) +} + +type preReceiveCommand struct { + loadCoreFn LoadCLICoreFunc +} + +func (c *preReceiveCommand) run(*kingpin.ParseContext) error { + return run(c.loadCoreFn, func(ctx context.Context, core *CLICore) error { + return core.PreReceive(ctx) + }) +} + +type updateCommand struct { + loadCoreFn LoadCLICoreFunc + + ref string + oldSHA string + newSHA string +} + +func (c *updateCommand) run(*kingpin.ParseContext) error { + return run(c.loadCoreFn, func(ctx context.Context, core *CLICore) error { + return core.Update(ctx, c.ref, c.oldSHA, c.newSHA) + }) +} + +type postReceiveCommand struct { + loadCoreFn LoadCLICoreFunc +} + +func (c *postReceiveCommand) run(*kingpin.ParseContext) error { + return run(c.loadCoreFn, func(ctx context.Context, core *CLICore) error { + return core.PostReceive(ctx) + }) +} + +func run(loadCoreFn LoadCLICoreFunc, fn func(ctx context.Context, core *CLICore) error) error { + core, err := loadCoreFn() + if errors.Is(err, ErrDisabled) { + // complete operation successfully without making any calls to the server. + return nil + } + if err != nil { + return err + } + + // Create context that listens for the interrupt signal from the OS and has a timeout. + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + ctx, cancel := context.WithTimeout(ctx, core.executionTimeout) + defer cancel() + + return fn(ctx, core) +} diff --git a/githook/client.go b/githook/client.go new file mode 100644 index 0000000000..1f899d4f2d --- /dev/null +++ b/githook/client.go @@ -0,0 +1,148 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "strings" +) + +const ( + // HTTPRequestPathPreReceive is the subpath under the provided base url the client uses to call pre-receive. + HTTPRequestPathPreReceive = "pre-receive" + + // HTTPRequestPathPostReceive is the subpath under the provided base url the client uses to call post-receive. + HTTPRequestPathPostReceive = "post-receive" + + // HTTPRequestPathUpdate is the subpath under the provided base url the client uses to call update. + HTTPRequestPathUpdate = "update" +) + +var ( + ErrNotFound = fmt.Errorf("not found") +) + +// Client is the Client used to call the githooks api of gitness api server. +type Client struct { + httpClient *http.Client + + // baseURL is the base url of the gitness api server. + baseURL string + + // requestPreparation is used to prepare the request before sending. + // This can be used to inject required headers. + requestPreparation func(*http.Request) *http.Request +} + +func NewClient(httpClient *http.Client, baseURL string, requestPreparation func(*http.Request) *http.Request) *Client { + return &Client{ + httpClient: httpClient, + baseURL: strings.TrimRight(baseURL, "/"), + requestPreparation: requestPreparation, + } +} + +// PreReceive calls the pre-receive githook api of the gitness api server. +func (c *Client) PreReceive(ctx context.Context, + in *PreReceiveInput) (*Output, error) { + return c.githook(ctx, HTTPRequestPathPreReceive, in) +} + +// Update calls the update githook api of the gitness api server. +func (c *Client) Update(ctx context.Context, + in *UpdateInput) (*Output, error) { + return c.githook(ctx, HTTPRequestPathUpdate, in) +} + +// PostReceive calls the post-receive githook api of the gitness api server. +func (c *Client) PostReceive(ctx context.Context, + in *PostReceiveInput) (*Output, error) { + return c.githook(ctx, HTTPRequestPathPostReceive, in) +} + +// githook executes the requested githook type using the provided input. +func (c *Client) githook(ctx context.Context, githookType string, in interface{}) (*Output, error) { + uri := c.baseURL + "/" + githookType + bodyBytes, err := json.Marshal(in) + if err != nil { + return nil, fmt.Errorf("failed to serialize input: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, uri, bytes.NewBuffer(bodyBytes)) + if err != nil { + return nil, fmt.Errorf("failed to create new http request: %w", err) + } + req.Header.Add("Content-Type", "application/json") + + // prepare request if configured + if c.requestPreparation != nil { + req = c.requestPreparation(req) + } + + // Execute the request + resp, err := c.httpClient.Do(req) + + // ensure the body is closed after we read (independent of status code or error) + if resp != nil && resp.Body != nil { + // Use function to satisfy the linter which complains about unhandled errors otherwise + defer func() { _ = resp.Body.Close() }() + } + + if err != nil { + return nil, fmt.Errorf("request execution failed: %w", err) + } + + return unmarshalResponse[Output](resp) +} + +// unmarshalResponse reads the response body and if there are no errors marshall's it into +// the data struct. +func unmarshalResponse[T any](resp *http.Response) (*T, error) { + if resp == nil { + return nil, errors.New("http response is empty") + } + + if resp.StatusCode == http.StatusNotFound { + return nil, ErrNotFound + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("expected response code 200 but got: %s", resp.Status) + } + + // ensure we actually got a body returned. + if resp.Body == nil { + return nil, errors.New("http response body is empty") + } + + rawBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body : %w", err) + } + + body := new(T) + err = json.Unmarshal(rawBody, body) + if err != nil { + return nil, fmt.Errorf("error deserializing response body: %w", err) + } + + return body, nil +} diff --git a/githook/core.go b/githook/core.go new file mode 100644 index 0000000000..ce70e0fa25 --- /dev/null +++ b/githook/core.go @@ -0,0 +1,142 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "bufio" + "context" + "errors" + "fmt" + "io" + "os" + "strings" + "time" +) + +// CLICore implements the core of a githook cli. It uses the client and execution timeout +// to perform githook operations as part of a cli. +type CLICore struct { + client *Client + executionTimeout time.Duration +} + +// NewCLICore returns a new CLICore using the provided client and execution timeout. +func NewCLICore(client *Client, executionTimeout time.Duration) *CLICore { + return &CLICore{ + client: client, + executionTimeout: executionTimeout, + } +} + +// PreReceive executes the pre-receive git hook. +func (c *CLICore) PreReceive(ctx context.Context) error { + refUpdates, err := getUpdatedReferencesFromStdIn() + if err != nil { + return fmt.Errorf("failed to read updated references from std in: %w", err) + } + + in := &PreReceiveInput{ + RefUpdates: refUpdates, + } + + out, err := c.client.PreReceive(ctx, in) + + return handleServerHookOutput(out, err) +} + +// Update executes the update git hook. +func (c *CLICore) Update(ctx context.Context, ref string, oldSHA string, newSHA string) error { + in := &UpdateInput{ + RefUpdate: ReferenceUpdate{ + Ref: ref, + Old: oldSHA, + New: newSHA, + }, + } + + out, err := c.client.Update(ctx, in) + + return handleServerHookOutput(out, err) +} + +// PostReceive executes the post-receive git hook. +func (c *CLICore) PostReceive(ctx context.Context) error { + refUpdates, err := getUpdatedReferencesFromStdIn() + if err != nil { + return fmt.Errorf("failed to read updated references from std in: %w", err) + } + + in := &PostReceiveInput{ + RefUpdates: refUpdates, + } + + out, err := c.client.PostReceive(ctx, in) + + return handleServerHookOutput(out, err) +} + +func handleServerHookOutput(out *Output, err error) error { + if err != nil { + return fmt.Errorf("an error occurred when calling the server: %w", err) + } + + if out == nil { + return errors.New("the server returned an empty output") + } + + if out.Error != nil { + return errors.New(*out.Error) + } + + return nil +} + +// getUpdatedReferencesFromStdIn reads the updated references provided by git from stdin. +// The expected format is " SP SP LF" +// For more details see https://git-scm.com/docs/githooks#pre-receive +func getUpdatedReferencesFromStdIn() ([]ReferenceUpdate, error) { + reader := bufio.NewReader(os.Stdin) + updatedRefs := []ReferenceUpdate{} + for { + line, err := reader.ReadString('\n') + // if end of file is reached, break the loop + if err == io.EOF { + break + } + if err != nil { + fmt.Printf("Error when reading from standard input - %s\n", err) //nolint:forbidigo // executes as cli. + return nil, err + } + + if len(line) == 0 { + return nil, errors.New("ref data from stdin contains empty line - not expected") + } + + // splitting line of expected form " SP SP LF" + splitGitHookData := strings.Split(line[:len(line)-1], " ") + if len(splitGitHookData) != 3 { + return nil, fmt.Errorf("received invalid data format or didn't receive enough parameters - %v", + splitGitHookData) + } + + updatedRefs = append(updatedRefs, ReferenceUpdate{ + Old: splitGitHookData[0], + New: splitGitHookData[1], + Ref: splitGitHookData[2], + }) + } + + return updatedRefs, nil +} diff --git a/githook/env.go b/githook/env.go new file mode 100644 index 0000000000..ebfc96bb70 --- /dev/null +++ b/githook/env.go @@ -0,0 +1,111 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "bytes" + "encoding/base64" + "encoding/gob" + "errors" + "fmt" + "os" +) + +const ( + // envNamePayload defines the environment variable name used to send the payload to githook binary. + envNamePayload = "GIT_HOOK_PAYLOAD" +) + +var ( + // ErrEnvVarNotFound is an error that is returned in case the environment variable isn't found. + ErrEnvVarNotFound = errors.New("environment variable not found") +) + +// GenerateEnvironmentVariables generates the environment variables that should be used when calling git +// to ensure the payload will be available to the githook cli. +func GenerateEnvironmentVariables(payload any) (map[string]string, error) { + // serialize the payload + payloadBuff := &bytes.Buffer{} + encoder := gob.NewEncoder(payloadBuff) + if err := encoder.Encode(payload); err != nil { + return nil, fmt.Errorf("failed to encode payload: %w", err) + } + + // send it as base64 to avoid issues with any problematic characters + // NOTE: this will blow up the payload by ~33%, though it's not expected to be too big. + // On the other hand, we save a lot of size by only needing one environment variable name. + payloadBase64 := base64.StdEncoding.EncodeToString(payloadBuff.Bytes()) + + return map[string]string{ + envNamePayload: payloadBase64, + }, nil +} + +// LoadPayloadFromMap loads the payload from a map containing environment variables in a map format. +func LoadPayloadFromMap[T any](envVars map[string]string) (T, error) { + var payload T + + // retrieve payload from environment variables + payloadBase64, ok := envVars[envNamePayload] + if !ok { + return payload, ErrEnvVarNotFound + } + + return decodePayload[T](payloadBase64) +} + +// LoadPayloadFromEnvironment loads the githook payload from the environment. +func LoadPayloadFromEnvironment[T any]() (T, error) { + var payload T + + // retrieve payload from environment variables + payloadBase64, err := getEnvironmentVariable(envNamePayload) + if err != nil { + return payload, fmt.Errorf("failed to load payload from environment variables: %w", err) + } + + return decodePayload[T](payloadBase64) +} + +func decodePayload[T any](encodedPayload string) (T, error) { + var payload T + // decode base64 + payloadBytes, err := base64.StdEncoding.DecodeString(encodedPayload) + if err != nil { + return payload, fmt.Errorf("failed to base64 decode payload: %w", err) + } + + // deserialize the payload + decoder := gob.NewDecoder(bytes.NewReader(payloadBytes)) + err = decoder.Decode(&payload) + if err != nil { + return payload, fmt.Errorf("failed to deserialize payload: %w", err) + } + + return payload, nil +} + +func getEnvironmentVariable(name string) (string, error) { + val, ok := os.LookupEnv(name) + if !ok { + return "", ErrEnvVarNotFound + } + + if val == "" { + return "", fmt.Errorf("'%s' found in env but it's empty", name) + } + + return val, nil +} diff --git a/githook/types.go b/githook/types.go new file mode 100644 index 0000000000..aba763cc74 --- /dev/null +++ b/githook/types.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +// Output represents the output of server hook api calls. +// TODO: support non-error messages (once we need it). +type Output struct { + // Error contains the user facing error (like "branch is protected", ...). + Error *string `json:"error,omitempty"` +} + +// ReferenceUpdate represents an update of a git reference. +type ReferenceUpdate struct { + // Ref is the full name of the reference that got updated. + Ref string `json:"ref"` + // Old is the old commmit hash (before the update). + Old string `json:"old"` + // New is the new commit hash (after the update). + New string `json:"new"` +} + +// PostReceiveInput represents the input of the post-receive git hook. +type PostReceiveInput struct { + // RefUpdates contains all references that got updated as part of the git operation. + RefUpdates []ReferenceUpdate `json:"ref_updates"` +} + +// PreReceiveInput represents the input of the pre-receive git hook. +type PreReceiveInput struct { + // RefUpdates contains all references that are being updated as part of the git operation. + RefUpdates []ReferenceUpdate `json:"ref_updates"` +} + +// UpdateInput represents the input of the update git hook. +type UpdateInput struct { + // RefUpdate contains information about the reference that is being updated. + RefUpdate ReferenceUpdate `json:"ref_update"` +} diff --git a/gitrpc/blame.go b/gitrpc/blame.go new file mode 100644 index 0000000000..7795e984a0 --- /dev/null +++ b/gitrpc/blame.go @@ -0,0 +1,134 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "io" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type BlameParams struct { + ReadParams + GitRef string + Path string + + // LineFrom allows to restrict the blame output to only lines starting from the provided line number (inclusive). + // Optional, ignored if value is 0. + LineFrom int + + // LineTo allows to restrict the blame output to only lines up to the provided line number (inclusive). + // Optional, ignored if value is 0. + LineTo int +} + +func (params *BlameParams) Validate() error { + if params == nil { + return ErrNoParamsProvided + } + + if err := params.ReadParams.Validate(); err != nil { + return err + } + + if params.GitRef == "" { + return Errorf(StatusInvalidArgument, "git ref needs to be provided") + } + + if params.Path == "" { + return Errorf(StatusInvalidArgument, "file path needs to be provided") + } + + if params.LineFrom < 0 || params.LineTo < 0 { + return Errorf(StatusInvalidArgument, "line from and line to can't be negative") + } + + if params.LineTo > 0 && params.LineFrom > params.LineTo { + return Errorf(StatusInvalidArgument, "line from can't be after line after") + } + + return nil +} + +type BlamePart struct { + Commit *Commit `json:"commit"` + Lines []string `json:"lines"` +} + +// Blame processes and streams the git blame output data. +// The function returns two channels: The data channel and the error channel. +// If any error happens during the operation it will be put to the error channel +// and the streaming will stop. Maximum of one error can be put on the channel. +func (c *Client) Blame(ctx context.Context, params *BlameParams) (<-chan *BlamePart, <-chan error) { + ch := make(chan *BlamePart) + chErr := make(chan error, 1) + + go func() { + defer close(ch) + defer close(chErr) + + if err := params.Validate(); err != nil { + chErr <- err + return + } + + stream, err := c.blameService.Blame(ctx, &rpc.BlameRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitRef, + Path: params.Path, + Range: &rpc.LineRange{ + From: int32(params.LineFrom), + To: int32(params.LineTo), + }, + }) + if err != nil { + chErr <- processRPCErrorf(err, "failed to get blame info from server") + return + } + + for { + var part *rpc.BlamePart + + part, err = stream.Recv() + if err != nil && !errors.Is(err, io.EOF) { + chErr <- processRPCErrorf(err, "blame failed") + return + } + + if part == nil { + return + } + + var commit *Commit + + commit, err = mapRPCCommit(part.Commit) + if err != nil { + chErr <- processRPCErrorf(err, "failed to map rpc commit") + return + } + + lines := make([]string, len(part.Lines)) + for i, line := range part.Lines { + lines[i] = string(line) + } + + ch <- &BlamePart{Commit: commit, Lines: lines} + } + }() + + return ch, chErr +} diff --git a/gitrpc/blob.go b/gitrpc/blob.go new file mode 100644 index 0000000000..9f75bc2c71 --- /dev/null +++ b/gitrpc/blob.go @@ -0,0 +1,77 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "io" + + "github.com/harness/gitness/gitrpc/internal/streamio" + "github.com/harness/gitness/gitrpc/rpc" +) + +type GetBlobParams struct { + ReadParams + SHA string + SizeLimit int64 +} + +type GetBlobOutput struct { + SHA string + // Size is the actual size of the blob. + Size int64 + // ContentSize is the total number of bytes returned by the Content Reader. + ContentSize int64 + // Content contains the (partial) content of the blob. + Content io.Reader +} + +func (c *Client) GetBlob(ctx context.Context, params *GetBlobParams) (*GetBlobOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + stream, err := c.repoService.GetBlob(ctx, &rpc.GetBlobRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Sha: params.SHA, + SizeLimit: params.SizeLimit, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to start blob stream") + } + + msg, err := stream.Recv() + if err != nil { + return nil, processRPCErrorf(err, "failed to read blob header from stream") + } + + header := msg.GetHeader() + if header == nil { + return nil, Errorf(StatusInternal, "expected to receive header from server") + } + + // setup contentReader that reads content from grpc stream + contentReader := streamio.NewReader(func() ([]byte, error) { + resp, rErr := stream.Recv() + return resp.GetContent(), rErr + }) + + return &GetBlobOutput{ + SHA: header.GetSha(), + Size: header.GetSize(), + ContentSize: header.GetContentSize(), + Content: contentReader, + }, nil +} diff --git a/gitrpc/branch.go b/gitrpc/branch.go new file mode 100644 index 0000000000..c4ba9d8aef --- /dev/null +++ b/gitrpc/branch.go @@ -0,0 +1,201 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/check" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +type BranchSortOption int + +const ( + BranchSortOptionDefault BranchSortOption = iota + BranchSortOptionName + BranchSortOptionDate +) + +type CreateBranchParams struct { + WriteParams + // BranchName is the name of the branch + BranchName string + // Target is a git reference (branch / tag / commit SHA) + Target string +} + +type CreateBranchOutput struct { + Branch Branch +} + +type GetBranchParams struct { + ReadParams + // BranchName is the name of the branch + BranchName string +} + +type GetBranchOutput struct { + Branch Branch +} + +type DeleteBranchParams struct { + WriteParams + // Name is the name of the branch + BranchName string +} + +type ListBranchesParams struct { + ReadParams + IncludeCommit bool + Query string + Sort BranchSortOption + Order SortOrder + Page int32 + PageSize int32 +} + +type ListBranchesOutput struct { + Branches []Branch +} + +type Branch struct { + Name string + SHA string + Commit *Commit +} + +func (c *Client) CreateBranch(ctx context.Context, params *CreateBranchParams) (*CreateBranchOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + if err := check.BranchName(params.BranchName); err != nil { + return nil, ErrInvalidArgumentf(err.Error()) + } + + resp, err := c.refService.CreateBranch(ctx, &rpc.CreateBranchRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + Target: params.Target, + BranchName: params.BranchName, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to create '%s' branch on server", params.BranchName) + } + + var branch *Branch + branch, err = mapRPCBranch(resp.Branch) + if err != nil { + return nil, processRPCErrorf(err, "failed to map rpc branch %v", resp.Branch) + } + + return &CreateBranchOutput{ + Branch: *branch, + }, nil +} + +func (c *Client) GetBranch(ctx context.Context, params *GetBranchParams) (*GetBranchOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + resp, err := c.refService.GetBranch(ctx, &rpc.GetBranchRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + BranchName: params.BranchName, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to get branch from server") + } + + var branch *Branch + branch, err = mapRPCBranch(resp.GetBranch()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc branch: %w", err) + } + + return &GetBranchOutput{ + Branch: *branch, + }, nil +} + +func (c *Client) DeleteBranch(ctx context.Context, params *DeleteBranchParams) error { + if params == nil { + return ErrNoParamsProvided + } + _, err := c.refService.DeleteBranch(ctx, &rpc.DeleteBranchRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + BranchName: params.BranchName, + // TODO: what are scenarios where we wouldn't want to force delete? + // Branch protection is a different story, and build on top application layer. + Force: true, + }) + if err != nil { + return processRPCErrorf(err, "failed to delete branch on server") + } + + return nil +} + +func (c *Client) ListBranches(ctx context.Context, params *ListBranchesParams) (*ListBranchesOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + stream, err := c.refService.ListBranches(ctx, &rpc.ListBranchesRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + IncludeCommit: params.IncludeCommit, + Query: params.Query, + Sort: mapToRPCListBranchesSortOption(params.Sort), + Order: mapToRPCSortOrder(params.Order), + Page: params.Page, + PageSize: params.PageSize, + }) + if err != nil { + return nil, fmt.Errorf("failed to start stream for branches: %w", err) + } + + // NOTE: don't use PageSize as initial slice capacity - as that theoretically could be MaxInt + output := &ListBranchesOutput{ + Branches: make([]Branch, 0, 16), + } + for { + var next *rpc.ListBranchesResponse + next, err = stream.Recv() + if errors.Is(err, io.EOF) { + log.Ctx(ctx).Debug().Msg("received end of stream") + break + } + if err != nil { + return nil, processRPCErrorf(err, "received unexpected error from rpc") + } + if next.GetBranch() == nil { + return nil, fmt.Errorf("expected branch message") + } + + var branch *Branch + branch, err = mapRPCBranch(next.GetBranch()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc branch: %w", err) + } + + output.Branches = append(output.Branches, *branch) + } + + return output, nil +} diff --git a/gitrpc/check/branch.go b/gitrpc/check/branch.go new file mode 100644 index 0000000000..3e81c25382 --- /dev/null +++ b/gitrpc/check/branch.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "errors" + "fmt" + "strings" +) + +/* https://git-scm.com/docs/git-check-ref-format + * How to handle various characters in refnames: + * 0: An acceptable character for refs + * 1: End-of-component + * 2: ., look for a preceding . to reject .. in refs + * 3: {, look for a preceding @ to reject @{ in refs + * 4: A bad character: ASCII control characters, and + * ":", "?", "[", "\", "^", "~", SP, or TAB + * 5: *, reject unless REFNAME_REFSPEC_PATTERN is set + */ +var refnameDisposition = [256]byte{ + 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 2, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 4, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 4, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 4, 4, +} + +func BranchName(branch string) error { + const lock = ".lock" + last := byte('\x00') + + for i := 0; i < len(branch); i++ { + ch := branch[i] & 255 + disp := refnameDisposition[ch] + + switch disp { + case 1: + if i == 0 { + goto out + } + if last == '/' { // Refname contains "//" + return fmt.Errorf("branch '%s' cannot have two consecutive slashes // ", branch) + } + case 2: + if last == '.' { // Refname contains ".." + return fmt.Errorf("branch '%s' cannot have two consecutive dots .. ", branch) + } + case 3: + if last == '@' { // Refname contains "@{". + return fmt.Errorf("branch '%s' cannot contain a sequence @{", branch) + } + case 4: + return fmt.Errorf("branch '%s' cannot have ASCII control characters "+ + "(i.e. bytes whose values are lower than \040, or \177 DEL), space, tilde ~, caret ^, or colon : anywhere", branch) + case 5: + return fmt.Errorf("branch '%s' can't be a pattern", branch) + } + last = ch + } +out: + if last == '\x00' { + return errors.New("branch name is empty") + } + if last == '.' { + return fmt.Errorf("branch '%s' cannot have . at the end", branch) + } + if last == '@' { + return fmt.Errorf("branch '%s' cannot be the single character @", branch) + } + if last == '/' { + return fmt.Errorf("branch '%s' cannot have / at the end", branch) + } + if branch[0] == '.' { + return fmt.Errorf("branch '%s' cannot start with '.'", branch) + } + if strings.HasSuffix(branch, lock) { + return fmt.Errorf("branch '%s' cannot end with '%s'", branch, lock) + } + return nil +} diff --git a/gitrpc/check/branch_test.go b/gitrpc/check/branch_test.go new file mode 100644 index 0000000000..5acf3c6697 --- /dev/null +++ b/gitrpc/check/branch_test.go @@ -0,0 +1,225 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import "testing" + +func TestBranchName(t *testing.T) { + type args struct { + branch string + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "happy path", + args: args{ + branch: "new-branch", + }, + wantErr: false, + }, + { + name: "happy path, include slash", + args: args{ + branch: "eb/new-branch", + }, + wantErr: false, + }, + { + name: "happy path, test utf-8 chars", + args: args{ + branch: "eb/new\u2318branch", + }, + wantErr: false, + }, + { + name: "branch name empty should return error", + args: args{ + branch: "", + }, + wantErr: true, + }, + { + name: "branch name starts with / should return error", + args: args{ + branch: "/new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains // should return error", + args: args{ + branch: "eb//new-branch", + }, + wantErr: true, + }, + { + name: "branch name ends with / should return error", + args: args{ + branch: "eb/new-branch/", + }, + wantErr: true, + }, + { + name: "branch name starts with . should return error", + args: args{ + branch: ".new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains .. should return error", + args: args{ + branch: "new..branch", + }, + wantErr: true, + }, + { + name: "branch name ends with . should return error", + args: args{ + branch: "new-branch.", + }, + wantErr: true, + }, + { + name: "branch name contains ~ should return error", + args: args{ + branch: "new~branch", + }, + wantErr: true, + }, + { + name: "branch name contains ^ should return error", + args: args{ + branch: "^new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains : should return error", + args: args{ + branch: "new:branch", + }, + wantErr: true, + }, + { + name: "branch name contains control char should return error", + args: args{ + branch: "new\x08branch", + }, + wantErr: true, + }, + { + name: "branch name ends with .lock should return error", + args: args{ + branch: "new-branch.lock", + }, + wantErr: true, + }, + { + name: "branch name starts with ? should return error", + args: args{ + branch: "?new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains ? should return error", + args: args{ + branch: "new?branch", + }, + wantErr: true, + }, + { + name: "branch name ends with ? should return error", + args: args{ + branch: "new-branch?", + }, + wantErr: true, + }, + { + name: "branch name starts with [ should return error", + args: args{ + branch: "[new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains [ should return error", + args: args{ + branch: "new[branch", + }, + wantErr: true, + }, + { + name: "branch name ends with [ should return error", + args: args{ + branch: "new-branch[", + }, + wantErr: true, + }, + { + name: "branch name starts with * should return error", + args: args{ + branch: "*new-branch", + }, + wantErr: true, + }, + { + name: "branch name contains * should return error", + args: args{ + branch: "new*branch", + }, + wantErr: true, + }, + { + name: "branch name ends with * should return error", + args: args{ + branch: "new-branch*", + }, + wantErr: true, + }, + { + name: "branch name cannot contain a sequence @{ and should return error", + args: args{ + branch: "new-br@{anch", + }, + wantErr: true, + }, + { + name: "branch name cannot be the single character @ and should return error", + args: args{ + branch: "@", + }, + wantErr: true, + }, + { + name: "branch name cannot contain \\ and should return error", + args: args{ + branch: "new-br\\anch", + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := BranchName(tt.args.branch); (err != nil) != tt.wantErr { + t.Errorf("validateBranchName() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/gitrpc/client.go b/gitrpc/client.go new file mode 100644 index 0000000000..b5af5253b4 --- /dev/null +++ b/gitrpc/client.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "fmt" + "time" + + "github.com/harness/gitness/gitrpc/rpc" + + "google.golang.org/grpc" + "google.golang.org/grpc/backoff" + "google.golang.org/grpc/credentials/insecure" +) + +type Client struct { + conn *grpc.ClientConn + repoService rpc.RepositoryServiceClient + refService rpc.ReferenceServiceClient + httpService rpc.SmartHTTPServiceClient + commitFilesService rpc.CommitFilesServiceClient + diffService rpc.DiffServiceClient + mergeService rpc.MergeServiceClient + blameService rpc.BlameServiceClient + pushService rpc.PushServiceClient +} + +func New(config Config) (*Client, error) { + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("provided config is invalid: %w", err) + } + + // create interceptors + logIntc := NewClientLogInterceptor() + + // preparate all grpc options + grpcOpts := []grpc.DialOption{ + grpc.WithDefaultServiceConfig(fmt.Sprintf(`{"loadBalancingPolicy":"%s"}`, config.LoadBalancingPolicy)), + grpc.WithTransportCredentials(insecure.NewCredentials()), + grpc.WithChainUnaryInterceptor( + logIntc.UnaryClientInterceptor(), + ), + grpc.WithChainStreamInterceptor( + logIntc.StreamClientInterceptor(), + ), + grpc.WithConnectParams( + grpc.ConnectParams{ + // This config optimizes for connection recovery instead of load reduction. + // NOTE: we only expect limited number of internal clients, thus low number of connections. + Backoff: backoff.Config{ + BaseDelay: 100 * time.Millisecond, + Multiplier: 1.6, // same as default + Jitter: 0.2, // same as default + MaxDelay: time.Second, + }, + }, + ), + } + + conn, err := grpc.Dial(config.Addr, grpcOpts...) + if err != nil { + return nil, err + } + + return NewWithConn(conn), nil +} + +func NewWithConn(conn *grpc.ClientConn) *Client { + return &Client{ + conn: conn, + repoService: rpc.NewRepositoryServiceClient(conn), + refService: rpc.NewReferenceServiceClient(conn), + httpService: rpc.NewSmartHTTPServiceClient(conn), + commitFilesService: rpc.NewCommitFilesServiceClient(conn), + diffService: rpc.NewDiffServiceClient(conn), + mergeService: rpc.NewMergeServiceClient(conn), + blameService: rpc.NewBlameServiceClient(conn), + pushService: rpc.NewPushServiceClient(conn), + } +} diff --git a/gitrpc/commit.go b/gitrpc/commit.go new file mode 100644 index 0000000000..f0ed4115cf --- /dev/null +++ b/gitrpc/commit.go @@ -0,0 +1,278 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "fmt" + "io" + "strconv" + "time" + + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +type GetCommitParams struct { + ReadParams + // SHA is the git commit sha + SHA string +} + +type GetCommitOutput struct { + Commit Commit `json:"commit"` +} + +type Commit struct { + SHA string `json:"sha"` + Title string `json:"title"` + Message string `json:"message,omitempty"` + Author Signature `json:"author"` + Committer Signature `json:"committer"` +} + +type Signature struct { + Identity Identity `json:"identity"` + When time.Time `json:"when"` +} + +type Identity struct { + Name string `json:"name"` + Email string `json:"email"` +} + +func (c *Client) GetCommit(ctx context.Context, params *GetCommitParams) (*GetCommitOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + result, err := c.repoService.GetCommit(ctx, &rpc.GetCommitRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Sha: params.SHA, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to get commit") + } + + commit, err := mapRPCCommit(result.GetCommit()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc commit: %w", err) + } + + return &GetCommitOutput{ + Commit: *commit, + }, nil +} + +type ListCommitsParams struct { + ReadParams + // GitREF is a git reference (branch / tag / commit SHA) + GitREF string + // After is a git reference (branch / tag / commit SHA) + // If provided, commits only up to that reference will be returned (exlusive) + After string + Page int32 + Limit int32 + Path string + + // Since allows to filter for commits since the provided UNIX timestamp - Optional, ignored if value is 0. + Since int64 + + // Until allows to filter for commits until the provided UNIX timestamp - Optional, ignored if value is 0. + Until int64 + + // Committer allows to filter for commits based on the committer - Optional, ignored if string is empty. + Committer string +} + +type RenameDetails struct { + OldPath string + NewPath string + CommitShaBefore string + CommitShaAfter string +} + +type ListCommitsOutput struct { + Commits []Commit + RenameDetails []*RenameDetails + TotalCommits int +} + +func (c *Client) ListCommits(ctx context.Context, params *ListCommitsParams) (*ListCommitsOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + stream, err := c.repoService.ListCommits(ctx, &rpc.ListCommitsRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitREF, + After: params.After, + Page: params.Page, + Limit: params.Limit, + Path: params.Path, + Since: params.Since, + Until: params.Until, + Committer: params.Committer, + }) + if err != nil { + return nil, fmt.Errorf("failed to start stream for commits: %w", err) + } + // NOTE: don't use PageSize as initial slice capacity - as that theoretically could be MaxInt + output := &ListCommitsOutput{ + Commits: make([]Commit, 0, 16), + } + + // check for list commits header + header, err := stream.Header() + if err != nil { + return nil, processRPCErrorf(err, "failed to read list commits header from stream") + } + + values := header.Get("total-commits") + if len(values) > 0 && values[0] != "" { + total, err := strconv.ParseInt(values[0], 10, 32) + if err != nil { + return nil, processRPCErrorf(err, "failed to convert header total-commits") + } + output.TotalCommits = int(total) + } + + for { + var next *rpc.ListCommitsResponse + next, err = stream.Recv() + if errors.Is(err, io.EOF) { + log.Ctx(ctx).Debug().Msg("received end of stream") + break + } + if err != nil { + return nil, processRPCErrorf(err, "received unexpected error from server") + } + if next.GetCommit() == nil { + return nil, fmt.Errorf("expected commit message") + } + + var commit *Commit + commit, err = mapRPCCommit(next.GetCommit()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc commit: %w", err) + } + output.Commits = append(output.Commits, *commit) + + if next.RenameDetails != nil { + output.RenameDetails = mapRPCRenameDetails(next.RenameDetails) + } + } + + return output, nil +} + +type GetCommitDivergencesParams struct { + ReadParams + MaxCount int32 + Requests []CommitDivergenceRequest +} + +type GetCommitDivergencesOutput struct { + Divergences []CommitDivergence +} + +// CommitDivergenceRequest contains the refs for which the converging commits should be counted. +type CommitDivergenceRequest struct { + // From is the ref from which the counting of the diverging commits starts. + From string + // To is the ref at which the counting of the diverging commits ends. + To string +} + +// CommitDivergence contains the information of the count of converging commits between two refs. +type CommitDivergence struct { + // Ahead is the count of commits the 'From' ref is ahead of the 'To' ref. + Ahead int32 + // Behind is the count of commits the 'From' ref is behind the 'To' ref. + Behind int32 +} + +func (c *Client) GetCommitDivergences(ctx context.Context, + params *GetCommitDivergencesParams) (*GetCommitDivergencesOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + // build rpc request + req := &rpc.GetCommitDivergencesRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + MaxCount: params.MaxCount, + Requests: make([]*rpc.CommitDivergenceRequest, len(params.Requests)), + } + for i := range params.Requests { + req.Requests[i] = &rpc.CommitDivergenceRequest{ + From: params.Requests[i].From, + To: params.Requests[i].To, + } + } + resp, err := c.repoService.GetCommitDivergences(ctx, req) + if err != nil { + return nil, processRPCErrorf(err, "failed to get diverging commits from server") + } + + divergences := resp.GetDivergences() + if divergences == nil { + return nil, NewError(StatusInternal, "server response divergences were nil") + } + + // build output + output := &GetCommitDivergencesOutput{ + Divergences: make([]CommitDivergence, len(divergences)), + } + for i := range divergences { + if divergences[i] == nil { + return nil, NewError(StatusInternal, "server returned nil divergence") + } + + output.Divergences[i] = CommitDivergence{ + Ahead: divergences[i].Ahead, + Behind: divergences[i].Behind, + } + } + + return output, nil +} + +type MergeBaseParams struct { + ReadParams + Ref1 string + Ref2 string +} + +type MergeBaseOutput struct { + MergeBaseSHA string +} + +func (c *Client) MergeBase(ctx context.Context, + params MergeBaseParams, +) (MergeBaseOutput, error) { + result, err := c.repoService.MergeBase(ctx, &rpc.MergeBaseRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Ref1: params.Ref1, + Ref2: params.Ref2, + }) + if err != nil { + return MergeBaseOutput{}, fmt.Errorf("failed to get merge base commit: %w", err) + } + + return MergeBaseOutput{ + MergeBaseSHA: result.MergeBaseSha, + }, nil +} diff --git a/gitrpc/common.go b/gitrpc/common.go new file mode 100644 index 0000000000..7ca46d9600 --- /dev/null +++ b/gitrpc/common.go @@ -0,0 +1,66 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "github.com/harness/gitness/gitrpc/rpc" +) + +// ReadParams contains the base parameters for read operations. +type ReadParams struct { + RepoUID string +} + +func (p ReadParams) Validate() error { + if p.RepoUID == "" { + return ErrInvalidArgumentf("repository id cannot be empty") + } + return nil +} + +// WriteParams contains the base parameters for write operations. +type WriteParams struct { + RepoUID string + Actor Identity + EnvVars map[string]string +} + +func mapToRPCReadRequest(p ReadParams) *rpc.ReadRequest { + return &rpc.ReadRequest{ + RepoUid: p.RepoUID, + } +} + +func mapToRPCWriteRequest(p WriteParams) *rpc.WriteRequest { + out := &rpc.WriteRequest{ + RepoUid: p.RepoUID, + Actor: &rpc.Identity{ + Name: p.Actor.Name, + Email: p.Actor.Email, + }, + EnvVars: make([]*rpc.EnvVar, len(p.EnvVars)), + } + + i := 0 + for k, v := range p.EnvVars { + out.EnvVars[i] = &rpc.EnvVar{ + Name: k, + Value: v, + } + i++ + } + + return out +} diff --git a/gitrpc/config.go b/gitrpc/config.go new file mode 100644 index 0000000000..aad9211e68 --- /dev/null +++ b/gitrpc/config.go @@ -0,0 +1,36 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "errors" +) + +// Config represents the config for the gitrpc client. +type Config struct { + Addr string `envconfig:"GITRPC_CLIENT_ADDR" default:"127.0.0.1:3001"` + LoadBalancingPolicy string `envconfig:"GITRPC_CLIENT_LOAD_BALANCING_POLICY" default:"pick_first"` +} + +func (c *Config) Validate() error { + if c == nil { + return errors.New("config is required") + } + if c.Addr == "" { + return errors.New("config.Addr is required") + } + + return nil +} diff --git a/gitrpc/diff.go b/gitrpc/diff.go new file mode 100644 index 0000000000..b28022c55f --- /dev/null +++ b/gitrpc/diff.go @@ -0,0 +1,411 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "io" + + "github.com/harness/gitness/gitrpc/internal/streamio" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "golang.org/x/sync/errgroup" +) + +type DiffParams struct { + ReadParams + BaseRef string + HeadRef string + MergeBase bool + IncludePatch bool +} + +func (p DiffParams) Validate() error { + if err := p.ReadParams.Validate(); err != nil { + return err + } + + if p.HeadRef == "" { + return ErrInvalidArgumentf("head ref cannot be empty") + } + return nil +} + +func (c *Client) RawDiff(ctx context.Context, params *DiffParams, out io.Writer) error { + if err := params.Validate(); err != nil { + return err + } + diff, err := c.diffService.RawDiff(ctx, &rpc.DiffRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + BaseRef: params.BaseRef, + HeadRef: params.HeadRef, + MergeBase: params.MergeBase, + }) + if err != nil { + return processRPCErrorf(err, "failed to fetch diff between '%s' and '%s' with err: %v", + params.BaseRef, params.HeadRef, err) + } + + reader := streamio.NewReader(func() ([]byte, error) { + var resp *rpc.RawDiffResponse + resp, err = diff.Recv() + return resp.GetData(), err + }) + + if _, err = io.Copy(out, reader); err != nil { + return processRPCErrorf(err, "failed to fetch diff between '%s' and '%s' with err: %v", + params.BaseRef, params.HeadRef, err) + } + + return nil +} + +func (c *Client) CommitDiff(ctx context.Context, params *GetCommitParams, out io.Writer) error { + if err := params.Validate(); err != nil { + return err + } + diff, err := c.diffService.CommitDiff(ctx, &rpc.CommitDiffRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Sha: params.SHA, + }) + if err != nil { + return processRPCErrorf(err, "failed to fetch diff for commit '%s': %v", params.SHA, err) + } + + reader := streamio.NewReader(func() ([]byte, error) { + var resp *rpc.CommitDiffResponse + resp, err = diff.Recv() + return resp.GetData(), err + }) + + if _, err = io.Copy(out, reader); err != nil { + return err + } + + return nil +} + +type DiffShortStatOutput struct { + Files int + Additions int + Deletions int +} + +// DiffShortStat returns files changed, additions and deletions metadata. +func (c *Client) DiffShortStat(ctx context.Context, params *DiffParams) (DiffShortStatOutput, error) { + if err := params.Validate(); err != nil { + return DiffShortStatOutput{}, err + } + stat, err := c.diffService.DiffShortStat(ctx, &rpc.DiffRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + BaseRef: params.BaseRef, + HeadRef: params.HeadRef, + MergeBase: params.MergeBase, + }) + if err != nil { + return DiffShortStatOutput{}, processRPCErrorf(err, "failed to get diff data between '%s' and '%s'", + params.BaseRef, params.HeadRef) + } + return DiffShortStatOutput{ + Files: int(stat.GetFiles()), + Additions: int(stat.GetAdditions()), + Deletions: int(stat.GetDeletions()), + }, nil +} + +type DiffStatsOutput struct { + Commits int + FilesChanged int +} + +func (c *Client) DiffStats(ctx context.Context, params *DiffParams) (DiffStatsOutput, error) { + // declare variables which will be used in go routines, + // no need for atomic operations because writing and reading variable + // doesn't happen at the same time + var ( + totalCommits int + totalFiles int + ) + + errGroup, groupCtx := errgroup.WithContext(ctx) + + errGroup.Go(func() error { + // read total commits + + options := &GetCommitDivergencesParams{ + ReadParams: params.ReadParams, + Requests: []CommitDivergenceRequest{ + { + From: params.HeadRef, + To: params.BaseRef, + }, + }, + } + + rpcOutput, err := c.GetCommitDivergences(groupCtx, options) + if err != nil { + return processRPCErrorf(err, "failed to count pull request commits between '%s' and '%s'", + params.BaseRef, params.HeadRef) + } + if len(rpcOutput.Divergences) > 0 { + totalCommits = int(rpcOutput.Divergences[0].Ahead) + } + return nil + }) + + errGroup.Go(func() error { + // read short stat + stat, err := c.DiffShortStat(groupCtx, &DiffParams{ + ReadParams: params.ReadParams, + BaseRef: params.BaseRef, + HeadRef: params.HeadRef, + MergeBase: true, // must be true, because commitDivergences use tripple dot notation + }) + if err != nil { + return err + } + totalFiles = stat.Files + return nil + }) + + err := errGroup.Wait() + if err != nil { + return DiffStatsOutput{}, err + } + + return DiffStatsOutput{ + Commits: totalCommits, + FilesChanged: totalFiles, + }, nil +} + +type GetDiffHunkHeadersParams struct { + ReadParams + SourceCommitSHA string + TargetCommitSHA string +} + +type DiffFileHeader struct { + OldName string + NewName string + Extensions map[string]string +} + +type HunkHeader struct { + OldLine int + OldSpan int + NewLine int + NewSpan int + Text string +} + +type DiffFileHunkHeaders struct { + FileHeader DiffFileHeader + HunkHeaders []HunkHeader +} + +type GetDiffHunkHeadersOutput struct { + Files []DiffFileHunkHeaders +} + +func (c *Client) GetDiffHunkHeaders( + ctx context.Context, + params GetDiffHunkHeadersParams, +) (GetDiffHunkHeadersOutput, error) { + if params.SourceCommitSHA == params.TargetCommitSHA { + return GetDiffHunkHeadersOutput{}, nil + } + + hunkHeaders, err := c.diffService.GetDiffHunkHeaders(ctx, &rpc.GetDiffHunkHeadersRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + SourceCommitSha: params.SourceCommitSHA, + TargetCommitSha: params.TargetCommitSHA, + }) + if err != nil { + return GetDiffHunkHeadersOutput{}, processRPCErrorf(err, "failed to get git diff hunk headers") + } + + files := make([]DiffFileHunkHeaders, len(hunkHeaders.Files)) + for i, file := range hunkHeaders.Files { + headers := make([]HunkHeader, len(file.HunkHeaders)) + for j, header := range file.HunkHeaders { + headers[j] = mapHunkHeader(header) + } + files[i] = DiffFileHunkHeaders{ + FileHeader: mapDiffFileHeader(file.FileHeader), + HunkHeaders: headers, + } + } + + return GetDiffHunkHeadersOutput{ + Files: files, + }, nil +} + +type DiffCutOutput struct { + Header HunkHeader + LinesHeader string + Lines []string + MergeBaseSHA string + LatestSourceSHA string +} + +type DiffCutParams struct { + ReadParams + SourceCommitSHA string + SourceBranch string + TargetCommitSHA string + TargetBranch string + Path string + LineStart int + LineStartNew bool + LineEnd int + LineEndNew bool +} + +// DiffCut extracts diff snippet from a git diff hunk. +// The snippet is from the specific commit (specified by commit SHA), between refs +// source branch and target branch, from the specific file. +func (c *Client) DiffCut(ctx context.Context, params *DiffCutParams) (DiffCutOutput, error) { + result, err := c.diffService.DiffCut(ctx, &rpc.DiffCutRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + SourceCommitSha: params.SourceCommitSHA, + SourceBranch: params.SourceBranch, + TargetCommitSha: params.TargetCommitSHA, + TargetBranch: params.TargetBranch, + Path: params.Path, + LineStart: int32(params.LineStart), + LineStartNew: params.LineStartNew, + LineEnd: int32(params.LineEnd), + LineEndNew: params.LineEndNew, + }) + if err != nil { + return DiffCutOutput{}, processRPCErrorf(err, "failed to get git diff sub hunk") + } + + hunkHeader := types.HunkHeader{ + OldLine: int(result.HunkHeader.OldLine), + OldSpan: int(result.HunkHeader.OldSpan), + NewLine: int(result.HunkHeader.NewLine), + NewSpan: int(result.HunkHeader.NewSpan), + Text: result.HunkHeader.Text, + } + + return DiffCutOutput{ + Header: HunkHeader(hunkHeader), + LinesHeader: result.LinesHeader, + Lines: result.Lines, + MergeBaseSHA: result.MergeBaseSha, + LatestSourceSHA: result.LatestSourceSha, + }, nil +} + +type FileDiff struct { + SHA string `json:"sha"` + OldSHA string `json:"old_sha,omitempty"` + Path string `json:"path"` + OldPath string `json:"old_path,omitempty"` + Status FileDiffStatus `json:"status"` + Additions int64 `json:"additions"` + Deletions int64 `json:"deletions"` + Changes int64 `json:"changes"` + Patch []byte `json:"patch,omitempty"` + IsBinary bool `json:"is_binary"` + IsSubmodule bool `json:"is_submodule"` +} + +type FileDiffStatus string + +const ( + // NOTE: keeping values upper case for now to stay consistent with current API. + // TODO: change drone/go-scm (and potentially new dependencies) to case insensitive. + + FileDiffStatusUndefined FileDiffStatus = "UNDEFINED" + FileDiffStatusAdded FileDiffStatus = "ADDED" + FileDiffStatusModified FileDiffStatus = "MODIFIED" + FileDiffStatusDeleted FileDiffStatus = "DELETED" + FileDiffStatusRenamed FileDiffStatus = "RENAMED" +) + +func (c *Client) Diff(ctx context.Context, params *DiffParams) (<-chan *FileDiff, <-chan error) { + ch := make(chan *FileDiff) + // needs to be buffered so it is not blocking on receiver side when all data is sent + cherr := make(chan error, 1) + + go func() { + defer close(ch) + defer close(cherr) + + if err := params.Validate(); err != nil { + cherr <- err + return + } + + stream, err := c.diffService.Diff(ctx, &rpc.DiffRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + BaseRef: params.BaseRef, + HeadRef: params.HeadRef, + MergeBase: params.MergeBase, + IncludePatch: params.IncludePatch, + }) + if err != nil { + return + } + + for { + resp, err := stream.Recv() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + cherr <- processRPCErrorf(err, "failed to get git diff file from stream") + return + } + + ch <- &FileDiff{ + SHA: resp.Sha, + OldSHA: resp.OldSha, + Path: resp.Path, + OldPath: resp.OldPath, + Status: mapRPCFileDiffStatus(resp.Status), + Additions: int64(resp.Additions), + Deletions: int64(resp.Deletions), + Changes: int64(resp.Changes), + Patch: resp.Patch, + IsBinary: resp.IsBinary, + IsSubmodule: resp.IsSubmodule, + } + } + }() + + return ch, cherr +} + +func mapRPCFileDiffStatus(status rpc.DiffResponse_FileStatus) FileDiffStatus { + switch status { + case rpc.DiffResponse_ADDED: + return FileDiffStatusAdded + case rpc.DiffResponse_DELETED: + return FileDiffStatusDeleted + case rpc.DiffResponse_MODIFIED: + return FileDiffStatusModified + case rpc.DiffResponse_RENAMED: + return FileDiffStatusRenamed + default: + return FileDiffStatusUndefined + } +} diff --git a/gitrpc/diff/diff.go b/gitrpc/diff/diff.go new file mode 100644 index 0000000000..6ccc9c992e --- /dev/null +++ b/gitrpc/diff/diff.go @@ -0,0 +1,506 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package diff + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "strconv" + "strings" + + "github.com/harness/gitness/gitrpc/enum" +) + +// Predefine []byte variables to avoid runtime allocations. +var ( + escapedSlash = []byte(`\\`) + regularSlash = []byte(`\`) + escapedTab = []byte(`\t`) + regularTab = []byte("\t") +) + +// LineType is the line type in diff. +type LineType uint8 + +// A list of different line types. +const ( + DiffLinePlain LineType = iota + 1 + DiffLineAdd + DiffLineDelete + DiffLineSection +) + +// FileType is the file status in diff. +type FileType uint8 + +// A list of different file statuses. +const ( + FileAdd FileType = iota + FileChange + FileDelete + FileRename +) + +// Line represents a line in diff. +type Line struct { + Type LineType // The type of the line + Content string // The content of the line + LeftLine int // The left line number + RightLine int // The right line number +} + +// Section represents a section in diff. +type Section struct { + Lines []*Line // lines in the section + + numAdditions int + numDeletions int +} + +// NumLines returns the number of lines in the section. +func (s *Section) NumLines() int { + return len(s.Lines) +} + +// Line returns a specific line by given type and line number in a section. +func (s *Section) Line(lineType LineType, line int) *Line { + var ( + difference = 0 + addCount = 0 + delCount = 0 + matchedDiffLine *Line + ) + +loop: + for _, diffLine := range s.Lines { + switch diffLine.Type { + case DiffLineAdd: + addCount++ + case DiffLineDelete: + delCount++ + default: + if matchedDiffLine != nil { + break loop + } + difference = diffLine.RightLine - diffLine.LeftLine + addCount = 0 + delCount = 0 + } + + switch lineType { + case DiffLineDelete: + if diffLine.RightLine == 0 && diffLine.LeftLine == line-difference { + matchedDiffLine = diffLine + } + case DiffLineAdd: + if diffLine.LeftLine == 0 && diffLine.RightLine == line+difference { + matchedDiffLine = diffLine + } + } + } + + if addCount == delCount { + return matchedDiffLine + } + return nil +} + +// File represents a file in diff. +type File struct { + // The name and path of the file. + Path string + // The old name and path of the file. + OldPath string + // The type of the file. + Type FileType + // The index (SHA1 hash) of the file. For a changed/new file, it is the new SHA, + // and for a deleted file it becomes "000000". + SHA string + // OldSHA is the old index (SHA1 hash) of the file. + OldSHA string + // The sections in the file. + Sections []*Section + + numAdditions int + numDeletions int + + mode enum.EntryMode + oldMode enum.EntryMode + + IsBinary bool + IsSubmodule bool +} + +func (f *File) Status() string { + switch { + case f.Type == FileAdd: + return "added" + case f.Type == FileDelete: + return "deleted" + case f.Type == FileRename: + return "renamed" + case f.Type == FileChange: + return "changed" + default: + return "unchanged" + } +} + +// NumSections returns the number of sections in the file. +func (f *File) NumSections() int { + return len(f.Sections) +} + +// NumAdditions returns the number of additions in the file. +func (f *File) NumAdditions() int { + return f.numAdditions +} + +// NumChanges returns the number of additions and deletions in the file. +func (f *File) NumChanges() int { + return f.numAdditions + f.numDeletions +} + +// NumDeletions returns the number of deletions in the file. +func (f *File) NumDeletions() int { + return f.numDeletions +} + +// Mode returns the mode of the file. +func (f *File) Mode() enum.EntryMode { + return f.mode +} + +// OldMode returns the old mode of the file if it's changed. +func (f *File) OldMode() enum.EntryMode { + return f.oldMode +} + +func (f *File) IsEmpty() bool { + return f.Path == "" && f.OldPath == "" +} + +type Parser struct { + *bufio.Reader + + // The next line that hasn't been processed. It is used to determine what kind + // of process should go in. + buffer []byte + isEOF bool +} + +func (p *Parser) readLine() error { + if p.buffer != nil { + return nil + } + + var err error + p.buffer, err = p.ReadBytes('\n') + if err != nil { + if err != io.EOF { + return fmt.Errorf("read string: %v", err) + } + + p.isEOF = true + } + + // Remove line break + if len(p.buffer) > 0 && p.buffer[len(p.buffer)-1] == '\n' { + p.buffer = p.buffer[:len(p.buffer)-1] + } + return nil +} + +var diffHead = []byte("diff --git ") + +func (p *Parser) parseFileHeader() (*File, error) { + submoduleMode := " 160000" + line := string(p.buffer) + p.buffer = nil + + // NOTE: In case file name is surrounded by double quotes (it happens only in + // git-shell). e.g. diff --git "a/xxx" "b/xxx" + hasQuote := line[len(diffHead)] == '"' + middle := strings.Index(line, ` b/`) + if hasQuote { + middle = strings.Index(line, ` "b/`) + } + + beg := len(diffHead) + a := line[beg+2 : middle] + b := line[middle+3:] + if hasQuote { + a = string(UnescapeChars([]byte(a[1 : len(a)-1]))) + b = string(UnescapeChars([]byte(b[1 : len(b)-1]))) + } + + file := &File{ + Path: a, + OldPath: b, + Type: FileChange, + } + + // Check file diff type and submodule + var err error +checkType: + for !p.isEOF { + if err = p.readLine(); err != nil { + return nil, err + } + + line := string(p.buffer) + p.buffer = nil + + if len(line) == 0 { + continue + } + + switch { + case strings.HasPrefix(line, enum.DiffExtHeaderNewFileMode): + file.Type = FileAdd + file.IsSubmodule = strings.HasSuffix(line, submoduleMode) + fields := strings.Fields(line) + if len(fields) > 0 { + mode, _ := strconv.ParseUint(fields[len(fields)-1], 8, 64) + file.mode = enum.EntryMode(mode) + if file.oldMode == 0 { + file.oldMode = file.mode + } + } + case strings.HasPrefix(line, enum.DiffExtHeaderDeletedFileMode): + file.Type = FileDelete + file.IsSubmodule = strings.HasSuffix(line, submoduleMode) + fields := strings.Fields(line) + if len(fields) > 0 { + mode, _ := strconv.ParseUint(fields[len(fields)-1], 8, 64) + file.mode = enum.EntryMode(mode) + if file.oldMode == 0 { + file.oldMode = file.mode + } + } + case strings.HasPrefix(line, enum.DiffExtHeaderIndex): // e.g. index ee791be..9997571 100644 + fields := strings.Fields(line[6:]) + shas := strings.Split(fields[0], "..") + if len(shas) != 2 { + return nil, errors.New("malformed index: expect two SHAs in the form of ..") + } + + file.OldSHA = shas[0] + file.SHA = shas[1] + if len(fields) > 1 { + mode, _ := strconv.ParseUint(fields[1], 8, 64) + file.mode = enum.EntryMode(mode) + file.oldMode = enum.EntryMode(mode) + } + break checkType + case strings.HasPrefix(line, enum.DiffExtHeaderSimilarity): + file.Type = FileRename + file.OldPath = a + file.Path = b + + // No need to look for index if it's a pure rename + if strings.HasSuffix(line, "100%") { + break checkType + } + case strings.HasPrefix(line, enum.DiffExtHeaderNewMode): + fields := strings.Fields(line) + if len(fields) > 0 { + mode, _ := strconv.ParseUint(fields[len(fields)-1], 8, 64) + file.mode = enum.EntryMode(mode) + } + case strings.HasPrefix(line, enum.DiffExtHeaderOldMode): + fields := strings.Fields(line) + if len(fields) > 0 { + mode, _ := strconv.ParseUint(fields[len(fields)-1], 8, 64) + file.oldMode = enum.EntryMode(mode) + } + } + } + + return file, nil +} + +func (p *Parser) parseSection() (*Section, error) { + line := string(p.buffer) + p.buffer = nil + + section := &Section{ + Lines: []*Line{ + { + Type: DiffLineSection, + Content: line, + }, + }, + } + + // Parse line number, e.g. @@ -0,0 +1,3 @@ + var leftLine, rightLine int + ss := strings.Split(line, "@@") + ranges := strings.Split(ss[1][1:], " ") + leftLine, _ = strconv.Atoi(strings.Split(ranges[0], ",")[0][1:]) + if len(ranges) > 1 { + rightLine, _ = strconv.Atoi(strings.Split(ranges[1], ",")[0]) + } else { + rightLine = leftLine + } + + var err error + for !p.isEOF { + if err = p.readLine(); err != nil { + return nil, err + } + + if len(p.buffer) == 0 { + p.buffer = nil + continue + } + + // Make sure we're still in the section. If not, we're done with this section. + if p.buffer[0] != ' ' && + p.buffer[0] != '+' && + p.buffer[0] != '-' { + + // No new line indicator + if p.buffer[0] == '\\' && + bytes.HasPrefix(p.buffer, []byte(`\ No newline at end of file`)) { + p.buffer = nil + continue + } + return section, nil + } + + line := string(p.buffer) + p.buffer = nil + + switch line[0] { + case ' ': + section.Lines = append(section.Lines, &Line{ + Type: DiffLinePlain, + Content: line, + LeftLine: leftLine, + RightLine: rightLine, + }) + leftLine++ + rightLine++ + case '+': + section.Lines = append(section.Lines, &Line{ + Type: DiffLineAdd, + Content: line, + RightLine: rightLine, + }) + section.numAdditions++ + rightLine++ + case '-': + section.Lines = append(section.Lines, &Line{ + Type: DiffLineDelete, + Content: line, + LeftLine: leftLine, + }) + section.numDeletions++ + if leftLine > 0 { + leftLine++ + } + } + } + + return section, nil +} + +func (p *Parser) Parse(f func(f *File)) error { + file := new(File) + currentFileLines := 0 + additions := 0 + deletions := 0 + + var ( + err error + ) + for !p.isEOF { + if err = p.readLine(); err != nil { + return err + } + + if len(p.buffer) == 0 || + bytes.HasPrefix(p.buffer, []byte("+++ ")) || + bytes.HasPrefix(p.buffer, []byte("--- ")) { + p.buffer = nil + continue + } + + // Found new file + if bytes.HasPrefix(p.buffer, diffHead) { + // stream previous file + if !file.IsEmpty() && f != nil { + f(file) + } + file, err = p.parseFileHeader() + if err != nil { + return err + } + + currentFileLines = 0 + continue + } + + if file == nil { + p.buffer = nil + continue + } + + if bytes.HasPrefix(p.buffer, []byte("Binary")) { + p.buffer = nil + file.IsBinary = true + continue + } + + // Loop until we found section header + if p.buffer[0] != '@' { + p.buffer = nil + continue + } + + section, err := p.parseSection() + if err != nil { + return err + } + file.Sections = append(file.Sections, section) + file.numAdditions += section.numAdditions + file.numDeletions += section.numDeletions + additions += section.numAdditions + deletions += section.numDeletions + currentFileLines += section.NumLines() + } + + // stream last file + if !file.IsEmpty() && f != nil { + f(file) + } + + return nil +} + +// UnescapeChars reverses escaped characters. +func UnescapeChars(in []byte) []byte { + if bytes.ContainsAny(in, "\\\t") { + return in + } + + out := bytes.Replace(in, escapedSlash, regularSlash, -1) + out = bytes.Replace(out, escapedTab, regularTab, -1) + return out +} diff --git a/gitrpc/enum/entry.go b/gitrpc/enum/entry.go new file mode 100644 index 0000000000..ad4cbbf123 --- /dev/null +++ b/gitrpc/enum/entry.go @@ -0,0 +1,14 @@ +package enum + +// EntryMode is the unix file mode of a tree entry. +type EntryMode int + +// There are only a few file modes in Git. They look like unix file modes, but +// they can only be one of these. +const ( + EntryTree EntryMode = 0040000 + EntryBlob EntryMode = 0100644 + EntryExec EntryMode = 0100755 + EntrySymlink EntryMode = 0120000 + EntryCommit EntryMode = 0160000 +) diff --git a/gitrpc/enum/hunk_headers.go b/gitrpc/enum/hunk_headers.go new file mode 100644 index 0000000000..f2d173cb1e --- /dev/null +++ b/gitrpc/enum/hunk_headers.go @@ -0,0 +1,30 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// Diff file header extensions. From: https://git-scm.com/docs/git-diff#generate_patch_text_with_p +const ( + DiffExtHeaderOldMode = "old mode" // old mode + DiffExtHeaderNewMode = "new mode" // new mode + DiffExtHeaderDeletedFileMode = "deleted file mode" // deleted file mode + DiffExtHeaderNewFileMode = "new file mode" // new file mode + DiffExtHeaderCopyFrom = "copy from" // copy from + DiffExtHeaderCopyTo = "copy to" // copy to + DiffExtHeaderRenameFrom = "rename from" // rename from + DiffExtHeaderRenameTo = "rename to" // rename to + DiffExtHeaderSimilarity = "similarity index" // similarity index + DiffExtHeaderDissimilarity = "dissimilarity index" // dissimilarity index + DiffExtHeaderIndex = "index" // index .. +) diff --git a/gitrpc/enum/merge.go b/gitrpc/enum/merge.go new file mode 100644 index 0000000000..d37016a14e --- /dev/null +++ b/gitrpc/enum/merge.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "github.com/harness/gitness/gitrpc/rpc" + +// MergeMethod represents the approach to merge commits into base branch. +type MergeMethod string + +const ( + // MergeMethodMerge create merge commit. + MergeMethodMerge MergeMethod = "merge" + // MergeMethodSquash squash commits into single commit before merging. + MergeMethodSquash MergeMethod = "squash" + // MergeMethodRebase rebase before merging. + MergeMethodRebase MergeMethod = "rebase" +) + +var MergeMethods = []MergeMethod{ + MergeMethodMerge, + MergeMethodSquash, + MergeMethodRebase, +} + +func MergeMethodFromRPC(t rpc.MergeRequest_MergeMethod) MergeMethod { + switch t { + case rpc.MergeRequest_merge: + return MergeMethodMerge + case rpc.MergeRequest_squash: + return MergeMethodSquash + case rpc.MergeRequest_rebase: + return MergeMethodRebase + default: + return MergeMethodMerge + } +} + +func (m MergeMethod) ToRPC() rpc.MergeRequest_MergeMethod { + switch m { + case MergeMethodMerge: + return rpc.MergeRequest_merge + case MergeMethodSquash: + return rpc.MergeRequest_squash + case MergeMethodRebase: + return rpc.MergeRequest_rebase + default: + return rpc.MergeRequest_merge + } +} + +func (m MergeMethod) Sanitize() (MergeMethod, bool) { + switch m { + case MergeMethodMerge, MergeMethodSquash, MergeMethodRebase: + return m, true + default: + return MergeMethodMerge, false + } +} diff --git a/gitrpc/enum/ref.go b/gitrpc/enum/ref.go new file mode 100644 index 0000000000..f965a2ea16 --- /dev/null +++ b/gitrpc/enum/ref.go @@ -0,0 +1,85 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "github.com/harness/gitness/gitrpc/rpc" + +type RefType int + +const ( + RefTypeUndefined RefType = iota + RefTypeRaw + RefTypeBranch + RefTypeTag + RefTypePullReqHead + RefTypePullReqMerge +) + +func RefFromRPC(t rpc.RefType) RefType { + switch t { + case rpc.RefType_RefRaw: + return RefTypeRaw + case rpc.RefType_RefBranch: + return RefTypeBranch + case rpc.RefType_RefTag: + return RefTypeTag + case rpc.RefType_RefPullReqHead: + return RefTypePullReqHead + case rpc.RefType_RefPullReqMerge: + return RefTypePullReqMerge + case rpc.RefType_Undefined: + return RefTypeUndefined + default: + return RefTypeUndefined + } +} + +func RefToRPC(t RefType) rpc.RefType { + switch t { + case RefTypeRaw: + return rpc.RefType_RefRaw + case RefTypeBranch: + return rpc.RefType_RefBranch + case RefTypeTag: + return rpc.RefType_RefTag + case RefTypePullReqHead: + return rpc.RefType_RefPullReqHead + case RefTypePullReqMerge: + return rpc.RefType_RefPullReqMerge + case RefTypeUndefined: + return rpc.RefType_Undefined + default: + return rpc.RefType_Undefined + } +} + +func (t RefType) String() string { + switch t { + case RefTypeRaw: + return "raw" + case RefTypeBranch: + return "branch" + case RefTypeTag: + return "tag" + case RefTypePullReqHead: + return "head" + case RefTypePullReqMerge: + return "merge" + case RefTypeUndefined: + fallthrough + default: + return "" + } +} diff --git a/gitrpc/errors.go b/gitrpc/errors.go new file mode 100644 index 0000000000..a53c4a4ae2 --- /dev/null +++ b/gitrpc/errors.go @@ -0,0 +1,216 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "errors" + "fmt" + + "github.com/harness/gitness/gitrpc/rpc" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var ( + ErrNoParamsProvided = ErrInvalidArgumentf("params not provided") +) + +const ( + conflictFilesKey = "conflict_files" + pathKey = "path" +) + +type Status string + +const ( + StatusConflict Status = "conflict" + StatusInternal Status = "internal" + StatusInvalidArgument Status = "invalid" + StatusNotFound Status = "not_found" + StatusPathNotFound Status = "path_not_found" + StatusNotImplemented Status = "not_implemented" + StatusUnauthorized Status = "unauthorized" + StatusFailed Status = "failed" + StatusPreconditionFailed Status = "precondition_failed" + StatusNotMergeable Status = "not_mergeable" + StatusAborted Status = "aborted" +) + +type Error struct { + // Machine-readable status code. + Status Status + + // Human-readable error message. + Message string + + // Details + Details map[string]any +} + +// Error implements the error interface. +func (e *Error) Error() string { + return e.Message +} + +// ErrorStatus unwraps an gitrpc error and returns its code. +// Non-application errors always return StatusInternal. +func ErrorStatus(err error) Status { + var ( + e *Error + ) + if err == nil { + return "" + } + if errors.As(err, &e) { + return e.Status + } + return StatusInternal +} + +// ErrorMessage unwraps an gitrpc error and returns its message. +// Non-gitrpc errors always return "Internal error". +func ErrorMessage(err error) string { + var ( + e *Error + ) + if err == nil { + return "" + } + if errors.As(err, &e) { + return e.Message + } + return "Internal error." +} + +// ErrorDetails unwraps an gitrpc error and returns its details. +// Non-gitrpc errors always return nil. +func ErrorDetails(err error) map[string]any { + var ( + e *Error + ) + if err == nil { + return nil + } + if errors.As(err, &e) { + return e.Details + } + return nil +} + +// NewError is a factory function to return an Error with a given status and message. +func NewError(code Status, message string) *Error { + return &Error{ + Status: code, + Message: message, + } +} + +// NewError is a factory function to return an Error with a given status, message and details. +func NewErrorWithDetails(code Status, message string, details map[string]any) *Error { + err := NewError(code, message) + err.Details = details + return err +} + +// Errorf is a helper function to return an Error with a given status and formatted message. +func Errorf(code Status, format string, args ...interface{}) *Error { + return &Error{ + Status: code, + Message: fmt.Sprintf(format, args...), + } +} + +// ErrInvalidArgumentf is a helper function to return an invalid argument Error. +func ErrInvalidArgumentf(format string, args ...interface{}) *Error { + return Errorf(StatusInvalidArgument, format, args...) +} + +func processRPCErrorf(err error, format string, args ...interface{}) error { + if errors.Is(err, &Error{}) { + return err + } + // create fallback error returned if we can't map it + fallbackMsg := fmt.Sprintf(format, args...) + fallbackErr := NewError(StatusInternal, fallbackMsg) + + // ensure it's an rpc error + st, ok := status.FromError(err) + if !ok { + return fallbackErr + } + + msg := st.Message() + + switch { + case st.Code() == codes.AlreadyExists: + return NewError(StatusConflict, msg) + case st.Code() == codes.NotFound: + code := StatusNotFound + details := make(map[string]any) + for _, detail := range st.Details() { + switch t := detail.(type) { + case *rpc.PathNotFoundError: + code = StatusPathNotFound + details[pathKey] = t.Path + default: + } + } + if len(details) > 0 { + return NewErrorWithDetails(code, msg, details) + } + return NewError(code, msg) + case st.Code() == codes.InvalidArgument: + return NewError(StatusInvalidArgument, msg) + case st.Code() == codes.FailedPrecondition: + code := StatusPreconditionFailed + details := make(map[string]any) + for _, detail := range st.Details() { + switch t := detail.(type) { + case *rpc.MergeConflictError: + details[conflictFilesKey] = t.ConflictingFiles + code = StatusNotMergeable + default: + } + } + if len(details) > 0 { + return NewErrorWithDetails(code, msg, details) + } + return NewError(code, msg) + default: + return fallbackErr + } +} + +func AsConflictFilesError(err error) (files []string) { + details := ErrorDetails(err) + object, ok := details[conflictFilesKey] + if ok { + files, _ = object.([]string) + } + + return +} + +// AsPathNotFoundError returns the path that wasn't found in case that's the error. +func AsPathNotFoundError(err error) (path string) { + details := ErrorDetails(err) + object, ok := details[pathKey] + if ok { + path = object.(string) + } + + return +} diff --git a/gitrpc/hash/aggregate_xor.go b/gitrpc/hash/aggregate_xor.go new file mode 100644 index 0000000000..a7369b005d --- /dev/null +++ b/gitrpc/hash/aggregate_xor.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +import ( + "errors" + "fmt" + "io" +) + +// xorAggregator is an implementation of the Aggregator interface +// that aggregates hashes by XORing them. +type xorAggregator struct { + hfm hashFactoryMethod + hashSize int +} + +func (a *xorAggregator) Empty() []byte { + return make([]byte, a.hashSize) +} + +func (a *xorAggregator) Hash(source Source) ([]byte, error) { + return a.append(a.Empty(), source) +} + +func (a *xorAggregator) Append(hash []byte, source Source) ([]byte, error) { + // copy value to ensure we don't modify the original hash array + hashCopy := make([]byte, len(hash)) + copy(hashCopy, hash) + + return a.append(hashCopy, source) +} + +func (a *xorAggregator) append(hash []byte, source Source) ([]byte, error) { + if len(hash) != a.hashSize { + return nil, fmt.Errorf( + "hash is of invalid length %d, aggregator works with hashes of length %d", + len(hash), + a.hashSize, + ) + } + // create new hasher to allow asynchronous usage + hasher := a.hfm() + + v, err := source.Next() + for err == nil { + // calculate hash of the value + hasher.Reset() + hasher.Write(v) + vHash := hasher.Sum(nil) + + // combine the hash with the current hash + hash = xorInPlace(hash, vHash) + + v, err = source.Next() + } + if !errors.Is(err, io.EOF) { + return nil, fmt.Errorf("failed getting the next element from source: %w", err) + } + + return hash, nil +} + +// xorInPlace XORs the provided byte arrays in place. +// If one slice is shorter, 0s will be used as replacement elements. +// WARNING: The method will taint the passed arrays! +func xorInPlace(a, b []byte) []byte { + // ensure len(a) >= len(b) + if len(b) > len(a) { + a, b = b, a + } + + // xor all values from a with b (or 0) + for i := 0; i < len(a); i++ { + var bi byte + if i < len(b) { + bi = b[i] + } + + a[i] ^= bi + } + + return a +} diff --git a/gitrpc/hash/aggregate_xor_test.go b/gitrpc/hash/aggregate_xor_test.go new file mode 100644 index 0000000000..0251c3de01 --- /dev/null +++ b/gitrpc/hash/aggregate_xor_test.go @@ -0,0 +1,126 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +import ( + "encoding/hex" + "testing" + + "github.com/stretchr/testify/require" +) + +const ( + value1 = "refs/heads/abcd:1234" + value2 = "refs/heads/zyxw:9876" +) + +var ( + hashValueEmpty, _ = hex.DecodeString("0000000000000000000000000000000000000000000000000000000000000000") + hashValue1, _ = hex.DecodeString("3a00e4f6f30e7eef599350b1bc19e1469bf5c6b26c3d93839d53547f0a61060d") + hashValue2, _ = hex.DecodeString("10111069c3abe9cec02f6bada1e1ab4233d04c7b1d4eb80f05ca2b851c3ba89d") + hashValue1And2, _ = hex.DecodeString("2a11f49f30a5972199bc3b1c1df84a04a8258ac971732b8c98997ffa165aae90") +) + +func TestXORAggregator_Empty(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Hash(SourceFromSlice([][]byte{})) + require.NoError(t, err, "failed to hash value1") + require.EqualValues(t, hashValueEmpty, res) +} + +func TestXORAggregator_Single(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Hash(SourceFromSlice([][]byte{[]byte(value1)})) + require.NoError(t, err, "failed to hash value1") + require.EqualValues(t, hashValue1, res) + + res, err = xor.Hash(SourceFromSlice([][]byte{[]byte(value2)})) + require.NoError(t, err, "failed to hash value2") + require.EqualValues(t, hashValue2, res) +} + +func TestXORAggregator_Multi(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Hash(SourceFromSlice([][]byte{[]byte(value1), []byte(value2)})) + require.NoError(t, err, "failed to hash value1 and value2") + require.EqualValues(t, hashValue1And2, res) +} + +func TestXORAggregator_MultiSame(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Hash(SourceFromSlice([][]byte{[]byte(value1), []byte(value1)})) + require.NoError(t, err, "failed to hash value1 and value1") + require.EqualValues(t, hashValueEmpty, res) + + res, err = xor.Hash(SourceFromSlice([][]byte{[]byte(value2), []byte(value2)})) + require.NoError(t, err, "failed to hash value2 and value2") + require.EqualValues(t, hashValueEmpty, res) + + res, err = xor.Hash(SourceFromSlice([][]byte{[]byte(value1), []byte(value2), []byte(value2)})) + require.NoError(t, err, "failed to hash value1 and value2 and value2") + require.EqualValues(t, hashValue1, res) + + res, err = xor.Hash(SourceFromSlice([][]byte{[]byte(value1), []byte(value1), []byte(value2)})) + require.NoError(t, err, "failed to hash value1 and value1 and value2") + require.EqualValues(t, hashValue2, res) + + res, err = xor.Hash(SourceFromSlice([][]byte{[]byte(value1), []byte(value2), []byte(value1)})) + require.NoError(t, err, "failed to hash value1 and value2 and value1") + require.EqualValues(t, hashValue2, res) +} + +func TestAppendMulti(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Append(hashValue1, SourceFromSlice([][]byte{[]byte(value2)})) + require.NoError(t, err, "failed to append value2") + require.EqualValues(t, hashValue1And2, res) + + res, err = xor.Append(hashValue2, SourceFromSlice([][]byte{[]byte(value1)})) + require.NoError(t, err, "failed to append value1") + require.EqualValues(t, hashValue1And2, res) + + res, err = xor.Append(hashValue2, SourceFromSlice([][]byte{[]byte(value1)})) + require.NoError(t, err, "failed to append value1") + require.EqualValues(t, hashValue1And2, res) +} + +func TestAppendSame(t *testing.T) { + xor, _ := New(TypeSHA256, AggregationTypeXOR) + + res, err := xor.Append(hashValue1, SourceFromSlice([][]byte{[]byte(value1)})) + require.NoError(t, err, "failed to append value1") + require.EqualValues(t, hashValueEmpty, res) + + res, err = xor.Append(hashValue2, SourceFromSlice([][]byte{[]byte(value2)})) + require.NoError(t, err, "failed to append value2") + require.EqualValues(t, hashValueEmpty, res) + + res, err = xor.Append(hashValue1, SourceFromSlice([][]byte{[]byte(value2), []byte(value2)})) + require.NoError(t, err, "failed to append value2 and value2") + require.EqualValues(t, hashValue1, res) + + res, err = xor.Append(hashValue1, SourceFromSlice([][]byte{[]byte(value1), []byte(value2)})) + require.NoError(t, err, "failed to append value1 and value2") + require.EqualValues(t, hashValue2, res) + + res, err = xor.Append(hashValue1, SourceFromSlice([][]byte{[]byte(value2), []byte(value1)})) + require.NoError(t, err, "failed to append value2 and value1") + require.EqualValues(t, hashValue2, res) +} diff --git a/gitrpc/hash/git.go b/gitrpc/hash/git.go new file mode 100644 index 0000000000..779faabcb8 --- /dev/null +++ b/gitrpc/hash/git.go @@ -0,0 +1,25 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +// SerializeReference serializes a reference to prepare it for hashing. +func SerializeReference(ref string, sha string) []byte { + return []byte(ref + ":" + sha) +} + +// SerializeHead serializes the head to prepare it for hashing. +func SerializeHead(value string) []byte { + return []byte("HEAD:" + value) +} diff --git a/gitrpc/hash/hash.go b/gitrpc/hash/hash.go new file mode 100644 index 0000000000..39dd3648de --- /dev/null +++ b/gitrpc/hash/hash.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +import ( + "crypto/sha256" + "fmt" + "hash" +) + +// Type defines the different types of hashing that are supported. +// NOTE: package doesn't take hash.Hash as input to allow external +// callers to both calculate the hash themselves using this package, or call gitrpc to calculate the hash, +// without the caller having to know internal details on what hash.Hash implementation is used. +type Type string + +const ( + // TypeSHA256 represents the sha256 hashing method. + TypeSHA256 Type = "sha256" +) + +// AggregationType defines the different types of hash aggregation types available. +type AggregationType string + +const ( + // AggregationTypeXOR aggregates a list of hashes using XOR. + // It provides commutative, self-inverse hashing, e.g.: + // - order of elements doesn't matter + // - two equal elements having the same hash cancel each other out. + AggregationTypeXOR AggregationType = "xor" +) + +// Aggregator is an abstraction of a component that aggregates a list of values into a single hash. +type Aggregator interface { + // Empty returns the empty hash of an aggregator. It is returned when hashing an empty Source + // or hashing a Source who's hash is equal to an empty source. Furthermore, the following is always true: + // `Hash(s) == Append(Empty(), s)` FOR ALL sources s. + Empty() []byte + + // Hash returns the hash aggregated over all elements of the provided source. + Hash(source Source) ([]byte, error) + + // Append returns the hash that results when aggregating the existing hash + // with the hashes of all elements of the provided source. + // IMPORTANT: size of existing hash has to be compatible (Empty() can be used for reference). + Append(hash []byte, source Source) ([]byte, error) +} + +// New returns a new aggregator for the given aggregation and hashing type. +func New(t Type, at AggregationType) (Aggregator, error) { + // get hash factory method to ensure we fail on object creation in case of invalid Type. + hfm, hashSize, err := getHashFactoryMethod(t) + if err != nil { + return nil, err + } + + switch at { + case AggregationTypeXOR: + return &xorAggregator{ + hfm: hfm, + hashSize: hashSize, + }, nil + default: + return nil, fmt.Errorf("unknown aggregation type '%s'", at) + } +} + +// hashFactoryMethod returns a hash.Hash implementation. +type hashFactoryMethod func() hash.Hash + +// getHashFactoryMethod returns the hash factory method together with the length of its generated hashes. +// NOTE: the length is needed to ensure hashes of an empty source are similar to hashes of `a a`. +func getHashFactoryMethod(t Type) (hashFactoryMethod, int, error) { + switch t { + case TypeSHA256: + return sha256.New, sha256.Size, nil + default: + return nil, -1, fmt.Errorf("unknown hash type '%s'", t) + } +} diff --git a/gitrpc/hash/source.go b/gitrpc/hash/source.go new file mode 100644 index 0000000000..27abbd742c --- /dev/null +++ b/gitrpc/hash/source.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +import ( + "context" + "fmt" + "io" +) + +// Source is an abstraction of a source of values that have to be hashed. +type Source interface { + Next() ([]byte, error) +} + +// SourceFunc is an alias for a function that returns the content of a source call by call. +type SourceFunc func() ([]byte, error) + +func (f SourceFunc) Next() ([]byte, error) { + return f() +} + +// SourceFromSlice returns a source that iterates over the slice. +func SourceFromSlice(slice [][]byte) Source { + return SourceFunc(func() ([]byte, error) { + if len(slice) == 0 { + return nil, io.EOF + } + + // get next element and move slice forward + next := slice[0] + slice = slice[1:] + + return next, nil + }) +} + +// SourceNext encapsulates the data that is needed to serve a call to Source.Next(). +// It is being used by SourceFromChannel to expose a channel as Source. +type SourceNext struct { + Data []byte + Err error +} + +// SourceFromChannel creates a source that returns all elements read from nextChan. +// The .Data and .Err of a SourceNext object in the channel will be returned as is. +// If the channel is closed, the source indicates the end of the data. +func SourceFromChannel(ctx context.Context, nextChan <-chan SourceNext) Source { + return SourceFunc(func() ([]byte, error) { + select { + case <-ctx.Done(): + return nil, fmt.Errorf("source context failed with: %w", ctx.Err()) + case next, ok := <-nextChan: + // channel closed, end of operation + if !ok { + return nil, io.EOF + } + + return next.Data, next.Err + } + }) +} diff --git a/gitrpc/hash/source_test.go b/gitrpc/hash/source_test.go new file mode 100644 index 0000000000..11d0154dba --- /dev/null +++ b/gitrpc/hash/source_test.go @@ -0,0 +1,135 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hash + +import ( + "context" + "io" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +var ( + byte1 = []byte{1} + byte2 = []byte{2} +) + +func TestSourceFromChannel_blockingChannel(t *testing.T) { + nextChan := make(chan SourceNext) + + ctx, cncl := context.WithTimeout(context.Background(), 1*time.Second) + defer cncl() + + source := SourceFromChannel(ctx, nextChan) + + go func() { + defer close(nextChan) + + select { + case nextChan <- SourceNext{Data: byte1}: + case <-ctx.Done(): + require.Fail(t, "writing data to source chan timed out") + } + }() + + next, err := source.Next() + require.NoError(t, err, "no error expected on first call to next") + require.Equal(t, byte1, next) + + _, err = source.Next() + require.ErrorIs(t, err, io.EOF, "EOF expected after first element was read") +} + +func TestSourceFromChannel_contextCanceled(t *testing.T) { + nextChan := make(chan SourceNext) + + ctx, cncl := context.WithTimeout(context.Background(), 1*time.Second) + cncl() + + source := SourceFromChannel(ctx, nextChan) + _, err := source.Next() + require.ErrorIs(t, err, context.Canceled, "Canceled error expected") +} + +func TestSourceFromChannel_sourceChannelDrainedOnClosing(t *testing.T) { + nextChan := make(chan SourceNext, 1) + + ctx, cncl := context.WithTimeout(context.Background(), 1*time.Second) + defer cncl() + + source := SourceFromChannel(ctx, nextChan) + + nextChan <- SourceNext{Data: byte1} + close(nextChan) + + next, err := source.Next() + require.NoError(t, err, "no error expected on first call to next") + require.Equal(t, byte1, next) + + _, err = source.Next() + require.ErrorIs(t, err, io.EOF, "EOF expected after first element was read") +} + +func TestSourceFromChannel_errorReturnedOnError(t *testing.T) { + nextChan := make(chan SourceNext, 1) + + ctx, cncl := context.WithTimeout(context.Background(), 1*time.Second) + defer cncl() + + source := SourceFromChannel(ctx, nextChan) + + nextChan <- SourceNext{ + Data: byte1, + Err: io.ErrClosedPipe, + } + + next, err := source.Next() + require.ErrorIs(t, err, io.ErrClosedPipe, "ErrClosedPipe expected") + require.Equal(t, byte1, next) +} + +func TestSourceFromChannel_fullChannel(t *testing.T) { + nextChan := make(chan SourceNext, 1) + + ctx, cncl := context.WithTimeout(context.Background(), 1*time.Second) + defer cncl() + + source := SourceFromChannel(ctx, nextChan) + + nextChan <- SourceNext{Data: byte1} + + go func() { + defer close(nextChan) + + select { + case nextChan <- SourceNext{Data: byte2}: + case <-ctx.Done(): + require.Fail(t, "writing data to source chan timed out") + } + }() + + next, err := source.Next() + require.NoError(t, err, "no error expected on first call to next") + require.Equal(t, byte1, next) + + next, err = source.Next() + require.NoError(t, err, "no error expected on second call to next") + require.Equal(t, byte2, next) + + _, err = source.Next() + require.ErrorIs(t, err, io.EOF, "EOF expected after two elements were read") +} diff --git a/gitrpc/interface.go b/gitrpc/interface.go new file mode 100644 index 0000000000..2283579399 --- /dev/null +++ b/gitrpc/interface.go @@ -0,0 +1,87 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "io" +) + +type Interface interface { + CreateRepository(ctx context.Context, params *CreateRepositoryParams) (*CreateRepositoryOutput, error) + DeleteRepository(ctx context.Context, params *DeleteRepositoryParams) error + GetTreeNode(ctx context.Context, params *GetTreeNodeParams) (*GetTreeNodeOutput, error) + ListTreeNodes(ctx context.Context, params *ListTreeNodeParams) (*ListTreeNodeOutput, error) + GetSubmodule(ctx context.Context, params *GetSubmoduleParams) (*GetSubmoduleOutput, error) + GetBlob(ctx context.Context, params *GetBlobParams) (*GetBlobOutput, error) + CreateBranch(ctx context.Context, params *CreateBranchParams) (*CreateBranchOutput, error) + CreateCommitTag(ctx context.Context, params *CreateCommitTagParams) (*CreateCommitTagOutput, error) + DeleteTag(ctx context.Context, params *DeleteTagParams) error + GetBranch(ctx context.Context, params *GetBranchParams) (*GetBranchOutput, error) + DeleteBranch(ctx context.Context, params *DeleteBranchParams) error + ListBranches(ctx context.Context, params *ListBranchesParams) (*ListBranchesOutput, error) + GetRef(ctx context.Context, params GetRefParams) (GetRefResponse, error) + PathsDetails(ctx context.Context, params PathsDetailsParams) (PathsDetailsOutput, error) + + // UpdateRef creates, updates or deletes a git ref. If the OldValue is defined it must match the reference value + // prior to the call. To remove a ref use the zero ref as the NewValue. To require the creation of a new one and + // not update of an exiting one, set the zero ref as the OldValue. + UpdateRef(ctx context.Context, params UpdateRefParams) error + + SyncRepository(ctx context.Context, params *SyncRepositoryParams) (*SyncRepositoryOutput, error) + + MatchFiles(ctx context.Context, params *MatchFilesParams) (*MatchFilesOutput, error) + + /* + * Commits service + */ + GetCommit(ctx context.Context, params *GetCommitParams) (*GetCommitOutput, error) + ListCommits(ctx context.Context, params *ListCommitsParams) (*ListCommitsOutput, error) + ListCommitTags(ctx context.Context, params *ListCommitTagsParams) (*ListCommitTagsOutput, error) + GetCommitDivergences(ctx context.Context, params *GetCommitDivergencesParams) (*GetCommitDivergencesOutput, error) + CommitFiles(ctx context.Context, params *CommitFilesParams) (CommitFilesResponse, error) + MergeBase(ctx context.Context, params MergeBaseParams) (MergeBaseOutput, error) + + /* + * Git Cli Service + */ + GetInfoRefs(ctx context.Context, w io.Writer, params *InfoRefsParams) error + ServicePack(ctx context.Context, w io.Writer, params *ServicePackParams) error + + /* + * Diff services + */ + RawDiff(ctx context.Context, in *DiffParams, w io.Writer) error + Diff(ctx context.Context, in *DiffParams) (<-chan *FileDiff, <-chan error) + CommitDiff(ctx context.Context, params *GetCommitParams, w io.Writer) error + DiffShortStat(ctx context.Context, params *DiffParams) (DiffShortStatOutput, error) + DiffStats(ctx context.Context, params *DiffParams) (DiffStatsOutput, error) + + GetDiffHunkHeaders(ctx context.Context, params GetDiffHunkHeadersParams) (GetDiffHunkHeadersOutput, error) + DiffCut(ctx context.Context, params *DiffCutParams) (DiffCutOutput, error) + + /* + * Merge services + */ + Merge(ctx context.Context, in *MergeParams) (MergeOutput, error) + + /* + * Blame services + */ + Blame(ctx context.Context, params *BlameParams) (<-chan *BlamePart, <-chan error) + PushRemote(ctx context.Context, params *PushRemoteParams) error + + GeneratePipeline(ctx context.Context, params *GeneratePipelineParams) (GeneratePipelinesOutput, error) +} diff --git a/gitrpc/internal/files/file.go b/gitrpc/internal/files/file.go new file mode 100644 index 0000000000..be044d74f6 --- /dev/null +++ b/gitrpc/internal/files/file.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package files + +import ( + "path" + "strings" +) + +// CleanUploadFileName Trims a filename and returns empty string if it is a .git directory. +func CleanUploadFileName(name string) string { + // Rebase the filename + name = strings.Trim(name, "\n") + name = strings.Trim(path.Clean("/"+name), "/") + // Git disallows any filenames to have a .git directory in them. + for _, part := range strings.Split(name, "/") { + if strings.ToLower(part) == ".git" { + return "" + } + } + return name +} diff --git a/gitrpc/internal/gitea/blame.go b/gitrpc/internal/gitea/blame.go new file mode 100644 index 0000000000..23eb08c5d8 --- /dev/null +++ b/gitrpc/internal/gitea/blame.go @@ -0,0 +1,269 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bufio" + "bytes" + "context" + "errors" + "io" + "regexp" + "strconv" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (g Adapter) Blame(ctx context.Context, + repoPath, rev, file string, + lineFrom, lineTo int, +) types.BlameReader { + // prepare the git command line arguments + args := make([]string, 0, 8) + args = append(args, "blame", "--porcelain", "--encoding=UTF-8") + if lineFrom > 0 || lineTo > 0 { + var lines string + if lineFrom > 0 { + lines = strconv.Itoa(lineFrom) + } + if lineTo > 0 { + lines += "," + strconv.Itoa(lineTo) + } + + args = append(args, "-L", lines) + } + args = append(args, rev, "--", file) + + pipeRead, pipeWrite := io.Pipe() + stderr := &bytes.Buffer{} + go func() { + var err error + + defer func() { + // If running of the command below fails, make the pipe reader also fail with the same error. + _ = pipeWrite.CloseWithError(err) + }() + + cmd := gitea.NewCommand(ctx, args...) + err = cmd.Run(&gitea.RunOpts{ + Dir: repoPath, + Stdout: pipeWrite, + Stderr: stderr, // We capture stderr output in a buffer. + }) + }() + + return &BlameReader{ + scanner: bufio.NewScanner(pipeRead), + commitCache: make(map[string]*types.Commit), + errReader: stderr, // Any stderr output will cause the BlameReader to fail. + } +} + +// blamePorcelainHeadRE is used to detect line header start in git blame porcelain output. +// It is explained here: https://www.git-scm.com/docs/git-blame#_the_porcelain_format +var blamePorcelainHeadRE = regexp.MustCompile(`^([0-9a-f]{40}|[0-9a-f]{64}) (\d+) (\d+)( (\d+))?$`) + +var blamePorcelainOutOfRangeErrorRE = regexp.MustCompile(`has only \d+ lines$`) + +type BlameReader struct { + scanner *bufio.Scanner + lastLine string + commitCache map[string]*types.Commit + errReader io.Reader +} + +func (r *BlameReader) nextLine() (string, error) { + if line := r.lastLine; line != "" { + r.lastLine = "" + return line, nil + } + + for r.scanner.Scan() { + line := r.scanner.Text() + if line != "" { + return line, nil + } + } + + if err := r.scanner.Err(); err != nil { + return "", err + } + + return "", io.EOF +} + +func (r *BlameReader) unreadLine(line string) { + r.lastLine = line +} + +//nolint:complexity,gocognit,nestif // it's ok +func (r *BlameReader) NextPart() (*types.BlamePart, error) { + var commit *types.Commit + var lines []string + var err error + + for { + var line string + line, err = r.nextLine() + if err != nil { + break // This is the only place where we break the loop. Normally it will be the io.EOF. + } + + if matches := blamePorcelainHeadRE.FindStringSubmatch(line); matches != nil { + sha := matches[1] + + if commit == nil { + commit = r.commitCache[sha] + if commit == nil { + commit = &types.Commit{SHA: sha} + } + + if matches[5] != "" { + // At index 5 there's number of lines in this section. However, the resulting + // BlamePart might contain more than this because we join consecutive sections + // if the commit SHA is the same. + lineCount, _ := strconv.Atoi(matches[5]) + lines = make([]string, 0, lineCount) + } + + continue + } + + if sha != commit.SHA { + r.unreadLine(line) + r.commitCache[commit.SHA] = commit + + return &types.BlamePart{ + Commit: *commit, + Lines: lines, + }, nil + } + + continue + } + + if commit == nil { + // Continue reading the lines until a line header is reached. + // This should not happen. Normal output always starts with a line header (with a commit SHA). + continue + } + + if line[0] == '\t' { + // all output that contains actual file data is prefixed with tab, otherwise it's a header line + lines = append(lines, line[1:]) + continue + } + + parseBlameHeaders(line, commit) + } + + // Check if there's something in the error buffer... If yes, that's the error! + // It should contain error string from the git. + errRaw, _ := io.ReadAll(r.errReader) + if len(errRaw) > 0 { + line := string(errRaw) + + if idx := bytes.IndexByte(errRaw, '\n'); idx > 0 { + line = line[:idx] // get only the first line of the output + } + + line = strings.TrimPrefix(line, "fatal: ") // git errors start with the "fatal: " prefix + + switch { + case strings.Contains(line, "no such path"): + return nil, status.Error(codes.NotFound, line) + case strings.Contains(line, "bad revision"): + return nil, status.Error(codes.NotFound, line) + case blamePorcelainOutOfRangeErrorRE.MatchString(line): + return nil, status.Error(codes.InvalidArgument, line) + default: + return nil, status.Error(codes.Unknown, line) + } + } + + // This error can happen if the command git failed to start. Triggered by pipe writer's CloseWithError call. + if !errors.Is(err, io.EOF) { + return nil, status.Error(codes.Internal, err.Error()) + } + + var part *types.BlamePart + + if commit != nil && len(lines) > 0 { + part = &types.BlamePart{ + Commit: *commit, + Lines: lines, + } + } + + return part, err +} + +func parseBlameHeaders(line string, commit *types.Commit) { + // This is the list of git blame headers that we process. Other headers we ignore. + const ( + headerSummary = "summary " + headerAuthorName = "author " + headerAuthorMail = "author-mail " + headerAuthorTime = "author-time " + headerCommitterName = "committer " + headerCommitterMail = "committer-mail " + headerCommitterTime = "committer-time " + ) + + switch { + case strings.HasPrefix(line, headerSummary): + commit.Title = extractName(line[len(headerSummary):]) + case strings.HasPrefix(line, headerAuthorName): + commit.Author.Identity.Name = extractName(line[len(headerAuthorName):]) + case strings.HasPrefix(line, headerAuthorMail): + commit.Author.Identity.Email = extractEmail(line[len(headerAuthorMail):]) + case strings.HasPrefix(line, headerAuthorTime): + commit.Author.When = extractTime(line[len(headerAuthorTime):]) + case strings.HasPrefix(line, headerCommitterName): + commit.Committer.Identity.Name = extractName(line[len(headerCommitterName):]) + case strings.HasPrefix(line, headerCommitterMail): + commit.Committer.Identity.Email = extractEmail(line[len(headerCommitterMail):]) + case strings.HasPrefix(line, headerCommitterTime): + commit.Committer.When = extractTime(line[len(headerCommitterTime):]) + } +} + +func extractName(s string) string { + return s +} + +// extractEmail extracts email from git blame output. +// The email address is wrapped between "<" and ">" characters. +// If "<" or ">" are not in place it returns the string as it. +func extractEmail(s string) string { + if len(s) >= 2 && s[0] == '<' && s[len(s)-1] == '>' { + s = s[1 : len(s)-1] + } + return s +} + +// extractTime extracts timestamp from git blame output. +// The timestamp is UNIX time (in seconds). +// In case of an error it simply returns zero UNIX time. +func extractTime(s string) time.Time { + milli, _ := strconv.ParseInt(s, 10, 64) + return time.Unix(milli, 0) +} diff --git a/gitrpc/internal/gitea/blame_test.go b/gitrpc/internal/gitea/blame_test.go new file mode 100644 index 0000000000..234ba3e802 --- /dev/null +++ b/gitrpc/internal/gitea/blame_test.go @@ -0,0 +1,171 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bufio" + "errors" + "io" + "strings" + "testing" + "testing/iotest" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + + "github.com/google/go-cmp/cmp" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func TestBlameReader_NextPart(t *testing.T) { + // a sample of git blame porcelain output + const blameOut = ` +16f267ad4f731af1b2e36f42e170ed8921377398 9 10 1 +author Marko +author-mail +author-time 1669812989 +author-tz +0100 +committer Committer +committer-mail +committer-time 1669812989 +committer-tz +0100 +summary Pull request 1 +filename file_name_before_rename.go + Line 10 +16f267ad4f731af1b2e36f42e170ed8921377398 12 11 1 + Line 11 +dcb4b6b63e86f06ed4e4c52fbc825545dc0b6200 12 12 1 +author Marko +author-mail +author-time 1673952128 +author-tz +0100 +committer Committer +committer-mail +committer-time 1673952128 +committer-tz +0100 +summary Pull request 2 +previous 6561a7b86e1a5e74ea0e4e73ccdfc18b486a2826 file_name.go +filename file_name.go + Line 12 +16f267ad4f731af1b2e36f42e170ed8921377398 13 13 2 + Line 13 +16f267ad4f731af1b2e36f42e170ed8921377398 14 14 + Line 14 +` + + author := types.Identity{ + Name: "Marko", + Email: "marko.gacesa@harness.io", + } + committer := types.Identity{ + Name: "Committer", + Email: "noreply@harness.io", + } + + commit1 := types.Commit{ + SHA: "16f267ad4f731af1b2e36f42e170ed8921377398", + Title: "Pull request 1", + Message: "", + Author: types.Signature{ + Identity: author, + When: time.Unix(1669812989, 0), + }, + Committer: types.Signature{ + Identity: committer, + When: time.Unix(1669812989, 0), + }, + } + + commit2 := types.Commit{ + SHA: "dcb4b6b63e86f06ed4e4c52fbc825545dc0b6200", + Title: "Pull request 2", + Message: "", + Author: types.Signature{ + Identity: author, + When: time.Unix(1673952128, 0), + }, + Committer: types.Signature{ + Identity: committer, + When: time.Unix(1673952128, 0), + }, + } + + want := []*types.BlamePart{ + { + Commit: commit1, + Lines: []string{"Line 10", "Line 11"}, + }, + { + Commit: commit2, + Lines: []string{"Line 12"}, + }, + { + Commit: commit1, + Lines: []string{"Line 13", "Line 14"}, + }, + } + + reader := BlameReader{ + scanner: bufio.NewScanner(strings.NewReader(blameOut)), + commitCache: make(map[string]*types.Commit), + errReader: strings.NewReader(""), + } + + var got []*types.BlamePart + + for { + part, err := reader.NextPart() + if part != nil { + got = append(got, part) + } + if err != nil { + if !errors.Is(err, io.EOF) { + t.Errorf("failed with the error: %v", err) + } + break + } + } + + if diff := cmp.Diff(got, want); diff != "" { + t.Errorf(diff) + } +} + +func TestBlameReader_NextPart_UserError(t *testing.T) { + reader := BlameReader{ + scanner: bufio.NewScanner(strings.NewReader("")), + commitCache: make(map[string]*types.Commit), + errReader: strings.NewReader("fatal: no such path\n"), + } + + _, err := reader.NextPart() + if s, ok := status.FromError(err); !ok || s.Code() != codes.NotFound { + t.Errorf("expected NotFound error but got: %v", err) + } +} + +func TestBlameReader_NextPart_CmdError(t *testing.T) { + reader := BlameReader{ + scanner: bufio.NewScanner(iotest.ErrReader(errors.New("dummy error"))), + commitCache: make(map[string]*types.Commit), + errReader: strings.NewReader(""), + } + + _, err := reader.NextPart() + if s, ok := status.FromError(err); !ok || s.Code() != codes.Internal || s.Message() != "dummy error" { + t.Errorf("expected Internal error but got: %v", err) + } +} diff --git a/gitrpc/internal/gitea/blob.go b/gitrpc/internal/gitea/blob.go new file mode 100644 index 0000000000..ddc7cbd96a --- /dev/null +++ b/gitrpc/internal/gitea/blob.go @@ -0,0 +1,103 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bytes" + "context" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/internal/types" + + "code.gitea.io/gitea/modules/git" +) + +// GetBlob returns the blob for the given object sha. +func (g Adapter) GetBlob(ctx context.Context, repoPath string, sha string, sizeLimit int64) (*types.BlobReader, error) { + // Note: We are avoiding gitea blob implementation, as that is tied to the lifetime of the repository object. + // Instead, we just use the gitea helper methods ourselves. + stdIn, stdOut, cancel := git.CatFileBatch(ctx, repoPath) + + _, err := stdIn.Write([]byte(sha + "\n")) + if err != nil { + cancel() + return nil, fmt.Errorf("failed to write blob sha to git stdin: %w", err) + } + + objectSHA, objectType, objectSize, err := git.ReadBatchLine(stdOut) + if err != nil { + cancel() + return nil, processGiteaErrorf(err, "failed to read cat-file batch line") + } + + if string(objectSHA) != sha { + cancel() + return nil, fmt.Errorf("cat-file returned object sha '%s' but expected '%s'", objectSHA, sha) + } + if objectType != string(git.ObjectBlob) { + cancel() + return nil, fmt.Errorf("cat-file returned object type '%s' but expected '%s'", objectType, git.ObjectBlob) + } + + contentSize := objectSize + if sizeLimit > 0 && sizeLimit < contentSize { + contentSize = sizeLimit + } + + return &types.BlobReader{ + SHA: sha, + Size: objectSize, + ContentSize: contentSize, + Content: &exactLimitReader{ + reader: stdOut, + remainingBytes: contentSize, + close: func() error { + // TODO: is there a better (but short) way to clear the buffer? + // gitea is .Discard()'ing elements here until it's empty. + stdOut.Reset(bytes.NewBuffer([]byte{})) + cancel() + return nil + }, + }, + }, nil +} + +// exactLimitReader reads the content of a reader and ensures no more than the specified bytes will be requested from +// the underlaying reader. This is required for readers that don't ensure completion after reading all remaining bytes. +// io.LimitReader doesn't work as it waits for bytes that never come, io.SectionReader would requrie an io.ReaderAt. +type exactLimitReader struct { + reader io.Reader + remainingBytes int64 + close func() error +} + +func (r *exactLimitReader) Read(p []byte) (int, error) { + if r.remainingBytes <= 0 { + return 0, io.EOF + } + + if int64(len(p)) > r.remainingBytes { + p = p[0:r.remainingBytes] + } + n, err := r.reader.Read(p) + r.remainingBytes -= int64(n) + + return n, err +} + +func (r *exactLimitReader) Close() error { + return r.close() +} diff --git a/gitrpc/internal/gitea/branch.go b/gitrpc/internal/gitea/branch.go new file mode 100644 index 0000000000..0e6dcb5583 --- /dev/null +++ b/gitrpc/internal/gitea/branch.go @@ -0,0 +1,55 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" +) + +// GetBranch gets an existing branch. +func (g Adapter) GetBranch(ctx context.Context, repoPath string, + branchName string) (*types.Branch, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + giteaBranch, err := giteaRepo.GetBranch(branchName) + if err != nil { + return nil, processGiteaErrorf(err, "failed to get branch '%s'", branchName) + } + + giteaCommit, err := giteaBranch.GetCommit() + if err != nil { + return nil, processGiteaErrorf(err, "failed to get commit '%s'", branchName) + } + + commit, err := mapGiteaCommit(giteaCommit) + if err != nil { + return nil, fmt.Errorf("failed to map gitea commit: %w", err) + } + + return &types.Branch{ + Name: giteaBranch.Name, + SHA: giteaCommit.ID.String(), + Commit: commit, + }, nil +} diff --git a/gitrpc/internal/gitea/commit.go b/gitrpc/internal/gitea/commit.go new file mode 100644 index 0000000000..f83c3f55ff --- /dev/null +++ b/gitrpc/internal/gitea/commit.go @@ -0,0 +1,445 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bytes" + "context" + "errors" + "fmt" + "strconv" + "strings" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" +) + +const ( + giteaPrettyLogFormat = `--pretty=format:%H` +) + +// GetLatestCommit gets the latest commit of a path relative from the provided reference. +// Note: ref can be Branch / Tag / CommitSHA. +func (g Adapter) GetLatestCommit(ctx context.Context, repoPath string, + ref string, treePath string) (*types.Commit, error) { + treePath = cleanTreePath(treePath) + + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + giteaCommit, err := giteaGetCommitByPath(giteaRepo, ref, treePath) + if err != nil { + return nil, processGiteaErrorf(err, "error getting latest commit for '%s'", treePath) + } + + return mapGiteaCommit(giteaCommit) +} + +// giteaGetCommitByPath returns the latest commit per specific branch. +func giteaGetCommitByPath(giteaRepo *gitea.Repository, ref string, treePath string) (*gitea.Commit, error) { + if treePath == "" { + treePath = "." + } + + // NOTE: the difference to gitea implementation is passing `ref`. + stdout, _, runErr := gitea.NewCommand(giteaRepo.Ctx, "log", ref, "-1", giteaPrettyLogFormat, "--", treePath). + RunStdBytes(&gitea.RunOpts{Dir: giteaRepo.Path}) + if runErr != nil { + return nil, fmt.Errorf("failed to trigger log command: %w", runErr) + } + + lines := parseLinesToSlice(stdout) + + giteaCommits, err := getGiteaCommits(giteaRepo, lines) + if err != nil { + return nil, err + } + + return giteaCommits[0], nil +} + +func getGiteaCommits(giteaRepo *gitea.Repository, commitIDs []string) ([]*gitea.Commit, error) { + var giteaCommits []*gitea.Commit + if len(commitIDs) == 0 { + return giteaCommits, nil + } + + for _, commitID := range commitIDs { + commit, err := giteaRepo.GetCommit(commitID) + if err != nil { + return nil, fmt.Errorf("failed to get commit '%s': %w", commitID, err) + } + giteaCommits = append(giteaCommits, commit) + } + + return giteaCommits, nil +} + +func (g Adapter) listCommitSHAs( + giteaRepo *gitea.Repository, + ref string, + page int, + limit int, + filter types.CommitFilter, +) ([]string, error) { + args := make([]string, 0, 16) + args = append(args, "rev-list") + + // return commits only up to a certain reference if requested + if filter.AfterRef != "" { + // ^REF tells the rev-list command to return only commits that aren't reachable by SHA + args = append(args, fmt.Sprintf("^%s", filter.AfterRef)) + } + // add refCommitSHA as starting point + args = append(args, ref) + + if len(filter.Path) != 0 { + args = append(args, "--", filter.Path) + } + + // add pagination if requested + // TODO: we should add absolut limits to protect gitrpc (return error) + if limit > 0 { + args = append(args, "--max-count", fmt.Sprint(limit)) + + if page > 1 { + args = append(args, "--skip", fmt.Sprint((page-1)*limit)) + } + } + + if filter.Since > 0 || filter.Until > 0 { + args = append(args, "--date", "unix") + } + if filter.Since > 0 { + args = append(args, "--since", strconv.FormatInt(filter.Since, 10)) + } + if filter.Until > 0 { + args = append(args, "--until", strconv.FormatInt(filter.Until, 10)) + } + if filter.Committer != "" { + args = append(args, "--committer", filter.Committer) + } + + stdout, _, runErr := gitea.NewCommand(giteaRepo.Ctx, args...).RunStdBytes(&gitea.RunOpts{Dir: giteaRepo.Path}) + if runErr != nil { + // TODO: handle error in case they don't have a common merge base! + return nil, processGiteaErrorf(runErr, "failed to trigger rev-list command") + } + + return parseLinesToSlice(stdout), nil +} + +// ListCommitSHAs lists the commits reachable from ref. +// Note: ref & afterRef can be Branch / Tag / CommitSHA. +// Note: commits returned are [ref->...->afterRef). +func (g Adapter) ListCommitSHAs( + ctx context.Context, + repoPath string, + ref string, + page int, + limit int, + filter types.CommitFilter, +) ([]string, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + return g.listCommitSHAs(giteaRepo, ref, page, limit, filter) +} + +// ListCommits lists the commits reachable from ref. +// Note: ref & afterRef can be Branch / Tag / CommitSHA. +// Note: commits returned are [ref->...->afterRef). +func (g Adapter) ListCommits(ctx context.Context, + repoPath string, + ref string, + page int, limit int, filter types.CommitFilter, +) ([]types.Commit, []types.PathRenameDetails, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + commitSHAs, err := g.listCommitSHAs(giteaRepo, ref, page, limit, filter) + if err != nil { + return nil, nil, err + } + + giteaCommits, err := getGiteaCommits(giteaRepo, commitSHAs) + if err != nil { + return nil, nil, err + } + + commits := make([]types.Commit, len(giteaCommits)) + for i := range giteaCommits { + var commit *types.Commit + commit, err = mapGiteaCommit(giteaCommits[i]) + if err != nil { + return nil, nil, err + } + commits[i] = *commit + } + + if len(filter.Path) != 0 { + renameDetailsList, err := getRenameDetails(giteaRepo, commits, filter.Path) + if err != nil { + return nil, nil, err + } + cleanedUpCommits := cleanupCommitsForRename(commits, renameDetailsList, filter.Path) + return cleanedUpCommits, renameDetailsList, nil + } + + return commits, nil, nil +} + +// In case of rename of a file, same commit will be listed twice - Once in old file and second time in new file. +// Hence, we are making it a pattern to only list it as part of new file and not as part of old file. +func cleanupCommitsForRename( + commits []types.Commit, + renameDetails []types.PathRenameDetails, + path string, +) []types.Commit { + if len(commits) == 0 { + return commits + } + for _, renameDetail := range renameDetails { + // Since rename details is present it implies that we have commits and hence don't need null check. + if commits[0].SHA == renameDetail.CommitSHABefore && path == renameDetail.OldPath { + return commits[1:] + } + } + return commits +} + +func getRenameDetails( + giteaRepo *gitea.Repository, + commits []types.Commit, + path string) ([]types.PathRenameDetails, error) { + if len(commits) == 0 { + return []types.PathRenameDetails{}, nil + } + + renameDetailsList := make([]types.PathRenameDetails, 0, 2) + + renameDetails, err := giteaGetRenameDetails(giteaRepo, commits[0].SHA, path) + if err != nil { + return nil, err + } + if renameDetails.NewPath != "" || renameDetails.OldPath != "" { + renameDetails.CommitSHABefore = commits[0].SHA + renameDetailsList = append(renameDetailsList, *renameDetails) + } + + if len(commits) == 1 { + return renameDetailsList, nil + } + + renameDetailsLast, err := giteaGetRenameDetails(giteaRepo, commits[len(commits)-1].SHA, path) + if err != nil { + return nil, err + } + + if renameDetailsLast.NewPath != "" || renameDetailsLast.OldPath != "" { + renameDetailsLast.CommitSHAAfter = commits[len(commits)-1].SHA + renameDetailsList = append(renameDetailsList, *renameDetailsLast) + } + return renameDetailsList, nil +} + +func giteaGetRenameDetails(giteaRepo *gitea.Repository, ref string, path string) (*types.PathRenameDetails, error) { + stdout, _, runErr := gitea.NewCommand(giteaRepo.Ctx, "log", ref, "--name-status", "--pretty=format:", "-1"). + RunStdBytes(&gitea.RunOpts{Dir: giteaRepo.Path}) + if runErr != nil { + return nil, fmt.Errorf("failed to trigger log command: %w", runErr) + } + + lines := parseLinesToSlice(stdout) + + changeType, oldPath, newPath, err := getFileChangeTypeFromLog(lines, path) + if err != nil { + return nil, err + } + + if strings.HasPrefix(*changeType, "R") { + return &types.PathRenameDetails{ + OldPath: *oldPath, + NewPath: *newPath, + }, nil + } + + return &types.PathRenameDetails{}, nil +} + +func getFileChangeTypeFromLog(changeStrings []string, filePath string) (*string, *string, *string, error) { + for _, changeString := range changeStrings { + if strings.Contains(changeString, filePath) { + changeInfo := strings.Split(changeString, "\t") + if len(changeInfo) != 3 { + return &changeInfo[0], nil, nil, nil + } + return &changeInfo[0], &changeInfo[1], &changeInfo[2], nil + } + } + return nil, nil, nil, fmt.Errorf("could not parse change for the file") +} + +// GetCommit returns the (latest) commit for a specific ref. +// Note: ref can be Branch / Tag / CommitSHA. +func (g Adapter) GetCommit(ctx context.Context, repoPath string, ref string) (*types.Commit, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + commit, err := giteaRepo.GetCommit(ref) + if err != nil { + return nil, processGiteaErrorf(err, "error getting commit for ref '%s'", ref) + } + + return mapGiteaCommit(commit) +} + +func (g Adapter) GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) { + return gitea.GetFullCommitID(ctx, repoPath, shortID) +} + +// GetCommits returns the (latest) commits for a specific list of refs. +// Note: ref can be Branch / Tag / CommitSHA. +func (g Adapter) GetCommits(ctx context.Context, repoPath string, refs []string) ([]types.Commit, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + commits := make([]types.Commit, len(refs)) + for i, sha := range refs { + var giteaCommit *gitea.Commit + giteaCommit, err = giteaRepo.GetCommit(sha) + if err != nil { + return nil, processGiteaErrorf(err, "error getting commit '%s'", sha) + } + + var commit *types.Commit + commit, err = mapGiteaCommit(giteaCommit) + if err != nil { + return nil, err + } + commits[i] = *commit + } + + return commits, nil +} + +// GetCommitDivergences returns the count of the diverging commits for all branch pairs. +// IMPORTANT: If a max is provided it limits the overal count of diverging commits +// (max 10 could lead to (0, 10) while it's actually (2, 12)). +func (g Adapter) GetCommitDivergences(ctx context.Context, repoPath string, + requests []types.CommitDivergenceRequest, max int32) ([]types.CommitDivergence, error) { + var err error + res := make([]types.CommitDivergence, len(requests)) + for i, req := range requests { + res[i], err = g.getCommitDivergence(ctx, repoPath, req, max) + if errors.Is(err, types.ErrNotFound) { + res[i] = types.CommitDivergence{Ahead: -1, Behind: -1} + continue + } + if err != nil { + return nil, err + } + } + + return res, nil +} + +// getCommitDivergence returns the count of diverging commits for a pair of branches. +// IMPORTANT: If a max is provided it limits the overal count of diverging commits +// (max 10 could lead to (0, 10) while it's actually (2, 12)). +// NOTE: Gitea implementation makes two git cli calls, but it can be done with one +// (downside is the max behavior explained above). +func (g Adapter) getCommitDivergence(ctx context.Context, repoPath string, + req types.CommitDivergenceRequest, max int32) (types.CommitDivergence, error) { + // prepare args + args := []string{ + "rev-list", + "--count", + "--left-right", + } + // limit count if requested. + if max > 0 { + args = append(args, "--max-count") + args = append(args, fmt.Sprint(max)) + } + // add query to get commits without shared base commits + args = append(args, fmt.Sprintf("%s...%s", req.From, req.To)) + + var err error + cmd := gitea.NewCommand(ctx, args...) + stdOut, stdErr, err := cmd.RunStdString(&gitea.RunOpts{Dir: repoPath}) + if err != nil { + return types.CommitDivergence{}, + processGiteaErrorf(err, "git rev-list failed for '%s...%s' (stdErr: '%s')", req.From, req.To, stdErr) + } + + // parse output, e.g.: `1 2\n` + rawLeft, rawRight, ok := strings.Cut(stdOut, "\t") + if !ok { + return types.CommitDivergence{}, fmt.Errorf("git rev-list returned unexpected output '%s'", stdOut) + } + + // trim any unnecessary characters + rawLeft = strings.TrimRight(rawLeft, " \t") + rawRight = strings.TrimRight(rawRight, " \t\n") + + // parse numbers + left, err := strconv.ParseInt(rawLeft, 10, 32) + if err != nil { + return types.CommitDivergence{}, + fmt.Errorf("failed to parse git rev-list output for ahead '%s' (full: '%s')): %w", rawLeft, stdOut, err) + } + right, err := strconv.ParseInt(rawRight, 10, 32) + if err != nil { + return types.CommitDivergence{}, + fmt.Errorf("failed to parse git rev-list output for behind '%s' (full: '%s')): %w", rawRight, stdOut, err) + } + + return types.CommitDivergence{ + Ahead: int32(left), + Behind: int32(right), + }, nil +} + +func parseLinesToSlice(output []byte) []string { + if len(output) == 0 { + return nil + } + + lines := bytes.Split(bytes.TrimSpace(output), []byte{'\n'}) + + slice := make([]string, len(lines)) + for i, line := range lines { + slice[i] = string(line) + } + + return slice +} diff --git a/gitrpc/internal/gitea/config.go b/gitrpc/internal/gitea/config.go new file mode 100644 index 0000000000..d07d91efa9 --- /dev/null +++ b/gitrpc/internal/gitea/config.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "fmt" + "strings" + + "code.gitea.io/gitea/modules/git" +) + +// Config set local git key and value configuration. +func (g Adapter) Config(ctx context.Context, repoPath, key, value string) error { + var outbuf, errbuf strings.Builder + if err := git.NewCommand(ctx, "config", "--local").AddArguments(key, value). + Run(&git.RunOpts{ + Dir: repoPath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return fmt.Errorf("git config [%s -> <%s> ]: %w\n%s\n%s", + key, value, err, outbuf.String(), errbuf.String()) + } + return nil +} diff --git a/gitrpc/internal/gitea/diff.go b/gitrpc/internal/gitea/diff.go new file mode 100644 index 0000000000..ce7879974c --- /dev/null +++ b/gitrpc/internal/gitea/diff.go @@ -0,0 +1,208 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "strings" + + "github.com/harness/gitness/gitrpc/internal/parser" + "github.com/harness/gitness/gitrpc/internal/types" + + "code.gitea.io/gitea/modules/git" +) + +func (g Adapter) RawDiff( + ctx context.Context, + repoPath string, + baseRef string, + headRef string, + mergeBase bool, + w io.Writer, +) error { + args := make([]string, 0, 8) + args = append(args, "diff", "-M", "--full-index") + if mergeBase { + args = append(args, "--merge-base") + } + args = append(args, baseRef, headRef) + + cmd := git.NewCommand(ctx, args...) + cmd.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath)) + errbuf := bytes.Buffer{} + if err := cmd.Run(&git.RunOpts{ + Dir: repoPath, + Stderr: &errbuf, + Stdout: w, + }); err != nil { + if errbuf.Len() > 0 { + err = &runStdError{err: err, stderr: errbuf.String()} + } + return processGiteaErrorf(err, "git diff failed between '%s' and '%s' with err: %v", baseRef, headRef, err) + } + return nil +} + +// CommitDiff will stream diff for provided ref +func (g Adapter) CommitDiff(ctx context.Context, repoPath, sha string, w io.Writer) error { + args := make([]string, 0, 8) + args = append(args, "show", "--full-index", "--pretty=format:%b", sha) + + stderr := new(bytes.Buffer) + cmd := git.NewCommand(ctx, args...) + if err := cmd.Run(&git.RunOpts{ + Dir: repoPath, + Stdout: w, + Stderr: stderr, + }); err != nil { + return processGiteaErrorf(err, "commit diff error: %v", stderr) + } + return nil +} + +func (g Adapter) DiffShortStat( + ctx context.Context, + repoPath string, + baseRef string, + headRef string, + useMergeBase bool, +) (types.DiffShortStat, error) { + separator := ".." + if useMergeBase { + separator = "..." + } + + shortstatArgs := []string{baseRef + separator + headRef} + if len(baseRef) == 0 || baseRef == git.EmptySHA { + shortstatArgs = []string{git.EmptyTreeSHA, headRef} + } + numFiles, totalAdditions, totalDeletions, err := git.GetDiffShortStat(ctx, repoPath, shortstatArgs...) + if err != nil { + return types.DiffShortStat{}, processGiteaErrorf(err, "failed to get diff short stat between %s and %s", + baseRef, headRef) + } + return types.DiffShortStat{ + Files: numFiles, + Additions: totalAdditions, + Deletions: totalDeletions, + }, nil +} + +// GetDiffHunkHeaders for each file in diff output returns file name (old and new to detect renames), +// and all hunk headers. The diffs are generated with unified=0 parameter to create minimum sized hunks. +// Hunks' body is ignored. +// The purpose of this function is to get data based on which code comments could be repositioned. +func (g Adapter) GetDiffHunkHeaders( + ctx context.Context, + repoPath, targetRef, sourceRef string, +) ([]*types.DiffFileHunkHeaders, error) { + pipeRead, pipeWrite := io.Pipe() + stderr := &bytes.Buffer{} + go func() { + var err error + + defer func() { + // If running of the command below fails, make the pipe reader also fail with the same error. + _ = pipeWrite.CloseWithError(err) + }() + + cmd := git.NewCommand(ctx, + "diff", "--patch", "--no-color", "--unified=0", sourceRef, targetRef) + err = cmd.Run(&git.RunOpts{ + Dir: repoPath, + Stdout: pipeWrite, + Stderr: stderr, // We capture stderr output in a buffer. + }) + }() + + fileHunkHeaders, err := parser.GetHunkHeaders(pipeRead) + + // First check if there's something in the stderr buffer, if yes that's the error + if errStderr := parseDiffStderr(stderr); errStderr != nil { + return nil, errStderr + } + + // Next check if reading the git diff output caused an error + if err != nil { + return nil, err + } + + return fileHunkHeaders, nil +} + +// DiffCut parses full file git diff output and returns lines specified with the parameters. +// The purpose of this function is to get diff data with which code comments could be generated. +func (g Adapter) DiffCut( + ctx context.Context, + repoPath, targetRef, sourceRef, path string, + params types.DiffCutParams, +) (types.HunkHeader, types.Hunk, error) { + pipeRead, pipeWrite := io.Pipe() + stderr := &bytes.Buffer{} + go func() { + var err error + + defer func() { + // If running of the command below fails, make the pipe reader also fail with the same error. + _ = pipeWrite.CloseWithError(err) + }() + + cmd := git.NewCommand(ctx, + "diff", "--merge-base", "--patch", "--no-color", "--unified=100000000", + targetRef, sourceRef, "--", path) + err = cmd.Run(&git.RunOpts{ + Dir: repoPath, + Stdout: pipeWrite, + Stderr: stderr, // We capture stderr output in a buffer. + }) + }() + + diffCutHeader, linesHunk, err := parser.DiffCut(pipeRead, params) + + // First check if there's something in the stderr buffer, if yes that's the error + if errStderr := parseDiffStderr(stderr); errStderr != nil { + return types.HunkHeader{}, types.Hunk{}, errStderr + } + + // Next check if reading the git diff output caused an error + if err != nil { + return types.HunkHeader{}, types.Hunk{}, err + } + + return diffCutHeader, linesHunk, nil +} + +func parseDiffStderr(stderr *bytes.Buffer) error { + errRaw := stderr.String() // assume there will never be a lot of output to stdout + if len(errRaw) == 0 { + return nil + } + + if idx := strings.IndexByte(errRaw, '\n'); idx > 0 { + errRaw = errRaw[:idx] // get only the first line of the output + } + + errRaw = strings.TrimPrefix(errRaw, "fatal: ") // git errors start with the "fatal: " prefix + + if strings.Contains(errRaw, "bad revision") { + return types.ErrSHADoesNotMatch + } + + return errors.New(errRaw) +} diff --git a/gitrpc/internal/gitea/errors.go b/gitrpc/internal/gitea/errors.go new file mode 100644 index 0000000000..e5097105cd --- /dev/null +++ b/gitrpc/internal/gitea/errors.go @@ -0,0 +1,38 @@ +package gitea + +import ( + "errors" + "os/exec" + + "code.gitea.io/gitea/modules/git" +) + +type runStdError struct { + err error + stderr string + errMsg string +} + +func (r *runStdError) Error() string { + // the stderr must be in the returned error text, some code only checks `strings.Contains(err.Error(), "git error")` + if r.errMsg == "" { + r.errMsg = git.ConcatenateError(r.err, r.stderr).Error() + } + return r.errMsg +} + +func (r *runStdError) Unwrap() error { + return r.err +} + +func (r *runStdError) Stderr() string { + return r.stderr +} + +func (r *runStdError) IsExitCode(code int) bool { + var exitError *exec.ExitError + if errors.As(r.err, &exitError) { + return exitError.ExitCode() == code + } + return false +} diff --git a/gitrpc/internal/gitea/gitea.go b/gitrpc/internal/gitea/gitea.go new file mode 100644 index 0000000000..b501449aa6 --- /dev/null +++ b/gitrpc/internal/gitea/gitea.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" +) + +type Adapter struct { + repoProvider *GoGitRepoProvider + lastCommitCache cache.Cache[CommitEntryKey, *types.Commit] +} + +func New( + repoProvider *GoGitRepoProvider, + lastCommitCache cache.Cache[CommitEntryKey, *types.Commit], +) (Adapter, error) { + // TODO: should be subdir of gitRoot? What is it being used for? + setting.Git.HomePath = "home" + + err := gitea.InitSimple(context.Background()) + if err != nil { + return Adapter{}, err + } + + return Adapter{ + repoProvider: repoProvider, + lastCommitCache: lastCommitCache, + }, nil +} diff --git a/gitrpc/internal/gitea/gitea_test.go b/gitrpc/internal/gitea/gitea_test.go new file mode 100644 index 0000000000..33a3296230 --- /dev/null +++ b/gitrpc/internal/gitea/gitea_test.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "fmt" + "testing" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + + "github.com/stretchr/testify/require" +) + +func TestParseSignatureFromCatFileLine(t *testing.T) { + // test good cases + testParseSignatureFromCatFileLineFor(t, "Max Mustermann", "max.mm@me.io", "1666401234 -0700", 1666401234, -7*60*60) + testParseSignatureFromCatFileLineFor(t, "Max", "max@gitness.io", "1666050206 +0530", 1666050206, 5*60*60+30*60) + testParseSignatureFromCatFileLineFor(t, "Max", "max@gitness.io", "1666401234 +0000", 1666401234, 0) + testParseSignatureFromCatFileLineFor(t, "Max", "randomEmail", "1666401234 -0000", 1666401234, 0) + testParseSignatureFromCatFileLineFor(t, "Max", "max@mm.io", "Fri Sep 23 10:57:49 2022 -0700", 1663955869, -7*60*60) + + // test bad cases + _, err := parseSignatureFromCatFileLine(" 1666401234 -0700") + require.Error(t, err, "no name") + _, err = parseSignatureFromCatFileLine("name 1666401234 -0700") + require.Error(t, err, "no email") + _, err = parseSignatureFromCatFileLine("name ") + require.Error(t, err, "no time") + _, err = parseSignatureFromCatFileLine("name ") + require.Error(t, err, "no time2") + _, err = parseSignatureFromCatFileLine("name 1666050206") + require.Error(t, err, "no timezone with unix") + _, err = parseSignatureFromCatFileLine("name +0800") + require.Error(t, err, "no unix with timezone") + _, err = parseSignatureFromCatFileLine("name 1666050206 0800") + require.Error(t, err, "timezone no sign") + _, err = parseSignatureFromCatFileLine("name 1666050206 +080") + require.Error(t, err, "timezone too short") + _, err = parseSignatureFromCatFileLine("name 1666050206 +00a0") + require.Error(t, err, "timezone invald char") +} + +func testParseSignatureFromCatFileLineFor(t *testing.T, name string, email string, timeAsString string, + expectedTimeUnix int64, expectedTimeOffset int) { + line := fmt.Sprintf("%s <%s> %s", name, email, timeAsString) + s, err := parseSignatureFromCatFileLine(line) + + require.NoError(t, err, line) + require.Equal(t, name, s.Identity.Name, line) + require.Equal(t, email, s.Identity.Email, line) + + // verify time and offset + _, offset := s.When.Zone() + require.Equal(t, expectedTimeUnix, s.When.Unix(), line) + require.Equal(t, expectedTimeOffset, offset, line) +} + +func TestParseTagDataFromCatFile(t *testing.T) { + when, _ := time.Parse(defaultGitTimeLayout, "Fri Sep 23 10:57:49 2022 -0700") + testParseTagDataFromCatFileFor(t, "sha012", types.GitObjectTypeTag, "name1", + types.Signature{Identity: types.Identity{Name: "max", Email: "max@mail.com"}, When: when}, + "some message", "some message") + + // test with signature + testParseTagDataFromCatFileFor(t, "sha012", types.GitObjectTypeCommit, "name2", + types.Signature{Identity: types.Identity{Name: "max", Email: "max@mail.com"}, When: when}, + "gpgsig -----BEGIN PGP SIGNATURE-----\n\nw...B\n-----END PGP SIGNATURE-----\n\nsome message", + "some message") +} + +func testParseTagDataFromCatFileFor(t *testing.T, object string, typ types.GitObjectType, name string, + tagger types.Signature, remainder string, expectedMessage string) { + data := fmt.Sprintf( + "object %s\ntype %s\ntag %s\ntagger %s <%s> %s\n%s", + object, string(typ), name, + tagger.Identity.Name, tagger.Identity.Email, tagger.When.Format(defaultGitTimeLayout), + remainder) + res, err := parseTagDataFromCatFile([]byte(data)) + require.NoError(t, err) + + require.Equal(t, name, res.Name, data) + require.Equal(t, object, res.TargetSha, data) + require.Equal(t, typ, res.TargetType, data) + require.Equal(t, expectedMessage, res.Message, data) + require.Equal(t, tagger.Identity.Name, res.Tagger.Identity.Name, data) + require.Equal(t, tagger.Identity.Email, res.Tagger.Identity.Email, data) + require.Equal(t, tagger.When, res.Tagger.When, data) +} diff --git a/gitrpc/internal/gitea/gogit.go b/gitrpc/internal/gitea/gogit.go new file mode 100644 index 0000000000..3bcdfd0259 --- /dev/null +++ b/gitrpc/internal/gitea/gogit.go @@ -0,0 +1,119 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "errors" + "fmt" + "os" + "time" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/gitrpc/internal/types" + + gogitosfs "github.com/go-git/go-billy/v5/osfs" + gogit "github.com/go-git/go-git/v5" + gogitplumbing "github.com/go-git/go-git/v5/plumbing" + gogitcache "github.com/go-git/go-git/v5/plumbing/cache" + gogitobject "github.com/go-git/go-git/v5/plumbing/object" + gogitfilesystem "github.com/go-git/go-git/v5/storage/filesystem" +) + +type GoGitRepoProvider struct { + gitObjectCache cache.Cache[string, *gogitcache.ObjectLRU] +} + +func NewGoGitRepoProvider(objectCacheMax int, cacheDuration time.Duration) *GoGitRepoProvider { + c := cache.New[string, *gogitcache.ObjectLRU](gitObjectCacheGetter{ + maxSize: objectCacheMax, + }, cacheDuration) + return &GoGitRepoProvider{ + gitObjectCache: c, + } +} + +func (gr *GoGitRepoProvider) Get(ctx context.Context, path string) (*gogit.Repository, error) { + fs := gogitosfs.New(path) + stat, err := fs.Stat("") + if err != nil { + if os.IsNotExist(err) { + return nil, types.ErrRepositoryNotFound + } + + return nil, fmt.Errorf("failed to check repository existence: %w", err) + } + if !stat.IsDir() { + return nil, types.ErrRepositoryCorrupted + } + + gitObjectCache, err := gr.gitObjectCache.Get(ctx, path) + if err != nil { + return nil, fmt.Errorf("failed to get repository cache: %w", err) + } + + s := gogitfilesystem.NewStorage(fs, gitObjectCache) + + repo, err := gogit.Open(s, nil) + if err != nil { + return nil, err + } + + return repo, nil +} + +type gitObjectCacheGetter struct { + maxSize int +} + +func (r gitObjectCacheGetter) Find(_ context.Context, _ string) (*gogitcache.ObjectLRU, error) { + return gogitcache.NewObjectLRU(gogitcache.FileSize(r.maxSize)), nil +} + +func (g Adapter) getGoGitCommit(ctx context.Context, + repoPath string, + rev string, +) (*gogit.Repository, *gogitobject.Commit, error) { + repo, err := g.repoProvider.Get(ctx, repoPath) + if err != nil { + return nil, nil, fmt.Errorf("failed to open repository: %w", err) + } + + var refSHA *gogitplumbing.Hash + if rev == "" { + var head *gogitplumbing.Reference + head, err = repo.Head() + if err != nil { + return nil, nil, fmt.Errorf("failed to get head: %w", err) + } + + headHash := head.Hash() + refSHA = &headHash + } else { + refSHA, err = repo.ResolveRevision(gogitplumbing.Revision(rev)) + if errors.Is(err, gogitplumbing.ErrReferenceNotFound) { + return nil, nil, types.ErrNotFound + } else if err != nil { + return nil, nil, fmt.Errorf("failed to resolve revision %s: %w", rev, err) + } + } + + refCommit, err := repo.CommitObject(*refSHA) + if err != nil { + return nil, nil, fmt.Errorf("failed to load commit data: %w", err) + } + + return repo, refCommit, nil +} diff --git a/gitrpc/internal/gitea/last_commit_cache.go b/gitrpc/internal/gitea/last_commit_cache.go new file mode 100644 index 0000000000..77fbfe01b0 --- /dev/null +++ b/gitrpc/internal/gitea/last_commit_cache.go @@ -0,0 +1,170 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "crypto/sha256" + "encoding/gob" + "encoding/hex" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + gogitplumbing "github.com/go-git/go-git/v5/plumbing" + "github.com/go-redis/redis/v8" +) + +func NewInMemoryLastCommitCache( + cacheDuration time.Duration, + repoProvider *GoGitRepoProvider, +) cache.Cache[CommitEntryKey, *types.Commit] { + return cache.New[CommitEntryKey, *types.Commit]( + commitEntryGetter{ + repoProvider: repoProvider, + }, + cacheDuration) +} + +func NewRedisLastCommitCache( + redisClient redis.UniversalClient, + cacheDuration time.Duration, + repoProvider *GoGitRepoProvider, +) cache.Cache[CommitEntryKey, *types.Commit] { + return cache.NewRedis[CommitEntryKey, *types.Commit]( + redisClient, + commitEntryGetter{ + repoProvider: repoProvider, + }, + func(key CommitEntryKey) string { + h := sha256.New() + h.Write([]byte(key)) + return "gitrpc:last_commit:" + hex.EncodeToString(h.Sum(nil)) + }, + commitValueCodec{}, + cacheDuration) +} + +func NoLastCommitCache( + repoProvider *GoGitRepoProvider, +) cache.Cache[CommitEntryKey, *types.Commit] { + return cache.NewNoCache[CommitEntryKey, *types.Commit](commitEntryGetter{repoProvider: repoProvider}) +} + +type CommitEntryKey string + +const commitEntryKeySeparator = "\x00" + +func makeCommitEntryKey(repoPath, commitSHA, path string) CommitEntryKey { + return CommitEntryKey(repoPath + commitEntryKeySeparator + commitSHA + commitEntryKeySeparator + path) +} + +func (c CommitEntryKey) Split() (repoPath, commitSHA, path string) { + parts := strings.Split(string(c), commitEntryKeySeparator) + if len(parts) != 3 { + return + } + + repoPath = parts[0] + commitSHA = parts[1] + path = parts[2] + + return +} + +type commitValueCodec struct{} + +func (c commitValueCodec) Encode(v *types.Commit) string { + buffer := &strings.Builder{} + _ = gob.NewEncoder(buffer).Encode(v) + return buffer.String() +} + +func (c commitValueCodec) Decode(s string) (*types.Commit, error) { + commit := &types.Commit{} + if err := gob.NewDecoder(strings.NewReader(s)).Decode(commit); err != nil { + return nil, fmt.Errorf("failed to unpack commit entry value: %w", err) + } + + return commit, nil +} + +type commitEntryGetter struct { + repoProvider *GoGitRepoProvider +} + +// Find implements the cache.Getter interface. +func (c commitEntryGetter) Find(ctx context.Context, key CommitEntryKey) (*types.Commit, error) { + repoPath, rev, path := key.Split() + + if path == "" { + path = "." + } + + args := []string{"log", "--max-count=1", "--format=%H", rev, "--", path} + commitSHA, _, runErr := gitea.NewCommand(ctx, args...).RunStdString(&gitea.RunOpts{Dir: repoPath}) + if runErr != nil { + return nil, fmt.Errorf("failed to run git: %w", runErr) + } + + commitSHA = strings.TrimSpace(commitSHA) + + if commitSHA == "" { + return nil, types.ErrNotFound + } + + repo, err := c.repoProvider.Get(ctx, repoPath) + if err != nil { + return nil, fmt.Errorf("failed to get repository %s from cache: %w", repoPath, err) + } + + commit, err := repo.CommitObject(gogitplumbing.NewHash(commitSHA)) + if err != nil { + return nil, fmt.Errorf("failed to load commit data: %w", err) + } + + var title string + var message string + + title = commit.Message + if idx := strings.IndexRune(commit.Message, '\n'); idx >= 0 { + title = commit.Message[:idx] + message = commit.Message[idx+1:] + } + + return &types.Commit{ + SHA: commitSHA, + Title: title, + Message: message, + Author: types.Signature{ + Identity: types.Identity{ + Name: commit.Author.Name, + Email: commit.Author.Email, + }, + When: commit.Author.When, + }, + Committer: types.Signature{ + Identity: types.Identity{ + Name: commit.Committer.Name, + Email: commit.Committer.Email, + }, + When: commit.Committer.When, + }, + }, nil +} diff --git a/gitrpc/internal/gitea/mapping.go b/gitrpc/internal/gitea/mapping.go new file mode 100644 index 0000000000..8cc336dde4 --- /dev/null +++ b/gitrpc/internal/gitea/mapping.go @@ -0,0 +1,185 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "errors" + "fmt" + "strings" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + gogitfilemode "github.com/go-git/go-git/v5/plumbing/filemode" + "github.com/rs/zerolog/log" +) + +// Logs the error and message, returns either the provided message or a git equivalent if possible. +// Always logs the full message with error as warning. +func processGiteaErrorf(err error, format string, args ...interface{}) error { + // create fallback error returned if we can't map it + fallbackErr := fmt.Errorf(format, args...) + + // always log internal error together with message. + log.Warn().Msgf("%v: [GITEA] %v", fallbackErr, err) + + // check if it's a RunStdError error (contains raw git error) + var runStdErr gitea.RunStdError + if errors.As(err, &runStdErr) { + return mapGiteaRunStdError(runStdErr, fallbackErr) + } + + switch { + // gitea is using errors.New(no such file or directory") exclusively for OpenRepository ... (at least as of now) + case err.Error() == "no such file or directory": + return fmt.Errorf("repository not found: %w", types.ErrNotFound) + case gitea.IsErrNotExist(err): + return types.ErrNotFound + case gitea.IsErrBranchNotExist(err): + return types.ErrNotFound + default: + return fallbackErr + } +} + +// TODO: Improve gitea error handling. +// Doubt this will work for all std errors, as git doesn't seem to have nice error codes. +func mapGiteaRunStdError(err gitea.RunStdError, fallback error) error { + switch { + // exit status 128 - fatal: A branch named 'mybranch' already exists. + // exit status 128 - fatal: cannot lock ref 'refs/heads/a': 'refs/heads/a/b' exists; cannot create 'refs/heads/a' + case err.IsExitCode(128) && strings.Contains(err.Stderr(), "exists"): + return types.ErrAlreadyExists + + // exit status 128 - fatal: 'a/bc/d/' is not a valid branch name. + case err.IsExitCode(128) && strings.Contains(err.Stderr(), "not a valid"): + return types.ErrInvalidArgument + + // exit status 1 - error: branch 'mybranch' not found. + case err.IsExitCode(1) && strings.Contains(err.Stderr(), "not found"): + return types.ErrNotFound + + // exit status 128 - fatal: ambiguous argument 'branch1...branch2': unknown revision or path not in the working tree. + case err.IsExitCode(128) && strings.Contains(err.Stderr(), "unknown revision"): + msg := "unknown revision or path not in the working tree" + // parse the error response from git output + lines := strings.Split(err.Error(), "\n") + if len(lines) > 0 { + cols := strings.Split(lines[0], ": ") + if len(cols) >= 2 { + msg = cols[1] + ", " + cols[2] + } + } + return fmt.Errorf("%v err: %w", msg, types.ErrNotFound) + + // exit status 128 - fatal: couldn't find remote ref v1. + case err.IsExitCode(128) && strings.Contains(err.Stderr(), "couldn't find"): + return types.ErrNotFound + + // exit status 128 - fatal: unable to access 'http://127.0.0.1:4101/hvfl1xj5fojwlrw77xjflw80uxjous254jrr967rvj/': + // Failed to connect to 127.0.0.1 port 4101 after 4 ms: Connection refused + case err.IsExitCode(128) && strings.Contains(err.Stderr(), "Failed to connect"): + return types.ErrFailedToConnect + + default: + return fallback + } +} + +func mapGiteaRawRef(raw map[string]string) (map[types.GitReferenceField]string, error) { + res := make(map[types.GitReferenceField]string, len(raw)) + for k, v := range raw { + gitRefField, err := types.ParseGitReferenceField(k) + if err != nil { + return nil, err + } + res[gitRefField] = v + } + + return res, nil +} + +func mapToGiteaReferenceSortingArgument(s types.GitReferenceField, o types.SortOrder) string { + sortBy := string(types.GitReferenceFieldRefName) + desc := o == types.SortOrderDesc + + if s == types.GitReferenceFieldCreatorDate { + sortBy = string(types.GitReferenceFieldCreatorDate) + if o == types.SortOrderDefault { + desc = true + } + } + + if desc { + return "-" + sortBy + } + + return sortBy +} + +func mapGiteaCommit(giteaCommit *gitea.Commit) (*types.Commit, error) { + if giteaCommit == nil { + return nil, fmt.Errorf("gitea commit is nil") + } + + author, err := mapGiteaSignature(giteaCommit.Author) + if err != nil { + return nil, fmt.Errorf("failed to map gitea author: %w", err) + } + committer, err := mapGiteaSignature(giteaCommit.Committer) + if err != nil { + return nil, fmt.Errorf("failed to map gitea commiter: %w", err) + } + return &types.Commit{ + SHA: giteaCommit.ID.String(), + Title: giteaCommit.Summary(), + // remove potential tailing newlines from message + Message: strings.TrimRight(giteaCommit.Message(), "\n"), + Author: author, + Committer: committer, + }, nil +} + +func mapGogitNodeToTreeNodeModeAndType(gogitMode gogitfilemode.FileMode) (types.TreeNodeType, types.TreeNodeMode, error) { + switch gogitMode { + case gogitfilemode.Regular, gogitfilemode.Deprecated: + return types.TreeNodeTypeBlob, types.TreeNodeModeFile, nil + case gogitfilemode.Symlink: + return types.TreeNodeTypeBlob, types.TreeNodeModeSymlink, nil + case gogitfilemode.Executable: + return types.TreeNodeTypeBlob, types.TreeNodeModeExec, nil + case gogitfilemode.Submodule: + return types.TreeNodeTypeCommit, types.TreeNodeModeCommit, nil + case gogitfilemode.Dir: + return types.TreeNodeTypeTree, types.TreeNodeModeTree, nil + default: + return types.TreeNodeTypeBlob, types.TreeNodeModeFile, + fmt.Errorf("received unknown tree node mode from gogit: '%s'", gogitMode.String()) + } +} + +func mapGiteaSignature(giteaSignature *gitea.Signature) (types.Signature, error) { + if giteaSignature == nil { + return types.Signature{}, fmt.Errorf("gitea signature is nil") + } + + return types.Signature{ + Identity: types.Identity{ + Name: giteaSignature.Name, + Email: giteaSignature.Email, + }, + When: giteaSignature.When, + }, nil +} diff --git a/gitrpc/internal/gitea/match_files.go b/gitrpc/internal/gitea/match_files.go new file mode 100644 index 0000000000..0dba9183b6 --- /dev/null +++ b/gitrpc/internal/gitea/match_files.go @@ -0,0 +1,102 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "errors" + "fmt" + "io" + "path" + + "github.com/harness/gitness/gitrpc/internal/types" + + gogitobject "github.com/go-git/go-git/v5/plumbing/object" +) + +func (g Adapter) MatchFiles(ctx context.Context, + repoPath string, + ref string, + dirPath string, + pattern string, + maxSize int, +) ([]types.FileContent, error) { + _, refCommit, err := g.getGoGitCommit(ctx, repoPath, ref) + if err != nil { + return nil, err + } + + tree, err := refCommit.Tree() + if err != nil { + return nil, fmt.Errorf("failed to get tree for the commit: %w", err) + } + + if dirPath != "" { + tree, err = tree.Tree(dirPath) + if errors.Is(err, gogitobject.ErrDirectoryNotFound) { + return nil, &types.PathNotFoundError{Path: dirPath} + } + if err != nil { + return nil, fmt.Errorf("failed to navigate to %s directory: %w", dirPath, err) + } + } + + var files []types.FileContent + for _, fileEntry := range tree.Entries { + ok, err := path.Match(pattern, fileEntry.Name) + if err != nil { + return nil, err + } + if !ok { + continue + } + + name := fileEntry.Name + + f, err := tree.TreeEntryFile(&fileEntry) + if err != nil { + return nil, fmt.Errorf("failed to get tree entry file %s: %w", name, err) + } + + reader, err := f.Reader() + if err != nil { + return nil, fmt.Errorf("failed to open tree entry file %s: %w", name, err) + } + + filePath := path.Join(dirPath, name) + + content, err := func(r io.ReadCloser) ([]byte, error) { + defer func() { + _ = r.Close() + }() + return io.ReadAll(io.LimitReader(reader, int64(maxSize))) + }(reader) + if err != nil { + return nil, fmt.Errorf("failed to read file content %s: %w", name, err) + } + + if len(content) == maxSize { + // skip truncated files + continue + } + + files = append(files, types.FileContent{ + Path: filePath, + Content: content, + }) + } + + return files, nil +} diff --git a/gitrpc/internal/gitea/merge.go b/gitrpc/internal/gitea/merge.go new file mode 100644 index 0000000000..df11045c6c --- /dev/null +++ b/gitrpc/internal/gitea/merge.go @@ -0,0 +1,566 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bufio" + "bytes" + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/tempdir" + "github.com/harness/gitness/gitrpc/internal/types" + + "code.gitea.io/gitea/modules/git" + "github.com/rs/zerolog/log" +) + +// CreateTemporaryRepo creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch +// it also create a second base branch called "original_base". +// +//nolint:funlen,gocognit // need refactor +func (g Adapter) CreateTemporaryRepoForPR( + ctx context.Context, + reposTempPath string, + pr *types.PullRequest, + baseBranch string, + trackingBranch string, +) (types.TempRepository, error) { + if pr.BaseRepoPath == "" && pr.HeadRepoPath != "" { + pr.BaseRepoPath = pr.HeadRepoPath + } + + if pr.HeadRepoPath == "" && pr.BaseRepoPath != "" { + pr.HeadRepoPath = pr.BaseRepoPath + } + + if pr.BaseBranch == "" { + return types.TempRepository{}, errors.New("empty base branch") + } + + if pr.HeadBranch == "" { + return types.TempRepository{}, errors.New("empty head branch") + } + + baseRepoPath := pr.BaseRepoPath + headRepoPath := pr.HeadRepoPath + + // Clone base repo. + tmpBasePath, err := tempdir.CreateTemporaryPath(reposTempPath, "pull") + if err != nil { + return types.TempRepository{}, err + } + + if err = g.InitRepository(ctx, tmpBasePath, false); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + return types.TempRepository{}, err + } + + remoteRepoName := "head_repo" + + // Add head repo remote. + addCacheRepo := func(staging, cache string) error { + var f *os.File + alternates := filepath.Join(staging, ".git", "objects", "info", "alternates") + f, err = os.OpenFile(alternates, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600) + if err != nil { + return fmt.Errorf("failed to open alternates file '%s': %w", alternates, err) + } + defer f.Close() + data := filepath.Join(cache, "objects") + if _, err = fmt.Fprintln(f, data); err != nil { + return fmt.Errorf("failed to write alternates file '%s': %w", alternates, err) + } + return nil + } + + if err = addCacheRepo(tmpBasePath, baseRepoPath); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + return types.TempRepository{}, + fmt.Errorf("unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepoPath, err) + } + + var outbuf, errbuf strings.Builder + if err = git.NewCommand(ctx, "remote", "add", "-t", pr.BaseBranch, "-m", pr.BaseBranch, "origin", baseRepoPath). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to add base repository as origin "+ + "[%s -> tmpBasePath]:\n%s\n%s", pr.BaseRepoPath, outbuf.String(), errbuf.String()) + } + outbuf.Reset() + errbuf.Reset() + + // Fetch base branch + baseCommit, err := g.GetCommit(ctx, pr.BaseRepoPath, pr.BaseBranch) + if err != nil { + return types.TempRepository{}, fmt.Errorf("failed to get commit of base branch '%s', error: %w", pr.BaseBranch, err) + } + baseID := baseCommit.SHA + if err = git.NewCommand(ctx, "fetch", "origin", "--no-tags", "--", + baseID+":"+baseBranch, baseID+":original_"+baseBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to fetch origin base branch "+ + "[%s:%s -> base, original_base in tmpBasePath].\n%s\n%s", + pr.BaseRepoPath, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + outbuf.Reset() + errbuf.Reset() + + if err = git.NewCommand(ctx, "symbolic-ref", "HEAD", git.BranchPrefix+baseBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to set HEAD as base "+ + "branch [tmpBasePath]:\n%s\n%s", outbuf.String(), errbuf.String()) + } + outbuf.Reset() + errbuf.Reset() + + if err = addCacheRepo(tmpBasePath, headRepoPath); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to head base repository "+ + "to temporary repo [%s -> tmpBasePath]", pr.HeadRepoPath) + } + + if err = git.NewCommand(ctx, "remote", "add", remoteRepoName, headRepoPath). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to add head repository as head_repo "+ + "[%s -> tmpBasePath]:\n%s\n%s", pr.HeadRepoPath, outbuf.String(), errbuf.String()) + } + outbuf.Reset() + errbuf.Reset() + + headCommit, err := g.GetCommit(ctx, pr.HeadRepoPath, pr.HeadBranch) + if err != nil { + return types.TempRepository{}, fmt.Errorf("failed to get commit of head branch '%s', error: %w", pr.HeadBranch, err) + } + headID := headCommit.SHA + if err = git.NewCommand(ctx, "fetch", "--no-tags", remoteRepoName, headID+":"+trackingBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + _ = tempdir.RemoveTemporaryPath(tmpBasePath) + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return types.TempRepository{}, processGiteaErrorf(giteaErr, "unable to fetch head_repo head branch "+ + "[%s:%s -> tracking in tmpBasePath]:\n%s\n%s", + pr.HeadRepoPath, pr.HeadBranch, outbuf.String(), errbuf.String()) + } + outbuf.Reset() + errbuf.Reset() + + return types.TempRepository{ + Path: tmpBasePath, + BaseSHA: baseID, + HeadSHA: headID, + }, nil +} + +func runMergeCommand( + ctx context.Context, + pr *types.PullRequest, + mergeMethod enum.MergeMethod, + cmd *git.Command, + tmpBasePath string, + env []string, +) error { + var outbuf, errbuf strings.Builder + if err := cmd.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + Env: env, + }); err != nil { + // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict + if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil { + // We have a merge conflict error + if err = conflictFiles(ctx, pr, env, tmpBasePath, &outbuf); err != nil { + return err + } + return &types.MergeConflictsError{ + Method: mergeMethod, + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + } else if strings.Contains(errbuf.String(), "refusing to merge unrelated histories") { + return &types.MergeUnrelatedHistoriesError{ + Method: mergeMethod, + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + } + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return processGiteaErrorf(giteaErr, "git merge [%s -> %s]\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + + return nil +} + +func commitAndSignNoAuthor( + ctx context.Context, + pr *types.PullRequest, + message string, + signArg string, + tmpBasePath string, + env []string, +) error { + var outbuf, errbuf strings.Builder + if signArg == "" { + if err := git.NewCommand(ctx, "commit", "-m", message). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return processGiteaErrorf(err, "git commit [%s -> %s]\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + } else { + if err := git.NewCommand(ctx, "commit", signArg, "-m", message). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return processGiteaErrorf(err, "git commit [%s -> %s]\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + } + return nil +} + +// Merge merges changes between 2 refs (branch, commits or tags). +// +//nolint:gocognit,nestif +func (g Adapter) Merge( + ctx context.Context, + pr *types.PullRequest, + mergeMethod enum.MergeMethod, + baseBranch string, + trackingBranch string, + tmpBasePath string, + mergeMsg string, + env []string, + identity *types.Identity, +) error { + var ( + outbuf, errbuf strings.Builder + ) + + if mergeMsg == "" { + mergeMsg = "Merge commit" + } + + stagingBranch := "staging" + // TODO: sign merge commit + signArg := "--no-gpg-sign" + + switch mergeMethod { + case enum.MergeMethodMerge: + cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit", trackingBranch) + if err := runMergeCommand(ctx, pr, mergeMethod, cmd, tmpBasePath, env); err != nil { + return fmt.Errorf("unable to merge tracking into base: %w", err) + } + + if err := commitAndSignNoAuthor(ctx, pr, mergeMsg, signArg, tmpBasePath, env); err != nil { + return fmt.Errorf("unable to make final commit: %w", err) + } + case enum.MergeMethodSquash: + // Merge with squash + cmd := git.NewCommand(ctx, "merge", "--squash", trackingBranch) + if err := runMergeCommand(ctx, pr, mergeMethod, cmd, tmpBasePath, env); err != nil { + return fmt.Errorf("unable to merge --squash tracking into base: %w", err) + } + + if signArg == "" { + if err := git.NewCommand(ctx, "commit", fmt.Sprintf("--author='%s'", identity.String()), "-m", mergeMsg). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return processGiteaErrorf(err, "git commit [%s -> %s]\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + } else { + if err := git.NewCommand(ctx, "commit", signArg, fmt.Sprintf("--author='%s'", identity.String()), "-m", mergeMsg). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return processGiteaErrorf(err, "git commit [%s -> %s]\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, outbuf.String(), errbuf.String()) + } + } + case enum.MergeMethodRebase: + // Checkout head branch + if err := git.NewCommand(ctx, "checkout", "-b", stagingBranch, trackingBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return fmt.Errorf( + "git checkout base prior to merge post staging rebase [%s -> %s]: %w\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, err, outbuf.String(), errbuf.String(), + ) + } + outbuf.Reset() + errbuf.Reset() + + // Rebase before merging + if err := git.NewCommand(ctx, "rebase", baseBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + // Rebase will leave a REBASE_HEAD file in .git if there is a conflict + if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil { + var commitSha string + + // TBD git version we will support + // failingCommitPath := filepath.Join(tmpBasePath, ".git", "rebase-apply", "original-commit") // Git < 2.26 + // if _, cpErr := os.Stat(failingCommitPath); statErr != nil { + // return fmt.Errorf("git rebase staging on to base [%s -> %s]: %v\n%s\n%s", + // pr.HeadBranch, pr.BaseBranch, cpErr, outbuf.String(), errbuf.String()) + // } + + failingCommitPath := filepath.Join(tmpBasePath, ".git", "rebase-merge", "stopped-sha") // Git >= 2.26 + if _, cpErr := os.Stat(failingCommitPath); cpErr != nil { + return fmt.Errorf( + "git rebase staging on to base [%s -> %s]: %w\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, cpErr, outbuf.String(), errbuf.String(), + ) + } + + commitShaBytes, readErr := os.ReadFile(failingCommitPath) + if readErr != nil { + // Abandon this attempt to handle the error + return fmt.Errorf( + "git rebase staging on to base [%s -> %s]: %w\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, readErr, outbuf.String(), errbuf.String(), + ) + } + commitSha = strings.TrimSpace(string(commitShaBytes)) + + log.Debug().Msgf("RebaseConflict at %s [%s -> %s]: %v\n%s\n%s", + commitSha, pr.HeadBranch, pr.BaseBranch, err, outbuf.String(), errbuf.String(), + ) + return &types.MergeConflictsError{ + Method: mergeMethod, + CommitSHA: commitSha, + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + } + return fmt.Errorf( + "git rebase staging on to base [%s -> %s]: %w\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, err, outbuf.String(), errbuf.String(), + ) + } + outbuf.Reset() + errbuf.Reset() + + // Checkout base branch again + if err := git.NewCommand(ctx, "checkout", baseBranch). + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + return fmt.Errorf( + "git checkout base prior to merge post staging rebase [%s -> %s]: %w\n%s\n%s", + pr.HeadBranch, pr.BaseBranch, err, outbuf.String(), errbuf.String(), + ) + } + outbuf.Reset() + errbuf.Reset() + + cmd := git.NewCommand(ctx, "merge", "--ff-only", stagingBranch) + + // Prepare merge with commit + if err := runMergeCommand(ctx, pr, mergeMethod, cmd, tmpBasePath, env); err != nil { + return err + } + default: + return fmt.Errorf("wrong merge method provided: %s", mergeMethod) + } + + return nil +} + +func conflictFiles(ctx context.Context, + pr *types.PullRequest, + env []string, + repoPath string, + buf *strings.Builder, +) error { + stdout, stderr, cferr := git.NewCommand( + ctx, "diff", "--name-only", "--diff-filter=U", "--relative", + ).RunStdString(&git.RunOpts{ + Env: env, + Dir: repoPath, + }) + if cferr != nil { + return processGiteaErrorf(cferr, "failed to list conflict files [%s -> %s], stderr: %v, err: %v", + pr.HeadBranch, pr.BaseBranch, stderr, cferr) + } + if len(stdout) > 0 { + buf.Reset() + buf.WriteString(stdout) + } + return nil +} + +func (g Adapter) GetDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) (string, error) { + getDiffTreeFromBranch := func(repoPath, baseBranch, headBranch string) (string, error) { + var outbuf, errbuf strings.Builder + if err := git.NewCommand(ctx, "diff-tree", "--no-commit-id", + "--name-only", "-r", "-z", "--root", baseBranch, headBranch, "--"). + Run(&git.RunOpts{ + Dir: repoPath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + giteaErr := &giteaRunStdError{err: err, stderr: errbuf.String()} + return "", processGiteaErrorf(giteaErr, "git diff-tree [%s base:%s head:%s]: %s", + repoPath, baseBranch, headBranch, errbuf.String()) + } + return outbuf.String(), nil + } + + scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := bytes.IndexByte(data, '\x00'); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + } + + list, err := getDiffTreeFromBranch(repoPath, baseBranch, headBranch) + if err != nil { + return "", err + } + + // Prefixing '/' for each entry, otherwise all files with the same name in subdirectories would be matched. + out := bytes.Buffer{} + scanner := bufio.NewScanner(strings.NewReader(list)) + scanner.Split(scanNullTerminatedStrings) + for scanner.Scan() { + filepath := scanner.Text() + // escape '*', '?', '[', spaces and '!' prefix + filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`) + // no necessary to escape the first '#' symbol because the first symbol is '/' + fmt.Fprintf(&out, "/%s\n", filepath) + } + + return out.String(), nil +} + +// GetMergeBase checks and returns merge base of two branches and the reference used as base. +func (g Adapter) GetMergeBase(ctx context.Context, repoPath, remote, base, head string) (string, string, error) { + if remote == "" { + remote = "origin" + } + + if remote != "origin" { + tmpBaseName := git.RemotePrefix + remote + "/tmp_" + base + // Fetch commit into a temporary branch in order to be able to handle commits and tags + _, _, err := git.NewCommand(ctx, "fetch", "--no-tags", remote, "--", + base+":"+tmpBaseName).RunStdString(&git.RunOpts{Dir: repoPath}) + if err == nil { + base = tmpBaseName + } + } + + stdout, _, err := git.NewCommand(ctx, "merge-base", "--", base, head).RunStdString(&git.RunOpts{Dir: repoPath}) + if err != nil { + return "", "", processGiteaErrorf(err, "failed to get merge-base") + } + + return strings.TrimSpace(stdout), base, nil +} + +// giteaRunStdError is an implementation of the RunStdError interface in the gitea codebase. +// It allows us to process gitea errors even when using cmd.Run() instead of cmd.RunStdString() or run.StdBytes(). +// TODO: solve this nicer once we have proper gitrpc error handling. +type giteaRunStdError struct { + err error + stderr string +} + +func (e *giteaRunStdError) Error() string { + return fmt.Sprintf("failed with %s, error output: %s", e.err, e.stderr) +} + +func (e *giteaRunStdError) Unwrap() error { + return e.err +} + +func (e *giteaRunStdError) Stderr() string { + return e.stderr +} + +func (e *giteaRunStdError) IsExitCode(code int) bool { + var exitError *exec.ExitError + if errors.As(e.err, &exitError) { + return exitError.ExitCode() == code + } + return false +} diff --git a/gitrpc/internal/gitea/paths_details.go b/gitrpc/internal/gitea/paths_details.go new file mode 100644 index 0000000000..c15ff93aea --- /dev/null +++ b/gitrpc/internal/gitea/paths_details.go @@ -0,0 +1,81 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "errors" + "fmt" + + "github.com/harness/gitness/gitrpc/internal/types" + + gogitplumbing "github.com/go-git/go-git/v5/plumbing" + gogitfilemode "github.com/go-git/go-git/v5/plumbing/filemode" + gogitobject "github.com/go-git/go-git/v5/plumbing/object" +) + +// PathsDetails returns additional details about provided the paths. +func (g Adapter) PathsDetails(ctx context.Context, + repoPath string, + ref string, + paths []string, +) ([]types.PathDetails, error) { + repo, refCommit, err := g.getGoGitCommit(ctx, repoPath, ref) + if err != nil { + return nil, err + } + + refSHA := refCommit.Hash.String() + + tree, err := refCommit.Tree() + if err != nil { + return nil, fmt.Errorf("failed to get tree for the commit: %w", err) + } + + results := make([]types.PathDetails, len(paths)) + + for i, path := range paths { + results[i].Path = path + + if len(path) > 0 { + entry, err := tree.FindEntry(path) + if errors.Is(err, gogitobject.ErrDirectoryNotFound) || errors.Is(err, gogitobject.ErrEntryNotFound) { + return nil, &types.PathNotFoundError{Path: path} + } + if err != nil { + return nil, fmt.Errorf("failed to find path entry %s: %w", path, err) + } + + if entry.Mode == gogitfilemode.Regular || entry.Mode == gogitfilemode.Executable { + blobObj, err := repo.Object(gogitplumbing.BlobObject, entry.Hash) + if err != nil { + return nil, fmt.Errorf("failed to get blob object size for the path %s and hash %s: %w", + path, entry.Hash.String(), err) + } + + results[i].Size = blobObj.(*gogitobject.Blob).Size + } + } + + commitEntry, err := g.lastCommitCache.Get(ctx, makeCommitEntryKey(repoPath, refSHA, path)) + if err != nil { + return nil, fmt.Errorf("failed to find last commit for path %s: %w", path, err) + } + + results[i].LastCommit = commitEntry + } + + return results, nil +} diff --git a/gitrpc/internal/gitea/ref.go b/gitrpc/internal/gitea/ref.go new file mode 100644 index 0000000000..76bb0ec2d7 --- /dev/null +++ b/gitrpc/internal/gitea/ref.go @@ -0,0 +1,188 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "fmt" + "io" + "math" + "strings" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + gitearef "code.gitea.io/gitea/modules/git/foreachref" +) + +func DefaultInstructor(_ types.WalkReferencesEntry) (types.WalkInstruction, error) { + return types.WalkInstructionHandle, nil +} + +// WalkReferences uses the provided options to filter the available references of the repo, +// and calls the handle function for every matching node. +// The instructor & handler are called with a map that contains the matching value for every field provided in fields. +// TODO: walkGiteaReferences related code should be moved to separate file. +func (g Adapter) WalkReferences(ctx context.Context, + repoPath string, handler types.WalkReferencesHandler, opts *types.WalkReferencesOptions) error { + // backfil optional options + if opts.Instructor == nil { + opts.Instructor = DefaultInstructor + } + if len(opts.Fields) == 0 { + opts.Fields = []types.GitReferenceField{types.GitReferenceFieldRefName, types.GitReferenceFieldObjectName} + } + if opts.MaxWalkDistance <= 0 { + opts.MaxWalkDistance = math.MaxInt32 + } + if opts.Patterns == nil { + opts.Patterns = []string{} + } + if string(opts.Sort) == "" { + opts.Sort = types.GitReferenceFieldRefName + } + + // prepare for-each-ref input + sortArg := mapToGiteaReferenceSortingArgument(opts.Sort, opts.Order) + rawFields := make([]string, len(opts.Fields)) + for i := range opts.Fields { + rawFields[i] = string(opts.Fields[i]) + } + giteaFormat := gitearef.NewFormat(rawFields...) + + // initializer pipeline for output processing + pipeOut, pipeIn := io.Pipe() + defer pipeOut.Close() + defer pipeIn.Close() + stderr := strings.Builder{} + rc := &gitea.RunOpts{Dir: repoPath, Stdout: pipeIn, Stderr: &stderr} + + go func() { + // create array for args as patterns have to be passed as separate args. + args := []string{ + "for-each-ref", + "--format", + giteaFormat.Flag(), + "--sort", + sortArg, + "--count", + fmt.Sprint(opts.MaxWalkDistance), + "--ignore-case", + } + args = append(args, opts.Patterns...) + err := gitea.NewCommand(ctx, args...).Run(rc) + if err != nil { + _ = pipeIn.CloseWithError(gitea.ConcatenateError(err, stderr.String())) + } else { + _ = pipeIn.Close() + } + }() + + // TODO: return error from git command!!!! + + parser := giteaFormat.Parser(pipeOut) + return walkGiteaReferenceParser(parser, handler, opts) +} + +func walkGiteaReferenceParser(parser *gitearef.Parser, handler types.WalkReferencesHandler, + opts *types.WalkReferencesOptions) error { + for i := int32(0); i < opts.MaxWalkDistance; i++ { + // parse next line - nil if end of output reached or an error occurred. + rawRef := parser.Next() + if rawRef == nil { + break + } + + // convert to correct map. + ref, err := mapGiteaRawRef(rawRef) + if err != nil { + return err + } + + // check with the instructor on the next instruction. + instruction, err := opts.Instructor(ref) + if err != nil { + return fmt.Errorf("error getting instruction: %w", err) + } + + if instruction == types.WalkInstructionSkip { + continue + } + if instruction == types.WalkInstructionStop { + break + } + + // otherwise handle the reference. + err = handler(ref) + if err != nil { + return fmt.Errorf("error handling reference: %w", err) + } + } + + if err := parser.Err(); err != nil { + return processGiteaErrorf(err, "failed to parse reference walk output") + } + + return nil +} + +// GetRef get's the target of a reference +// IMPORTANT provide full reference name to limit risk of collisions across reference types +// (e.g `refs/heads/main` instead of `main`). +func (g Adapter) GetRef(ctx context.Context, repoPath, reference string) (string, error) { + cmd := gitea.NewCommand(ctx, "show-ref", "--verify", "-s", "--", reference) + stdout, _, err := cmd.RunStdString(&gitea.RunOpts{ + Dir: repoPath, + }) + if err != nil { + if err.IsExitCode(128) && strings.Contains(err.Stderr(), "not a valid ref") { + return "", types.ErrNotFound + } + return "", err + } + + return strings.TrimSpace(stdout), nil +} + +// UpdateRef allows to update / create / delete references +// IMPORTANT provide full reference name to limit risk of collisions across reference types +// (e.g `refs/heads/main` instead of `main`). +func (g Adapter) UpdateRef(ctx context.Context, + repoPath, reference, newValue, oldValue string, +) error { + args := make([]string, 0, 4) + args = append(args, "update-ref") + if newValue == "" { + // if newvalue is empty, delete ref + args = append(args, "-d", reference) + } else { + args = append(args, reference, newValue) + } + + // if an old value was provided, verify it matches. + if oldValue != "" { + args = append(args, oldValue) + } + + cmd := gitea.NewCommand(ctx, args...) + _, _, err := cmd.RunStdString(&gitea.RunOpts{ + Dir: repoPath, + }) + if err != nil { + return processGiteaErrorf(err, "update-ref failed") + } + + return nil +} diff --git a/gitrpc/internal/gitea/repo.go b/gitrpc/internal/gitea/repo.go new file mode 100644 index 0000000000..e61ee61b03 --- /dev/null +++ b/gitrpc/internal/gitea/repo.go @@ -0,0 +1,285 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "fmt" + "regexp" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/util" +) + +var lsRemoteHeadRegexp = regexp.MustCompile(`ref: refs/heads/([^\s]+)\s+HEAD`) + +// InitRepository initializes a new Git repository. +func (g Adapter) InitRepository(ctx context.Context, repoPath string, bare bool) error { + return gitea.InitRepository(ctx, repoPath, bare) +} + +// SetDefaultBranch sets the default branch of a repo. +func (g Adapter) SetDefaultBranch(ctx context.Context, repoPath string, + defaultBranch string, allowEmpty bool) error { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + // if requested, error out if branch doesn't exist. Otherwise, blindly set it. + if !allowEmpty && !giteaRepo.IsBranchExist(defaultBranch) { + // TODO: ensure this returns not found error to caller + return fmt.Errorf("branch '%s' does not exist", defaultBranch) + } + + // change default branch + err = giteaRepo.SetDefaultBranch(defaultBranch) + if err != nil { + return processGiteaErrorf(err, "failed to set new default branch") + } + + return nil +} + +// GetDefaultBranch gets the default branch of a repo. +func (g Adapter) GetDefaultBranch(ctx context.Context, repoPath string) (string, error) { + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return "", processGiteaErrorf(err, "failed to open gitea repo") + } + defer giteaRepo.Close() + + // get default branch + branch, err := giteaRepo.GetDefaultBranch() + if err != nil { + return "", processGiteaErrorf(err, "failed to get default branch") + } + + return branch, nil +} + +// GetRemoteDefaultBranch retrieves the default branch of a remote repository. +// If the repo doesn't have a default branch, types.ErrNoDefaultBranch is returned. +func (g Adapter) GetRemoteDefaultBranch(ctx context.Context, remoteURL string) (string, error) { + args := []string{ + "-c", "credential.helper=", + "ls-remote", + "--symref", + "-q", + remoteURL, + "HEAD", + } + + cmd := gitea.NewCommand(ctx, args...) + stdOut, _, err := cmd.RunStdString(nil) + if err != nil { + return "", processGiteaErrorf(err, "failed to ls remote repo") + } + + // git output looks as follows, and we are looking for the ref that HEAD points to + // ref: refs/heads/main HEAD + // 46963bc7f0b5e8c5f039d50ac9e6e51933c78cdf HEAD + match := lsRemoteHeadRegexp.FindStringSubmatch(stdOut) + if match == nil { + return "", types.ErrNoDefaultBranch + } + + return match[1], nil +} + +func (g Adapter) Clone(ctx context.Context, from, to string, opts types.CloneRepoOptions) error { + err := gitea.Clone(ctx, from, to, gitea.CloneRepoOptions{ + Timeout: opts.Timeout, + Mirror: opts.Mirror, + Bare: opts.Bare, + Quiet: opts.Quiet, + Branch: opts.Branch, + Shared: opts.Shared, + NoCheckout: opts.NoCheckout, + Depth: opts.Depth, + Filter: opts.Filter, + SkipTLSVerify: opts.SkipTLSVerify, + }) + if err != nil { + return processGiteaErrorf(err, "failed to clone repo") + } + + return nil +} + +// Sync synchronizes the repository to match the provided source. +// NOTE: This is a read operation and doesn't trigger any server side hooks. +func (g Adapter) Sync(ctx context.Context, repoPath string, remoteURL string) error { + args := []string{ + "-c", "advice.fetchShowForcedUpdates=false", + "-c", "credential.helper=", + "fetch", + "--quiet", + "--prune", + "--atomic", + "--force", + "--no-write-fetch-head", + "--no-show-forced-updates", + remoteURL, + "+refs/*:refs/*", + } + + cmd := gitea.NewCommand(ctx, args...) + _, _, err := cmd.RunStdString(&gitea.RunOpts{ + Dir: repoPath, + UseContextTimeout: true, + }) + if err != nil { + return processGiteaErrorf(err, "failed to sync repo") + } + + return nil +} + +func (g Adapter) AddFiles(repoPath string, all bool, files ...string) error { + err := gitea.AddChanges(repoPath, all, files...) + if err != nil { + return processGiteaErrorf(err, "failed to add changes") + } + + return nil +} + +// Commit commits the changes of the repository. +// NOTE: Modification of gitea implementation that supports commiter_date + author_date. +func (g Adapter) Commit(ctx context.Context, repoPath string, opts types.CommitChangesOptions) error { + // setup environment variables used by git-commit + // See https://git-scm.com/book/en/v2/Git-Internals-Environment-Variables + env := []string{ + "GIT_AUTHOR_NAME=" + opts.Author.Identity.Name, + "GIT_AUTHOR_EMAIL=" + opts.Author.Identity.Email, + "GIT_AUTHOR_DATE=" + opts.Author.When.Format(time.RFC3339), + "GIT_COMMITTER_NAME=" + opts.Committer.Identity.Name, + "GIT_COMMITTER_EMAIL=" + opts.Committer.Identity.Email, + "GIT_COMMITTER_DATE=" + opts.Committer.When.Format(time.RFC3339), + } + + args := []string{ + "commit", + "-m", + opts.Message, + } + + _, _, err := gitea.NewCommand(ctx, args...).RunStdString(&gitea.RunOpts{Dir: repoPath, Env: env}) + // No stderr but exit status 1 means nothing to commit (see gitea CommitChanges) + if err != nil && err.Error() != "exit status 1" { + return processGiteaErrorf(err, "failed to commit changes") + } + return nil +} + +func (g Adapter) Push(ctx context.Context, repoPath string, opts types.PushOptions) error { + err := Push(ctx, repoPath, opts) + if err != nil { + return processGiteaErrorf(err, "failed to push changes") + } + + return nil +} + +// Push pushs local commits to given remote branch. +// NOTE: Modification of gitea implementation that supports --force-with-lease. +// TODOD: return our own error types and move to above adapter.Push method +func Push(ctx context.Context, repoPath string, opts types.PushOptions) error { + cmd := gitea.NewCommand(ctx, + "-c", "credential.helper=", + "push", + ) + if opts.Force { + cmd.AddArguments("-f") + } + if opts.ForceWithLease != "" { + cmd.AddArguments(fmt.Sprintf("--force-with-lease=%s", opts.ForceWithLease)) + } + if opts.Mirror == true { + cmd.AddArguments("--mirror") + } + cmd.AddArguments("--", opts.Remote) + + if len(opts.Branch) > 0 { + cmd.AddArguments(opts.Branch) + } + + // remove credentials if there are any + logRemote := opts.Remote + if strings.Contains(logRemote, "://") && strings.Contains(logRemote, "@") { + logRemote = util.SanitizeCredentialURLs(logRemote) + } + cmd.SetDescription( + fmt.Sprintf( + "pushing %s to %s (Force: %t, ForceWithLease: %s)", + opts.Branch, + logRemote, + opts.Force, + opts.ForceWithLease, + ), + ) + var outbuf, errbuf strings.Builder + + if opts.Timeout == 0 { + opts.Timeout = -1 + } + + err := cmd.Run(&gitea.RunOpts{ + Env: opts.Env, + Timeout: opts.Timeout, + Dir: repoPath, + Stdout: &outbuf, + Stderr: &errbuf, + }) + if err != nil { + switch { + case strings.Contains(errbuf.String(), "non-fast-forward"): + return &gitea.ErrPushOutOfDate{ + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + case strings.Contains(errbuf.String(), "! [remote rejected]"): + err := &gitea.ErrPushRejected{ + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + err.GenerateMessage() + return err + case strings.Contains(errbuf.String(), "matches more than one"): + err := &gitea.ErrMoreThanOne{ + StdOut: outbuf.String(), + StdErr: errbuf.String(), + Err: err, + } + return err + default: + // fall through to normal error handling + } + } + + if errbuf.Len() > 0 && err != nil { + return fmt.Errorf("%w - %s", err, errbuf.String()) + } + + return err +} diff --git a/gitrpc/internal/gitea/submodule.go b/gitrpc/internal/gitea/submodule.go new file mode 100644 index 0000000000..7e78d670ab --- /dev/null +++ b/gitrpc/internal/gitea/submodule.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" +) + +// GetSubmodule returns the submodule at the given path reachable from ref. +// Note: ref can be Branch / Tag / CommitSHA. +func (g Adapter) GetSubmodule(ctx context.Context, repoPath string, + ref string, treePath string) (*types.Submodule, error) { + treePath = cleanTreePath(treePath) + + giteaRepo, err := gitea.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGiteaErrorf(err, "failed to open repository") + } + defer giteaRepo.Close() + + // Get the giteaCommit object for the ref + giteaCommit, err := giteaRepo.GetCommit(ref) + if err != nil { + return nil, processGiteaErrorf(err, "error getting commit for ref '%s'", ref) + } + + giteaSubmodule, err := giteaCommit.GetSubModule(treePath) + if err != nil { + return nil, processGiteaErrorf(err, "error getting submodule '%s' from commit", treePath) + } + + return &types.Submodule{ + Name: giteaSubmodule.Name, + URL: giteaSubmodule.URL, + }, nil +} diff --git a/gitrpc/internal/gitea/tag.go b/gitrpc/internal/gitea/tag.go new file mode 100644 index 0000000000..a786f46c6e --- /dev/null +++ b/gitrpc/internal/gitea/tag.go @@ -0,0 +1,314 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "strconv" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" +) + +const ( + pgpSignatureBeginToken = "\n-----BEGIN PGP SIGNATURE-----\n" //#nosec G101 + pgpSignatureEndToken = "\n-----END PGP SIGNATURE-----" //#nosec G101 +) + +// GetAnnotatedTag returns the tag for a specific tag sha. +func (g Adapter) GetAnnotatedTag(ctx context.Context, repoPath string, sha string) (*types.Tag, error) { + tags, err := giteaGetAnnotatedTags(ctx, repoPath, []string{sha}) + if err != nil || len(tags) == 0 { + return nil, processGiteaErrorf(err, "failed to get annotated tag with sha '%s'", sha) + } + + return &tags[0], nil +} + +// GetAnnotatedTags returns the tags for a specific list of tag sha. +func (g Adapter) GetAnnotatedTags(ctx context.Context, repoPath string, shas []string) ([]types.Tag, error) { + return giteaGetAnnotatedTags(ctx, repoPath, shas) +} + +// CreateTag creates the tag pointing at the provided SHA (could be any type, e.g. commit, tag, blob, ...) +func (g Adapter) CreateTag( + ctx context.Context, + repoPath string, + name string, + targetSHA string, + opts *types.CreateTagOptions, +) error { + args := []string{ + "tag", + } + env := []string{} + + if opts != nil && opts.Message != "" { + args = append(args, + "-m", + opts.Message, + ) + env = append(env, + "GIT_COMMITTER_NAME="+opts.Tagger.Identity.Name, + "GIT_COMMITTER_EMAIL="+opts.Tagger.Identity.Email, + "GIT_COMMITTER_DATE="+opts.Tagger.When.Format(time.RFC3339), + ) + } + + args = append(args, + "--", + name, + targetSHA, + ) + + cmd := gitea.NewCommand(ctx, args...) + _, _, err := cmd.RunStdString(&gitea.RunOpts{Dir: repoPath, Env: env}) + if err != nil { + return processGiteaErrorf(err, "Service failed to create a tag") + } + return nil +} + +// giteaGetAnnotatedTag is a custom implementation to retrieve an annotated tag from a sha. +// The code is following parts of the gitea implementation. +func giteaGetAnnotatedTags(ctx context.Context, repoPath string, shas []string) ([]types.Tag, error) { + // The tag is an annotated tag with a message. + writer, reader, cancel := gitea.CatFileBatch(ctx, repoPath) + defer func() { + cancel() + _ = writer.Close() + }() + + tags := make([]types.Tag, len(shas)) + + for i, sha := range shas { + if _, err := writer.Write([]byte(sha + "\n")); err != nil { + return nil, err + } + tagSha, typ, size, err := gitea.ReadBatchLine(reader) + if err != nil { + if errors.Is(err, io.EOF) || gitea.IsErrNotExist(err) { + return nil, fmt.Errorf("tag with sha %s does not exist", sha) + } + return nil, err + } + if typ != string(types.GitObjectTypeTag) { + return nil, fmt.Errorf("git object is of type '%s', expected tag", typ) + } + + // read the remaining rawData + rawData, err := io.ReadAll(io.LimitReader(reader, size)) + if err != nil { + return nil, err + } + _, err = reader.Discard(1) + if err != nil { + return nil, err + } + + tag, err := parseTagDataFromCatFile(rawData) + if err != nil { + return nil, fmt.Errorf("failed to parse tag '%s': %w", sha, err) + } + + // fill in the sha + tag.Sha = string(tagSha) + + tags[i] = tag + } + + return tags, nil +} + +// parseTagDataFromCatFile parses a tag from a cat-file output. +func parseTagDataFromCatFile(data []byte) (tag types.Tag, err error) { + p := 0 + + // parse object Id + tag.TargetSha, p, err = giteaParseCatFileLine(data, p, "object") + if err != nil { + return + } + + // parse object type + rawType, p, err := giteaParseCatFileLine(data, p, "type") + if err != nil { + return + } + + tag.TargetType, err = types.ParseGitObjectType(rawType) + if err != nil { + return + } + + // parse tag name + tag.Name, p, err = giteaParseCatFileLine(data, p, "tag") + if err != nil { + return + } + + // parse tagger + rawTaggerInfo, p, err := giteaParseCatFileLine(data, p, "tagger") + if err != nil { + return + } + tag.Tagger, err = parseSignatureFromCatFileLine(rawTaggerInfo) + if err != nil { + return + } + + // remainder is message and gpg (remove leading and tailing new lines) + message := string(bytes.Trim(data[p:], "\n")) + + // handle gpg signature + pgpEnd := strings.Index(message, pgpSignatureEndToken) + if pgpEnd > -1 { + messageStart := pgpEnd + len(pgpSignatureEndToken) + // for now we just remove the signature (and trim any separating new lines) + // TODO: add support for GPG signature of tags + message = strings.TrimLeft(message[messageStart:], "\n") + } + + tag.Message = message + + // get title from message + tag.Title = message + titleEnd := strings.IndexByte(message, '\n') + if titleEnd > -1 { + tag.Title = message[:titleEnd] + } + + return tag, nil +} + +func giteaParseCatFileLine(data []byte, start int, header string) (string, int, error) { + // for simplicity only look at data from start onwards + data = data[start:] + + lenHeader := len(header) + lenData := len(data) + if lenData < lenHeader { + return "", 0, fmt.Errorf("expected '%s' but line only contains '%s'", header, string(data)) + } + if string(data[:lenHeader]) != header { + return "", 0, fmt.Errorf("expected '%s' but started with '%s'", header, string(data[:lenHeader])) + } + + // get end of line and start of next line (used externally, transpose with provided start index) + lineEnd := bytes.IndexByte(data, '\n') + externalNextLine := start + lineEnd + 1 + if lineEnd == -1 { + lineEnd = lenData + externalNextLine = start + lenData + } + + // if there's no data, return an error (have to consider for ' ') + if lineEnd <= lenHeader+1 { + return "", 0, fmt.Errorf("no data for line of type '%s'", header) + } + + return string(data[lenHeader+1 : lineEnd]), externalNextLine, nil +} + +// defaultGitTimeLayout is the (default) time format printed by git. +const defaultGitTimeLayout = "Mon Jan _2 15:04:05 2006 -0700" + +// parseSignatureFromCatFileLine parses the signature from a cat-file output. +// This is used for commit / tag outputs. Input will be similar to (without 'author 'prefix): +// - author Max Mustermann 1666401234 -0700 +// - author Max Mustermann Tue Oct 18 05:13:26 2022 +0530 +// TODO: method is leaning on gitea code - requires reference? +func parseSignatureFromCatFileLine(line string) (types.Signature, error) { + sig := types.Signature{} + emailStart := strings.LastIndexByte(line, '<') + emailEnd := strings.LastIndexByte(line, '>') + if emailStart == -1 || emailEnd == -1 || emailEnd < emailStart { + return types.Signature{}, fmt.Errorf("signature is missing email ('%s')", line) + } + + // name requires that there is at least one char followed by a space (so emailStart >= 2) + if emailStart < 2 { + return types.Signature{}, fmt.Errorf("signature is missing name ('%s')", line) + } + + sig.Identity.Name = line[:emailStart-1] + sig.Identity.Email = line[emailStart+1 : emailEnd] + + timeStart := emailEnd + 2 + if timeStart >= len(line) { + return types.Signature{}, fmt.Errorf("signature is missing time ('%s')", line) + } + + // Check if time format is written date time format (e.g Thu, 07 Apr 2005 22:13:13 +0200) + // we can check that by ensuring that the date time part starts with a non-digit character. + if line[timeStart] > '9' { + var err error + sig.When, err = time.Parse(defaultGitTimeLayout, line[timeStart:]) + if err != nil { + return types.Signature{}, fmt.Errorf("failed to time.parse signature time ('%s'): %w", line, err) + } + + return sig, nil + } + + // Otherwise we have to manually parse unix time and time zone + endOfUnixTime := timeStart + strings.IndexByte(line[timeStart:], ' ') + if endOfUnixTime <= timeStart { + return types.Signature{}, fmt.Errorf("signature is missing unix time ('%s')", line) + } + + unixSeconds, err := strconv.ParseInt(line[timeStart:endOfUnixTime], 10, 64) + if err != nil { + return types.Signature{}, fmt.Errorf("failed to parse unix time ('%s'): %w", line, err) + } + + // parse time zone + startOfTimeZone := endOfUnixTime + 1 // +1 for space + endOfTimeZone := startOfTimeZone + 5 // +5 for '+0700' + if startOfTimeZone >= len(line) || endOfTimeZone > len(line) { + return types.Signature{}, fmt.Errorf("signature is missing time zone ('%s')", line) + } + + // get and disect timezone, e.g. '+0700' + rawTimeZone := line[startOfTimeZone:endOfTimeZone] + rawTimeZoneH := rawTimeZone[1:3] // gets +[07]00 + rawTimeZoneMin := rawTimeZone[3:] // gets +07[00] + timeZoneH, err := strconv.ParseInt(rawTimeZoneH, 10, 64) + if err != nil { + return types.Signature{}, fmt.Errorf("failed to parse hours of time zone ('%s'): %w", line, err) + } + timeZoneMin, err := strconv.ParseInt(rawTimeZoneMin, 10, 64) + if err != nil { + return types.Signature{}, fmt.Errorf("failed to parse minutes of time zone ('%s'): %w", line, err) + } + + timeZoneOffsetInSec := int(timeZoneH*60+timeZoneMin) * 60 + if rawTimeZone[0] == '-' { + timeZoneOffsetInSec *= -1 + } + timeZone := time.FixedZone("", timeZoneOffsetInSec) + + // create final time using unix and timezone translation + sig.When = time.Unix(unixSeconds, 0).In(timeZone) + + return sig, nil +} diff --git a/gitrpc/internal/gitea/tree.go b/gitrpc/internal/gitea/tree.go new file mode 100644 index 0000000000..b819db2556 --- /dev/null +++ b/gitrpc/internal/gitea/tree.go @@ -0,0 +1,152 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "path" + "path/filepath" + "strings" + + "github.com/harness/gitness/gitrpc/internal/types" + + gitea "code.gitea.io/gitea/modules/git" + gogitfilemode "github.com/go-git/go-git/v5/plumbing/filemode" + gogitobject "github.com/go-git/go-git/v5/plumbing/object" +) + +func cleanTreePath(treePath string) string { + return strings.Trim(path.Clean("/"+treePath), "/") +} + +// GetTreeNode returns the tree node at the given path as found for the provided reference. +// Note: ref can be Branch / Tag / CommitSHA. +func (g Adapter) GetTreeNode(ctx context.Context, + repoPath string, + ref string, + treePath string, +) (*types.TreeNode, error) { + treePath = cleanTreePath(treePath) + + _, refCommit, err := g.getGoGitCommit(ctx, repoPath, ref) + if err != nil { + return nil, err + } + + rootEntry := gogitobject.TreeEntry{ + Name: "", + Mode: gogitfilemode.Dir, + Hash: refCommit.TreeHash, + } + + treeEntry := &rootEntry + + if len(treePath) > 0 { + tree, err := refCommit.Tree() + if err != nil { + return nil, fmt.Errorf("failed to get tree for the commit: %w", err) + } + + treeEntry, err = tree.FindEntry(treePath) + if errors.Is(err, gogitobject.ErrDirectoryNotFound) || errors.Is(err, gogitobject.ErrEntryNotFound) { + return nil, &types.PathNotFoundError{Path: treePath} + } + if err != nil { + return nil, fmt.Errorf("failed to find path entry %s: %w", treePath, err) + } + } + + nodeType, mode, err := mapGogitNodeToTreeNodeModeAndType(treeEntry.Mode) + if err != nil { + return nil, err + } + + return &types.TreeNode{ + Mode: mode, + NodeType: nodeType, + Sha: treeEntry.Hash.String(), + Name: treeEntry.Name, + Path: treePath, + }, nil +} + +// ListTreeNodes lists the child nodes of a tree reachable from ref via the specified path +// and includes the latest commit for all nodes if requested. +// Note: ref can be Branch / Tag / CommitSHA. +// +//nolint:gocognit // refactor if needed +func (g Adapter) ListTreeNodes(ctx context.Context, + repoPath string, + ref string, + treePath string, +) ([]types.TreeNode, error) { + treePath = cleanTreePath(treePath) + + _, refCommit, err := g.getGoGitCommit(ctx, repoPath, ref) + if err != nil { + return nil, err + } + + tree, err := refCommit.Tree() + if err != nil { + return nil, fmt.Errorf("failed to get tree for the commit: %w", err) + } + + if len(treePath) > 0 { + tree, err = tree.Tree(treePath) + if errors.Is(err, gogitobject.ErrDirectoryNotFound) || errors.Is(err, gogitobject.ErrEntryNotFound) { + return nil, &types.PathNotFoundError{Path: treePath} + } + if err != nil { + return nil, fmt.Errorf("failed to find path entry %s: %w", treePath, err) + } + } + + treeNodes := make([]types.TreeNode, len(tree.Entries)) + for i, treeEntry := range tree.Entries { + nodeType, mode, err := mapGogitNodeToTreeNodeModeAndType(treeEntry.Mode) + if err != nil { + return nil, err + } + + treeNodes[i] = types.TreeNode{ + NodeType: nodeType, + Mode: mode, + Sha: treeEntry.Hash.String(), + Name: treeEntry.Name, + Path: filepath.Join(treePath, treeEntry.Name), + } + } + + return treeNodes, nil +} + +func (g Adapter) ReadTree(ctx context.Context, repoPath, ref string, w io.Writer, args ...string) error { + errbuf := bytes.Buffer{} + if err := gitea.NewCommand(ctx, append([]string{"read-tree", ref}, args...)...). + Run(&gitea.RunOpts{ + Dir: repoPath, + Stdout: w, + Stderr: &errbuf, + }); err != nil { + return fmt.Errorf("unable to read %s in to the index: %w\n%s", + ref, err, errbuf.String()) + } + return nil +} diff --git a/gitrpc/internal/gitea/vars.go b/gitrpc/internal/gitea/vars.go new file mode 100644 index 0000000000..f5adc0e008 --- /dev/null +++ b/gitrpc/internal/gitea/vars.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import "regexp" + +var ( + escapedSymbols = regexp.MustCompile(`([*[?! \\])`) +) diff --git a/gitrpc/internal/middleware/error.go b/gitrpc/internal/middleware/error.go new file mode 100644 index 0000000000..5e1728f3c1 --- /dev/null +++ b/gitrpc/internal/middleware/error.go @@ -0,0 +1,103 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "context" + "errors" + "reflect" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type ErrInterceptor struct { +} + +func NewErrInterceptor() ErrInterceptor { + return ErrInterceptor{} +} + +func (i ErrInterceptor) UnaryInterceptor() grpc.UnaryServerInterceptor { + return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, + handler grpc.UnaryHandler) (interface{}, error) { + value, err := handler(ctx, req) + if (value == nil || reflect.ValueOf(value).IsNil()) && err == nil { + return nil, status.Error(codes.Internal, "service returned no error and no object") + } + err = processError(ctx, err) + return value, err + } +} + +func (i ErrInterceptor) StreamInterceptor() grpc.StreamServerInterceptor { + return func(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, + handler grpc.StreamHandler) error { + err := handler(srv, stream) + err = processError(stream.Context(), err) + return err + } +} + +func processError(ctx context.Context, err error) (rerr error) { + if err == nil { + return nil + } + + defer func() { + statusErr, ok := status.FromError(rerr) + if !ok { + return + } + //nolint: exhaustive // log only server side errors, no need to log user based errors + switch statusErr.Code() { + case codes.Unknown, + codes.DeadlineExceeded, + codes.ResourceExhausted, + codes.FailedPrecondition, + codes.Aborted, + codes.OutOfRange, + codes.Unimplemented, + codes.Internal, + codes.Unavailable, + codes.DataLoss: + { + logCtx := log.Ctx(ctx) + logCtx.Error().Msg(err.Error()) + } + } + }() + + // custom errors should implement StatusError + var statusError interface { + Status() (*status.Status, error) + } + + if errors.As(err, &statusError) { + st, sterr := statusError.Status() + if sterr != nil { + return sterr + } + return st.Err() + } + + if status, ok := status.FromError(err); ok { + return status.Err() + } + + return status.Errorf(codes.Unknown, err.Error()) +} diff --git a/gitrpc/internal/middleware/log.go b/gitrpc/internal/middleware/log.go new file mode 100644 index 0000000000..1bc984aea0 --- /dev/null +++ b/gitrpc/internal/middleware/log.go @@ -0,0 +1,162 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "context" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/xid" + "github.com/rs/zerolog/log" + "google.golang.org/grpc" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/status" +) + +const ( + RequestIDNone string = "gitrpc_none" +) + +// requestIDKey is context key for storing and retrieving the request ID to and from a context. +type requestIDKey struct{} + +// LogInterceptor injects a zerolog logger with common grpc related annotations and logs the completion of the call. +// If the metadata contains a request id, the logger is annotated with the same request ID, otherwise with a new one. +type LogInterceptor struct { +} + +func NewLogInterceptor() LogInterceptor { + return LogInterceptor{} +} + +func (i LogInterceptor) UnaryInterceptor() grpc.UnaryServerInterceptor { + return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, + handler grpc.UnaryHandler) (interface{}, error) { + ctx = injectLogging(ctx, info.FullMethod) + + // measure execution time + start := time.Now() + value, err := handler(ctx, req) + + logCompletion(ctx, start, err) + + return value, err + } +} + +func (i LogInterceptor) StreamInterceptor() grpc.StreamServerInterceptor { + return func(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, + handler grpc.StreamHandler) error { + ctx := injectLogging(stream.Context(), info.FullMethod) + + // wrap stream with updated context + stream = &logServerStream{ + ServerStream: stream, + ctx: ctx, + } + + // measure execution time + start := time.Now() + err := handler(srv, stream) + + logCompletion(ctx, start, err) + + return err + } +} + +// WithRequestID returns a copy of parent in which the request id value is set. +func WithRequestID(parent context.Context, v string) context.Context { + return context.WithValue(parent, requestIDKey{}, v) +} + +// RequestIDFrom retrieves the request id from the context. +// If no request id exists, RequestIDNone is returned. +func RequestIDFrom(ctx context.Context) string { + if v, ok := ctx.Value(requestIDKey{}).(string); ok { + return v + } + + return RequestIDNone +} + +func injectLogging(ctx context.Context, fullMethod string) context.Context { + // split fullMethod into service and method (expected format: "/package.service/method...") + // If it doesn't match the expected format, the full string is put into method. + service, method := "", fullMethod + if len(fullMethod) > 0 && fullMethod[0] == '/' { + if s, m, ok := strings.Cut(fullMethod[1:], "/"); ok { + service, method = s, m + } + } + + // get request id (or create a new one) and inject it for later usage (git env variables) + requestID := getOrCreateRequestID(ctx) + ctx = WithRequestID(ctx, requestID) + + // create new logCtx with injected info + logCtx := log.Logger.With(). + Str("grpc.service", service). + Str("grpc.method", method). + Str("request_id", requestID) + + // add peer information if available + if p, ok := peer.FromContext(ctx); ok && p.Addr != nil { + logCtx = logCtx.Str("grpc.peer", p.Addr.String()) + } + + // inject logger in context + logger := logCtx.Logger() + return logger.WithContext(ctx) +} + +func logCompletion(ctx context.Context, start time.Time, err error) { + logCtx := log.Ctx(ctx).Info(). + Dur("grpc.elapsed_ms", time.Since(start)) + + // try to get grpc status code + if status, ok := status.FromError(err); ok { + logCtx.Str("grpc.status_code", status.Code().String()) + } + + logCtx.Msg("grpc request completed.") +} + +func getOrCreateRequestID(ctx context.Context) string { + // check if request id was passed as part of grpc metadata + if md, ok := metadata.FromIncomingContext(ctx); ok { + if ids := md.Get(rpc.MetadataKeyRequestID); len(ids) > 0 { + return ids[0] + } + } + + // use same type of request IDs as used by zerolog + return xid.New().String() +} + +// logServerStream is used to modify the stream context. +// In order to modify the stream context we have to create a new struct and overshadow the `Context()` method. +type logServerStream struct { + grpc.ServerStream + ctx context.Context +} + +func (s *logServerStream) Context() context.Context { + return s.ctx +} diff --git a/gitrpc/internal/parser/diff_cut.go b/gitrpc/internal/parser/diff_cut.go new file mode 100644 index 0000000000..03be315c54 --- /dev/null +++ b/gitrpc/internal/parser/diff_cut.go @@ -0,0 +1,287 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "bufio" + "errors" + "io" + + "github.com/harness/gitness/gitrpc/internal/types" +) + +// DiffCut parses git diff output that should consist of a single hunk +// (usually generated with large value passed to the "--unified" parameter) +// and returns lines specified with the parameters. +// +//nolint:funlen,gocognit,nestif,gocognit,gocyclo,cyclop // it's actually very readable +func DiffCut(r io.Reader, params types.DiffCutParams) (types.HunkHeader, types.Hunk, error) { + scanner := bufio.NewScanner(r) + + var err error + var hunkHeader types.HunkHeader + + if _, err = scanFileHeader(scanner); err != nil { + return types.HunkHeader{}, types.Hunk{}, err + } + + if hunkHeader, err = scanHunkHeader(scanner); err != nil { + return types.HunkHeader{}, types.Hunk{}, err + } + + currentOldLine := hunkHeader.OldLine + currentNewLine := hunkHeader.NewLine + + var inCut bool + var diffCutHeader types.HunkHeader + var diffCut []string + + linesBeforeBuf := newStrCircBuf(params.BeforeLines) + + for { + if params.LineEndNew && currentNewLine > params.LineEnd || + !params.LineEndNew && currentOldLine > params.LineEnd { + break // exceeded the requested line range + } + + var line string + var action diffAction + + line, action, err = scanHunkLine(scanner) + if err != nil { + return types.HunkHeader{}, types.Hunk{}, err + } + + if line == "" { + err = io.EOF + break + } + + if params.LineStartNew && currentNewLine < params.LineStart || + !params.LineStartNew && currentOldLine < params.LineStart { + // not yet in the requested line range + linesBeforeBuf.push(line) + } else { + if !inCut { + diffCutHeader.NewLine = currentNewLine + diffCutHeader.OldLine = currentOldLine + } + inCut = true + + if action != actionRemoved { + diffCutHeader.NewSpan++ + } + if action != actionAdded { + diffCutHeader.OldSpan++ + } + + diffCut = append(diffCut, line) + if len(diffCut) > params.LineLimit { + break // safety break + } + } + + // increment the line numbers + if action != actionRemoved { + currentNewLine++ + } + if action != actionAdded { + currentOldLine++ + } + } + + if !inCut { + return types.HunkHeader{}, types.Hunk{}, types.ErrHunkNotFound + } + + var ( + linesBefore []string + linesAfter []string + ) + + linesBefore = linesBeforeBuf.lines() + if !errors.Is(err, io.EOF) { + for i := 0; i < params.AfterLines; i++ { + line, _, err := scanHunkLine(scanner) + if err != nil { + return types.HunkHeader{}, types.Hunk{}, err + } + if line == "" { + break + } + linesAfter = append(linesAfter, line) + } + } + + diffCutHeaderLines := diffCutHeader + + for _, s := range linesBefore { + action := diffAction(s[0]) + if action != actionRemoved { + diffCutHeaderLines.NewLine-- + diffCutHeaderLines.NewSpan++ + } + if action != actionAdded { + diffCutHeaderLines.OldLine-- + diffCutHeaderLines.OldSpan++ + } + } + + for _, s := range linesAfter { + action := diffAction(s[0]) + if action != actionRemoved { + diffCutHeaderLines.NewSpan++ + } + if action != actionAdded { + diffCutHeaderLines.OldSpan++ + } + } + + return diffCutHeader, types.Hunk{ + HunkHeader: diffCutHeaderLines, + Lines: concat(linesBefore, diffCut, linesAfter), + }, nil +} + +// scanFileHeader keeps reading lines until file header line is read. +func scanFileHeader(scan *bufio.Scanner) (types.DiffFileHeader, error) { + for scan.Scan() { + line := scan.Text() + if h, ok := ParseDiffFileHeader(line); ok { + return h, nil + } + } + + if err := scan.Err(); err != nil { + return types.DiffFileHeader{}, err + } + + return types.DiffFileHeader{}, types.ErrHunkNotFound +} + +// scanHunkHeader keeps reading lines until hunk header line is read. +func scanHunkHeader(scan *bufio.Scanner) (types.HunkHeader, error) { + for scan.Scan() { + line := scan.Text() + if h, ok := ParseDiffHunkHeader(line); ok { + return h, nil + } + } + + if err := scan.Err(); err != nil { + return types.HunkHeader{}, err + } + + return types.HunkHeader{}, types.ErrHunkNotFound +} + +type diffAction byte + +const ( + actionUnchanged diffAction = ' ' + actionRemoved diffAction = '-' + actionAdded diffAction = '+' +) + +func scanHunkLine(scan *bufio.Scanner) (line string, action diffAction, err error) { +again: + action = actionUnchanged + + if !scan.Scan() { + err = scan.Err() + return + } + + line = scan.Text() + if line == "" { + err = types.ErrHunkNotFound // should not happen: empty line in diff output + return + } + + action = diffAction(line[0]) + if action == '\\' { // handle the "\ No newline at end of file" line + goto again + } + + if action != actionRemoved && action != actionAdded && action != actionUnchanged { + // treat this as the end of hunk + line = "" + action = actionUnchanged + return + } + + return +} + +type strCircBuf struct { + head int + entries []string +} + +func newStrCircBuf(size int) strCircBuf { + return strCircBuf{ + head: -1, + entries: make([]string, 0, size), + } +} + +func (b *strCircBuf) push(s string) { + n := cap(b.entries) + if n == 0 { + return + } + + b.head++ + + if len(b.entries) < n { + b.entries = append(b.entries, s) + return + } + + if b.head >= n { + b.head = 0 + } + b.entries[b.head] = s +} + +func (b *strCircBuf) lines() []string { + n := cap(b.entries) + if len(b.entries) < n { + return b.entries + } + + res := make([]string, n) + for i := 0; i < n; i++ { + idx := (b.head + 1 + i) % n + res[i] = b.entries[idx] + } + return res +} + +func concat[T any](a ...[]T) []T { + var n int + for _, m := range a { + n += len(m) + } + res := make([]T, n) + + n = 0 + for _, m := range a { + copy(res[n:], m) + n += len(m) + } + + return res +} diff --git a/gitrpc/internal/parser/diff_cut_test.go b/gitrpc/internal/parser/diff_cut_test.go new file mode 100644 index 0000000000..6b69675f5a --- /dev/null +++ b/gitrpc/internal/parser/diff_cut_test.go @@ -0,0 +1,271 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "reflect" + "strings" + "testing" + + "github.com/harness/gitness/gitrpc/internal/types" +) + +//nolint:gocognit // it's a unit test!!! +func TestDiffCut(t *testing.T) { + const input = `diff --git a/test.txt b/test.txt +--- a/test.txt ++++ b/test.txt +@@ -1,15 +1,11 @@ ++0 + 1 + 2 + 3 + 4 + 5 +-6 +-7 +-8 ++6,7,8 + 9 + 10 + 11 + 12 +-13 +-14 +-15 +` + + tests := []struct { + name string + params types.DiffCutParams + expCutHeader string + expCut []string + expError error + }{ + { + name: "at-'+6,7,8':new", + params: types.DiffCutParams{ + LineStart: 7, LineStartNew: true, + LineEnd: 7, LineEndNew: true, + BeforeLines: 0, AfterLines: 0, + LineLimit: 1000, + }, + expCutHeader: "@@ -6,3 +7 @@", + expCut: []string{"-6", "-7", "-8", "+6,7,8"}, + expError: nil, + }, + { + name: "at-'+6,7,8':new-with-lines-around", + params: types.DiffCutParams{ + LineStart: 7, LineStartNew: true, + LineEnd: 7, LineEndNew: true, + BeforeLines: 1, AfterLines: 2, + LineLimit: 1000, + }, + expCutHeader: "@@ -5,6 +6,4 @@", + expCut: []string{" 5", "-6", "-7", "-8", "+6,7,8", " 9", " 10"}, + expError: nil, + }, + { + name: "at-'+0':new-with-lines-around", + params: types.DiffCutParams{ + LineStart: 1, LineStartNew: true, + LineEnd: 1, LineEndNew: true, + BeforeLines: 3, AfterLines: 3, + LineLimit: 1000, + }, + expCutHeader: "@@ -1,3 +1,4 @@", + expCut: []string{"+0", " 1", " 2", " 3"}, + expError: nil, + }, + { + name: "at-'-13':one-with-lines-around", + params: types.DiffCutParams{ + LineStart: 13, LineStartNew: false, + LineEnd: 13, LineEndNew: false, + BeforeLines: 1, AfterLines: 1, + LineLimit: 1000, + }, + expCutHeader: "@@ -12,3 +11 @@", + expCut: []string{" 12", "-13", "-14"}, + expError: nil, + }, + { + name: "at-'-13':mixed", + params: types.DiffCutParams{ + LineStart: 7, LineStartNew: false, + LineEnd: 7, LineEndNew: true, + BeforeLines: 0, AfterLines: 0, + LineLimit: 1000, + }, + expCutHeader: "@@ -7,2 +7 @@", + expCut: []string{"-7", "-8", "+6,7,8"}, + expError: nil, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + hunkHeader, linesHunk, err := DiffCut( + strings.NewReader(input), + test.params, + ) + + //nolint:errorlint // this error will not be wrapped + if want, got := test.expError, err; want != got { + t.Errorf("error mismatch: want=%v got=%v", want, got) + return + } + + if err != nil { + return + } + + if test.params.LineStartNew && test.params.LineStart != hunkHeader.NewLine { + t.Errorf("hunk line start mismatch: want=%d got=%d", test.params.LineStart, hunkHeader.NewLine) + } + + if !test.params.LineStartNew && test.params.LineStart != hunkHeader.OldLine { + t.Errorf("hunk line start mismatch: want=%d got=%d", test.params.LineStart, hunkHeader.OldLine) + } + + if want, got := test.expCutHeader, linesHunk.String(); want != got { + t.Errorf("header mismatch: want=%s got=%s", want, got) + } + + if want, got := test.expCut, linesHunk.Lines; !reflect.DeepEqual(want, got) { + t.Errorf("lines mismatch: want=%s got=%s", want, got) + } + }) + } +} + +func TestDiffCutNoEOLInOld(t *testing.T) { + const input = `diff --git a/test.txt b/test.txt +index 541cb64f..047d7ee2 100644 +--- a/test.txt ++++ b/test.txt +@@ -1 +1,4 @@ +-test +\ No newline at end of file ++123 ++456 ++789 +` + + hh, h, err := DiffCut( + strings.NewReader(input), + types.DiffCutParams{ + LineStart: 3, + LineStartNew: true, + LineEnd: 3, + LineEndNew: true, + BeforeLines: 1, + AfterLines: 1, + LineLimit: 100, + }, + ) + if err != nil { + t.Errorf("got error: %v", err) + return + } + + expectedHH := types.HunkHeader{OldLine: 2, OldSpan: 0, NewLine: 3, NewSpan: 1} + if expectedHH != hh { + t.Errorf("expected hunk header: %+v, but got: %+v", expectedHH, hh) + } + + expectedHunkLines := types.Hunk{ + HunkHeader: types.HunkHeader{OldLine: 2, OldSpan: 0, NewLine: 2, NewSpan: 2}, + Lines: []string{"+456", "+789"}, + } + if !reflect.DeepEqual(expectedHunkLines, h) { + t.Errorf("expected hunk header: %+v, but got: %+v", expectedHunkLines, h) + } +} + +func TestDiffCutNoEOLInNew(t *testing.T) { + const input = `diff --git a/test.txt b/test.txt +index af7864ba..541cb64f 100644 +--- a/test.txt ++++ b/test.txt +@@ -1,3 +1 @@ +-123 +-456 +-789 ++test +\ No newline at end of file +` + hh, h, err := DiffCut( + strings.NewReader(input), + types.DiffCutParams{ + LineStart: 1, + LineStartNew: true, + LineEnd: 1, + LineEndNew: true, + BeforeLines: 0, + AfterLines: 0, + LineLimit: 100, + }, + ) + if err != nil { + t.Errorf("got error: %v", err) + return + } + + expectedHH := types.HunkHeader{OldLine: 1, OldSpan: 3, NewLine: 1, NewSpan: 1} + if expectedHH != hh { + t.Errorf("expected hunk header: %+v, but got: %+v", expectedHH, hh) + } + + expectedHunkLines := types.Hunk{ + HunkHeader: types.HunkHeader{OldLine: 1, OldSpan: 3, NewLine: 1, NewSpan: 1}, + Lines: []string{"-123", "-456", "-789", "+test"}, + } + if !reflect.DeepEqual(expectedHunkLines, h) { + t.Errorf("expected hunk header: %+v, but got: %+v", expectedHunkLines, h) + } +} + +func TestStrCircBuf(t *testing.T) { + tests := []struct { + name string + cap int + feed []string + exp []string + }{ + {name: "empty", cap: 10, feed: nil, exp: []string{}}, + {name: "zero-cap", cap: 0, feed: []string{"A", "B"}, exp: []string{}}, + {name: "one", cap: 5, feed: []string{"A"}, exp: []string{"A"}}, + {name: "two", cap: 3, feed: []string{"A", "B"}, exp: []string{"A", "B"}}, + {name: "cap", cap: 3, feed: []string{"A", "B", "C"}, exp: []string{"A", "B", "C"}}, + {name: "cap+1", cap: 3, feed: []string{"A", "B", "C", "D"}, exp: []string{"B", "C", "D"}}, + {name: "cap+2", cap: 3, feed: []string{"A", "B", "C", "D", "E"}, exp: []string{"C", "D", "E"}}, + {name: "cap*2+1", cap: 2, feed: []string{"A", "B", "C", "D", "E"}, exp: []string{"D", "E"}}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + b := newStrCircBuf(test.cap) + for _, s := range test.feed { + b.push(s) + } + + if want, got := test.exp, b.lines(); !reflect.DeepEqual(want, got) { + t.Errorf("want=%v, got=%v", want, got) + } + }) + } +} diff --git a/gitrpc/internal/parser/diff_headers.go b/gitrpc/internal/parser/diff_headers.go new file mode 100644 index 0000000000..0ce0171ed1 --- /dev/null +++ b/gitrpc/internal/parser/diff_headers.go @@ -0,0 +1,113 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "bufio" + "io" + "regexp" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/types" +) + +var regExpDiffFileHeader = regexp.MustCompile(`^diff --git a/(.+) b/(.+)$`) + +func ParseDiffFileHeader(line string) (types.DiffFileHeader, bool) { + groups := regExpDiffFileHeader.FindStringSubmatch(line) + if groups == nil { + return types.DiffFileHeader{}, false + } + + return types.DiffFileHeader{ + OldFileName: groups[1], + NewFileName: groups[2], + Extensions: map[string]string{}, + }, true +} + +var regExpDiffExtHeader = regexp.MustCompile( + "^(" + + enum.DiffExtHeaderOldMode + "|" + + enum.DiffExtHeaderNewMode + "|" + + enum.DiffExtHeaderDeletedFileMode + "|" + + enum.DiffExtHeaderNewFileMode + "|" + + enum.DiffExtHeaderCopyFrom + "|" + + enum.DiffExtHeaderCopyTo + "|" + + enum.DiffExtHeaderRenameFrom + "|" + + enum.DiffExtHeaderRenameTo + "|" + + enum.DiffExtHeaderSimilarity + "|" + + enum.DiffExtHeaderDissimilarity + "|" + + enum.DiffExtHeaderIndex + + ") (.+)$") + +func ParseDiffFileExtendedHeader(line string) (string, string) { + groups := regExpDiffExtHeader.FindStringSubmatch(line) + if groups == nil { + return "", "" + } + + return groups[1], groups[2] +} + +// GetHunkHeaders parses git diff output and returns all diff headers for all files. +// See for documentation: https://git-scm.com/docs/git-diff#generate_patch_text_with_p +func GetHunkHeaders(r io.Reader) ([]*types.DiffFileHunkHeaders, error) { + scanner := bufio.NewScanner(r) + + var currentFile *types.DiffFileHunkHeaders + var result []*types.DiffFileHunkHeaders + + for scanner.Scan() { + line := scanner.Text() + + if h, ok := ParseDiffFileHeader(line); ok { + if currentFile != nil { + result = append(result, currentFile) + } + currentFile = &types.DiffFileHunkHeaders{ + FileHeader: h, + HunksHeaders: nil, + } + + continue + } + + if currentFile == nil { + // should not happen: we reached the hunk header without first finding the file header. + return nil, types.ErrHunkNotFound + } + + if h, ok := ParseDiffHunkHeader(line); ok { + currentFile.HunksHeaders = append(currentFile.HunksHeaders, h) + continue + } + + if headerKey, headerValue := ParseDiffFileExtendedHeader(line); headerKey != "" { + currentFile.FileHeader.Extensions[headerKey] = headerValue + continue + } + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + if currentFile != nil { + result = append(result, currentFile) + } + + return result, nil +} diff --git a/gitrpc/internal/parser/diff_headers_test.go b/gitrpc/internal/parser/diff_headers_test.go new file mode 100644 index 0000000000..b8c7429c64 --- /dev/null +++ b/gitrpc/internal/parser/diff_headers_test.go @@ -0,0 +1,109 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "strings" + "testing" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/types" + + "github.com/google/go-cmp/cmp" +) + +func TestGetHunkHeaders(t *testing.T) { + input := `diff --git a/new_file.txt b/new_file.txt +new file mode 100644 +index 0000000..fb0c863 +--- /dev/null ++++ b/new_file.txt +@@ -0,0 +1,3 @@ ++This is a new file ++created for this ++unit test. +diff --git a/old_file_name.txt b/changed_file.txt +index f043b93..e9449b5 100644 +--- a/changed_file.txt ++++ b/changed_file.txt +@@ -7,3 +7,4 @@ + Unchanged line +-Removed line 1 ++Added line 1 ++Added line 2 + Unchanged line +@@ -27,2 +28,3 @@ + Unchanged line ++Added line + Unchanged line +diff --git a/deleted_file.txt b/deleted_file.txt +deleted file mode 100644 +index f043b93..0000000 +--- a/deleted_file.txt ++++ /dev/null +@@ -1,3 +0,0 @@ +-This is content of +-a deleted file +-in git diff output. +` + + got, err := GetHunkHeaders(strings.NewReader(input)) + if err != nil { + t.Errorf("got error: %v", err) + return + } + + want := []*types.DiffFileHunkHeaders{ + { + FileHeader: types.DiffFileHeader{ + OldFileName: "new_file.txt", + NewFileName: "new_file.txt", + Extensions: map[string]string{ + enum.DiffExtHeaderNewFileMode: "100644", + enum.DiffExtHeaderIndex: "0000000..fb0c863", + }, + }, + HunksHeaders: []types.HunkHeader{{OldLine: 0, OldSpan: 0, NewLine: 1, NewSpan: 3}}, + }, + { + FileHeader: types.DiffFileHeader{ + OldFileName: "old_file_name.txt", + NewFileName: "changed_file.txt", + Extensions: map[string]string{ + enum.DiffExtHeaderIndex: "f043b93..e9449b5 100644", + }, + }, + HunksHeaders: []types.HunkHeader{ + {OldLine: 7, OldSpan: 3, NewLine: 7, NewSpan: 4}, + {OldLine: 27, OldSpan: 2, NewLine: 28, NewSpan: 3}, + }, + }, + { + FileHeader: types.DiffFileHeader{ + OldFileName: "deleted_file.txt", + NewFileName: "deleted_file.txt", + Extensions: map[string]string{ + enum.DiffExtHeaderDeletedFileMode: "100644", + enum.DiffExtHeaderIndex: "f043b93..0000000", + }, + }, + HunksHeaders: []types.HunkHeader{{OldLine: 1, OldSpan: 3, NewLine: 0, NewSpan: 0}}, + }, + } + + if diff := cmp.Diff(got, want); diff != "" { + t.Errorf(diff) + } +} diff --git a/gitrpc/internal/parser/hunk.go b/gitrpc/internal/parser/hunk.go new file mode 100644 index 0000000000..6e81ec1331 --- /dev/null +++ b/gitrpc/internal/parser/hunk.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "regexp" + "strconv" + + "github.com/harness/gitness/gitrpc/internal/types" +) + +var regExpHunkHeader = regexp.MustCompile(`^@@ -([0-9]+)(,([0-9]+))? \+([0-9]+)(,([0-9]+))? @@( (.+))?$`) + +func ParseDiffHunkHeader(line string) (types.HunkHeader, bool) { + groups := regExpHunkHeader.FindStringSubmatch(line) + if groups == nil { + return types.HunkHeader{}, false + } + + oldLine, _ := strconv.Atoi(groups[1]) + oldSpan := 1 + if groups[3] != "" { + oldSpan, _ = strconv.Atoi(groups[3]) + } + + newLine, _ := strconv.Atoi(groups[4]) + newSpan := 1 + if groups[6] != "" { + newSpan, _ = strconv.Atoi(groups[6]) + } + + return types.HunkHeader{ + OldLine: oldLine, + OldSpan: oldSpan, + NewLine: newLine, + NewSpan: newSpan, + Text: groups[8], + }, true +} diff --git a/gitrpc/internal/service/blame.go b/gitrpc/internal/service/blame.go new file mode 100644 index 0000000000..b410a7c9fe --- /dev/null +++ b/gitrpc/internal/service/blame.go @@ -0,0 +1,94 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "errors" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" +) + +type BlameService struct { + rpc.UnimplementedBlameServiceServer + adapter GitAdapter + reposRoot string +} + +var _ rpc.BlameServiceServer = (*BlameService)(nil) + +func NewBlameService(adapter GitAdapter, reposRoot string) *BlameService { + return &BlameService{ + adapter: adapter, + reposRoot: reposRoot, + } +} + +func (s BlameService) Blame(request *rpc.BlameRequest, stream rpc.BlameService_BlameServer) error { + ctx := stream.Context() + + repoPath := getFullPathForRepo(s.reposRoot, request.Base.GetRepoUid()) + + reader := s.adapter.Blame(ctx, + repoPath, request.GitRef, request.Path, + int(request.Range.From), int(request.Range.To)) + + for { + part, errRead := reader.NextPart() + + errStream := streamBlamePart(part, stream) + if errStream != nil { + return errStream + } + + if errRead != nil { + if errors.Is(errRead, io.EOF) { + return nil + } + return errRead + } + } +} + +func streamBlamePart( + part *types.BlamePart, stream rpc.BlameService_BlameServer, +) error { + if part == nil { + return nil + } + + commit, errMap := mapGitCommit(&part.Commit) + if errMap != nil { + return fmt.Errorf("failed to map git commit: %w", errMap) + } + + lines := make([][]byte, len(part.Lines)) + for i, line := range part.Lines { + lines[i] = []byte(line) + } + + pack := &rpc.BlamePart{ + Commit: commit, + Lines: lines, + } + + if errStream := stream.Send(pack); errStream != nil { + return errStream + } + + return nil +} diff --git a/gitrpc/internal/service/blob.go b/gitrpc/internal/service/blob.go new file mode 100644 index 0000000000..42b856b95d --- /dev/null +++ b/gitrpc/internal/service/blob.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "errors" + "io" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +func (s RepositoryService) GetBlob(request *rpc.GetBlobRequest, stream rpc.RepositoryService_GetBlobServer) error { + if err := validateGetBlobRequest(request); err != nil { + return err + } + + ctx := stream.Context() + base := request.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // TODO: do we need to validate request for nil? + gitBlob, err := s.adapter.GetBlob(ctx, repoPath, request.GetSha(), request.GetSizeLimit()) + if err != nil { + return processGitErrorf(err, "failed to get blob") + } + defer func() { + dErr := gitBlob.Content.Close() + if dErr != nil { + log.Ctx(ctx).Warn().Err(err).Msgf("failed to close blob content reader.") + } + }() + + err = stream.Send(&rpc.GetBlobResponse{ + Data: &rpc.GetBlobResponse_Header{ + Header: &rpc.GetBlobResponseHeader{ + Sha: request.GetSha(), + Size: gitBlob.Size, + ContentSize: gitBlob.ContentSize, + }, + }, + }) + if err != nil { + return ErrInternalf("failed to send header: %w", err) + } + + const bufferSize = 16384 + contentBuffer := make([]byte, bufferSize) + for { + if ctx.Err() != nil { + return ErrCanceledf("the context got canceled while streaming the blob content: %w", ctx.Err()) + } + + n, rErr := gitBlob.Content.Read(contentBuffer) + // according to io.Reader documentation, returned bytes should always be processed before the error + // This is crucial to handle cases were n > 0 bytes are returned together with io.EOF + if n > 0 { + sErr := stream.Send(&rpc.GetBlobResponse{ + Data: &rpc.GetBlobResponse_Content{ + Content: contentBuffer[:n], + }, + }) + if sErr != nil { + return ErrInternalf("failed to send content of buffer (potential read error: %s): %w", rErr, sErr) + } + } + if errors.Is(rErr, io.EOF) { + break + } + if rErr != nil { + return rErr + } + } + + return nil +} + +func validateGetBlobRequest(r *rpc.GetBlobRequest) error { + if r.GetBase() == nil { + return types.ErrBaseCannotBeEmpty + } + if r.GetSha() == "" { + return types.ErrEmptySHA + } + + return nil +} diff --git a/gitrpc/internal/service/branch.go b/gitrpc/internal/service/branch.go new file mode 100644 index 0000000000..8edde8cb2e --- /dev/null +++ b/gitrpc/internal/service/branch.go @@ -0,0 +1,289 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/gitrpc/check" + "github.com/harness/gitness/gitrpc/internal/gitea" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var listBranchesRefFields = []types.GitReferenceField{types.GitReferenceFieldRefName, types.GitReferenceFieldObjectName} + +func (s ReferenceService) CreateBranch( + ctx context.Context, + request *rpc.CreateBranchRequest, +) (*rpc.CreateBranchResponse, error) { + if err := check.BranchName(request.BranchName); err != nil { + return nil, ErrInvalidArgument(err) + } + + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // TODO: why are we using gitea operations here?! + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + if ok, err := repo.IsEmpty(); ok { + return nil, ErrInvalidArgumentf("branch cannot be created on empty repository", err) + } + + sharedRepo, err := NewSharedRepo(s.tmpDir, base.GetRepoUid(), repo) + if err != nil { + return nil, processGitErrorf(err, "failed to create new shared repo") + } + defer sharedRepo.Close(ctx) + + // clone repo (with HEAD branch - target might be anything) + err = sharedRepo.Clone(ctx, "") + if err != nil { + return nil, processGitErrorf(err, "failed to clone shared repo with branch '%s'", request.GetBranchName()) + } + + _, err = sharedRepo.GetBranchCommit(request.GetBranchName()) + // return an error if branch alredy exists (push doesn't fail if it's a noop or fast forward push) + if err == nil { + return nil, ErrAlreadyExistsf("branch '%s' already exists", request.GetBranchName()) + } + if !git.IsErrNotExist(err) { + return nil, processGitErrorf(err, "branch creation of '%s' failed", request.GetBranchName()) + } + + // get target commit (as target could be branch/tag/commit, and tag can't be pushed using source:destination syntax) + targetCommit, err := s.adapter.GetCommit(ctx, sharedRepo.tmpPath, strings.TrimSpace(request.GetTarget())) + if git.IsErrNotExist(err) { + return nil, ErrNotFoundf("target '%s' doesn't exist", request.GetTarget()) + } + if err != nil { + return nil, processGitErrorf(err, "failed to get commit id for target '%s'", request.GetTarget()) + } + + // push to new branch (all changes should go through push flow for hooks and other safety meassures) + err = sharedRepo.PushCommitToBranch(ctx, base, targetCommit.SHA, request.GetBranchName()) + if err != nil { + return nil, processGitErrorf(err, "failed to push new branch '%s'", request.GetBranchName()) + } + + // get branch + // TODO: get it from shared repo to avoid opening another gitea repo and having to strip here. + gitBranch, err := s.adapter.GetBranch(ctx, repoPath, + strings.TrimPrefix(request.GetBranchName(), gitReferenceNamePrefixBranch)) + if err != nil { + return nil, processGitErrorf(err, "failed to get gitea branch '%s'", request.GetBranchName()) + } + + branch, err := mapGitBranch(gitBranch) + if err != nil { + return nil, err + } + + return &rpc.CreateBranchResponse{ + Branch: branch, + }, nil +} + +func (s ReferenceService) GetBranch(ctx context.Context, + request *rpc.GetBranchRequest) (*rpc.GetBranchResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + gitBranch, err := s.adapter.GetBranch(ctx, repoPath, + strings.TrimPrefix(request.GetBranchName(), gitReferenceNamePrefixBranch)) + if err != nil { + return nil, processGitErrorf(err, "failed to get gitea branch '%s'", request.GetBranchName()) + } + + branch, err := mapGitBranch(gitBranch) + if err != nil { + return nil, err + } + + return &rpc.GetBranchResponse{ + Branch: branch, + }, nil +} + +func (s ReferenceService) DeleteBranch(ctx context.Context, + request *rpc.DeleteBranchRequest) (*rpc.DeleteBranchResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // TODO: why are we using gitea operations here?! + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + sharedRepo, err := NewSharedRepo(s.tmpDir, base.GetRepoUid(), repo) + if err != nil { + return nil, processGitErrorf(err, "failed to create new shared repo") + } + defer sharedRepo.Close(ctx) + + // clone repo (technically we don't care about which branch we clone) + err = sharedRepo.Clone(ctx, request.GetBranchName()) + if err != nil { + return nil, processGitErrorf(err, "failed to clone shared repo with branch '%s'", request.GetBranchName()) + } + + // get latest branch commit before we delete + gitCommit, err := sharedRepo.GetBranchCommit(request.GetBranchName()) + if err != nil { + return nil, processGitErrorf(err, "failed to get gitea commit for branch '%s'", request.GetBranchName()) + } + + // push to new branch (all changes should go through push flow for hooks and other safety meassures) + // NOTE: setting sourceRef to empty will delete the remote branch when pushing: + // https://git-scm.com/docs/git-push#Documentation/git-push.txt-ltrefspecgt82308203 + err = sharedRepo.PushDeleteBranch(ctx, base, request.GetBranchName()) + if err != nil { + return nil, processGitErrorf(err, "failed to delete branch '%s' from remote repo", request.GetBranchName()) + } + + return &rpc.DeleteBranchResponse{ + Sha: gitCommit.ID.String(), + }, nil +} + +func (s ReferenceService) ListBranches(request *rpc.ListBranchesRequest, + stream rpc.ReferenceService_ListBranchesServer) error { + base := request.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + ctx := stream.Context() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // get all required information from git references + branches, err := s.listBranchesLoadReferenceData(ctx, repoPath, request) + if err != nil { + return err + } + + // get commits if needed (single call for perf savings: 1s-4s vs 5s-20s) + if request.GetIncludeCommit() { + commitSHAs := make([]string, len(branches)) + for i := range branches { + commitSHAs[i] = branches[i].Sha + } + + var gitCommits []types.Commit + gitCommits, err = s.adapter.GetCommits(ctx, repoPath, commitSHAs) + if err != nil { + return status.Errorf(codes.Internal, "failed to get commits: %v", err) + } + + for i := range gitCommits { + branches[i].Commit, err = mapGitCommit(&gitCommits[i]) + if err != nil { + return err + } + } + } + + // send out all branches + for _, branch := range branches { + err = stream.Send(&rpc.ListBranchesResponse{ + Branch: branch, + }) + if err != nil { + return status.Errorf(codes.Internal, "failed to send branch: %v", err) + } + } + + return nil +} + +func (s ReferenceService) listBranchesLoadReferenceData(ctx context.Context, + repoPath string, request *rpc.ListBranchesRequest) ([]*rpc.Branch, error) { + // TODO: can we be smarter with slice allocation + branches := make([]*rpc.Branch, 0, 16) + handler := listBranchesWalkReferencesHandler(&branches) + instructor, endsAfter, err := wrapInstructorWithOptionalPagination( + gitea.DefaultInstructor, // branches only have one target type, default instructor is enough + request.GetPage(), + request.GetPageSize()) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "invalid pagination details: %v", err) + } + + opts := &types.WalkReferencesOptions{ + Patterns: createReferenceWalkPatternsFromQuery(gitReferenceNamePrefixBranch, request.GetQuery()), + Sort: mapListBranchesSortOption(request.Sort), + Order: mapSortOrder(request.Order), + Fields: listBranchesRefFields, + Instructor: instructor, + // we don't do any post-filtering, restrict git to only return as many elements as pagination needs. + MaxWalkDistance: endsAfter, + } + + err = s.adapter.WalkReferences(ctx, repoPath, handler, opts) + if err != nil { + return nil, processGitErrorf(err, "failed to walk branch references") + } + + log.Ctx(ctx).Trace().Msgf("git adapter returned %d branches", len(branches)) + + return branches, nil +} + +func listBranchesWalkReferencesHandler(branches *[]*rpc.Branch) types.WalkReferencesHandler { + return func(e types.WalkReferencesEntry) error { + fullRefName, ok := e[types.GitReferenceFieldRefName] + if !ok { + return fmt.Errorf("entry missing reference name") + } + objectSHA, ok := e[types.GitReferenceFieldObjectName] + if !ok { + return fmt.Errorf("entry missing object sha") + } + + branch := &rpc.Branch{ + Name: fullRefName[len(gitReferenceNamePrefixBranch):], + Sha: objectSHA, + } + + // TODO: refactor to not use slice pointers? + *branches = append(*branches, branch) + + return nil + } +} diff --git a/gitrpc/internal/service/commit.go b/gitrpc/internal/service/commit.go new file mode 100644 index 0000000000..9a3bb46e6d --- /dev/null +++ b/gitrpc/internal/service/commit.go @@ -0,0 +1,179 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "strconv" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +func (s RepositoryService) GetCommit(ctx context.Context, + request *rpc.GetCommitRequest) (*rpc.GetCommitResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + // ensure the provided SHA is valid (and not a reference) + sha := request.GetSha() + if !isValidGitSHA(sha) { + return nil, status.Errorf(codes.InvalidArgument, "the provided commit sha '%s' is of invalid format.", sha) + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + gitCommit, err := s.adapter.GetCommit(ctx, repoPath, sha) + if err != nil { + return nil, processGitErrorf(err, "failed to get commit") + } + + commit, err := mapGitCommit(gitCommit) + if err != nil { + return nil, status.Errorf(codes.Internal, "failed to map git commit: %v", err) + } + + return &rpc.GetCommitResponse{ + Commit: commit, + }, nil +} + +func (s RepositoryService) ListCommits(request *rpc.ListCommitsRequest, + stream rpc.RepositoryService_ListCommitsServer) error { + base := request.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + ctx := stream.Context() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + gitCommits, renameDetails, err := s.adapter.ListCommits(ctx, repoPath, request.GetGitRef(), + int(request.GetPage()), int(request.GetLimit()), types.CommitFilter{AfterRef: request.After, + Path: request.Path, + Since: request.Since, + Until: request.Until, + Committer: request.Committer}) + if err != nil { + return processGitErrorf(err, "failed to get list of commits") + } + + // try to get total commits between gitref and After refs + totalCommits := 0 + if request.Page == 1 && len(gitCommits) < int(request.Limit) { + totalCommits = len(gitCommits) + } else if request.After != "" && request.GitRef != request.After { + div, err := s.adapter.GetCommitDivergences(ctx, repoPath, []types.CommitDivergenceRequest{ + {From: request.GitRef, To: request.After}, + }, 0) + if err != nil { + return processGitErrorf(err, "failed to get total commits") + } + if len(div) > 0 { + totalCommits = int(div[0].Ahead) + } + } + + log.Ctx(ctx).Trace().Msgf("git adapter returned %d commits", len(gitCommits)) + header := metadata.New(map[string]string{"total-commits": strconv.Itoa(totalCommits)}) + if err := stream.SendHeader(header); err != nil { + return ErrInternalf("unable to send 'total-commits' header", err) + } + + for i := range gitCommits { + var commit *rpc.Commit + commit, err = mapGitCommit(&gitCommits[i]) + if err != nil { + return status.Errorf(codes.Internal, "failed to map git commit: %v", err) + } + + err = stream.Send(&rpc.ListCommitsResponse{ + Commit: commit, + RenameDetails: mapRenameDetails(renameDetails), + }) + if err != nil { + return status.Errorf(codes.Internal, "failed to send commit: %v", err) + } + } + + return nil +} + +func (s RepositoryService) GetCommitDivergences(ctx context.Context, + request *rpc.GetCommitDivergencesRequest) (*rpc.GetCommitDivergencesResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // map to gitea requests + requests := request.GetRequests() + if requests == nil { + return nil, status.Error(codes.InvalidArgument, "requests is nil") + } + giteaDivergenceRequests := make([]types.CommitDivergenceRequest, len(requests)) + for i := range requests { + if requests[i] == nil { + return nil, status.Errorf(codes.InvalidArgument, "requests[%d] is nil", i) + } + giteaDivergenceRequests[i].From = requests[i].From + giteaDivergenceRequests[i].To = requests[i].To + } + + // call gitea + giteaDivergenceResponses, err := s.adapter.GetCommitDivergences(ctx, repoPath, + giteaDivergenceRequests, request.GetMaxCount()) + if err != nil { + return nil, processGitErrorf(err, "failed to get diverging commits") + } + + // map to rpc response + response := &rpc.GetCommitDivergencesResponse{ + Divergences: make([]*rpc.CommitDivergence, len(giteaDivergenceResponses)), + } + for i := range giteaDivergenceResponses { + response.Divergences[i] = &rpc.CommitDivergence{ + Ahead: giteaDivergenceResponses[i].Ahead, + Behind: giteaDivergenceResponses[i].Behind, + } + } + + return response, nil +} + +func (s RepositoryService) MergeBase(ctx context.Context, + r *rpc.MergeBaseRequest, +) (*rpc.MergeBaseResponse, error) { + base := r.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + mergeBase, _, err := s.adapter.GetMergeBase(ctx, repoPath, "", r.Ref1, r.Ref2) + if err != nil { + return nil, processGitErrorf(err, "failed to find merge base") + } + + return &rpc.MergeBaseResponse{ + MergeBaseSha: mergeBase, + }, nil +} diff --git a/gitrpc/internal/service/diff.go b/gitrpc/internal/service/diff.go new file mode 100644 index 0000000000..e924a8a55c --- /dev/null +++ b/gitrpc/internal/service/diff.go @@ -0,0 +1,259 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/diff" + "github.com/harness/gitness/gitrpc/internal/streamio" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type DiffService struct { + rpc.UnimplementedDiffServiceServer + adapter GitAdapter + reposRoot string + reposTempDir string +} + +func NewDiffService(adapter GitAdapter, reposRoot string, reposTempDir string) (*DiffService, error) { + return &DiffService{ + adapter: adapter, + reposRoot: reposRoot, + reposTempDir: reposTempDir, + }, nil +} + +func (s DiffService) RawDiff(request *rpc.DiffRequest, stream rpc.DiffService_RawDiffServer) error { + + sw := streamio.NewWriter(func(p []byte) error { + return stream.Send(&rpc.RawDiffResponse{Data: p}) + }) + + return s.rawDiff(stream.Context(), request, sw) +} + +func (s DiffService) rawDiff(ctx context.Context, request *rpc.DiffRequest, w io.Writer) error { + err := validateDiffRequest(request) + if err != nil { + return err + } + + base := request.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + err = s.adapter.RawDiff(ctx, repoPath, request.GetBaseRef(), request.GetHeadRef(), request.MergeBase, w) + if err != nil { + return processGitErrorf(err, "failed to fetch diff "+ + "between %s and %s", request.GetBaseRef(), request.GetHeadRef()) + } + return nil +} + +func (s DiffService) CommitDiff(request *rpc.CommitDiffRequest, stream rpc.DiffService_CommitDiffServer) error { + err := validateCommitDiffRequest(request) + if err != nil { + return err + } + + base := request.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + sw := streamio.NewWriter(func(p []byte) error { + return stream.Send(&rpc.CommitDiffResponse{Data: p}) + }) + + return s.adapter.CommitDiff(stream.Context(), repoPath, request.Sha, sw) +} + +func validateDiffRequest(in *rpc.DiffRequest) error { + if in.GetBase() == nil { + return types.ErrBaseCannotBeEmpty + } + if in.GetBaseRef() == "" { + return types.ErrEmptyBaseRef + } + if in.GetHeadRef() == "" { + return types.ErrEmptyHeadRef + } + + return nil +} + +func validateCommitDiffRequest(in *rpc.CommitDiffRequest) error { + if in.Base == nil { + return types.ErrBaseCannotBeEmpty + } + + if !isValidGitSHA(in.Sha) { + return status.Errorf(codes.InvalidArgument, "the provided commit sha '%s' is of invalid format.", in.Sha) + } + + return nil +} + +func (s DiffService) DiffShortStat(ctx context.Context, r *rpc.DiffRequest) (*rpc.DiffShortStatResponse, error) { + err := validateDiffRequest(r) + if err != nil { + return nil, fmt.Errorf("failed to validate request for short diff statistic, error: %v", err) + } + + base := r.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + stat, err := s.adapter.DiffShortStat(ctx, repoPath, r.GetBaseRef(), r.GetHeadRef(), r.GetMergeBase()) + if err != nil { + return nil, processGitErrorf(err, "failed to fetch short statistics "+ + "between %s and %s", r.GetBaseRef(), r.GetHeadRef()) + } + + return &rpc.DiffShortStatResponse{ + Files: int32(stat.Files), + Additions: int32(stat.Additions), + Deletions: int32(stat.Deletions), + }, nil +} + +func (s DiffService) GetDiffHunkHeaders( + ctx context.Context, + r *rpc.GetDiffHunkHeadersRequest, +) (*rpc.GetDiffHunkHeadersResponse, error) { + base := r.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + hunkHeaders, err := s.adapter.GetDiffHunkHeaders(ctx, repoPath, r.TargetCommitSha, r.SourceCommitSha) + if err != nil { + return nil, processGitErrorf(err, "failed to get diff hunk headers between two commits") + } + + return &rpc.GetDiffHunkHeadersResponse{ + Files: mapDiffFileHunkHeaders(hunkHeaders), + }, nil +} + +func (s DiffService) DiffCut( + ctx context.Context, + r *rpc.DiffCutRequest, +) (*rpc.DiffCutResponse, error) { + base := r.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + mergeBase, _, err := s.adapter.GetMergeBase(ctx, repoPath, "", r.TargetBranch, r.SourceBranch) + if err != nil { + return nil, processGitErrorf(err, "failed to find merge base") + } + + sourceCommits, err := s.adapter.ListCommitSHAs(ctx, repoPath, r.SourceBranch, 0, 1, + types.CommitFilter{AfterRef: r.TargetBranch}) + if err != nil || len(sourceCommits) == 0 { + return nil, processGitErrorf(err, "failed to get list of source branch commits") + } + + diffHunkHeader, linesHunk, err := s.adapter.DiffCut(ctx, + repoPath, + r.TargetCommitSha, r.SourceCommitSha, + r.Path, + types.DiffCutParams{ + LineStart: int(r.LineStart), + LineStartNew: r.LineStartNew, + LineEnd: int(r.LineEnd), + LineEndNew: r.LineEndNew, + BeforeLines: 2, + AfterLines: 2, + LineLimit: 40, + }) + if err != nil { + return nil, processGitErrorf(err, "failed to get diff hunk") + } + + return &rpc.DiffCutResponse{ + HunkHeader: mapHunkHeader(diffHunkHeader), + LinesHeader: linesHunk.HunkHeader.String(), + Lines: linesHunk.Lines, + MergeBaseSha: mergeBase, + LatestSourceSha: sourceCommits[0], + }, nil +} + +func (s DiffService) Diff(request *rpc.DiffRequest, stream rpc.DiffService_DiffServer) error { + done := make(chan bool) + defer close(done) + + pr, pw := io.Pipe() + defer pr.Close() + + parser := diff.Parser{ + Reader: bufio.NewReader(pr), + } + + go func() { + defer pw.Close() + err := s.rawDiff(stream.Context(), request, pw) + if err != nil { + return + } + }() + + return parser.Parse(func(f *diff.File) { + streamDiffFile(f, request.IncludePatch, stream) + }) +} + +func streamDiffFile(f *diff.File, includePatch bool, stream rpc.DiffService_DiffServer) { + var status rpc.DiffResponse_FileStatus + switch f.Type { + case diff.FileAdd: + status = rpc.DiffResponse_ADDED + case diff.FileChange: + status = rpc.DiffResponse_MODIFIED + case diff.FileDelete: + status = rpc.DiffResponse_DELETED + case diff.FileRename: + status = rpc.DiffResponse_RENAMED + default: + status = rpc.DiffResponse_UNDEFINED + } + + patch := bytes.Buffer{} + if includePatch { + for _, sec := range f.Sections { + for _, line := range sec.Lines { + if line.Type != diff.DiffLinePlain { + patch.WriteString(line.Content) + } + } + } + } + + stream.Send(&rpc.DiffResponse{ + Path: f.Path, + OldPath: f.OldPath, + Sha: f.SHA, + OldSha: f.OldSHA, + Status: status, + Additions: int32(f.NumAdditions()), + Deletions: int32(f.NumDeletions()), + Changes: int32(f.NumChanges()), + Patch: patch.Bytes(), + }) +} diff --git a/gitrpc/internal/service/env.go b/gitrpc/internal/service/env.go new file mode 100644 index 0000000000..404d66860b --- /dev/null +++ b/gitrpc/internal/service/env.go @@ -0,0 +1,22 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +const ( + EnvActorName = "GITRPC_ACTOR_NAME" + EnvActorEmail = "GITRPC_ACTOR_EMAIL" + EnvRepoUID = "GITRPC_REPO_UID" + EnvRequestID = "GITRPC_REQUEST_ID" +) diff --git a/gitrpc/internal/service/errors.go b/gitrpc/internal/service/errors.go new file mode 100644 index 0000000000..9c41701e1c --- /dev/null +++ b/gitrpc/internal/service/errors.go @@ -0,0 +1,238 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "errors" + "fmt" + "strings" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/anypb" +) + +type Error struct { + Code codes.Code + Message string + Err error + details []proto.Message +} + +func (e *Error) Error() string { + return fmt.Sprintf("%s, err: %v", e.Message, e.Err.Error()) +} + +func (e *Error) Status() (*status.Status, error) { + st := status.New(e.Code, e.Message) + if len(e.details) == 0 { + return st, nil + } + // add details + proto := st.Proto() + for _, detail := range e.details { + marshaled, err := anypb.New(detail) + if err != nil { + return nil, err + } + + proto.Details = append(proto.Details, marshaled) + } + return status.FromProto(proto), nil +} + +func (e *Error) Details() any { + return e.details +} + +func (e *Error) Unwrap() error { + return e.Err +} + +// Errorf generates new Error with status code and custom arguments. +// args can contain format args and additional arg like err which will be logged +// by middleware and details object type of map. Ordering of args element +// should first process format args and then error or detail. +func Errorf(code codes.Code, format string, args ...any) (err error) { + details := make([]proto.Message, 0, 8) + newargs := make([]any, 0, len(args)) + + for _, arg := range args { + if arg == nil { + continue + } + switch t := arg.(type) { + case error: + err = t + case proto.Message: + details = append(details, t) + default: + newargs = append(newargs, arg) + } + } + + return &Error{ + Code: code, + Message: fmt.Sprintf(format, newargs...), + Err: err, + details: details, + } +} + +func wrapError(code codes.Code, err error) error { + var e *Error + if errors.As(err, &e) { + return err + } + return &Error{ + Code: code, + Message: err.Error(), + Err: err, + } +} + +// ErrCanceled wraps err with codes.Canceled, unless err is already a Error error. +func ErrCanceled(err error) error { return wrapError(codes.Canceled, err) } + +// ErrDeadlineExceeded wraps err with codes.DeadlineExceeded, unless err is already a Error error. +func ErrDeadlineExceeded(err error) error { return wrapError(codes.DeadlineExceeded, err) } + +// ErrInternal wraps err with codes.Internal, unless err is already a Error error. +func ErrInternal(err error) error { return wrapError(codes.Internal, err) } + +// ErrInvalidArgument wraps err with codes.InvalidArgument, unless err is already a Error error. +func ErrInvalidArgument(err error) error { return wrapError(codes.InvalidArgument, err) } + +// ErrNotFound wraps error with codes.NotFound, unless err is already a Error error. +func ErrNotFound(err error) error { return wrapError(codes.NotFound, err) } + +// ErrFailedPrecondition wraps err with codes.FailedPrecondition, unless err is already a Error +// error. +func ErrFailedPrecondition(err error) error { return wrapError(codes.FailedPrecondition, err) } + +// ErrUnavailable wraps err with codes.Unavailable, unless err is already a gRPC error. +func ErrUnavailable(err error) error { return wrapError(codes.Unavailable, err) } + +// ErrPermissionDenied wraps err with codes.PermissionDenied, unless err is already a Error error. +func ErrPermissionDenied(err error) error { return wrapError(codes.PermissionDenied, err) } + +// ErrAlreadyExists wraps err with codes.AlreadyExists, unless err is already a Error error. +func ErrAlreadyExists(err error) error { return wrapError(codes.AlreadyExists, err) } + +// ErrAborted wraps err with codes.Aborted, unless err is already a Error type. +func ErrAborted(err error) error { return wrapError(codes.Aborted, err) } + +// ErrCanceledf wraps a formatted error with codes.Canceled, unless the formatted error is a +// wrapped Error error. +func ErrCanceledf(format string, a ...interface{}) error { + return Errorf(codes.Canceled, format, a...) +} + +// ErrDeadlineExceededf wraps a formatted error with codes.DeadlineExceeded, unless the formatted +// error is a wrapped Error error. +func ErrDeadlineExceededf(format string, a ...interface{}) error { + return Errorf(codes.DeadlineExceeded, format, a...) +} + +// ErrInternalf wraps a formatted error with codes.Internal, unless the formatted error is a +// wrapped Error error. +func ErrInternalf(format string, a ...interface{}) error { + return Errorf(codes.Internal, format, a...) +} + +// ErrInvalidArgumentf wraps a formatted error with codes.InvalidArgument, unless the formatted +// error is a wrapped Error error. +func ErrInvalidArgumentf(format string, a ...interface{}) error { + return Errorf(codes.InvalidArgument, format, a...) +} + +// ErrNotFoundf wraps a formatted error with codes.NotFound, unless the +// formatted error is a wrapped Error error. +func ErrNotFoundf(format string, a ...interface{}) error { + return Errorf(codes.NotFound, format, a...) +} + +// ErrFailedPreconditionf wraps a formatted error with codes.FailedPrecondition, unless the +// formatted error is a wrapped Error error. +func ErrFailedPreconditionf(format string, a ...interface{}) error { + return Errorf(codes.FailedPrecondition, format, a...) +} + +// ErrUnavailablef wraps a formatted error with codes.Unavailable, unless the +// formatted error is a wrapped Error error. +func ErrUnavailablef(format string, a ...interface{}) error { + return Errorf(codes.Unavailable, format, a...) +} + +// ErrPermissionDeniedf wraps a formatted error with codes.PermissionDenied, unless the formatted +// error is a wrapped Error error. +func ErrPermissionDeniedf(format string, a ...interface{}) error { + return Errorf(codes.PermissionDenied, format, a...) +} + +// ErrAlreadyExistsf wraps a formatted error with codes.AlreadyExists, unless the formatted error is +// a wrapped Error error. +func ErrAlreadyExistsf(format string, a ...interface{}) error { + return Errorf(codes.AlreadyExists, format, a...) +} + +// ErrAbortedf wraps a formatted error with codes.Aborted, unless the formatted error is a wrapped +// Error error. +func ErrAbortedf(format string, a ...interface{}) error { + return Errorf(codes.Aborted, format, a...) +} + +// processGitErrorf translates error. +func processGitErrorf(err error, format string, args ...interface{}) error { + var ( + cferr *types.MergeConflictsError + pferr *types.PathNotFoundError + ) + const nl = "\n" + // when we add err as argument it will be part of the new error + args = append(args, err) + switch { + case errors.Is(err, types.ErrNotFound), + errors.Is(err, types.ErrSHADoesNotMatch), + errors.Is(err, types.ErrHunkNotFound): + return ErrNotFound(err) + case errors.As(err, &pferr): + rpcErr := &rpc.PathNotFoundError{ + Path: pferr.Path, + } + return ErrNotFoundf("failed to find path", rpcErr, err) + case errors.Is(err, types.ErrAlreadyExists): + return ErrAlreadyExists(err) + case errors.Is(err, types.ErrInvalidArgument): + return ErrInvalidArgument(err) + case errors.As(err, &cferr): + stdout := strings.Trim(cferr.StdOut, nl) + conflictingFiles := strings.Split(stdout, nl) + files := &rpc.MergeConflictError{ + ConflictingFiles: conflictingFiles, + } + return ErrFailedPreconditionf("merging failed due to conflicting changes with the target branch", files, err) + case types.IsMergeUnrelatedHistoriesError(err): + return ErrFailedPrecondition(err) + case errors.Is(err, types.ErrFailedToConnect): + return ErrInvalidArgument(err) + default: + return ErrInternalf(format, args...) + } +} diff --git a/gitrpc/internal/service/http.go b/gitrpc/internal/service/http.go new file mode 100644 index 0000000000..18d3df5271 --- /dev/null +++ b/gitrpc/internal/service/http.go @@ -0,0 +1,182 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bytes" + "context" + "fmt" + "io" + "regexp" + "strconv" + "strings" + + "github.com/harness/gitness/gitrpc/internal/streamio" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`) + +type SmartHTTPService struct { + rpc.UnimplementedSmartHTTPServiceServer + adapter GitAdapter + reposRoot string +} + +func NewHTTPService(adapter GitAdapter, reposRoot string) (*SmartHTTPService, error) { + return &SmartHTTPService{ + adapter: adapter, + reposRoot: reposRoot, + }, nil +} + +func (s *SmartHTTPService) InfoRefs( + request *rpc.InfoRefsRequest, + stream rpc.SmartHTTPService_InfoRefsServer, +) error { + ctx := stream.Context() + base := request.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + // NOTE: Don't include os.Environ() as we don't have control over it - define everything explicitly + environ := []string{} + if request.GitProtocol != "" { + environ = append(environ, "GIT_PROTOCOL="+request.GitProtocol) + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + w := streamio.NewWriter(func(p []byte) error { + return stream.Send(&rpc.InfoRefsResponse{Data: p}) + }) + + cmd := &bytes.Buffer{} + if err := git.NewCommand(ctx, request.GetService(), "--stateless-rpc", "--advertise-refs", "."). + Run(&git.RunOpts{ + Env: environ, + Dir: repoPath, + Stdout: cmd, + }); err != nil { + return status.Errorf(codes.Internal, "InfoRefsUploadPack: cmd: %v", err) + } + if _, err := w.Write(packetWrite("# service=git-" + request.GetService() + "\n")); err != nil { + return status.Errorf(codes.Internal, "InfoRefsUploadPack: pktLine: %v", err) + } + + if _, err := w.Write([]byte("0000")); err != nil { + return status.Errorf(codes.Internal, "InfoRefsUploadPack: flush: %v", err) + } + + if _, err := io.Copy(w, cmd); err != nil { + return status.Errorf(codes.Internal, "InfoRefsUploadPack: %v", err) + } + return nil +} + +func (s *SmartHTTPService) ServicePack(stream rpc.SmartHTTPService_ServicePackServer) error { + ctx := stream.Context() + // Get basic repo data + request, err := stream.Recv() + if err != nil { + return err + } + // if client sends data as []byte raise error, needs reader + if request.GetData() != nil { + return status.Errorf(codes.InvalidArgument, "ServicePack(): non-empty Data") + } + + // ensure we have the correct base type that matches the services to be triggered + var repoUID string + switch request.GetService() { + case rpc.ServiceUploadPack: + if request.GetReadBase() == nil { + return status.Errorf(codes.InvalidArgument, "ServicePack(): read base is missing for upload-pack") + } + repoUID = request.GetReadBase().GetRepoUid() + case rpc.ServiceReceivePack: + if request.GetWriteBase() == nil { + return status.Errorf(codes.InvalidArgument, "ServicePack(): write base is missing for receive-pack") + } + repoUID = request.GetWriteBase().GetRepoUid() + default: + return status.Errorf(codes.InvalidArgument, "ServicePack(): unsupported service '%s'", request.GetService()) + } + + repoPath := getFullPathForRepo(s.reposRoot, repoUID) + + stdin := streamio.NewReader(func() ([]byte, error) { + resp, streamErr := stream.Recv() + return resp.GetData(), streamErr + }) + + stdout := streamio.NewWriter(func(p []byte) error { + return stream.Send(&rpc.ServicePackResponse{Data: p}) + }) + + return serviceRPC(ctx, stdin, stdout, request, repoPath) +} + +func serviceRPC(ctx context.Context, stdin io.Reader, stdout io.Writer, + request *rpc.ServicePackRequest, dir string) error { + protocol := request.GetGitProtocol() + service := request.GetService() + + // NOTE: Don't include os.Environ() as we don't have control over it - define everything explicitly + environ := []string{} + if request.GetWriteBase() != nil { + // in case of a write operation inject the provided environment variables + environ = CreateEnvironmentForPush(ctx, request.GetWriteBase()) + } + // set this for allow pre-receive and post-receive execute + environ = append(environ, "SSH_ORIGINAL_COMMAND="+service) + + if protocol != "" && safeGitProtocolHeader.MatchString(protocol) { + environ = append(environ, "GIT_PROTOCOL="+protocol) + } + + var ( + stderr bytes.Buffer + ) + cmd := git.NewCommand(ctx, service, "--stateless-rpc", dir) + cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", dir)) + err := cmd.Run(&git.RunOpts{ + Dir: dir, + Env: environ, + Stdout: stdout, + Stdin: stdin, + Stderr: &stderr, + UseContextTimeout: true, + }) + if err != nil && err.Error() != "signal: killed" { + log.Ctx(ctx).Err(err).Msgf("Fail to serve RPC(%s) in %s: %v - %s", service, dir, err, stderr.String()) + } + return err +} + +func packetWrite(str string) []byte { + s := strconv.FormatInt(int64(len(str)+4), 16) + if len(s)%4 != 0 { + s = strings.Repeat("0", 4-len(s)%4) + s + } + return []byte(s + str) +} diff --git a/gitrpc/internal/service/interface.go b/gitrpc/internal/service/interface.go new file mode 100644 index 0000000000..8d8d5e8e6a --- /dev/null +++ b/gitrpc/internal/service/interface.go @@ -0,0 +1,113 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "io" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/types" +) + +// GitAdapter for accessing git commands from gitea. +type GitAdapter interface { + InitRepository(ctx context.Context, path string, bare bool) error + Config(ctx context.Context, repoPath, key, value string) error + SetDefaultBranch(ctx context.Context, repoPath string, defaultBranch string, allowEmpty bool) error + GetDefaultBranch(ctx context.Context, repoPath string) (string, error) + GetRemoteDefaultBranch(ctx context.Context, remoteURL string) (string, error) + Clone(ctx context.Context, from, to string, opts types.CloneRepoOptions) error + AddFiles(repoPath string, all bool, files ...string) error + Commit(ctx context.Context, repoPath string, opts types.CommitChangesOptions) error + Push(ctx context.Context, repoPath string, opts types.PushOptions) error + ReadTree(ctx context.Context, repoPath, ref string, w io.Writer, args ...string) error + GetTreeNode(ctx context.Context, repoPath string, ref string, treePath string) (*types.TreeNode, error) + ListTreeNodes(ctx context.Context, repoPath string, ref string, treePath string) ([]types.TreeNode, error) + PathsDetails(ctx context.Context, repoPath string, ref string, paths []string) ([]types.PathDetails, error) + GetSubmodule(ctx context.Context, repoPath string, ref string, treePath string) (*types.Submodule, error) + GetBlob(ctx context.Context, repoPath string, sha string, sizeLimit int64) (*types.BlobReader, error) + WalkReferences(ctx context.Context, repoPath string, handler types.WalkReferencesHandler, + opts *types.WalkReferencesOptions) error + GetCommit(ctx context.Context, repoPath string, ref string) (*types.Commit, error) + GetCommits(ctx context.Context, repoPath string, refs []string) ([]types.Commit, error) + ListCommits(ctx context.Context, repoPath string, + ref string, page int, limit int, filter types.CommitFilter) ([]types.Commit, []types.PathRenameDetails, error) + ListCommitSHAs(ctx context.Context, repoPath string, + ref string, page int, limit int, filter types.CommitFilter) ([]string, error) + GetLatestCommit(ctx context.Context, repoPath string, ref string, treePath string) (*types.Commit, error) + GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) + GetAnnotatedTag(ctx context.Context, repoPath string, sha string) (*types.Tag, error) + GetAnnotatedTags(ctx context.Context, repoPath string, shas []string) ([]types.Tag, error) + CreateTag(ctx context.Context, repoPath string, name string, targetSHA string, opts *types.CreateTagOptions) error + GetBranch(ctx context.Context, repoPath string, branchName string) (*types.Branch, error) + GetCommitDivergences(ctx context.Context, repoPath string, + requests []types.CommitDivergenceRequest, max int32) ([]types.CommitDivergence, error) + GetRef(ctx context.Context, repoPath string, reference string) (string, error) + UpdateRef(ctx context.Context, repoPath, reference, newValue, oldValue string) error + CreateTemporaryRepoForPR(ctx context.Context, reposTempPath string, pr *types.PullRequest, + baseBranch, trackingBranch string) (types.TempRepository, error) + Merge(ctx context.Context, pr *types.PullRequest, mergeMethod enum.MergeMethod, baseBranch, trackingBranch string, + tmpBasePath string, mergeMsg string, env []string, identity *types.Identity) error + GetMergeBase(ctx context.Context, repoPath, remote, base, head string) (string, string, error) + Blame(ctx context.Context, repoPath, rev, file string, lineFrom, lineTo int) types.BlameReader + Sync(ctx context.Context, repoPath string, source string) error + + // + // Diff operations + // + + GetDiffTree(ctx context.Context, + repoPath, + baseBranch, + headBranch string) (string, error) + + RawDiff(ctx context.Context, + repoPath, + base, + head string, + mergeBase bool, + w io.Writer) error + + CommitDiff(ctx context.Context, + repoPath, + sha string, + w io.Writer) error + + DiffShortStat(ctx context.Context, + repoPath string, + baseRef string, + headRef string, + useMergeBase bool) (types.DiffShortStat, error) + + GetDiffHunkHeaders(ctx context.Context, + repoPath string, + targetRef string, + sourceRef string) ([]*types.DiffFileHunkHeaders, error) + + DiffCut(ctx context.Context, + repoPath string, + targetRef string, + sourceRef string, + path string, + params types.DiffCutParams) (types.HunkHeader, types.Hunk, error) + + MatchFiles(ctx context.Context, + repoPath string, + ref string, + dirPath string, + regExpDef string, + maxSize int) ([]types.FileContent, error) +} diff --git a/gitrpc/internal/service/mapping.go b/gitrpc/internal/service/mapping.go new file mode 100644 index 0000000000..adcac5ded3 --- /dev/null +++ b/gitrpc/internal/service/mapping.go @@ -0,0 +1,197 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + + "github.com/harness/gitness/gitrpc/hash" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func mapSortOrder(s rpc.SortOrder) types.SortOrder { + switch s { + case rpc.SortOrder_Asc: + return types.SortOrderAsc + case rpc.SortOrder_Desc: + return types.SortOrderDesc + case rpc.SortOrder_Default: + return types.SortOrderDefault + default: + // no need to error out - just use default for sorting + return types.SortOrderDefault + } +} + +func mapListCommitTagsSortOption(s rpc.ListCommitTagsRequest_SortOption) types.GitReferenceField { + switch s { + case rpc.ListCommitTagsRequest_Date: + return types.GitReferenceFieldCreatorDate + case rpc.ListCommitTagsRequest_Name: + return types.GitReferenceFieldRefName + case rpc.ListCommitTagsRequest_Default: + return types.GitReferenceFieldRefName + default: + // no need to error out - just use default for sorting + return types.GitReferenceFieldRefName + } +} + +func mapListBranchesSortOption(s rpc.ListBranchesRequest_SortOption) types.GitReferenceField { + switch s { + case rpc.ListBranchesRequest_Date: + return types.GitReferenceFieldCreatorDate + case rpc.ListBranchesRequest_Name: + return types.GitReferenceFieldRefName + case rpc.ListBranchesRequest_Default: + return types.GitReferenceFieldRefName + default: + // no need to error out - just use default for sorting + return types.GitReferenceFieldRefName + } +} + +// TODO: Add UTs to ensure enum values match! +func mapGitNodeType(t types.TreeNodeType) rpc.TreeNodeType { + return rpc.TreeNodeType(t) +} + +// TODO: Add UTs to ensure enum values match! +func mapGitMode(m types.TreeNodeMode) rpc.TreeNodeMode { + return rpc.TreeNodeMode(m) +} + +func mapGitBranch(gitBranch *types.Branch) (*rpc.Branch, error) { + if gitBranch == nil { + return nil, status.Errorf(codes.Internal, "git branch is nil") + } + + var commit *rpc.Commit + var err error + if gitBranch.Commit != nil { + commit, err = mapGitCommit(gitBranch.Commit) + if err != nil { + return nil, err + } + } + + return &rpc.Branch{ + Name: gitBranch.Name, + Sha: gitBranch.SHA, + Commit: commit, + }, nil +} + +func mapGitCommit(gitCommit *types.Commit) (*rpc.Commit, error) { + if gitCommit == nil { + return nil, status.Errorf(codes.Internal, "git commit is nil") + } + + return &rpc.Commit{ + Sha: gitCommit.SHA, + Title: gitCommit.Title, + Message: gitCommit.Message, + Author: mapGitSignature(gitCommit.Author), + Committer: mapGitSignature(gitCommit.Committer), + }, nil +} + +func mapGitSignature(gitSignature types.Signature) *rpc.Signature { + return &rpc.Signature{ + Identity: &rpc.Identity{ + Name: gitSignature.Identity.Name, + Email: gitSignature.Identity.Email, + }, + When: gitSignature.When.Unix(), + } +} +func mapHunkHeader(hunkHeader types.HunkHeader) *rpc.HunkHeader { + return &rpc.HunkHeader{ + OldLine: int32(hunkHeader.OldLine), + OldSpan: int32(hunkHeader.OldSpan), + NewLine: int32(hunkHeader.NewLine), + NewSpan: int32(hunkHeader.NewSpan), + Text: hunkHeader.Text, + } +} + +func mapDiffFileHeader(h types.DiffFileHeader) *rpc.DiffFileHeader { + return &rpc.DiffFileHeader{ + OldFileName: h.OldFileName, + NewFileName: h.NewFileName, + Extensions: h.Extensions, + } +} + +func mapDiffFileHunkHeaders(diffHunkHeaders []*types.DiffFileHunkHeaders) []*rpc.DiffFileHunkHeaders { + res := make([]*rpc.DiffFileHunkHeaders, len(diffHunkHeaders)) + for i, diffHunkHeader := range diffHunkHeaders { + hunkHeaders := make([]*rpc.HunkHeader, len(diffHunkHeader.HunksHeaders)) + for j, hunkHeader := range diffHunkHeader.HunksHeaders { + hunkHeaders[j] = mapHunkHeader(hunkHeader) + } + res[i] = &rpc.DiffFileHunkHeaders{ + FileHeader: mapDiffFileHeader(diffHunkHeader.FileHeader), + HunkHeaders: hunkHeaders, + } + } + return res +} + +func mapRenameDetails(renameDetails []types.PathRenameDetails) []*rpc.RenameDetails { + renameDetailsList := make([]*rpc.RenameDetails, len(renameDetails)) + for i, detail := range renameDetails { + renameDetailsList[i] = &rpc.RenameDetails{ + OldPath: detail.OldPath, + NewPath: detail.NewPath, + CommitShaBefore: detail.CommitSHABefore, + CommitShaAfter: detail.CommitSHAAfter} + } + return renameDetailsList +} + +func mapAnnotatedTag(tag *types.Tag) *rpc.CommitTag { + return &rpc.CommitTag{ + Name: tag.Name, + Sha: tag.Sha, + Title: tag.Title, + Message: tag.Message, + Tagger: mapGitSignature(tag.Tagger), + IsAnnotated: true, + Commit: nil, + } +} + +func mapHashType(t rpc.HashType) (hash.Type, error) { + switch t { + case rpc.HashType_HashTypeSHA256: + return hash.TypeSHA256, nil + default: + return hash.Type(""), fmt.Errorf("unknown hash type: '%s'", t) + } +} + +func mapHashAggregationType(t rpc.HashAggregationType) (hash.AggregationType, error) { + switch t { + case rpc.HashAggregationType_HashAggregationTypeXOR: + return hash.AggregationTypeXOR, nil + default: + return hash.AggregationType(""), fmt.Errorf("unknown hash aggregation type: '%s'", t) + } +} diff --git a/gitrpc/internal/service/match_files.go b/gitrpc/internal/service/match_files.go new file mode 100644 index 0000000000..0171677f1f --- /dev/null +++ b/gitrpc/internal/service/match_files.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" +) + +func (s RepositoryService) MatchFiles( + ctx context.Context, + request *rpc.MatchFilesRequest, +) (*rpc.MatchFilesResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + matchedFiles, err := s.adapter.MatchFiles(ctx, repoPath, + request.Ref, request.DirPath, request.Pattern, int(request.MaxSize)) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + files := make([]*rpc.FileContent, len(matchedFiles)) + for i, matchedFile := range matchedFiles { + files[i] = &rpc.FileContent{ + Path: matchedFile.Path, + Content: matchedFile.Content, + } + } + + return &rpc.MatchFilesResponse{ + Files: files, + }, nil +} diff --git a/gitrpc/internal/service/merge.go b/gitrpc/internal/service/merge.go new file mode 100644 index 0000000000..da627f4886 --- /dev/null +++ b/gitrpc/internal/service/merge.go @@ -0,0 +1,272 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/tempdir" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type MergeService struct { + rpc.UnimplementedMergeServiceServer + adapter GitAdapter + reposRoot string + reposTempDir string +} + +var _ rpc.MergeServiceServer = (*MergeService)(nil) + +func NewMergeService(adapter GitAdapter, reposRoot, reposTempDir string) (*MergeService, error) { + return &MergeService{ + adapter: adapter, + reposRoot: reposRoot, + reposTempDir: reposTempDir, + }, nil +} + +//nolint:funlen,gocognit // maybe some refactoring when we add fast forward merging +func (s MergeService) Merge( + ctx context.Context, + request *rpc.MergeRequest, +) (*rpc.MergeResponse, error) { + if err := validateMergeRequest(request); err != nil { + return nil, err + } + + base := request.Base + repoPath := getFullPathForRepo(s.reposRoot, base.RepoUid) + + baseBranch := "base" + trackingBranch := "tracking" + + pr := &types.PullRequest{ + BaseRepoPath: repoPath, + BaseBranch: request.BaseBranch, + HeadBranch: request.HeadBranch, + } + + // Clone base repo. + tmpRepo, err := s.adapter.CreateTemporaryRepoForPR(ctx, s.reposTempDir, pr, baseBranch, trackingBranch) + if err != nil { + return nil, processGitErrorf(err, "failed to initialize temporary repo") + } + defer func() { + rmErr := tempdir.RemoveTemporaryPath(tmpRepo.Path) + if rmErr != nil { + log.Ctx(ctx).Warn().Msgf("Removing temporary location %s for merge operation was not successful", tmpRepo.Path) + } + }() + + mergeBaseCommitSHA, _, err := s.adapter.GetMergeBase(ctx, tmpRepo.Path, "origin", baseBranch, trackingBranch) + if err != nil { + return nil, fmt.Errorf("failed to get merge base: %w", err) + } + + if tmpRepo.HeadSHA == mergeBaseCommitSHA { + return nil, ErrInvalidArgumentf("no changes between head branch %s and base branch %s", + request.HeadBranch, request.BaseBranch) + } + + if request.HeadExpectedSha != "" && request.HeadExpectedSha != tmpRepo.HeadSHA { + return nil, status.Errorf( + codes.FailedPrecondition, + "head branch '%s' is on SHA '%s' which doesn't match expected SHA '%s'.", + request.HeadBranch, + tmpRepo.HeadSHA, + request.HeadExpectedSha) + } + + var outbuf, errbuf strings.Builder + // Enable sparse-checkout + sparseCheckoutList, err := s.adapter.GetDiffTree(ctx, tmpRepo.Path, baseBranch, trackingBranch) + if err != nil { + return nil, fmt.Errorf("execution of GetDiffTree failed: %w", err) + } + + infoPath := filepath.Join(tmpRepo.Path, ".git", "info") + if err = os.MkdirAll(infoPath, 0o700); err != nil { + return nil, fmt.Errorf("unable to create .git/info in tmpRepo.Path: %w", err) + } + + sparseCheckoutListPath := filepath.Join(infoPath, "sparse-checkout") + if err = os.WriteFile(sparseCheckoutListPath, []byte(sparseCheckoutList), 0o600); err != nil { + return nil, + fmt.Errorf("unable to write .git/info/sparse-checkout file in tmpRepo.Path: %w", err) + } + + // Switch off LFS process (set required, clean and smudge here also) + if err = s.adapter.Config(ctx, tmpRepo.Path, "filter.lfs.process", ""); err != nil { + return nil, err + } + + if err = s.adapter.Config(ctx, tmpRepo.Path, "filter.lfs.required", "false"); err != nil { + return nil, err + } + + if err = s.adapter.Config(ctx, tmpRepo.Path, "filter.lfs.clean", ""); err != nil { + return nil, err + } + + if err = s.adapter.Config(ctx, tmpRepo.Path, "filter.lfs.smudge", ""); err != nil { + return nil, err + } + + if err = s.adapter.Config(ctx, tmpRepo.Path, "core.sparseCheckout", "true"); err != nil { + return nil, err + } + + // Read base branch index + if err = s.adapter.ReadTree(ctx, tmpRepo.Path, "HEAD", io.Discard); err != nil { + return nil, fmt.Errorf("failed to read tree: %w", err) + } + outbuf.Reset() + errbuf.Reset() + + committer := base.GetActor() + if request.GetCommitter() != nil { + committer = request.GetCommitter() + } + committerDate := time.Now().UTC() + if request.GetAuthorDate() != 0 { + committerDate = time.Unix(request.GetCommitterDate(), 0) + } + + author := committer + if request.GetAuthor() != nil { + author = request.GetAuthor() + } + authorDate := committerDate + if request.GetAuthorDate() != 0 { + authorDate = time.Unix(request.GetAuthorDate(), 0) + } + + // Because this may call hooks we should pass in the environment + // TODO: merge specific envars should be set by the adapter impl. + env := append(CreateEnvironmentForPush(ctx, base), + "GIT_AUTHOR_NAME="+author.Name, + "GIT_AUTHOR_EMAIL="+author.Email, + "GIT_AUTHOR_DATE="+authorDate.Format(time.RFC3339), + "GIT_COMMITTER_NAME="+committer.Name, + "GIT_COMMITTER_EMAIL="+committer.Email, + "GIT_COMMITTER_DATE="+committerDate.Format(time.RFC3339), + ) + + mergeMsg := strings.TrimSpace(request.Title) + if len(request.Message) > 0 { + mergeMsg += "\n\n" + strings.TrimSpace(request.Message) + } + + if err = s.adapter.Merge( + ctx, + pr, + enum.MergeMethodFromRPC(request.Method), + baseBranch, + trackingBranch, + tmpRepo.Path, + mergeMsg, + env, + &types.Identity{ + Name: author.Name, + Email: author.Email, + }); err != nil { + return nil, processGitErrorf(err, "merge failed") + } + + mergeCommitSHA, err := s.adapter.GetFullCommitID(ctx, tmpRepo.Path, baseBranch) + if err != nil { + return nil, fmt.Errorf("failed to get full commit id for the new merge: %w", err) + } + + refType := enum.RefFromRPC(request.RefType) + if refType == enum.RefTypeUndefined { + return &rpc.MergeResponse{ + BaseSha: tmpRepo.BaseSHA, + HeadSha: tmpRepo.HeadSHA, + MergeBaseSha: mergeBaseCommitSHA, + MergeSha: mergeCommitSHA, + }, nil + } + + refPath, err := GetRefPath(request.RefName, refType) + if err != nil { + return nil, fmt.Errorf("failed to generate full reference for type '%s' and name '%s' for merge operation: %w", + request.RefType, request.RefName, err) + } + pushRef := baseBranch + ":" + refPath + + if err = s.adapter.Push(ctx, tmpRepo.Path, types.PushOptions{ + Remote: "origin", + Branch: pushRef, + Force: request.Force, + Env: env, + }); err != nil { + return nil, fmt.Errorf("failed to push merge commit to ref '%s': %w", refPath, err) + } + + return &rpc.MergeResponse{ + BaseSha: tmpRepo.BaseSHA, + HeadSha: tmpRepo.HeadSHA, + MergeBaseSha: mergeBaseCommitSHA, + MergeSha: mergeCommitSHA, + }, nil +} + +func validateMergeRequest(request *rpc.MergeRequest) error { + base := request.Base + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + author := base.Actor + if author == nil { + return fmt.Errorf("empty actor") + } + + if len(author.Email) == 0 { + return fmt.Errorf("empty user email") + } + + if len(author.Name) == 0 { + return fmt.Errorf("empty user name") + } + + if len(request.BaseBranch) == 0 { + return fmt.Errorf("empty branch name") + } + + if len(request.HeadBranch) == 0 { + return fmt.Errorf("empty head branch name") + } + + if request.RefType != rpc.RefType_Undefined && len(request.RefName) == 0 { + return fmt.Errorf("ref name has to be provided if type is defined") + } + + return nil +} diff --git a/gitrpc/internal/service/operations.go b/gitrpc/internal/service/operations.go new file mode 100644 index 0000000000..06a0b0268e --- /dev/null +++ b/gitrpc/internal/service/operations.go @@ -0,0 +1,568 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bufio" + "bytes" + "context" + "errors" + "fmt" + "io" + "path" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/files" + "github.com/harness/gitness/gitrpc/internal/slices" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" +) + +const ( + filePrefix = "file://" + defaultFilePermission = "100644" // 0o644 default file permission +) + +type CommitFilesService struct { + rpc.UnimplementedCommitFilesServiceServer + adapter GitAdapter + reposRoot string + reposTempDir string +} + +type fileAction struct { + header *rpc.CommitFilesActionHeader + // content can hold file content or new path for move operation + // new path is prefixed with filePrefix constant + content []byte +} + +func NewCommitFilesService(adapter GitAdapter, reposRoot, reposTempDir string) (*CommitFilesService, error) { + return &CommitFilesService{ + adapter: adapter, + reposRoot: reposRoot, + reposTempDir: reposTempDir, + }, nil +} + +//nolint:funlen,gocognit // needs refactoring +func (s *CommitFilesService) CommitFiles(stream rpc.CommitFilesService_CommitFilesServer) error { + ctx := stream.Context() + headerRequest, err := stream.Recv() + if err != nil { + return ErrInternal(err) + } + + header := headerRequest.GetHeader() + if header == nil { + return ErrInvalidArgument(types.ErrHeaderCannotBeEmpty) + } + + base := header.GetBase() + if base == nil { + return ErrInvalidArgument(types.ErrBaseCannotBeEmpty) + } + + committer := base.GetActor() + if header.GetCommitter() != nil { + committer = header.GetCommitter() + } + committerDate := time.Now().UTC() + if header.GetAuthorDate() != 0 { + committerDate = time.Unix(header.GetCommitterDate(), 0) + } + + author := committer + if header.GetAuthor() != nil { + author = header.GetAuthor() + } + authorDate := committerDate + if header.GetAuthorDate() != 0 { + authorDate = time.Unix(header.GetAuthorDate(), 0) + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // TODO: why are we using the giteat operations here? + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return processGitErrorf(err, "failed to open repo") + } + + // check if repo is empty + // IMPORTANT: we don't use gitea's repo.IsEmpty() as that only checks whether the default branch exists (in HEAD). + // This can be an issue in case someone created a branch already in the repo (just default branch is missing). + // In that case the user can accidentally create separate git histories (which most likely is unintended). + // If the user wants to actually build a disconnected commit graph they can use the cli. + isEmpty, err := repoHasBranches(ctx, repo) + if err != nil { + return ErrInternalf("failed to determine if repository is empty", err) + } + + // ensure input data is valid + if err = s.validateAndPrepareHeader(repo, isEmpty, header); err != nil { + return err + } + + // collect all file actions from grpc stream + actions := make([]fileAction, 0, 16) + if err = s.collectActions(stream, &actions); err != nil { + return err + } + + // create a new shared repo + shared, err := NewSharedRepo(s.reposTempDir, base.GetRepoUid(), repo) + if err != nil { + return processGitErrorf(err, "failed to create shared repository") + } + defer shared.Close(ctx) + + // handle empty repo separately (as branch doesn't exist, no commit exists, ...) + var parentCommitSHA string + if isEmpty { + err = s.prepareTreeEmptyRepo(ctx, shared, actions) + if err != nil { + return err + } + } else { + parentCommitSHA, err = s.prepareTree(ctx, shared, header.GetBranchName(), actions) + if err != nil { + return err + } + } + + // Now write the tree + treeHash, err := shared.WriteTree(ctx) + if err != nil { + return processGitErrorf(err, "failed to write tree object") + } + + message := strings.TrimSpace(header.GetTitle()) + if len(header.GetMessage()) > 0 { + message += "\n\n" + strings.TrimSpace(header.GetMessage()) + } + // Now commit the tree + commitSHA, err := shared.CommitTreeWithDate( + ctx, + parentCommitSHA, + author, + committer, + treeHash, + message, + false, + authorDate, + committerDate, + ) + if err != nil { + return processGitErrorf(err, "failed to commit the tree") + } + + if err = shared.PushCommitToBranch(ctx, base, commitSHA, header.GetNewBranchName()); err != nil { + return processGitErrorf(err, "failed to push commits to remote repository") + } + + commit, err := shared.GetCommit(commitSHA) + if err != nil { + return processGitErrorf(err, "failed to get commit for SHA %s", commitSHA) + } + + return stream.SendAndClose(&rpc.CommitFilesResponse{ + CommitId: commit.ID.String(), + }) +} + +func (s *CommitFilesService) prepareTree(ctx context.Context, shared *SharedRepo, + branchName string, actions []fileAction) (string, error) { + // clone original branch from repo + if err := s.clone(ctx, shared, branchName); err != nil { + return "", err + } + + // Get the latest commit of the original branch + commit, err := shared.GetBranchCommit(branchName) + if err != nil { + return "", processGitErrorf(err, "failed to get latest commit of the branch %s", branchName) + } + + // execute all actions + for _, action := range actions { + action := action + if err = s.processAction(ctx, shared, &action, commit); err != nil { + return "", err + } + } + + return commit.ID.String(), nil +} + +func (s *CommitFilesService) prepareTreeEmptyRepo(ctx context.Context, shared *SharedRepo, + actions []fileAction) error { + // init a new repo (full clone would cause risk that by time of push someone wrote to the remote repo!) + err := shared.Init(ctx) + if err != nil { + return processGitErrorf(err, "failed to init shared tmp repository") + } + + for _, action := range actions { + if action.header.Action != rpc.CommitFilesActionHeader_CREATE { + return ErrFailedPrecondition(types.ErrActionNotAllowedOnEmptyRepo) + } + + filePath := files.CleanUploadFileName(action.header.GetPath()) + if filePath == "" { + return ErrInvalidArgument(types.ErrInvalidPath) + } + + reader := bytes.NewReader(action.content) + if err = createFile(ctx, shared, nil, filePath, defaultFilePermission, reader); err != nil { + return ErrInternalf("failed to create file '%s'", action.header.Path, err) + } + } + + return nil +} + +func (s *CommitFilesService) validateAndPrepareHeader(repo *git.Repository, isEmpty bool, + header *rpc.CommitFilesRequestHeader) error { + if header.GetBranchName() == "" { + defaultBranchRef, err := repo.GetDefaultBranch() + if err != nil { + return processGitErrorf(err, "failed to get default branch") + } + header.BranchName = defaultBranchRef + } + + if header.GetNewBranchName() == "" { + header.NewBranchName = header.GetBranchName() + } + + // trim refs/heads/ prefixes to avoid issues when calling gitea API + header.BranchName = strings.TrimPrefix(strings.TrimSpace(header.GetBranchName()), gitReferenceNamePrefixBranch) + header.NewBranchName = strings.TrimPrefix(strings.TrimSpace(header.GetNewBranchName()), gitReferenceNamePrefixBranch) + + // if the repo is empty then we can skip branch existence checks + if isEmpty { + return nil + } + + // ensure source branch exists + if _, err := repo.GetBranch(header.GetBranchName()); err != nil { + return processGitErrorf(err, "failed to get source branch %s", header.BranchName) + } + + // ensure new branch doesn't exist yet (if new branch creation was requested) + if header.GetBranchName() != header.GetNewBranchName() { + existingBranch, err := repo.GetBranch(header.GetNewBranchName()) + if existingBranch != nil { + return ErrAlreadyExistsf("branch %s already exists", existingBranch.Name) + } + if err != nil && !git.IsErrBranchNotExist(err) { + return processGitErrorf(err, "failed to create new branch %s", header.NewBranchName) + } + } + return nil +} + +func (s *CommitFilesService) clone( + ctx context.Context, + shared *SharedRepo, + branch string, +) error { + if err := shared.Clone(ctx, branch); err != nil { + return ErrInternalf("failed to clone branch '%s'", branch, err) + } + + if err := shared.SetDefaultIndex(ctx); err != nil { + return ErrInternalf("failed to set default index", err) + } + + return nil +} + +func (s *CommitFilesService) collectActions( + stream rpc.CommitFilesService_CommitFilesServer, + ptrActions *[]fileAction, +) error { + if ptrActions == nil { + return nil + } + actions := *ptrActions + for { + req, err := stream.Recv() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + + return ErrInternalf("receive request failed", err) + } + + switch payload := req.GetAction().GetPayload().(type) { + case *rpc.CommitFilesAction_Header: + actions = append(actions, fileAction{header: payload.Header}) + case *rpc.CommitFilesAction_Content: + if len(actions) == 0 { + return ErrFailedPrecondition(types.ErrContentSentBeforeAction) + } + + // append the content to the previous fileAction + content := &actions[len(actions)-1].content + *content = append(*content, payload.Content...) + default: + return ErrInternalf("unhandled fileAction payload type: %T", payload) + } + } + if len(actions) == 0 { + return ErrInvalidArgument(types.ErrActionListEmpty) + } + *ptrActions = actions + return nil +} + +func (s *CommitFilesService) processAction( + ctx context.Context, + shared *SharedRepo, + action *fileAction, + commit *git.Commit, +) (err error) { + header := action.header + if _, ok := rpc.CommitFilesActionHeader_ActionType_name[int32(header.Action)]; !ok { + return ErrInvalidArgumentf("undefined file action %s", action.header.Action, types.ErrUndefinedAction) + } + + filePath := files.CleanUploadFileName(header.GetPath()) + if filePath == "" { + return ErrInvalidArgument(types.ErrInvalidPath) + } + + reader := bytes.NewReader(action.content) + + switch header.Action { + case rpc.CommitFilesActionHeader_CREATE: + err = createFile(ctx, shared, commit, filePath, defaultFilePermission, reader) + case rpc.CommitFilesActionHeader_UPDATE: + err = updateFile(ctx, shared, commit, filePath, header.GetSha(), defaultFilePermission, reader) + case rpc.CommitFilesActionHeader_MOVE: + err = moveFile(ctx, shared, commit, filePath, defaultFilePermission, reader) + case rpc.CommitFilesActionHeader_DELETE: + err = deleteFile(ctx, shared, filePath) + } + + return err +} + +func createFile(ctx context.Context, repo *SharedRepo, commit *git.Commit, + filePath, mode string, reader io.Reader) error { + // only check path availability if a source commit is available (empty repo won't have such a commit) + if commit != nil { + if err := checkPathAvailability(commit, filePath, true); err != nil { + return err + } + } + + hash, err := repo.WriteGitObject(ctx, reader) + if err != nil { + return processGitErrorf(err, "error hashing object") + } + + // Add the object to the index + if err = repo.AddObjectToIndex(ctx, mode, hash, filePath); err != nil { + return processGitErrorf(err, "error creating object") + } + return nil +} + +func updateFile(ctx context.Context, repo *SharedRepo, commit *git.Commit, filePath, sha, + mode string, reader io.Reader) error { + // get file mode from existing file (default unless executable) + entry, err := getFileEntry(commit, sha, filePath) + if err != nil { + return err + } + if entry.IsExecutable() { + mode = "100755" + } + + hash, err := repo.WriteGitObject(ctx, reader) + if err != nil { + return processGitErrorf(err, "error hashing object") + } + + if err = repo.AddObjectToIndex(ctx, mode, hash, filePath); err != nil { + return processGitErrorf(err, "error updating object") + } + return nil +} + +func moveFile(ctx context.Context, repo *SharedRepo, commit *git.Commit, + filePath, mode string, reader io.Reader) error { + buffer := &bytes.Buffer{} + newPath, err := parsePayload(reader, buffer) + if err != nil { + return err + } + + if buffer.Len() == 0 && newPath != "" { + err = repo.ShowFile(ctx, filePath, commit.ID.String(), buffer) + if err != nil { + return processGitErrorf(err, "failed lookup for path %s", newPath) + } + } + + if err = checkPathAvailability(commit, newPath, false); err != nil { + return err + } + + filesInIndex, err := repo.LsFiles(ctx, filePath) + if err != nil { + return processGitErrorf(err, "listing files error") + } + if !slices.Contains(filesInIndex, filePath) { + return ErrNotFoundf("path %s not found", filePath) + } + + hash, err := repo.WriteGitObject(ctx, buffer) + if err != nil { + return processGitErrorf(err, "error hashing object") + } + + if err = repo.AddObjectToIndex(ctx, mode, hash, newPath); err != nil { + return processGitErrorf(err, "add object error") + } + + if err = repo.RemoveFilesFromIndex(ctx, filePath); err != nil { + return processGitErrorf(err, "remove object error") + } + return nil +} + +func deleteFile(ctx context.Context, repo *SharedRepo, filePath string) error { + filesInIndex, err := repo.LsFiles(ctx, filePath) + if err != nil { + return processGitErrorf(err, "listing files error") + } + if !slices.Contains(filesInIndex, filePath) { + return ErrNotFoundf("file path %s not found", filePath) + } + + if err = repo.RemoveFilesFromIndex(ctx, filePath); err != nil { + return processGitErrorf(err, "remove object error") + } + return nil +} + +func getFileEntry( + commit *git.Commit, + sha string, + path string, +) (*git.TreeEntry, error) { + entry, err := commit.GetTreeEntryByPath(path) + if git.IsErrNotExist(err) { + return nil, ErrNotFoundf("path %s not found", path) + } + if err != nil { + return nil, processGitErrorf(err, "failed to get tree for path %s", path) + } + + // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error + if sha == "" || sha != entry.ID.String() { + return nil, ErrInvalidArgumentf("sha does not match for path %s [given: %s, expected: %s]", + path, sha, entry.ID.String()) + } + + return entry, nil +} + +// checkPathAvailability ensures that the path is available for the requested operation. +// For the path where this file will be created/updated, we need to make +// sure no parts of the path are existing files or links except for the last +// item in the path which is the file name, and that shouldn't exist IF it is +// a new file OR is being moved to a new path. +func checkPathAvailability(commit *git.Commit, filePath string, isNewFile bool) error { + parts := strings.Split(filePath, "/") + subTreePath := "" + for index, part := range parts { + subTreePath = path.Join(subTreePath, part) + entry, err := commit.GetTreeEntryByPath(subTreePath) + if err != nil { + if git.IsErrNotExist(err) { + // Means there is no item with that name, so we're good + break + } + return processGitErrorf(err, "failed to get tree entry for path %s", subTreePath) + } + switch { + case index < len(parts)-1: + if !entry.IsDir() { + return ErrAlreadyExistsf("a file already exists where you're trying to create a subdirectory [path: %s]", + subTreePath) + } + case entry.IsLink(): + return fmt.Errorf("a symbolic link %w where you're trying to create a subdirectory [path: %s]", + types.ErrAlreadyExists, subTreePath) + case entry.IsDir(): + return ErrAlreadyExistsf("a directory already exists where you're trying to create a subdirectory [path: %s]", + subTreePath) + case filePath != "" || isNewFile: + return ErrAlreadyExistsf("file path %s already exists", filePath) + } + } + return nil +} + +// repoHasBranches returns true iff there's at least one branch in the repo (any branch) +// NOTE: This is different from repo.Empty(), +// as it doesn't care whether the existing branch is the default branch or not. +func repoHasBranches(ctx context.Context, repo *git.Repository) (bool, error) { + // repo has branches IFF there's at least one commit that is reachable via a branch + // (every existing branch points to a commit) + stdout, _, runErr := git.NewCommand(ctx, "rev-list", "--max-count", "1", "--branches"). + RunStdBytes(&git.RunOpts{Dir: repo.Path}) + if runErr != nil { + return false, processGitErrorf(runErr, "failed to trigger rev-list command") + } + + return strings.TrimSpace(string(stdout)) == "", nil +} + +func parsePayload(payload io.Reader, content io.Writer) (string, error) { + newPath := "" + reader := bufio.NewReader(payload) + // check for filePrefix + prefixBytes := make([]byte, len(filePrefix)) + if _, err := reader.Read(prefixBytes); err != nil { + if errors.Is(err, io.EOF) { + return "", nil + } + return "", err + } + // check if payload starts with filePrefix constant + if bytes.Equal(prefixBytes, []byte(filePrefix)) { + filename, _ := reader.ReadString('\n') // no err handling because next statement will check filename + newPath = files.CleanUploadFileName(filename) + if newPath == "" { + return "", types.ErrInvalidPath + } + } else { + if _, err := content.Write(prefixBytes); err != nil { + return "", err + } + } + _, err := io.Copy(content, reader) + return newPath, err +} diff --git a/gitrpc/internal/service/operations_test.go b/gitrpc/internal/service/operations_test.go new file mode 100644 index 0000000000..462abead77 --- /dev/null +++ b/gitrpc/internal/service/operations_test.go @@ -0,0 +1,118 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bytes" + "io" + "strings" + "testing" +) + +func Test_parsePayload(t *testing.T) { + s := "this is the content of the file" + filename := "file.txt" + content := &bytes.Buffer{} + type args struct { + payload io.Reader + content io.Writer + } + tests := []struct { + name string + args args + want string + wantContent []byte + wantErr bool + }{ + { + name: "no content", + args: args{ + payload: strings.NewReader(""), + content: content, + }, + }, + { + name: "sample content", + args: args{ + payload: strings.NewReader(s), + content: content, + }, + wantContent: []byte(s), + }, + { + name: "file name test", + args: args{ + payload: strings.NewReader(filePrefix + filename), + content: content, + }, + want: filename, + wantContent: []byte{}, + }, + { + name: "file name with new line", + args: args{ + payload: strings.NewReader(filePrefix + filename + "\n"), + content: content, + }, + want: filename, + wantContent: []byte{}, + }, + { + name: "content test", + args: args{ + payload: strings.NewReader(filePrefix + filename + "\n" + s), + content: content, + }, + want: filename, + wantContent: []byte(s), + }, + { + name: "content test with empty line at the top", + args: args{ + payload: strings.NewReader(filePrefix + filename + "\n\n" + s), + content: content, + }, + want: filename, + wantContent: []byte("\n" + s), + }, + { + name: "content test with double empty line at the top", + args: args{ + payload: strings.NewReader(filePrefix + filename + "\n\n\n" + s), + content: content, + }, + want: filename, + wantContent: []byte("\n\n" + s), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parsePayload(tt.args.payload, tt.args.content) + if (err != nil) != tt.wantErr { + t.Errorf("parsePayload() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("parsePayload() got filename = %v, want %v", + got, tt.want) + } + if !bytes.Equal(content.Bytes(), tt.wantContent) { + t.Errorf("parsePayload() got content = %v, want %v", + content.Bytes(), tt.wantContent) + } + content.Reset() + }) + } +} diff --git a/gitrpc/internal/service/path.go b/gitrpc/internal/service/path.go new file mode 100644 index 0000000000..259cf18214 --- /dev/null +++ b/gitrpc/internal/service/path.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "path/filepath" +) + +// getFullPathForRepo returns the full path of a repo given the root dir of repos and the uid of the repo. +// NOTE: Split repos into subfolders using their prefix to distribute repos across a set of folders. +func getFullPathForRepo(reposRoot, uid string) string { + // ASSUMPTION: repoUID is of lenth at least 4 - otherwise we have trouble either way. + return filepath.Join( + reposRoot, // root folder + uid[0:2], // first subfolder + uid[2:4], // second subfolder + fmt.Sprintf("%s.%s", uid[4:], gitRepoSuffix), // remainder with .git + ) +} diff --git a/gitrpc/internal/service/pipeline.go b/gitrpc/internal/service/pipeline.go new file mode 100644 index 0000000000..9e048b5eb5 --- /dev/null +++ b/gitrpc/internal/service/pipeline.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "os" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/drone/go-generate/builder" + "github.com/drone/go-generate/chroot" + "github.com/rs/zerolog/log" +) + +func (s RepositoryService) GeneratePipeline(ctx context.Context, + request *rpc.GeneratePipelineRequest, +) (*rpc.GeneratePipelineResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + tempDir, err := os.MkdirTemp(s.tmpDir, "*-"+base.GetRepoUid()) + if err != nil { + return nil, fmt.Errorf("error creating temp dir for repo %s: %w", base.GetRepoUid(), err) + } + defer func(path string) { + // when repo is successfully created remove temp dir + errRm := os.RemoveAll(path) + if errRm != nil { + log.Err(errRm).Msg("failed to cleanup temporary dir.") + } + }(tempDir) + + // Clone repository to temp dir + if err = s.adapter.Clone(ctx, repoPath, tempDir, types.CloneRepoOptions{Depth: 1}); err != nil { + return nil, processGitErrorf(err, "failed to clone repo") + } + + // create a chroot virtual filesystem that we + // pass to the builder for isolation purposes. + chroot, err := chroot.New(tempDir) + if err != nil { + return nil, fmt.Errorf("failed to set the temp directory as active directory: %w", err) + } + + // builds the pipeline configuration based on + // the contents of the virtual filesystem. + builder := builder.New() + out, err := builder.Build(chroot) + if err != nil { + return nil, fmt.Errorf("failed to build pipeline: %w", err) + } + + return &rpc.GeneratePipelineResponse{ + PipelineYaml: out, + }, nil +} diff --git a/gitrpc/internal/service/push.go b/gitrpc/internal/service/push.go new file mode 100644 index 0000000000..a67da0357b --- /dev/null +++ b/gitrpc/internal/service/push.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" +) + +type PushService struct { + rpc.UnimplementedPushServiceServer + adapter GitAdapter + reposRoot string +} + +var _ rpc.PushServiceServer = (*PushService)(nil) + +func NewPushService(adapter GitAdapter, reposRoot string) *PushService { + return &PushService{ + adapter: adapter, + reposRoot: reposRoot, + } +} + +func (s PushService) PushRemote( + ctx context.Context, + request *rpc.PushRemoteRequest, +) (*rpc.PushRemoteResponse, error) { + base := request.GetBase() + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + if ok, err := repo.IsEmpty(); ok { + return nil, ErrInvalidArgumentf("cannot push empty repo", err) + } + + err = s.adapter.Push(ctx, repoPath, types.PushOptions{ + Remote: request.RemoteUrl, + Force: false, + Env: nil, + Mirror: true, + }) + if err != nil { + return nil, err + } + return &rpc.PushRemoteResponse{}, nil +} diff --git a/gitrpc/internal/service/ref.go b/gitrpc/internal/service/ref.go new file mode 100644 index 0000000000..d45b1e4a17 --- /dev/null +++ b/gitrpc/internal/service/ref.go @@ -0,0 +1,308 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "math" + "strings" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/internal/gitea" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type ReferenceService struct { + rpc.UnimplementedReferenceServiceServer + adapter GitAdapter + reposRoot string + tmpDir string +} + +func NewReferenceService(adapter GitAdapter, + reposRoot string, tmpDir string) (*ReferenceService, error) { + return &ReferenceService{ + adapter: adapter, + reposRoot: reposRoot, + tmpDir: tmpDir, + }, nil +} + +// sanitizeReferenceQuery removes characters that aren't allowd in a branch name. +// TODO: should we error out instead of ignore bad chars? +func sanitizeReferenceQuery(query string) (string, bool, bool) { + if query == "" { + return "", false, false + } + + // get special characters before anything else + matchPrefix := query[0] == '^' // will be removed by mapping + matchSuffix := query[len(query)-1] == '$' + if matchSuffix { + // Special char $ has to be removed manually as it's a valid char + // TODO: this restricts the query language to a certain degree, can we do better? (escaping) + query = query[:len(query)-1] + } + + // strip all unwanted characters + return strings.Map(func(r rune) rune { + // See https://git-scm.com/docs/git-check-ref-format#_description for more details. + switch { + // rule 4. + case r < 32 || r == 127 || r == ' ' || r == '~' || r == '^' || r == ':': + return -1 + + // rule 5 + case r == '?' || r == '*' || r == '[': + return -1 + + // everything else we map as is + default: + return r + } + }, query), + matchPrefix, + matchSuffix +} + +// createReferenceWalkPatternsFromQuery returns a list of patterns that +// ensure only references matching the basePath and query are part of the walk. +func createReferenceWalkPatternsFromQuery(basePath string, query string) []string { + if basePath == "" && query == "" { + return []string{} + } + + // ensure non-empty basepath ends with "/" for proper matching and concatenation. + if basePath != "" && basePath[len(basePath)-1] != '/' { + basePath += "/" + } + + // in case query is empty, we just match the basePath. + if query == "" { + return []string{basePath} + } + + // sanitze the query and get special chars + query, matchPrefix, matchSuffix := sanitizeReferenceQuery(query) + + // In general, there are two search patterns: + // - refs/tags/**/*QUERY* - finds all refs that have QUERY in the filename. + // - refs/tags/**/*QUERY*/** - finds all refs that have a parent folder with QUERY in the name. + // + // In case the suffix has to match, they will be the same, so we return only one pattern. + if matchSuffix { + // exact match (refs/tags/QUERY) + if matchPrefix { + return []string{basePath + query} + } + + // suffix only match (refs/tags/**/*QUERY) + return []string{basePath + "**/*" + query} + } + + // prefix only match + // - refs/tags/QUERY* + // - refs/tags/QUERY*/** + if matchPrefix { + return []string{ + basePath + query + "*", // file + basePath + query + "*/**", // folder + } + } + + // arbitrary match + // - refs/tags/**/*QUERY* + // - refs/tags/**/*QUERY*/** + return []string{ + basePath + "**/*" + query + "*", // file + basePath + "**/*" + query + "*/**", // folder + } +} + +// wrapInstructorWithOptionalPagination wraps the provided walkInstructor with pagination. +// If no paging is enabled, the original instructor is returned. +func wrapInstructorWithOptionalPagination(inner types.WalkReferencesInstructor, + page int32, pageSize int32) (types.WalkReferencesInstructor, int32, error) { + // ensure pagination is requested + if pageSize < 1 { + return inner, 0, nil + } + + // sanitize page + if page < 1 { + page = 1 + } + + // ensure we don't overflow + if int64(page)*int64(pageSize) > int64(math.MaxInt) { + return nil, 0, fmt.Errorf("page %d with pageSize %d is out of range", page, pageSize) + } + + startAfter := (page - 1) * pageSize + endAfter := page * pageSize + + // we have to count ourselves for proper pagination + c := int32(0) + return func(e types.WalkReferencesEntry) (types.WalkInstruction, error) { + // execute inner instructor + inst, err := inner(e) + if err != nil { + return inst, err + } + + // no pagination if element is filtered out + if inst != types.WalkInstructionHandle { + return inst, nil + } + + // increase count iff element is part of filtered output + c++ + + // add pagination on filtered output + switch { + case c <= startAfter: + return types.WalkInstructionSkip, nil + case c > endAfter: + return types.WalkInstructionStop, nil + default: + return types.WalkInstructionHandle, nil + } + }, + endAfter, + nil +} + +func (s ReferenceService) GetRef(ctx context.Context, + request *rpc.GetRefRequest, +) (*rpc.GetRefResponse, error) { + if request.Base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + repoPath := getFullPathForRepo(s.reposRoot, request.Base.GetRepoUid()) + + refType := enum.RefFromRPC(request.GetRefType()) + if refType == enum.RefTypeUndefined { + return nil, status.Error(codes.InvalidArgument, "invalid value of RefType argument") + } + reference, err := GetRefPath(request.GetRefName(), refType) + if err != nil { + return nil, err + } + + sha, err := s.adapter.GetRef(ctx, repoPath, reference) + if err != nil { + return nil, err + } + + return &rpc.GetRefResponse{Sha: sha}, nil +} + +func (s ReferenceService) UpdateRef(ctx context.Context, + request *rpc.UpdateRefRequest, +) (*rpc.UpdateRefResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + refType := enum.RefFromRPC(request.GetRefType()) + if refType == enum.RefTypeUndefined { + return nil, status.Error(codes.InvalidArgument, "invalid value of RefType argument") + } + reference, err := GetRefPath(request.GetRefName(), refType) + if err != nil { + return nil, err + } + + // TODO: why are we using gitea operations here?! + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + if ok, err := repo.IsEmpty(); ok { + return nil, ErrInvalidArgumentf("branch cannot be created on empty repository", err) + } + + sharedRepo, err := NewSharedRepo(s.tmpDir, base.GetRepoUid(), repo) + if err != nil { + return nil, processGitErrorf(err, "failed to create new shared repo") + } + defer sharedRepo.Close(ctx) + + // clone repo (with HEAD branch - target might be anything) + err = sharedRepo.Clone(ctx, "") + if err != nil { + return nil, processGitErrorf(err, "failed to clone shared repo") + } + + pushOpts := types.PushOptions{ + Remote: sharedRepo.remoteRepo.Path, + Env: CreateEnvironmentForPush(ctx, base), + } + + // handle deletion explicitly to avoid any unwanted side effects + if request.GetNewValue() == "" { + pushOpts.Branch = ":" + reference + } else { + pushOpts.Branch = request.GetNewValue() + ":" + reference + } + + if request.GetOldValue() == "" { + pushOpts.Force = true + } else { + pushOpts.ForceWithLease = reference + ":" + request.GetOldValue() + } + + // TODO: our shared repo has so much duplication, that should be changed IMHO. + err = gitea.Push(ctx, sharedRepo.tmpPath, pushOpts) + if err != nil { + return nil, processGitErrorf(err, "failed to push changes to original repo") + } + + return &rpc.UpdateRefResponse{}, nil +} + +func GetRefPath(refName string, refType enum.RefType) (string, error) { + const ( + refPullReqPrefix = "refs/pullreq/" + refPullReqHeadSuffix = "/head" + refPullReqMergeSuffix = "/merge" + ) + + switch refType { + case enum.RefTypeRaw: + return refName, nil + case enum.RefTypeBranch: + return git.BranchPrefix + refName, nil + case enum.RefTypeTag: + return git.TagPrefix + refName, nil + case enum.RefTypePullReqHead: + return refPullReqPrefix + refName + refPullReqHeadSuffix, nil + case enum.RefTypePullReqMerge: + return refPullReqPrefix + refName + refPullReqMergeSuffix, nil + case enum.RefTypeUndefined: + fallthrough + default: + return "", ErrInvalidArgumentf("provided reference type '%s' is invalid", refType) + } +} diff --git a/gitrpc/internal/service/repo.go b/gitrpc/internal/service/repo.go new file mode 100644 index 0000000000..5e2f87fdf1 --- /dev/null +++ b/gitrpc/internal/service/repo.go @@ -0,0 +1,467 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "errors" + "fmt" + "io" + "os" + "path" + "regexp" + "runtime/debug" + "time" + + "github.com/harness/gitness/gitrpc/hash" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +const ( + maxFileSize = 1 << 20 + gitRepoSuffix = "git" + + gitReferenceNamePrefixBranch = "refs/heads/" + gitReferenceNamePrefixTag = "refs/tags/" + + gitHooksDir = "hooks" +) + +var ( + // TODO: should be coming from caller ALWAYS. + SystemIdentity = &rpc.Identity{ + Name: "gitness", + Email: "system@gitness", + } + + gitServerHookNames = []string{ + "pre-receive", + // "update", // update is disabled for performance reasons (called once for every ref) + "post-receive", + } + + // gitSHARegex defines the valid SHA format accepted by GIT (full form and short forms). + // Note: as of now SHA is at most 40 characters long, but in the future it's moving to sha256 + // which is 64 chars - keep this forward-compatible. + gitSHARegex = regexp.MustCompile("^[0-9a-f]{4,64}$") +) + +type Storage interface { + Save(filePath string, data io.Reader) (string, error) +} + +type RepositoryService struct { + rpc.UnimplementedRepositoryServiceServer + adapter GitAdapter + store Storage + reposRoot string + tmpDir string + gitHookPath string + reposGraveyard string +} + +func NewRepositoryService(adapter GitAdapter, store Storage, reposRoot string, tmpDir string, + gitHookPath string, reposGraveyard string) (*RepositoryService, error) { + return &RepositoryService{ + adapter: adapter, + store: store, + reposRoot: reposRoot, + tmpDir: tmpDir, + gitHookPath: gitHookPath, + reposGraveyard: reposGraveyard, + }, nil +} + +func (s RepositoryService) CreateRepository(stream rpc.RepositoryService_CreateRepositoryServer) error { + // first get repo params from stream + request, err := stream.Recv() + if err != nil { + return status.Errorf(codes.Internal, "cannot receive create repository data") + } + + header := request.GetHeader() + if header == nil { + return status.Errorf(codes.Internal, "expected header to be first message in stream") + } + log.Info().Msgf("received a create repository request %v", header) + + base := header.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + committer := base.GetActor() + if header.GetCommitter() != nil { + committer = header.GetCommitter() + } + committerDate := time.Now().UTC() + if header.GetCommitterDate() != 0 { + committerDate = time.Unix(header.GetCommitterDate(), 0) + } + + author := committer + if header.GetAuthor() != nil { + author = header.GetAuthor() + } + authorDate := committerDate + if header.GetAuthorDate() != 0 { + authorDate = time.Unix(header.GetAuthorDate(), 0) + } + + nextFSElement := func() (*rpc.FileUpload, error) { + m, errStream := stream.Recv() + if errStream != nil { + return nil, errStream + } + return m.GetFile(), nil + } + + err = s.createRepositoryInternal( + stream.Context(), + base, + header.GetDefaultBranch(), + nextFSElement, + committer, + committerDate, + author, + authorDate, + ) + if err != nil { + return err + } + + res := &rpc.CreateRepositoryResponse{} + err = stream.SendAndClose(res) + if err != nil { + return status.Errorf(codes.Internal, "cannot send completion response: %v", err) + } + + return nil +} + +//nolint:gocognit // refactor if needed +func (s RepositoryService) createRepositoryInternal( + ctx context.Context, + base *rpc.WriteRequest, + defaultBranch string, + nextFSElement func() (*rpc.FileUpload, error), + committer *rpc.Identity, + committerDate time.Time, + author *rpc.Identity, + authorDate time.Time, +) error { + log := log.Ctx(ctx) + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + if _, err := os.Stat(repoPath); !os.IsNotExist(err) { + return status.Errorf(codes.AlreadyExists, "repository exists already: %v", repoPath) + } + + // create repository in repos folder + ctx, cancel := context.WithCancel(ctx) + defer cancel() + err := s.adapter.InitRepository(ctx, repoPath, true) + // delete repo dir on error + defer func() { + if err != nil { + cleanuperr := s.DeleteRepositoryBestEffort(ctx, base.GetRepoUid()) + if cleanuperr != nil { + log.Warn().Err(cleanuperr).Msg("failed to cleanup repo dir") + } + } + }() + + if err != nil { + return processGitErrorf(err, "failed to initialize the repository") + } + + // update default branch (currently set to non-existent branch) + err = s.adapter.SetDefaultBranch(ctx, repoPath, defaultBranch, true) + if err != nil { + return processGitErrorf(err, "error updating default branch for repo '%s'", base.GetRepoUid()) + } + + // only execute file creation logic if files are provided + //nolint: nestif + if nextFSElement != nil { + // we need temp dir for cloning + tempDir, err := os.MkdirTemp("", "*-"+base.GetRepoUid()) + if err != nil { + return fmt.Errorf("error creating temp dir for repo %s: %w", base.GetRepoUid(), err) + } + defer func(path string) { + // when repo is successfully created remove temp dir + errRm := os.RemoveAll(path) + if errRm != nil { + log.Err(errRm).Msg("failed to cleanup temporary dir.") + } + }(tempDir) + + // Clone repository to temp dir + if err = s.adapter.Clone(ctx, repoPath, tempDir, types.CloneRepoOptions{}); err != nil { + return processGitErrorf(err, "failed to clone repo") + } + + // logic for receiving files + filePaths := make([]string, 0, 16) + for { + var filePath string + filePath, err = s.handleFileUploadIfAvailable(ctx, tempDir, nextFSElement) + if errors.Is(err, io.EOF) { + log.Info().Msg("received stream EOF") + break + } + if err != nil { + return status.Errorf(codes.Internal, "failed to receive file: %v", err) + } + + filePaths = append(filePaths, filePath) + } + + if len(filePaths) > 0 { + if committer == nil { + committer = base.GetActor() + } + if author == nil { + author = committer + } + // NOTE: This creates the branch in origin repo (as it doesn't exist as of now) + // TODO: this should at least be a constant and not hardcoded? + if err = s.addFilesAndPush(ctx, tempDir, filePaths, "HEAD:"+defaultBranch, nil, author, authorDate, + committer, committerDate, "origin", "initial commit"); err != nil { + return err + } + } + } + + // setup server hook symlinks pointing to configured server hook binary + // IMPORTANT: Setup hooks after repo creation to avoid issues with externally dependent services. + for _, hook := range gitServerHookNames { + hookPath := path.Join(repoPath, gitHooksDir, hook) + err = os.Symlink(s.gitHookPath, hookPath) + if err != nil { + return status.Errorf(codes.Internal, + "failed to setup symlink for hook '%s' ('%s' -> '%s'): %s", hook, hookPath, s.gitHookPath, err) + } + } + + log.Info().Msgf("repository created. Path: %s", repoPath) + return nil +} + +// isValidGitSHA returns true iff the provided string is a valid git sha (short or long form). +func isValidGitSHA(sha string) bool { + return gitSHARegex.MatchString(sha) +} + +func (s RepositoryService) DeleteRepository( + ctx context.Context, + request *rpc.DeleteRepositoryRequest, +) (*rpc.DeleteRepositoryResponse, error) { + base := request.GetBase() + + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.RepoUid) + + if _, err := os.Stat(repoPath); err != nil && os.IsNotExist(err) { + return nil, ErrNotFound(err) + } else if err != nil { + return nil, fmt.Errorf("failed to check the status of the repository %v: %w", repoPath, err) + } + + rmerr := s.DeleteRepositoryBestEffort(ctx, base.RepoUid) + + return &rpc.DeleteRepositoryResponse{}, rmerr +} + +func (s *RepositoryService) DeleteRepositoryBestEffort(ctx context.Context, repoUID string) error { + repoPath := getFullPathForRepo(s.reposRoot, repoUID) + tempPath := path.Join(s.reposGraveyard, repoUID) + + // move current dir to a temp dir (prevent partial deletion) + if err := os.Rename(repoPath, tempPath); err != nil { + return fmt.Errorf("couldn't move dir %s to %s : %w", repoPath, tempPath, err) + } + + if err := os.RemoveAll(tempPath); err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf("failed to delete dir %s from graveyard", tempPath) + } + return nil +} + +func (s RepositoryService) SyncRepository( + ctx context.Context, + request *rpc.SyncRepositoryRequest, +) (*rpc.SyncRepositoryResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.RepoUid) + + // create repo if requested + _, err := os.Stat(repoPath) + if err != nil && !os.IsNotExist(err) { + return nil, ErrInternalf("failed to create repo", err) + } + + if os.IsNotExist(err) { + if !request.CreateIfNotExists { + return nil, ErrNotFound(err) + } + + // the default branch doesn't matter for a sync, + // we create an empty repo and the head will by updated as part of the Sync. + const syncDefaultBranch = "main" + if err = s.createRepositoryInternal( + ctx, + base, + syncDefaultBranch, + nil, + nil, + time.Time{}, + nil, + time.Time{}, + ); err != nil { + return nil, err + } + } + + // sync repo content + err = s.adapter.Sync(ctx, repoPath, request.GetSource()) + if err != nil { + return nil, processGitErrorf(err, "failed to sync git repo") + } + + // get remote default branch + defaultBranch, err := s.adapter.GetRemoteDefaultBranch(ctx, request.GetSource()) + if errors.Is(err, types.ErrNoDefaultBranch) { + return &rpc.SyncRepositoryResponse{ + DefaultBranch: "", + }, nil + } + if err != nil { + return nil, processGitErrorf(err, "failed to get default branch from repo") + } + + // set default branch + err = s.adapter.SetDefaultBranch(ctx, repoPath, defaultBranch, true) + if err != nil { + return nil, processGitErrorf(err, "failed to set default branch of repo") + } + + return &rpc.SyncRepositoryResponse{ + DefaultBranch: defaultBranch, + }, nil +} + +func (s RepositoryService) HashRepository( + ctx context.Context, + request *rpc.HashRepositoryRequest, +) (*rpc.HashRepositoryResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.RepoUid) + + hashType, err := mapHashType(request.GetHashType()) + if err != nil { + return nil, ErrInvalidArgumentf("unknown hash type '%s'", request.GetHashType()) + } + + aggregationType, err := mapHashAggregationType(request.GetAggregationType()) + if err != nil { + return nil, ErrInvalidArgumentf("unknown aggregation type '%s'", request.GetAggregationType()) + } + + // add all references of the repo to the channel in a separate go routine, to allow streamed processing. + // Ensure we cancel the go routine in case we exit the func early. + goCtx, cancel := context.WithCancel(ctx) + defer cancel() + + hashChan := make(chan hash.SourceNext) + + go func() { + // always close channel last before leaving go routine + defer close(hashChan) + defer func() { + if r := recover(); r != nil { + hashChan <- hash.SourceNext{ + Err: fmt.Errorf("panic received while filling data source: %s", debug.Stack()), + } + } + }() + + // add default branch to hash + defaultBranch, err := s.adapter.GetDefaultBranch(goCtx, repoPath) + if err != nil { + hashChan <- hash.SourceNext{ + Err: processGitErrorf(err, "failed to get default branch"), + } + return + } + + hashChan <- hash.SourceNext{ + Data: hash.SerializeHead(defaultBranch), + } + + err = s.adapter.WalkReferences(goCtx, repoPath, func(wre types.WalkReferencesEntry) error { + ref, ok := wre[types.GitReferenceFieldRefName] + if !ok { + return errors.New("ref entry didn't contain the ref name") + } + sha, ok := wre[types.GitReferenceFieldObjectName] + if !ok { + return errors.New("ref entry didn't contain the ref object sha") + } + + hashChan <- hash.SourceNext{ + Data: hash.SerializeReference(ref, sha), + } + + return nil + }, &types.WalkReferencesOptions{}) + if err != nil { + hashChan <- hash.SourceNext{ + Err: processGitErrorf(err, "failed to walk references"), + } + } + }() + + hasher, err := hash.New(hashType, aggregationType) + if err != nil { + return nil, ErrInternalf("failed to get new reference hasher", err) + } + source := hash.SourceFromChannel(ctx, hashChan) + + res, err := hasher.Hash(source) + if err != nil { + return nil, processGitErrorf(err, "failed to hash repository") + } + + return &rpc.HashRepositoryResponse{ + Hash: res, + }, nil +} diff --git a/gitrpc/internal/service/shared_repo.go b/gitrpc/internal/service/shared_repo.go new file mode 100644 index 0000000000..62ffca5da0 --- /dev/null +++ b/gitrpc/internal/service/shared_repo.go @@ -0,0 +1,435 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "regexp" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/gitea" + "github.com/harness/gitness/gitrpc/internal/middleware" + "github.com/harness/gitness/gitrpc/internal/tempdir" + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" + "github.com/rs/zerolog/log" + "google.golang.org/grpc/metadata" +) + +// SharedRepo is a type to wrap our upload repositories as a shallow clone. +type SharedRepo struct { + repoUID string + repo *git.Repository + remoteRepo *git.Repository + tmpPath string +} + +// NewSharedRepo creates a new temporary upload repository. +func NewSharedRepo(baseTmpDir, repoUID string, remoteRepo *git.Repository) (*SharedRepo, error) { + tmpPath, err := tempdir.CreateTemporaryPath(baseTmpDir, repoUID) + if err != nil { + return nil, err + } + t := &SharedRepo{ + repoUID: repoUID, + remoteRepo: remoteRepo, + tmpPath: tmpPath, + } + return t, nil +} + +// Close the repository cleaning up all files. +func (r *SharedRepo) Close(ctx context.Context) { + defer r.repo.Close() + if err := tempdir.RemoveTemporaryPath(r.tmpPath); err != nil { + log.Ctx(ctx).Err(err).Msgf("Failed to remove temporary path %s", r.tmpPath) + } +} + +// Clone the base repository to our path and set branch as the HEAD. +func (r *SharedRepo) Clone(ctx context.Context, branchName string) error { + args := []string{"clone", "-s", "--bare"} + if branchName != "" { + args = append(args, "-b", strings.TrimPrefix(branchName, gitReferenceNamePrefixBranch)) + } + args = append(args, r.remoteRepo.Path, r.tmpPath) + + if _, _, err := git.NewCommand(ctx, args...).RunStdString(nil); err != nil { + stderr := err.Error() + if matched, _ := regexp.MatchString(".*Remote branch .* not found in upstream origin.*", stderr); matched { + return git.ErrBranchNotExist{ + Name: branchName, + } + } else if matched, _ = regexp.MatchString(".* repository .* does not exist.*", stderr); matched { + return fmt.Errorf("%s %w", r.repoUID, types.ErrNotFound) + } else { + return fmt.Errorf("Clone: %w %s", err, stderr) + } + } + gitRepo, err := git.OpenRepository(ctx, r.tmpPath) + if err != nil { + return processGitErrorf(err, "failed to open repo") + } + r.repo = gitRepo + return nil +} + +// Init the repository. +func (r *SharedRepo) Init(ctx context.Context) error { + if err := git.InitRepository(ctx, r.tmpPath, false); err != nil { + return err + } + gitRepo, err := git.OpenRepository(ctx, r.tmpPath) + if err != nil { + return processGitErrorf(err, "failed to open repo") + } + r.repo = gitRepo + return nil +} + +// SetDefaultIndex sets the git index to our HEAD. +func (r *SharedRepo) SetDefaultIndex(ctx context.Context) error { + if _, _, err := git.NewCommand(ctx, "read-tree", "HEAD").RunStdString(&git.RunOpts{Dir: r.tmpPath}); err != nil { + return fmt.Errorf("SetDefaultIndex: %w", err) + } + return nil +} + +// LsFiles checks if the given filename arguments are in the index. +func (r *SharedRepo) LsFiles(ctx context.Context, filenames ...string) ([]string, error) { + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + + cmdArgs := []string{"ls-files", "-z", "--"} + for _, arg := range filenames { + if arg != "" { + cmdArgs = append(cmdArgs, arg) + } + } + + if err := git.NewCommand(ctx, cmdArgs...). + Run(&git.RunOpts{ + Dir: r.tmpPath, + Stdout: stdOut, + Stderr: stdErr, + }); err != nil { + return nil, fmt.Errorf("unable to run git ls-files for temporary repo of: "+ + "%s Error: %w\nstdout: %s\nstderr: %s", + r.repoUID, err, stdOut.String(), stdErr.String()) + } + + filelist := make([]string, 0) + for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) { + filelist = append(filelist, string(line)) + } + + return filelist, nil +} + +// RemoveFilesFromIndex removes the given files from the index. +func (r *SharedRepo) RemoveFilesFromIndex(ctx context.Context, filenames ...string) error { + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + stdIn := new(bytes.Buffer) + for _, file := range filenames { + if file != "" { + stdIn.WriteString("0 0000000000000000000000000000000000000000\t") + stdIn.WriteString(file) + stdIn.WriteByte('\000') + } + } + + if err := git.NewCommand(ctx, "update-index", "--remove", "-z", "--index-info"). + Run(&git.RunOpts{ + Dir: r.tmpPath, + Stdin: stdIn, + Stdout: stdOut, + Stderr: stdErr, + }); err != nil { + return fmt.Errorf("unable to update-index for temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", + r.repoUID, err, stdOut.String(), stdErr.String()) + } + return nil +} + +// WriteGitObject writes the provided content to the object db and returns its hash. +func (r *SharedRepo) WriteGitObject(ctx context.Context, content io.Reader) (string, error) { + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + + if err := git.NewCommand(ctx, "hash-object", "-w", "--stdin"). + Run(&git.RunOpts{ + Dir: r.tmpPath, + Stdin: content, + Stdout: stdOut, + Stderr: stdErr, + }); err != nil { + return "", fmt.Errorf("unable to hash-object to temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", + r.repoUID, err, stdOut.String(), stdErr.String()) + } + + return strings.TrimSpace(stdOut.String()), nil +} + +// ShowFile dumps show file and write to io.Writer. +func (r *SharedRepo) ShowFile(ctx context.Context, filePath, commitHash string, writer io.Writer) error { + stderr := new(bytes.Buffer) + file := strings.TrimSpace(commitHash) + ":" + strings.TrimSpace(filePath) + cmd := git.NewCommand(ctx, "show", file) + if err := cmd.Run(&git.RunOpts{ + Dir: r.repo.Path, + Stdout: writer, + Stderr: stderr, + }); err != nil { + return fmt.Errorf("show file: %w - %s", err, stderr) + } + return nil +} + +// AddObjectToIndex adds the provided object hash to the index with the provided mode and path. +func (r *SharedRepo) AddObjectToIndex(ctx context.Context, mode, objectHash, objectPath string) error { + if _, _, err := git.NewCommand(ctx, "update-index", "--add", "--replace", "--cacheinfo", mode, objectHash, + objectPath).RunStdString(&git.RunOpts{Dir: r.tmpPath}); err != nil { + if matched, _ := regexp.MatchString(".*Invalid path '.*", err.Error()); matched { + return types.ErrInvalidPath + } + return fmt.Errorf("unable to add object to index at %s in temporary repo %s Error: %w", + objectPath, r.repoUID, err) + } + return nil +} + +// WriteTree writes the current index as a tree to the object db and returns its hash. +func (r *SharedRepo) WriteTree(ctx context.Context) (string, error) { + stdout, _, err := git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: r.tmpPath}) + if err != nil { + return "", fmt.Errorf("unable to write-tree in temporary repo for: %s Error: %w", + r.repoUID, err) + } + return strings.TrimSpace(stdout), nil +} + +// GetLastCommit gets the last commit ID SHA of the repo. +func (r *SharedRepo) GetLastCommit(ctx context.Context) (string, error) { + return r.GetLastCommitByRef(ctx, "HEAD") +} + +// GetLastCommitByRef gets the last commit ID SHA of the repo by ref. +func (r *SharedRepo) GetLastCommitByRef(ctx context.Context, ref string) (string, error) { + if ref == "" { + ref = "HEAD" + } + stdout, _, err := git.NewCommand(ctx, "rev-parse", ref).RunStdString(&git.RunOpts{Dir: r.tmpPath}) + if err != nil { + return "", fmt.Errorf("unable to rev-parse %s in temporary repo for: %s Error: %w", + ref, r.repoUID, err) + } + return strings.TrimSpace(stdout), nil +} + +// CommitTreeWithDate creates a commit from a given tree for the user with provided message. +func (r *SharedRepo) CommitTreeWithDate( + ctx context.Context, + parent string, + author, committer *rpc.Identity, + treeHash, message string, + signoff bool, + authorDate, committerDate time.Time, +) (string, error) { + // setup environment variables used by git-commit-tree + // See https://git-scm.com/book/en/v2/Git-Internals-Environment-Variables + env := []string{ + "GIT_AUTHOR_NAME=" + author.Name, + "GIT_AUTHOR_EMAIL=" + author.Email, + "GIT_AUTHOR_DATE=" + authorDate.Format(time.RFC3339), + "GIT_COMMITTER_NAME=" + committer.Name, + "GIT_COMMITTER_EMAIL=" + committer.Email, + "GIT_COMMITTER_DATE=" + committerDate.Format(time.RFC3339), + } + messageBytes := new(bytes.Buffer) + _, _ = messageBytes.WriteString(message) + _, _ = messageBytes.WriteString("\n") + + var args []string + if parent != "" { + args = []string{"commit-tree", treeHash, "-p", parent} + } else { + args = []string{"commit-tree", treeHash} + } + + // temporary no signing + args = append(args, "--no-gpg-sign") + + if signoff { + giteaSignature := &git.Signature{ + Name: committer.Name, + Email: committer.Email, + When: committerDate, + } + // Signed-off-by + _, _ = messageBytes.WriteString("\n") + _, _ = messageBytes.WriteString("Signed-off-by: ") + _, _ = messageBytes.WriteString(giteaSignature.String()) + } + + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + if err := git.NewCommand(ctx, args...). + Run(&git.RunOpts{ + Env: env, + Dir: r.tmpPath, + Stdin: messageBytes, + Stdout: stdout, + Stderr: stderr, + }); err != nil { + return "", fmt.Errorf("unable to commit-tree in temporary repo: %s Error: %w\nStdout: %s\nStderr: %s", + r.repoUID, err, stdout, stderr) + } + return strings.TrimSpace(stdout.String()), nil +} + +func (r *SharedRepo) PushDeleteBranch(ctx context.Context, writeRequest *rpc.WriteRequest, + branch string) error { + return r.push(ctx, writeRequest, "", GetReferenceFromBranchName(branch)) +} + +func (r *SharedRepo) PushCommitToBranch(ctx context.Context, writeRequest *rpc.WriteRequest, + commitSHA string, branch string) error { + return r.push(ctx, writeRequest, commitSHA, GetReferenceFromBranchName(branch)) +} + +func (r *SharedRepo) PushBranch(ctx context.Context, writeRequest *rpc.WriteRequest, + sourceBranch string, branch string) error { + return r.push(ctx, writeRequest, GetReferenceFromBranchName(sourceBranch), GetReferenceFromBranchName(branch)) +} +func (r *SharedRepo) PushTag(ctx context.Context, writeRequest *rpc.WriteRequest, + tagName string) error { + refTag := GetReferenceFromTagName(tagName) + return r.push(ctx, writeRequest, refTag, refTag) +} + +func (r *SharedRepo) PushDeleteTag(ctx context.Context, writeRequest *rpc.WriteRequest, + tagName string) error { + refTag := GetReferenceFromTagName(tagName) + return r.push(ctx, writeRequest, "", refTag) +} + +// push pushes the provided references to the provided branch in the original repository. +func (r *SharedRepo) push(ctx context.Context, writeRequest *rpc.WriteRequest, + sourceRef, destinationRef string) error { + // Because calls hooks we need to pass in the environment + env := CreateEnvironmentForPush(ctx, writeRequest) + if err := gitea.Push(ctx, r.tmpPath, types.PushOptions{ + Remote: r.remoteRepo.Path, + Branch: sourceRef + ":" + destinationRef, + Env: env, + }); err != nil { + if git.IsErrPushOutOfDate(err) { + return err + } else if git.IsErrPushRejected(err) { + rejectErr := new(git.ErrPushRejected) + if errors.As(err, &rejectErr) { + log.Ctx(ctx).Info().Msgf("Unable to push back to repo from temporary repo due to rejection:"+ + " %s (%s)\nStdout: %s\nStderr: %s\nError: %v", + r.repoUID, r.tmpPath, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err) + } + return err + } + return fmt.Errorf("unable to push back to repo from temporary repo: %s (%s) Error: %w", + r.repoUID, r.tmpPath, err) + } + return nil +} + +// GetBranchCommit Gets the commit object of the given branch. +func (r *SharedRepo) GetBranchCommit(branch string) (*git.Commit, error) { + if r.repo == nil { + return nil, fmt.Errorf("repository has not been cloned") + } + + return r.repo.GetBranchCommit(strings.TrimPrefix(branch, gitReferenceNamePrefixBranch)) +} + +// GetCommit Gets the commit object of the given commit ID. +func (r *SharedRepo) GetCommit(commitID string) (*git.Commit, error) { + if r.repo == nil { + return nil, fmt.Errorf("repository has not been cloned") + } + return r.repo.GetCommit(commitID) +} + +// ASSUMPTION: writeRequst and writeRequst.Actor is never nil. +func CreateEnvironmentForPush(ctx context.Context, writeRequest *rpc.WriteRequest) []string { + // don't send existing environment variables (os.Environ()), only send what's explicitly necessary. + // Otherwise we create implicit dependencies that are easy to break. + environ := []string{ + // request id to use for hooks + EnvRequestID + "=" + middleware.RequestIDFrom(ctx), + // repo related info + EnvRepoUID + "=" + writeRequest.RepoUid, + // actor related info + EnvActorName + "=" + writeRequest.Actor.Name, + EnvActorEmail + "=" + writeRequest.Actor.Email, + } + + // add all environment variables coming from client request + for _, envVar := range writeRequest.EnvVars { + environ = append(environ, fmt.Sprintf("%s=%s", envVar.Name, envVar.Value)) + } + + // add all environment variables from the metadata + if metadata, mOK := metadata.FromIncomingContext(ctx); mOK { + if envVars, eOK := metadata[rpc.MetadataKeyEnvironmentVariables]; eOK { + // TODO: should we do a sanity check? + environ = append(environ, envVars...) + } + } + + return environ +} + +// GetReferenceFromBranchName assumes the provided value is the branch name (not the ref!) +// and first sanitizes the branch name (remove any spaces or 'refs/heads/' prefix) +// It then returns the full form of the branch reference. +func GetReferenceFromBranchName(branchName string) string { + // remove spaces + branchName = strings.TrimSpace(branchName) + // remove `refs/heads/` prefix (shouldn't be there, but if it is remove it to try to avoid complications) + // NOTE: This is used to reduce missconfigurations via api + // TODO: block via CLI, too + branchName = strings.TrimPrefix(branchName, gitReferenceNamePrefixBranch) + + // return reference + return gitReferenceNamePrefixBranch + branchName +} + +func GetReferenceFromTagName(tagName string) string { + // remove spaces + tagName = strings.TrimSpace(tagName) + // remove `refs/heads/` prefix (shouldn't be there, but if it is remove it to try to avoid complications) + // NOTE: This is used to reduce missconfigurations via api + // TODO: block via CLI, too + tagName = strings.TrimPrefix(tagName, gitReferenceNamePrefixTag) + + // return reference + return gitReferenceNamePrefixTag + tagName +} diff --git a/gitrpc/internal/service/submodule.go b/gitrpc/internal/service/submodule.go new file mode 100644 index 0000000000..f9ddfc47ad --- /dev/null +++ b/gitrpc/internal/service/submodule.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" +) + +func (s RepositoryService) GetSubmodule(ctx context.Context, + request *rpc.GetSubmoduleRequest) (*rpc.GetSubmoduleResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + // TODO: do we need to validate request for nil? + gitSubmodule, err := s.adapter.GetSubmodule(ctx, repoPath, request.GetGitRef(), request.GetPath()) + if err != nil { + return nil, processGitErrorf(err, "failed to get submodule") + } + + return &rpc.GetSubmoduleResponse{ + Submodule: &rpc.Submodule{ + Name: gitSubmodule.Name, + Url: gitSubmodule.URL, + }, + }, nil +} diff --git a/gitrpc/internal/service/tag.go b/gitrpc/internal/service/tag.go new file mode 100644 index 0000000000..17b81dbeea --- /dev/null +++ b/gitrpc/internal/service/tag.go @@ -0,0 +1,363 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "errors" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "code.gitea.io/gitea/modules/git" + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +//nolint:gocognit // need to refactor this code +func (s ReferenceService) ListCommitTags(request *rpc.ListCommitTagsRequest, + stream rpc.ReferenceService_ListCommitTagsServer) error { + ctx := stream.Context() + base := request.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + // get all required information from git references + tags, err := s.listCommitTagsLoadReferenceData(ctx, repoPath, request) + if err != nil { + return err + } + + // get all tag and commit SHAs + annotatedTagSHAs := make([]string, 0, len(tags)) + commitSHAs := make([]string, len(tags)) + for i, tag := range tags { + // always set the commit sha (will be overwritten for annotated tags) + commitSHAs[i] = tag.Sha + + if tag.IsAnnotated { + annotatedTagSHAs = append(annotatedTagSHAs, tag.Sha) + } + } + + // populate annotation data for all annotated tags + if len(annotatedTagSHAs) > 0 { + var aTags []types.Tag + aTags, err = s.adapter.GetAnnotatedTags(ctx, repoPath, annotatedTagSHAs) + if err != nil { + return processGitErrorf(err, "failed to get annotated tag") + } + + ai := 0 // index for annotated tags + ri := 0 // read index for all tags + wi := 0 // write index for all tags (as we might remove some non-commit tags) + for ; ri < len(tags); ri++ { + // always copy the current read element to the latest write position (doesn't mean it's kept) + tags[wi] = tags[ri] + commitSHAs[wi] = commitSHAs[ri] + + // keep the tag as is if it's not annotated + if !tags[ri].IsAnnotated { + wi++ + continue + } + + // filter out annotated tags that don't point to commit objects (blobs, trees, nested tags, ...) + // we don't actually wanna write it, so keep write index + // TODO: Support proper pagination: https://harness.atlassian.net/browse/CODE-669 + if aTags[ai].TargetType != types.GitObjectTypeCommit { + ai++ + continue + } + + // correct the commitSHA for the annotated tag (currently it is the tag sha, not the commit sha) + // NOTE: This is required as otherwise gitea will wrongly set the committer to the tagger signature. + commitSHAs[wi] = aTags[ai].TargetSha + + // update tag information with annotation details + // NOTE: we keep the name from the reference and ignore the annotated name (similar to github) + tags[wi].Message = aTags[ai].Message + tags[wi].Title = aTags[ai].Title + tags[wi].Tagger = mapGitSignature(aTags[ai].Tagger) + + ai++ + wi++ + } + + // truncate slices based on what was removed + tags = tags[:wi] + commitSHAs = commitSHAs[:wi] + } + + // get commits if needed (single call for perf savings: 1s-4s vs 5s-20s) + if request.GetIncludeCommit() { + var gitCommits []types.Commit + gitCommits, err = s.adapter.GetCommits(ctx, repoPath, commitSHAs) + if err != nil { + return processGitErrorf(err, "failed to get commits") + } + + for i := range gitCommits { + tags[i].Commit, err = mapGitCommit(&gitCommits[i]) + if err != nil { + return err + } + } + } + + // send out all tags + for _, tag := range tags { + err = stream.Send(&rpc.ListCommitTagsResponse{ + Tag: tag, + }) + if err != nil { + return status.Errorf(codes.Internal, "failed to send tag: %v", err) + } + } + + return nil +} + +func newInstructorWithObjectTypeFilter(filter []types.GitObjectType) types.WalkReferencesInstructor { + return func(wre types.WalkReferencesEntry) (types.WalkInstruction, error) { + v, ok := wre[types.GitReferenceFieldObjectType] + if !ok { + return types.WalkInstructionStop, fmt.Errorf("ref field for object type is missing") + } + + // only handle if any of the filters match + for _, field := range filter { + if v == string(field) { + return types.WalkInstructionHandle, nil + } + } + + // by default skip + return types.WalkInstructionSkip, nil + } +} + +var listCommitTagsRefFields = []types.GitReferenceField{types.GitReferenceFieldRefName, + types.GitReferenceFieldObjectType, types.GitReferenceFieldObjectName} +var listCommitTagsObjectTypeFilter = []types.GitObjectType{types.GitObjectTypeCommit, types.GitObjectTypeTag} + +func (s ReferenceService) listCommitTagsLoadReferenceData(ctx context.Context, + repoPath string, request *rpc.ListCommitTagsRequest) ([]*rpc.CommitTag, error) { + // TODO: can we be smarter with slice allocation + tags := make([]*rpc.CommitTag, 0, 16) + handler := listCommitTagsWalkReferencesHandler(&tags) + instructor, _, err := wrapInstructorWithOptionalPagination( + newInstructorWithObjectTypeFilter(listCommitTagsObjectTypeFilter), + request.GetPage(), + request.GetPageSize()) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "invalid pagination details: %v", err) + } + + opts := &types.WalkReferencesOptions{ + Patterns: createReferenceWalkPatternsFromQuery(gitReferenceNamePrefixTag, request.GetQuery()), + Sort: mapListCommitTagsSortOption(request.Sort), + Order: mapSortOrder(request.Order), + Fields: listCommitTagsRefFields, + Instructor: instructor, + // we do post-filtering, so we can't restrict the git output ... + MaxWalkDistance: 0, + } + + err = s.adapter.WalkReferences(ctx, repoPath, handler, opts) + if err != nil { + return nil, processGitErrorf(err, "failed to walk tag references") + } + + log.Ctx(ctx).Trace().Msgf("git adapter returned %d tags", len(tags)) + + return tags, nil +} + +func listCommitTagsWalkReferencesHandler(tags *[]*rpc.CommitTag) types.WalkReferencesHandler { + return func(e types.WalkReferencesEntry) error { + fullRefName, ok := e[types.GitReferenceFieldRefName] + if !ok { + return fmt.Errorf("entry missing reference name") + } + objectSHA, ok := e[types.GitReferenceFieldObjectName] + if !ok { + return fmt.Errorf("entry missing object sha") + } + objectTypeRaw, ok := e[types.GitReferenceFieldObjectType] + if !ok { + return fmt.Errorf("entry missing object type") + } + + tag := &rpc.CommitTag{ + Name: fullRefName[len(gitReferenceNamePrefixTag):], + Sha: objectSHA, + IsAnnotated: objectTypeRaw == string(types.GitObjectTypeTag), + } + + // TODO: refactor to not use slice pointers? + *tags = append(*tags, tag) + + return nil + } +} +func (s ReferenceService) CreateCommitTag( + ctx context.Context, + request *rpc.CreateCommitTagRequest, +) (*rpc.CreateCommitTagResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + sharedRepo, err := NewSharedRepo(s.tmpDir, base.GetRepoUid(), repo) + if err != nil { + return nil, processGitErrorf(err, "failed to create new shared repo") + } + + defer sharedRepo.Close(ctx) + + // clone repo (with HEAD branch - target might be anything) + err = sharedRepo.Clone(ctx, "") + if err != nil { + return nil, processGitErrorf(err, "failed to clone shared repo") + } + + // get target commit (as target could be branch/tag/commit, and tag can't be pushed using source:destination syntax) + // NOTE: in case the target is an annotated tag, the targetCommit title and message are that of the tag, not the commit + targetCommit, err := s.adapter.GetCommit(ctx, sharedRepo.tmpPath, strings.TrimSpace(request.GetTarget())) + if git.IsErrNotExist(err) { + return nil, ErrNotFoundf("target '%s' doesn't exist", request.GetTarget()) + } + if err != nil { + return nil, fmt.Errorf("failed to get commit id for target '%s': %w", request.GetTarget(), err) + } + + tagger := base.GetActor() + if request.GetTagger() != nil { + tagger = request.GetTagger() + } + taggerDate := time.Now().UTC() + if request.GetTaggerDate() != 0 { + taggerDate = time.Unix(request.GetTaggerDate(), 0) + } + + createTagRequest := &types.CreateTagOptions{ + Message: request.GetMessage(), + Tagger: types.Signature{ + Identity: types.Identity{ + Name: tagger.Name, + Email: tagger.Email, + }, + When: taggerDate, + }, + } + err = s.adapter.CreateTag( + ctx, + sharedRepo.tmpPath, + request.GetTagName(), + targetCommit.SHA, + createTagRequest) + if errors.Is(err, types.ErrAlreadyExists) { + return nil, ErrAlreadyExistsf("tag '%s' already exists", request.GetTagName()) + } + if err != nil { + return nil, processGitErrorf(err, "Failed to create tag '%s'", request.GetTagName()) + } + + if err = sharedRepo.PushTag(ctx, base, request.GetTagName()); err != nil { + return nil, processGitErrorf(err, "Failed to push the tag to remote") + } + + var commitTag *rpc.CommitTag + if request.GetMessage() != "" { + tag, err := s.adapter.GetAnnotatedTag(ctx, repoPath, request.GetTagName()) + if err != nil { + return nil, fmt.Errorf("failed to read annotated tag after creation: %w", err) + } + commitTag = mapAnnotatedTag(tag) + } else { + commitTag = &rpc.CommitTag{ + Name: request.GetTagName(), + IsAnnotated: false, + Sha: targetCommit.SHA, + } + } + + // gitea overwrites some commit details in case getCommit(ref) was called with ref being a tag + // To avoid this issue, let's get the commit again using the actual id of the commit + // TODO: can we do this nicer? + rawCommit, err := s.adapter.GetCommit(ctx, repoPath, targetCommit.SHA) + if err != nil { + return nil, fmt.Errorf("failed to get the raw commit '%s' after tag creation: %w", targetCommit.SHA, err) + } + + commitTag.Commit, err = mapGitCommit(rawCommit) + if err != nil { + return nil, fmt.Errorf("failed to map target commit after tag creation: %w", err) + } + + return &rpc.CreateCommitTagResponse{Tag: commitTag}, nil +} + +func (s ReferenceService) DeleteTag( + ctx context.Context, + request *rpc.DeleteTagRequest, +) (*rpc.UpdateRefResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + repo, err := git.OpenRepository(ctx, repoPath) + if err != nil { + return nil, processGitErrorf(err, "failed to open repo") + } + + sharedRepo, err := NewSharedRepo(s.tmpDir, base.GetRepoUid(), repo) + if err != nil { + return nil, processGitErrorf(err, "failed to create new shared repo") + } + + defer sharedRepo.Close(ctx) + + // clone repo (with HEAD branch - tag target might be anything) + err = sharedRepo.Clone(ctx, "") + if err != nil { + return nil, processGitErrorf(err, "failed to clone shared repo with tag '%s'", request.GetTagName()) + } + + if err = sharedRepo.PushDeleteTag(ctx, base, request.GetTagName()); err != nil { + return nil, processGitErrorf(err, "Failed to push the tag %s to remote", request.GetTagName()) + } + + return &rpc.UpdateRefResponse{}, nil +} diff --git a/gitrpc/internal/service/tree.go b/gitrpc/internal/service/tree.go new file mode 100644 index 0000000000..d19991faed --- /dev/null +++ b/gitrpc/internal/service/tree.go @@ -0,0 +1,149 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +func (s RepositoryService) ListTreeNodes( + request *rpc.ListTreeNodesRequest, + stream rpc.RepositoryService_ListTreeNodesServer, +) error { + ctx := stream.Context() + base := request.GetBase() + if base == nil { + return types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + gitNodes, err := s.adapter.ListTreeNodes(ctx, repoPath, + request.GetGitRef(), request.GetPath()) + if err != nil { + return processGitErrorf(err, "failed to list tree nodes") + } + + log.Ctx(ctx).Trace().Msgf("git adapter returned %d nodes", len(gitNodes)) + + for _, gitNode := range gitNodes { + err = stream.Send(&rpc.ListTreeNodesResponse{ + Node: &rpc.TreeNode{ + Type: mapGitNodeType(gitNode.NodeType), + Mode: mapGitMode(gitNode.Mode), + Sha: gitNode.Sha, + Name: gitNode.Name, + Path: gitNode.Path, + }, + }) + if err != nil { + return status.Errorf(codes.Internal, "failed to send node: %v", err) + } + } + + return nil +} + +func (s RepositoryService) GetTreeNode(ctx context.Context, + request *rpc.GetTreeNodeRequest, +) (*rpc.GetTreeNodeResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + gitNode, err := s.adapter.GetTreeNode(ctx, repoPath, request.GitRef, request.Path) + if err != nil { + return nil, processGitErrorf(err, "no such path '%s' in '%s'", request.Path, request.GetGitRef()) + } + + res := &rpc.GetTreeNodeResponse{ + Node: &rpc.TreeNode{ + Type: mapGitNodeType(gitNode.NodeType), + Mode: mapGitMode(gitNode.Mode), + Sha: gitNode.Sha, + Name: gitNode.Name, + Path: gitNode.Path, + }, + } + + if request.GetIncludeLatestCommit() { + pathDetails, err := s.adapter.PathsDetails(ctx, repoPath, request.GitRef, []string{request.Path}) + if err != nil { + return nil, err + } + + if len(pathDetails) != 1 { + return nil, fmt.Errorf("failed to get details for the path %s", request.Path) + } + + if pathDetails[0].LastCommit != nil { + res.Commit, err = mapGitCommit(pathDetails[0].LastCommit) + if err != nil { + return nil, err + } + } + } + + return res, nil +} + +func (s RepositoryService) PathsDetails(ctx context.Context, + request *rpc.PathsDetailsRequest, +) (*rpc.PathsDetailsResponse, error) { + base := request.GetBase() + if base == nil { + return nil, types.ErrBaseCannotBeEmpty + } + + repoPath := getFullPathForRepo(s.reposRoot, base.GetRepoUid()) + + pathsDetails, err := s.adapter.PathsDetails(ctx, repoPath, request.GetGitRef(), request.GetPaths()) + if err != nil { + return nil, processGitErrorf(err, "failed to get path details in '%s'", request.GetGitRef()) + } + + details := make([]*rpc.PathDetails, len(pathsDetails)) + for i, pathDetails := range pathsDetails { + var lastCommit *rpc.Commit + + if pathDetails.LastCommit != nil { + lastCommit, err = mapGitCommit(pathDetails.LastCommit) + if err != nil { + return nil, fmt.Errorf("failed to map commit: %w", err) + } + } + + details[i] = &rpc.PathDetails{ + Path: pathDetails.Path, + LastCommit: lastCommit, + Size: pathDetails.Size, + } + } + + return &rpc.PathsDetailsResponse{ + PathDetails: details, + }, nil +} diff --git a/gitrpc/internal/service/upload.go b/gitrpc/internal/service/upload.go new file mode 100644 index 0000000000..1b854142ab --- /dev/null +++ b/gitrpc/internal/service/upload.go @@ -0,0 +1,189 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "path/filepath" + "time" + + "github.com/harness/gitness/gitrpc/internal/types" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// TODO: this should be taken as a struct input defined in proto. +func (s RepositoryService) addFilesAndPush( + ctx context.Context, + repoPath string, + filePaths []string, + branch string, + env []string, + author *rpc.Identity, + authorDate time.Time, + committer *rpc.Identity, + committerDate time.Time, + remote string, + message string, +) error { + if author == nil || committer == nil { + return status.Errorf(codes.InvalidArgument, "both author and committer have to be provided") + } + + err := s.adapter.AddFiles(repoPath, false, filePaths...) + if err != nil { + return processGitErrorf(err, "failed to add files") + } + err = s.adapter.Commit(ctx, repoPath, types.CommitChangesOptions{ + Committer: types.Signature{ + Identity: types.Identity{ + Name: committer.Name, + Email: committer.Email, + }, + When: committerDate, + }, + Author: types.Signature{ + Identity: types.Identity{ + Name: author.Name, + Email: author.Email, + }, + When: authorDate, + }, + Message: message, + }) + if err != nil { + return processGitErrorf(err, "failed to commit files") + } + + err = s.adapter.Push(ctx, repoPath, types.PushOptions{ + // TODO: Don't hard-code + Remote: remote, + Branch: branch, + Force: false, + Env: env, + Timeout: 0, + }) + if err != nil { + return processGitErrorf(err, "failed to push files") + } + + return nil +} + +func (s RepositoryService) handleFileUploadIfAvailable(ctx context.Context, basePath string, + nextFSElement func() (*rpc.FileUpload, error)) (string, error) { + log := log.Ctx(ctx) + + log.Info().Msg("waiting to receive file upload header") + header, err := getFileStreamHeader(nextFSElement) + if err != nil { + return "", err + } + + log.Info().Msgf("storing file at %s", header.Path) + // work with file content chunks + fileData := bytes.Buffer{} + fileSize := 0 + for { + log.Debug().Msg("waiting to receive data") + + var chunk *rpc.Chunk + chunk, err = getFileUploadChunk(nextFSElement) + if errors.Is(err, io.EOF) { + // only for a header we expect a stream EOF error (for chunk its a chunk.EOF). + return "", fmt.Errorf("data stream ended unexpectedly") + } + if err != nil { + return "", err + } + + size := len(chunk.Data) + + if size > 0 { + log.Debug().Msgf("received a chunk with size: %d", size) + + // TODO: file size could be checked on client side? + fileSize += size + if fileSize > maxFileSize { + return "", status.Errorf(codes.InvalidArgument, "file is too large: %d > %d", fileSize, maxFileSize) + } + + // TODO: write in file as we go (instead of in buffer) + _, err = fileData.Write(chunk.Data) + if err != nil { + return "", status.Errorf(codes.Internal, "cannot write chunk data: %v", err) + } + } + + if chunk.Eof { + log.Info().Msg("Received file EOF") + break + } + } + fullPath := filepath.Join(basePath, header.Path) + log.Info().Msgf("saving file at path %s", fullPath) + _, err = s.store.Save(fullPath, &fileData) + if err != nil { + return "", status.Errorf(codes.Internal, "cannot save file to the store: %v", err) + } + + return fullPath, nil +} + +func getFileStreamHeader(nextFileUpload func() (*rpc.FileUpload, error)) (*rpc.FileUploadHeader, error) { + fs, err := getFileUpload(nextFileUpload) + if err != nil { + return nil, err + } + + header := fs.GetHeader() + if header == nil { + return nil, status.Errorf(codes.Internal, "file stream is in wrong order - expected header") + } + + return header, nil +} + +func getFileUploadChunk(nextFileUpload func() (*rpc.FileUpload, error)) (*rpc.Chunk, error) { + fs, err := getFileUpload(nextFileUpload) + if err != nil { + return nil, err + } + + chunk := fs.GetChunk() + if chunk == nil { + return nil, status.Errorf(codes.Internal, "file stream is in wrong order - expected chunk") + } + + return chunk, nil +} + +func getFileUpload(nextFileUpload func() (*rpc.FileUpload, error)) (*rpc.FileUpload, error) { + fs, err := nextFileUpload() + if err != nil { + return nil, err + } + if fs == nil { + return nil, status.Errorf(codes.Internal, "file stream wasn't found") + } + return fs, nil +} diff --git a/gitrpc/internal/slices/slice.go b/gitrpc/internal/slices/slice.go new file mode 100644 index 0000000000..7b4b1cd3d5 --- /dev/null +++ b/gitrpc/internal/slices/slice.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package slices + +// Index returns the index of the first occurrence of v in s, +// or -1 if not present. +func Index[E comparable](s []E, v E) int { + for i, vs := range s { + if v == vs { + return i + } + } + return -1 +} + +// Contains reports whether v is present in s. +func Contains[E comparable](s []E, v E) bool { + return Index(s, v) >= 0 +} diff --git a/gitrpc/internal/storage/local.go b/gitrpc/internal/storage/local.go new file mode 100644 index 0000000000..079afb56d4 --- /dev/null +++ b/gitrpc/internal/storage/local.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + "io" + "os" + "path/filepath" +) + +type LocalStore struct{} + +func NewLocalStore() *LocalStore { + return &LocalStore{} +} + +func (store *LocalStore) Save(filePath string, data io.Reader) (string, error) { + err := os.MkdirAll(filepath.Dir(filePath), 0o777) + if err != nil { + return "", err + } + file, err := os.Create(filePath) + if err != nil { + return "", fmt.Errorf("cannot create file: %w", err) + } + defer file.Close() + + _, err = io.Copy(file, data) + if err != nil { + return "", fmt.Errorf("cannot write to file: %w", err) + } + + return filePath, nil +} diff --git a/gitrpc/internal/streamio/stream.go b/gitrpc/internal/streamio/stream.go new file mode 100644 index 0000000000..09f4cabcc9 --- /dev/null +++ b/gitrpc/internal/streamio/stream.go @@ -0,0 +1,93 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package streamio + +import "io" + +const MaxBufferSize = 128 * 1024 + +type writer struct { + bufferSize int + sender func([]byte) error +} + +type Option func(w *writer) + +func WithBufferSize(size int) Option { + return func(w *writer) { + w.bufferSize = size + } +} + +func NewWriter(sender func(p []byte) error, options ...Option) io.Writer { + w := &writer{ + sender: sender, + } + + for _, option := range options { + option(w) + } + + if w.bufferSize == 0 || w.bufferSize > MaxBufferSize { + w.bufferSize = MaxBufferSize + } + + return w +} + +func (w *writer) Write(p []byte) (int, error) { + var sent int + + for len(p) > 0 { + chunkSize := len(p) + if chunkSize > w.bufferSize { + chunkSize = w.bufferSize + } + + if err := w.sender(p[:chunkSize]); err != nil { + return sent, err + } + + sent += chunkSize + p = p[chunkSize:] + } + + return sent, nil +} + +func NewReader(receiver func() ([]byte, error)) io.Reader { + return &reader{receiver: receiver} +} + +type reader struct { + receiver func() ([]byte, error) + data []byte + err error +} + +func (r *reader) Read(p []byte) (int, error) { + if len(r.data) == 0 && r.err == nil { + r.data, r.err = r.receiver() + } + + n := copy(p, r.data) + r.data = r.data[n:] + + if len(r.data) == 0 { + return n, r.err + } + + return n, nil +} diff --git a/gitrpc/internal/tempdir/file.go b/gitrpc/internal/tempdir/file.go new file mode 100644 index 0000000000..f7e33e5059 --- /dev/null +++ b/gitrpc/internal/tempdir/file.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package tempdir + +import ( + "fmt" + "os" +) + +// CreateTemporaryPath creates a temporary path. +func CreateTemporaryPath(reposTempPath, prefix string) (string, error) { + if reposTempPath != "" { + if err := os.MkdirAll(reposTempPath, os.ModePerm); err != nil { + return "", fmt.Errorf("failed to create directory %s: %w", reposTempPath, err) + } + } + basePath, err := os.MkdirTemp(reposTempPath, prefix+".git") + if err != nil { + return "", fmt.Errorf("failed to create dir %s-*.git: %w", prefix, err) + } + return basePath, nil +} + +// RemoveTemporaryPath removes the temporary path. +func RemoveTemporaryPath(basePath string) error { + if _, err := os.Stat(basePath); !os.IsNotExist(err) { + return os.RemoveAll(basePath) + } + return nil +} diff --git a/gitrpc/internal/tools.go b/gitrpc/internal/tools.go new file mode 100644 index 0000000000..605b9f0def --- /dev/null +++ b/gitrpc/internal/tools.go @@ -0,0 +1,24 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build tools +// +build tools + +// following https://github.com/golang/go/wiki/Modules#how-can-i-track-tool-dependencies-for-a-module + +package internal + +import ( + _ "github.com/google/wire/cmd/wire" +) diff --git a/gitrpc/internal/types/errors.go b/gitrpc/internal/types/errors.go new file mode 100644 index 0000000000..e4d5cd2116 --- /dev/null +++ b/gitrpc/internal/types/errors.go @@ -0,0 +1,120 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "errors" + "fmt" + + "github.com/harness/gitness/gitrpc/enum" +) + +var ( + ErrAlreadyExists = errors.New("already exists") + ErrInvalidArgument = errors.New("invalid argument") + ErrRepositoryNotFound = errors.New("repository not found") + ErrRepositoryCorrupted = errors.New("repository corrupted") + ErrNotFound = errors.New("not found") + ErrInvalidPath = errors.New("path is invalid") + ErrUndefinedAction = errors.New("undefined action") + ErrActionNotAllowedOnEmptyRepo = errors.New("action not allowed on empty repository") + ErrContentSentBeforeAction = errors.New("content sent before action") + ErrActionListEmpty = errors.New("no commit actions to perform on repository") + ErrHeaderCannotBeEmpty = errors.New("header field cannot be empty") + ErrBaseCannotBeEmpty = errors.New("base field cannot be empty") + ErrSHADoesNotMatch = errors.New("sha does not match") + ErrEmptyBaseRef = errors.New("empty base reference") + ErrEmptyHeadRef = errors.New("empty head reference") + ErrNoDefaultBranch = errors.New("no default branch") + ErrFailedToConnect = errors.New("failed to connect") + ErrHunkNotFound = errors.New("hunk not found") + ErrEmptySHA = errors.New("empty SHA") +) + +// MergeConflictsError represents an error if merging fails with a conflict. +type MergeConflictsError struct { + Method enum.MergeMethod + CommitSHA string + StdOut string + StdErr string + Err error +} + +func IsMergeConflictsError(err error) bool { + return errors.Is(err, &MergeConflictsError{}) +} + +func (e *MergeConflictsError) Error() string { + return fmt.Sprintf("Merge Conflict Error: %v: %s\n%s", e.Err, e.StdErr, e.StdOut) +} + +func (e *MergeConflictsError) Unwrap() error { + return e.Err +} + +//nolint:errorlint // the purpose of this method is to check whether the target itself if of this type. +func (e *MergeConflictsError) Is(target error) bool { + _, ok := target.(*MergeConflictsError) + return ok +} + +// MergeUnrelatedHistoriesError represents an error if merging fails due to unrelated histories. +type MergeUnrelatedHistoriesError struct { + Method enum.MergeMethod + StdOut string + StdErr string + Err error +} + +func IsMergeUnrelatedHistoriesError(err error) bool { + return errors.Is(err, &MergeUnrelatedHistoriesError{}) +} + +func (e *MergeUnrelatedHistoriesError) Error() string { + return fmt.Sprintf("Merge UnrelatedHistories Error: %v: %s\n%s", e.Err, e.StdErr, e.StdOut) +} + +func (e *MergeUnrelatedHistoriesError) Unwrap() error { + return e.Err +} + +//nolint:errorlint // the purpose of this method is to check whether the target itself if of this type. +func (e *MergeUnrelatedHistoriesError) Is(target error) bool { + _, ok := target.(*MergeUnrelatedHistoriesError) + return ok +} + +// PathNotFoundError represents an error if a path in a repo can't be found. +type PathNotFoundError struct { + Path string +} + +func IsPathNotFoundError(err error) bool { + return errors.Is(err, &PathNotFoundError{}) +} + +func (e *PathNotFoundError) Error() string { + return fmt.Sprintf("path '%s' wasn't found in the repo", e.Path) +} + +func (e *PathNotFoundError) Unwrap() error { + return nil +} + +//nolint:errorlint // the purpose of this method is to check whether the target itself if of this type. +func (e *PathNotFoundError) Is(target error) bool { + _, ok := target.(*PathNotFoundError) + return ok +} diff --git a/gitrpc/internal/types/hunk.go b/gitrpc/internal/types/hunk.go new file mode 100644 index 0000000000..3d2473bbc8 --- /dev/null +++ b/gitrpc/internal/types/hunk.go @@ -0,0 +1,72 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "strconv" + "strings" +) + +type Hunk struct { + HunkHeader + Lines []string +} + +type HunkHeader struct { + OldLine int + OldSpan int + NewLine int + NewSpan int + Text string +} + +func (h *HunkHeader) IsZero() bool { + return h.OldLine == 0 && h.OldSpan == 0 && h.NewLine == 0 && h.NewSpan == 0 +} + +func (h *HunkHeader) IsValid() bool { + oldOk := h.OldLine == 0 && h.OldSpan == 0 || h.OldLine > 0 && h.OldSpan > 0 + newOk := h.NewLine == 0 && h.NewSpan == 0 || h.NewLine > 0 && h.NewSpan > 0 + return !h.IsZero() && oldOk && newOk +} + +func (h *HunkHeader) String() string { + sb := strings.Builder{} + + sb.WriteString("@@ -") + + sb.WriteString(strconv.Itoa(h.OldLine)) + if h.OldSpan != 1 { + sb.WriteByte(',') + sb.WriteString(strconv.Itoa(h.OldSpan)) + } + + sb.WriteString(" +") + + sb.WriteString(strconv.Itoa(h.NewLine)) + if h.NewSpan != 1 { + sb.WriteByte(',') + sb.WriteString(strconv.Itoa(h.NewSpan)) + } + + sb.WriteString(" @@") + + if h.Text != "" { + sb.WriteByte(' ') + sb.WriteString(h.Text) + } + + return sb.String() +} diff --git a/gitrpc/internal/types/types.go b/gitrpc/internal/types/types.go new file mode 100644 index 0000000000..1328a850af --- /dev/null +++ b/gitrpc/internal/types/types.go @@ -0,0 +1,344 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "fmt" + "io" + "time" +) + +type CloneRepoOptions struct { + Timeout time.Duration + Mirror bool + Bare bool + Quiet bool + Branch string + Shared bool + NoCheckout bool + Depth int + Filter string + SkipTLSVerify bool +} + +type SortOrder int + +const ( + SortOrderDefault SortOrder = iota + SortOrderAsc + SortOrderDesc +) + +type GitObjectType string + +const ( + GitObjectTypeCommit GitObjectType = "commit" + gitObjectTypeTree GitObjectType = "tree" + gitObjectTypeBlob GitObjectType = "Blob" + GitObjectTypeTag GitObjectType = "tag" +) + +func ParseGitObjectType(t string) (GitObjectType, error) { + switch t { + case string(GitObjectTypeCommit): + return GitObjectTypeCommit, nil + case string(gitObjectTypeBlob): + return gitObjectTypeBlob, nil + case string(gitObjectTypeTree): + return gitObjectTypeTree, nil + case string(GitObjectTypeTag): + return GitObjectTypeTag, nil + default: + return gitObjectTypeBlob, fmt.Errorf("unknown git object type '%s'", t) + } +} + +// GitReferenceField represents the different fields available When listing references. +// For the full list, see https://git-scm.com/docs/git-for-each-ref#_field_names +type GitReferenceField string + +const ( + GitReferenceFieldRefName GitReferenceField = "refname" + GitReferenceFieldObjectType GitReferenceField = "objecttype" + GitReferenceFieldObjectName GitReferenceField = "objectname" + GitReferenceFieldCreatorDate GitReferenceField = "creatordate" +) + +func ParseGitReferenceField(f string) (GitReferenceField, error) { + switch f { + case string(GitReferenceFieldCreatorDate): + return GitReferenceFieldCreatorDate, nil + case string(GitReferenceFieldRefName): + return GitReferenceFieldRefName, nil + case string(GitReferenceFieldObjectName): + return GitReferenceFieldObjectName, nil + case string(GitReferenceFieldObjectType): + return GitReferenceFieldObjectType, nil + default: + return GitReferenceFieldRefName, fmt.Errorf("unknown git reference field '%s'", f) + } +} + +type WalkInstruction int + +const ( + WalkInstructionStop WalkInstruction = iota + WalkInstructionHandle + WalkInstructionSkip +) + +type WalkReferencesEntry map[GitReferenceField]string + +// TODO: can be generic (so other walk methods can use the same) +type WalkReferencesInstructor func(WalkReferencesEntry) (WalkInstruction, error) + +// TODO: can be generic (so other walk methods can use the same) +type WalkReferencesHandler func(WalkReferencesEntry) error + +type WalkReferencesOptions struct { + // Patterns are the patterns used to pre-filter the references of the repo. + // OPTIONAL. By default all references are walked. + Patterns []string + + // Fields indicates the fields that are passed to the instructor & handler + // OPTIONAL. Default fields are: + // - GitReferenceFieldRefName + // - GitReferenceFieldObjectName + Fields []GitReferenceField + + // Instructor indicates on how to handle the reference. + // OPTIONAL. By default all references are handled. + // NOTE: once walkInstructionStop is returned, the walking stops. + Instructor WalkReferencesInstructor + + // Sort indicates the field by which the references should be sorted. + // OPTIONAL. By default GitReferenceFieldRefName is used. + Sort GitReferenceField + + // Order indicates the Order (asc or desc) of the sorted output + Order SortOrder + + // MaxWalkDistance is the maximum number of nodes that are iterated over before the walking stops. + // OPTIONAL. A value of <= 0 will walk all references. + // WARNING: Skipped elements count towards the walking distance + MaxWalkDistance int32 +} + +type Commit struct { + SHA string + Title string + Message string + Author Signature + Committer Signature +} + +type Branch struct { + Name string + SHA string + Commit *Commit +} + +type Tag struct { + Sha string + Name string + TargetSha string + TargetType GitObjectType + Title string + Message string + Tagger Signature +} + +type CreateTagOptions struct { + // Message is the optional message the tag will be created with - if the message is empty + // the tag will be lightweight, otherwise it'll be annotated. + Message string + + // Tagger is the information used in case the tag is annotated (Message is provided). + Tagger Signature +} + +// Signature represents the Author or Committer information. +type Signature struct { + Identity Identity + // When is the timestamp of the Signature. + When time.Time +} + +type Identity struct { + Name string + Email string +} + +func (i Identity) String() string { + return fmt.Sprintf("%s <%s>", i.Name, i.Email) +} + +type CommitChangesOptions struct { + Committer Signature + Author Signature + Message string +} + +type PushOptions struct { + Remote string + Branch string + Force bool + ForceWithLease string + Env []string + Timeout time.Duration + Mirror bool +} + +type TreeNodeWithCommit struct { + TreeNode + Commit *Commit +} + +type TreeNode struct { + NodeType TreeNodeType + Mode TreeNodeMode + Sha string + Name string + Path string +} + +// TreeNodeType specifies the different types of nodes in a git tree. +// IMPORTANT: has to be consistent with rpc.TreeNodeType (proto). +type TreeNodeType int + +const ( + TreeNodeTypeTree TreeNodeType = iota + TreeNodeTypeBlob + TreeNodeTypeCommit +) + +// TreeNodeMode specifies the different modes of a node in a git tree. +// IMPORTANT: has to be consistent with rpc.TreeNodeMode (proto). +type TreeNodeMode int + +const ( + TreeNodeModeFile TreeNodeMode = iota + TreeNodeModeSymlink + TreeNodeModeExec + TreeNodeModeTree + TreeNodeModeCommit +) + +type Submodule struct { + Name string + URL string +} + +type BlobReader struct { + SHA string + // Size is the actual size of the blob. + Size int64 + // ContentSize is the total number of bytes returned by the Content Reader. + ContentSize int64 + // Content contains the (partial) content of the blob. + Content io.ReadCloser +} + +// CommitDivergenceRequest contains the refs for which the converging commits should be counted. +type CommitDivergenceRequest struct { + // From is the ref from which the counting of the diverging commits starts. + From string + // To is the ref at which the counting of the diverging commits ends. + To string +} + +// CommitDivergence contains the information of the count of converging commits between two refs. +type CommitDivergence struct { + // Ahead is the count of commits the 'From' ref is ahead of the 'To' ref. + Ahead int32 + // Behind is the count of commits the 'From' ref is behind the 'To' ref. + Behind int32 +} + +type PullRequest struct { + BaseRepoPath string + HeadRepoPath string + + BaseBranch string + HeadBranch string +} + +type DiffShortStat struct { + Files int + Additions int + Deletions int +} + +type DiffFileHeader struct { + OldFileName string + NewFileName string + Extensions map[string]string +} + +type DiffFileHunkHeaders struct { + FileHeader DiffFileHeader + HunksHeaders []HunkHeader +} + +type DiffCutParams struct { + LineStart int + LineStartNew bool + LineEnd int + LineEndNew bool + BeforeLines int + AfterLines int + LineLimit int +} + +type BlameReader interface { + NextPart() (*BlamePart, error) +} + +type BlamePart struct { + Commit Commit + Lines []string +} + +type PathRenameDetails struct { + OldPath string + NewPath string + CommitSHABefore string + CommitSHAAfter string +} + +type CommitFilter struct { + Path string + AfterRef string + Since int64 + Until int64 + Committer string +} + +type TempRepository struct { + Path string + BaseSHA string + HeadSHA string +} + +type PathDetails struct { + Path string + LastCommit *Commit + Size int64 +} + +type FileContent struct { + Path string + Content []byte +} diff --git a/gitrpc/kuberesolver.go b/gitrpc/kuberesolver.go new file mode 100644 index 0000000000..c175df9d8b --- /dev/null +++ b/gitrpc/kuberesolver.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "github.com/sercand/kuberesolver/v5" +) + +func init() { + kuberesolver.RegisterInCluster() +} diff --git a/gitrpc/log_interceptor.go b/gitrpc/log_interceptor.go new file mode 100644 index 0000000000..712b2bdfb9 --- /dev/null +++ b/gitrpc/log_interceptor.go @@ -0,0 +1,72 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + + "github.com/harness/gitness/gitrpc/rpc" + + "google.golang.org/grpc" + "google.golang.org/grpc/metadata" +) + +type requestIDKey struct{} + +// ClientLogInterceptor injects the zerlog request ID into the metadata. +// That allows the gitrpc server to log with the same request ID as the client. +type ClientLogInterceptor struct { +} + +func NewClientLogInterceptor() ClientLogInterceptor { + return ClientLogInterceptor{} +} + +func (i ClientLogInterceptor) UnaryClientInterceptor() grpc.UnaryClientInterceptor { + return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, + invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + ctx = appendLoggingRequestIDToOutgoingMetadata(ctx) + return invoker(ctx, method, req, reply, cc, opts...) + } +} + +func (i ClientLogInterceptor) StreamClientInterceptor() grpc.StreamClientInterceptor { + return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, + streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + ctx = appendLoggingRequestIDToOutgoingMetadata(ctx) + return streamer(ctx, desc, cc, method) + } +} + +// WithRequestID returns a copy of parent in which the request id value is set. +// This can be used by external entities to pass request IDs to gitrpc. +func WithRequestID(parent context.Context, v string) context.Context { + return context.WithValue(parent, requestIDKey{}, v) +} + +// RequestIDFrom returns the value of the request ID key on the +// context - ok is true iff a non-empty value existed. +func RequestIDFrom(ctx context.Context) (string, bool) { + v, ok := ctx.Value(requestIDKey{}).(string) + return v, ok && v != "" +} + +// appendLoggingRequestIDToOutgoingMetadata appends the zerolog request ID to the outgoing grpc metadata, if available. +func appendLoggingRequestIDToOutgoingMetadata(ctx context.Context) context.Context { + if id, ok := RequestIDFrom(ctx); ok { + ctx = metadata.AppendToOutgoingContext(ctx, rpc.MetadataKeyRequestID, id) + } + return ctx +} diff --git a/gitrpc/mapping.go b/gitrpc/mapping.go new file mode 100644 index 0000000000..e401f0ee2e --- /dev/null +++ b/gitrpc/mapping.go @@ -0,0 +1,295 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "fmt" + "time" + + "github.com/harness/gitness/gitrpc/hash" + "github.com/harness/gitness/gitrpc/rpc" +) + +func mapToRPCSortOrder(o SortOrder) rpc.SortOrder { + switch o { + case SortOrderAsc: + return rpc.SortOrder_Asc + case SortOrderDesc: + return rpc.SortOrder_Desc + case SortOrderDefault: + return rpc.SortOrder_Default + default: + // no need to error out - just use default for sorting + return rpc.SortOrder_Default + } +} + +func mapToRPCListBranchesSortOption(o BranchSortOption) rpc.ListBranchesRequest_SortOption { + switch o { + case BranchSortOptionName: + return rpc.ListBranchesRequest_Name + case BranchSortOptionDate: + return rpc.ListBranchesRequest_Date + case BranchSortOptionDefault: + return rpc.ListBranchesRequest_Default + default: + // no need to error out - just use default for sorting + return rpc.ListBranchesRequest_Default + } +} + +func mapToRPCListCommitTagsSortOption(o TagSortOption) rpc.ListCommitTagsRequest_SortOption { + switch o { + case TagSortOptionName: + return rpc.ListCommitTagsRequest_Name + case TagSortOptionDate: + return rpc.ListCommitTagsRequest_Date + case TagSortOptionDefault: + return rpc.ListCommitTagsRequest_Default + default: + // no need to error out - just use default for sorting + return rpc.ListCommitTagsRequest_Default + } +} + +func mapRPCBranch(b *rpc.Branch) (*Branch, error) { + if b == nil { + return nil, fmt.Errorf("rpc branch is nil") + } + + var commit *Commit + if b.GetCommit() != nil { + var err error + commit, err = mapRPCCommit(b.GetCommit()) + if err != nil { + return nil, err + } + } + + return &Branch{ + Name: b.Name, + SHA: b.Sha, + Commit: commit, + }, nil +} + +func mapRPCCommitTag(t *rpc.CommitTag) (*CommitTag, error) { + if t == nil { + return nil, fmt.Errorf("rpc commit tag is nil") + } + + var commit *Commit + if t.GetCommit() != nil { + var err error + commit, err = mapRPCCommit(t.GetCommit()) + if err != nil { + return nil, err + } + } + + var tagger *Signature + if t.GetTagger() != nil { + var err error + tagger, err = mapRPCSignature(t.GetTagger()) + if err != nil { + return nil, err + } + } + + return &CommitTag{ + Name: t.Name, + SHA: t.Sha, + IsAnnotated: t.IsAnnotated, + Title: t.Title, + Message: t.Message, + Tagger: tagger, + Commit: commit, + }, nil +} + +func mapRPCRenameDetails(c []*rpc.RenameDetails) []*RenameDetails { + renameDetailsList := make([]*RenameDetails, len(c)) + for i, detail := range c { + renameDetailsList[i] = &RenameDetails{ + OldPath: detail.OldPath, + NewPath: detail.NewPath, + CommitShaBefore: detail.CommitShaBefore, + CommitShaAfter: detail.CommitShaAfter, + } + } + return renameDetailsList +} + +func mapRPCCommit(c *rpc.Commit) (*Commit, error) { + if c == nil { + return nil, fmt.Errorf("rpc commit is nil") + } + + author, err := mapRPCSignature(c.GetAuthor()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc author: %w", err) + } + + comitter, err := mapRPCSignature(c.GetCommitter()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc committer: %w", err) + } + + return &Commit{ + SHA: c.GetSha(), + Title: c.GetTitle(), + Message: c.GetMessage(), + Author: *author, + Committer: *comitter, + }, nil +} + +func mapRPCSignature(s *rpc.Signature) (*Signature, error) { + if s == nil { + return nil, fmt.Errorf("rpc signature is nil") + } + + identity, err := mapRPCIdentity(s.GetIdentity()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc identity: %w", err) + } + + when := time.Unix(s.When, 0) + + return &Signature{ + Identity: identity, + When: when, + }, nil +} + +func mapRPCIdentity(id *rpc.Identity) (Identity, error) { + if id == nil { + return Identity{}, fmt.Errorf("rpc identity is nil") + } + + return Identity{ + Name: id.GetName(), + Email: id.GetEmail(), + }, nil +} + +func mapRPCTreeNode(n *rpc.TreeNode) (TreeNode, error) { + if n == nil { + return TreeNode{}, fmt.Errorf("rpc tree node is nil") + } + + nodeType, err := mapRPCTreeNodeType(n.GetType()) + if err != nil { + return TreeNode{}, err + } + + mode, err := mapRPCTreeNodeMode(n.GetMode()) + if err != nil { + return TreeNode{}, err + } + + return TreeNode{ + Type: nodeType, + Mode: mode, + SHA: n.GetSha(), + Name: n.GetName(), + Path: n.GetPath(), + }, nil +} + +func mapRPCTreeNodeType(t rpc.TreeNodeType) (TreeNodeType, error) { + switch t { + case rpc.TreeNodeType_TreeNodeTypeBlob: + return TreeNodeTypeBlob, nil + case rpc.TreeNodeType_TreeNodeTypeCommit: + return TreeNodeTypeCommit, nil + case rpc.TreeNodeType_TreeNodeTypeTree: + return TreeNodeTypeTree, nil + default: + return TreeNodeTypeBlob, fmt.Errorf("unknown rpc tree node type: %d", t) + } +} + +func mapRPCTreeNodeMode(m rpc.TreeNodeMode) (TreeNodeMode, error) { + switch m { + case rpc.TreeNodeMode_TreeNodeModeFile: + return TreeNodeModeFile, nil + case rpc.TreeNodeMode_TreeNodeModeExec: + return TreeNodeModeExec, nil + case rpc.TreeNodeMode_TreeNodeModeSymlink: + return TreeNodeModeSymlink, nil + case rpc.TreeNodeMode_TreeNodeModeCommit: + return TreeNodeModeCommit, nil + case rpc.TreeNodeMode_TreeNodeModeTree: + return TreeNodeModeTree, nil + default: + return TreeNodeModeFile, fmt.Errorf("unknown rpc tree node mode: %d", m) + } +} + +func mapToRPCIdentityOptional(identity *Identity) *rpc.Identity { + if identity == nil { + return nil + } + + return &rpc.Identity{ + Name: identity.Name, + Email: identity.Email, + } +} + +func mapToRPCTimeOptional(t *time.Time) int64 { + if t == nil { + return 0 + } + + return t.Unix() +} + +func mapToRPCHashType(t hash.Type) (rpc.HashType, error) { + switch t { + case hash.TypeSHA256: + return rpc.HashType_HashTypeSHA256, nil + default: + return -1, fmt.Errorf("unknown hash type '%s'", t) + } +} + +func mapToRPCHashAggregationType(t hash.AggregationType) (rpc.HashAggregationType, error) { + switch t { + case hash.AggregationTypeXOR: + return rpc.HashAggregationType_HashAggregationTypeXOR, nil + default: + return -1, fmt.Errorf("unknown hash aggregation type '%s'", t) + } +} + +func mapHunkHeader(h *rpc.HunkHeader) HunkHeader { + return HunkHeader{ + OldLine: int(h.OldLine), + OldSpan: int(h.OldSpan), + NewLine: int(h.NewLine), + NewSpan: int(h.NewSpan), + Text: h.Text, + } +} + +func mapDiffFileHeader(h *rpc.DiffFileHeader) DiffFileHeader { + return DiffFileHeader{ + OldName: h.OldFileName, + NewName: h.NewFileName, + Extensions: h.Extensions, + } +} diff --git a/gitrpc/match_files.go b/gitrpc/match_files.go new file mode 100644 index 0000000000..44a252c486 --- /dev/null +++ b/gitrpc/match_files.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type FileContent struct { + Path string + Content []byte +} + +type MatchFilesParams struct { + ReadParams + Ref string + DirPath string + Pattern string + MaxSize int +} + +type MatchFilesOutput struct { + Files []FileContent +} + +func (c *Client) MatchFiles(ctx context.Context, + params *MatchFilesParams, +) (*MatchFilesOutput, error) { + resp, err := c.repoService.MatchFiles(ctx, &rpc.MatchFilesRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Ref: params.Ref, + DirPath: params.DirPath, + Pattern: params.Pattern, + MaxSize: int32(params.MaxSize), + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to match files") + } + + files := make([]FileContent, len(resp.Files)) + for i, f := range resp.Files { + files[i] = FileContent{ + Path: f.Path, + Content: f.Content, + } + } + + return &MatchFilesOutput{ + Files: files, + }, nil +} diff --git a/gitrpc/merge.go b/gitrpc/merge.go new file mode 100644 index 0000000000..ea87cc5392 --- /dev/null +++ b/gitrpc/merge.go @@ -0,0 +1,120 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "time" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/rpc" +) + +// MergeParams is input structure object for merging operation. +type MergeParams struct { + WriteParams + BaseBranch string + // HeadRepoUID specifies the UID of the repo that contains the head branch (required for forking). + // WARNING: This field is currently not supported yet! + HeadRepoUID string + HeadBranch string + Title string + Message string + + // Committer overwrites the git committer used for committing the files + // (optional, default: actor) + Committer *Identity + // CommitterDate overwrites the git committer date used for committing the files + // (optional, default: current time on server) + CommitterDate *time.Time + // Author overwrites the git author used for committing the files + // (optional, default: committer) + Author *Identity + // AuthorDate overwrites the git author date used for committing the files + // (optional, default: committer date) + AuthorDate *time.Time + + RefType enum.RefType + RefName string + + // HeadExpectedSHA is commit sha on the head branch, if HeadExpectedSHA is older + // than the HeadBranch latest sha then merge will fail. + HeadExpectedSHA string + + Force bool + DeleteHeadBranch bool + + Method enum.MergeMethod +} + +// MergeOutput is result object from merging and returns +// base, head and commit sha. +type MergeOutput struct { + // BaseSHA is the sha of the latest commit on the base branch that was used for merging. + BaseSHA string + // HeadSHA is the sha of the latest commit on the head branch that was used for merging. + HeadSHA string + // MergeBaseSHA is the sha of the merge base of the HeadSHA and BaseSHA + MergeBaseSHA string + // MergeSHA is the sha of the commit after merging HeadSHA with BaseSHA. + MergeSHA string +} + +// Merge method executes git merge operation. Refs can be sha, branch or tag. +// Based on input params.RefType merge can do checking or final merging of two refs. +// some examples: +// +// params.RefType = Undefined -> discard merge commit (only performs a merge check). +// params.RefType = Raw and params.RefName = refs/pull/1/ref will push to refs/pullreq/1/ref +// params.RefType = RefTypeBranch and params.RefName = "somebranch" -> merge and push to refs/heads/somebranch +// params.RefType = RefTypePullReqHead and params.RefName = "1" -> merge and push to refs/pullreq/1/head +// params.RefType = RefTypePullReqMerge and params.RefName = "1" -> merge and push to refs/pullreq/1/merge +// +// There are cases when you want to block merging and for that you will need to provide +// params.HeadExpectedSHA which will be compared with the latest sha from head branch +// if they are not the same error will be returned. +func (c *Client) Merge(ctx context.Context, params *MergeParams) (MergeOutput, error) { + if params == nil { + return MergeOutput{}, ErrNoParamsProvided + } + + resp, err := c.mergeService.Merge(ctx, &rpc.MergeRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + BaseBranch: params.BaseBranch, + HeadBranch: params.HeadBranch, + Title: params.Title, + Message: params.Message, + Author: mapToRPCIdentityOptional(params.Author), + AuthorDate: mapToRPCTimeOptional(params.AuthorDate), + Committer: mapToRPCIdentityOptional(params.Committer), + CommitterDate: mapToRPCTimeOptional(params.CommitterDate), + RefType: rpc.RefType(params.RefType), + RefName: params.RefName, + HeadExpectedSha: params.HeadExpectedSHA, + Force: params.Force, + DeleteHeadBranch: params.DeleteHeadBranch, + Method: params.Method.ToRPC(), + }) + if err != nil { + return MergeOutput{}, processRPCErrorf(err, "merging failed") + } + + return MergeOutput{ + BaseSHA: resp.GetBaseSha(), + HeadSHA: resp.GetHeadSha(), + MergeBaseSHA: resp.GetMergeBaseSha(), + MergeSHA: resp.GetMergeSha(), + }, nil +} diff --git a/gitrpc/operations.go b/gitrpc/operations.go new file mode 100644 index 0000000000..75d09fae9b --- /dev/null +++ b/gitrpc/operations.go @@ -0,0 +1,152 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "bytes" + "context" + "errors" + "io" + "time" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type FileAction string + +const ( + CreateAction FileAction = "CREATE" + UpdateAction FileAction = "UPDATE" + DeleteAction = "DELETE" + MoveAction = "MOVE" +) + +func (FileAction) Enum() []interface{} { + return []interface{}{CreateAction, UpdateAction, DeleteAction, MoveAction} +} + +// CommitFileAction holds file operation data. +type CommitFileAction struct { + Action FileAction + Path string + Payload []byte + SHA string +} + +// CommitFilesParams holds the data for file operations. +type CommitFilesParams struct { + WriteParams + Title string + Message string + Branch string + NewBranch string + Actions []CommitFileAction + + // Committer overwrites the git committer used for committing the files + // (optional, default: actor) + Committer *Identity + // CommitterDate overwrites the git committer date used for committing the files + // (optional, default: current time on server) + CommitterDate *time.Time + // Author overwrites the git author used for committing the files + // (optional, default: committer) + Author *Identity + // AuthorDate overwrites the git author date used for committing the files + // (optional, default: committer date) + AuthorDate *time.Time +} + +type CommitFilesResponse struct { + CommitID string +} + +func (c *Client) CommitFiles(ctx context.Context, params *CommitFilesParams) (CommitFilesResponse, error) { + stream, err := c.commitFilesService.CommitFiles(ctx) + if err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "failed to open file stream") + } + + if err = stream.Send(&rpc.CommitFilesRequest{ + Payload: &rpc.CommitFilesRequest_Header{ + Header: &rpc.CommitFilesRequestHeader{ + Base: mapToRPCWriteRequest(params.WriteParams), + BranchName: params.Branch, + NewBranchName: params.NewBranch, + Title: params.Title, + Message: params.Message, + Author: mapToRPCIdentityOptional(params.Author), + AuthorDate: mapToRPCTimeOptional(params.AuthorDate), + Committer: mapToRPCIdentityOptional(params.Committer), + CommitterDate: mapToRPCTimeOptional(params.CommitterDate), + }, + }, + }); err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "failed to send file headers") + } + + for _, action := range params.Actions { + // send headers + if err = stream.Send(&rpc.CommitFilesRequest{ + Payload: &rpc.CommitFilesRequest_Action{ + Action: &rpc.CommitFilesAction{ + Payload: &rpc.CommitFilesAction_Header{ + Header: &rpc.CommitFilesActionHeader{ + Action: rpc.CommitFilesActionHeader_ActionType( + rpc.CommitFilesActionHeader_ActionType_value[string(action.Action)]), + Path: action.Path, + Sha: action.SHA, + }, + }, + }, + }, + }); err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "failed to send file action to the stream") + } + + // send file content + buffer := make([]byte, FileTransferChunkSize) + reader := bytes.NewReader(action.Payload) + for { + n, err := reader.Read(buffer) + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "cannot read buffer") + } + + if err = stream.Send(&rpc.CommitFilesRequest{ + Payload: &rpc.CommitFilesRequest_Action{ + Action: &rpc.CommitFilesAction{ + Payload: &rpc.CommitFilesAction_Content{ + Content: buffer[:n], + }, + }, + }, + }); err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "failed to send file to the stream") + } + } + } + + recv, err := stream.CloseAndRecv() + if err != nil { + return CommitFilesResponse{}, processRPCErrorf(err, "failed to close the stream") + } + + return CommitFilesResponse{ + CommitID: recv.CommitId, + }, nil +} diff --git a/gitrpc/params.go b/gitrpc/params.go new file mode 100644 index 0000000000..fa32e6dcb9 --- /dev/null +++ b/gitrpc/params.go @@ -0,0 +1,27 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +type Repository interface { + GetGitUID() string +} + +// CreateRPCReadParams creates base read parameters for gitrpc read operations. +// IMPORTANT: repo is assumed to be not nil! +func CreateRPCReadParams(repo Repository) ReadParams { + return ReadParams{ + RepoUID: repo.GetGitUID(), + } +} diff --git a/gitrpc/pipeline.go b/gitrpc/pipeline.go new file mode 100644 index 0000000000..b83c40a31e --- /dev/null +++ b/gitrpc/pipeline.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type GeneratePipelineParams struct { + ReadParams +} + +type GeneratePipelinesOutput struct { + PipelineYAML []byte +} + +func (c *Client) GeneratePipeline(ctx context.Context, + params *GeneratePipelineParams, +) (GeneratePipelinesOutput, error) { + response, err := c.repoService.GeneratePipeline(ctx, &rpc.GeneratePipelineRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + }) + if err != nil { + return GeneratePipelinesOutput{}, processRPCErrorf(err, "failed to generate pipeline") + } + + return GeneratePipelinesOutput{ + PipelineYAML: response.PipelineYaml, + }, nil +} diff --git a/gitrpc/proto/blame.proto b/gitrpc/proto/blame.proto new file mode 100644 index 0000000000..d5e92e9aa7 --- /dev/null +++ b/gitrpc/proto/blame.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +service BlameService { + rpc Blame(BlameRequest) returns (stream BlamePart); +} + +message BlameRequest { + ReadRequest base = 1; + string git_ref = 2; + string path = 3; + LineRange range = 4; +} + +message LineRange { + int32 from = 1; + int32 to = 2; +} + +message BlamePart { + Commit commit = 1; + repeated bytes lines = 2; +} diff --git a/gitrpc/proto/diff.proto b/gitrpc/proto/diff.proto new file mode 100644 index 0000000000..2e46738b25 --- /dev/null +++ b/gitrpc/proto/diff.proto @@ -0,0 +1,138 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +// DiffService is a service which provides RPCs to inspect differences +// introduced between a set of commits. +service DiffService { + rpc RawDiff(DiffRequest) returns (stream RawDiffResponse) {} + rpc Diff(DiffRequest) returns (stream DiffResponse) {} + rpc CommitDiff(CommitDiffRequest) returns (stream CommitDiffResponse); + rpc DiffShortStat(DiffRequest) returns (DiffShortStatResponse) {} + rpc GetDiffHunkHeaders(GetDiffHunkHeadersRequest) returns (GetDiffHunkHeadersResponse) {} + rpc DiffCut(DiffCutRequest) returns (DiffCutResponse) {} +} + +message DiffRequest { + ReadRequest base = 1; + // base_ref is left side of compare and can be branch, commit and tag + string base_ref = 2; + // head_ref is right side of compare and can be branch, commit and tag + string head_ref = 3; + // merge_base used only in branch comparison, if merge_base is true + // it will show diff from the commit where branch is created and head branch + bool merge_base = 4; + // include_patch + bool include_patch = 5; +} + +message RawDiffResponse { + bytes data = 1; +} + +message DiffShortStatResponse { + int32 files = 1; + int32 additions = 2; + int32 deletions = 3; +} + +message HunkHeader { + int32 old_line = 1; + int32 old_span = 2; + int32 new_line = 3; + int32 new_span = 4; + string text = 5; +} + +message DiffFileHeader { + string old_file_name = 1; + string new_file_name = 2; + map extensions = 3; +} + +message DiffFileHunkHeaders { + DiffFileHeader file_header = 1; + repeated HunkHeader hunk_headers = 2; +} + +message GetDiffHunkHeadersRequest { + ReadRequest base = 1; + string source_commit_sha = 2; + string target_commit_sha = 4; +} + +message GetDiffHunkHeadersResponse { + repeated DiffFileHunkHeaders files = 1; +} + +message DiffCutRequest { + ReadRequest base = 1; + string source_commit_sha = 2; + string source_branch = 3; + string target_commit_sha = 4; + string target_branch = 5; + string path = 6; + int32 line_start = 7; + bool line_start_new = 8; + int32 line_end = 9; + bool line_end_new = 10; +} + +message DiffCutResponse { + HunkHeader hunk_header = 1; + string lines_header = 2; + repeated string lines = 3; + string merge_base_sha = 4; + string latest_source_sha = 5; +} + +message DiffResponse { + // A list of different file statuses + enum FileStatus { + // undefined + UNDEFINED = 0; + // file has been added + ADDED = 1; + // file has been changed + MODIFIED = 2; + // file has been deleted + DELETED = 3; + // the file has been renamed + RENAMED = 4; + } + // The path and name of the file + string path = 1; + // The old path and name of the file + string old_path = 2; + // sha (SHA1 hash) of the file. For a changed/new file, it is the new SHA, + // and for a deleted file it becomes "000000". + string sha = 3; + // old_sha is the old index (SHA1 hash) of the file. + string old_sha = 4; + // status of the file. + FileStatus status = 5; + // total number of additions in the file + int32 additions = 6; + // total number of deletions in the file + int32 deletions = 7; + // number of changes in the file + int32 changes = 8; + // patch from the file diff + bytes patch = 9; + // is binary file + bool is_binary = 10; + // is submodule + bool is_submodule = 11; +} + +message CommitDiffRequest { + ReadRequest base = 1; + string sha = 2; +} + +message CommitDiffResponse { + bytes data = 1; +} \ No newline at end of file diff --git a/gitrpc/proto/http.proto b/gitrpc/proto/http.proto new file mode 100644 index 0000000000..140dc8f315 --- /dev/null +++ b/gitrpc/proto/http.proto @@ -0,0 +1,58 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +// SmartHTTPService is a service that provides RPCs required for HTTP-based Git +// clones via the smart HTTP protocol. +service SmartHTTPService { + // The response body for GET /info/refs?service=git-upload-pack + // Will be invoked when the user executes a `git fetch`, meaning the server + // will upload the packs to that user. The user doesn't upload new objects. + rpc InfoRefs(InfoRefsRequest) returns (stream InfoRefsResponse) {} + + // ServicePack is just upload-pack or receive-pack + rpc ServicePack(stream ServicePackRequest) returns (stream ServicePackResponse) {} +} + +message InfoRefsRequest { + // Base specifies the base read parameters + ReadRequest base = 1; + // Service can be: upload-pack or receive-pack + string service = 2; + // Parameters to use with git -c (key=value pairs) + repeated string git_config_options = 3; + + // Git protocol version + string git_protocol = 4; +} + +message InfoRefsResponse { + bytes data = 1; +} + +message ServicePackRequest { + // Base specifies the base parameters. + // Depending on the service the matching base type has to be passed + oneof base { + ReadRequest read_base = 1; + WriteRequest write_base = 2; + }; + + // Service can be: upload-pack or receive-pack + string service = 3; + // Raw data to be copied to stdin of 'git upload-pack' + bytes data = 4; + // Parameters to use with git -c (key=value pairs) + repeated string git_config_options = 5; + // Git protocol version + string git_protocol = 6; +} + +message ServicePackResponse { + // Raw data from stdout of 'git upload-pack' + bytes data = 1; +} + diff --git a/gitrpc/proto/merge.proto b/gitrpc/proto/merge.proto new file mode 100644 index 0000000000..4419a057d2 --- /dev/null +++ b/gitrpc/proto/merge.proto @@ -0,0 +1,75 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +// DiffService is a service which provides RPCs to inspect differences +// introduced between a set of commits. +service MergeService { + rpc Merge(MergeRequest) returns (MergeResponse) {} +} + + +message MergeRequest { + enum MergeMethod { + merge = 0; + squash = 1; + rebase = 2; + } + WriteRequest base = 1; + // head_branch is the source branch we want to merge + string head_branch = 2; + // base_branch is the branch into which the given commit shall be merged and whose + // reference is going to be updated. + string base_branch = 3; + // title is the title to use for the merge commit. + string title = 4; + // message is the message to use for the merge commit. + string message = 5; + // author is the person who originally wrote the code + Identity author = 6; + // authorDate is the date when the code was written + int64 authorDate = 7; + // committer is the person who last applied the patch + Identity committer = 8; + // committer is the date when the code was applied + int64 committerDate = 9; + + // ref_type is an otional value and is used to generate the full + // reference in which the merge result is stored. + RefType ref_type = 10; + // ref_name is an otional value and is used to generate the full + // reference in which the merge result is stored. + string ref_name = 11; + + // head_expected_sha is commit sha on the head branch, if head_expected_sha is older + // than the head_branch latest sha then merge will fail. + string head_expected_sha = 12; + + // force merge + bool force = 13; + // delete branch after merge + bool delete_head_branch = 14; + // merging method + MergeMethod method = 15; +} + +message MergeResponse { + // base_sha is the sha of the latest commit on the base branch that was used for merging. + string base_sha = 1; + // head_sha is the sha of the latest commit on the head branch that was used for merging. + string head_sha = 2; + // merge_base_sha is the sha of the merge base of the head_sha and base_sha + string merge_base_sha = 3; + // merge_sha is the sha of the commit after merging head_sha with base_sha. + string merge_sha = 4; +} + +// MergeConflictError is an error returned in the case when merging two commits +// fails due to a merge conflict. +message MergeConflictError { + // ConflictingFiles is the set of files which have been conflicting. + repeated string conflicting_files = 1; +} \ No newline at end of file diff --git a/gitrpc/proto/operations.proto b/gitrpc/proto/operations.proto new file mode 100644 index 0000000000..8e731ab4d2 --- /dev/null +++ b/gitrpc/proto/operations.proto @@ -0,0 +1,73 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +// CommitFilesService is a service which provides RPCs that interact with Git +// files commit. +service CommitFilesService { + rpc CommitFiles(stream CommitFilesRequest) returns (CommitFilesResponse); +} + +// CommitFilesRequestHeader is the header of the UserCommitFiles that defines the commit details, +// parent and other information related to the call. +message CommitFilesRequestHeader { + WriteRequest base = 1; + string branch_name = 2; + string new_branch_name = 3; + string title = 4; + string message = 5; + Identity author = 6; + int64 authorDate = 7; + Identity committer = 8; + int64 committerDate = 9; +} + +// CommitFilesActionHeader contains the details of the action to be performed. +message CommitFilesActionHeader { + enum ActionType { + // CREATE creates a new file. + CREATE = 0; + // UPDATE updates an existing file. + UPDATE = 1; + // DELETE deletes an existing file or dir. + DELETE = 2; + // MOVE moves existing file to another dir. + MOVE = 3; + } + + // action is the type of the action taken to build a commit. Not all fields are + // used for all of the actions. + ActionType action = 1; + // path refers to the file or directory being modified. + string path = 2; + string sha = 3; +} + +// CommitFilesAction is the request message used to stream in the actions to build a commit. +message CommitFilesAction { + oneof payload { + // header contains the details of action being performed. Header must be sent before the + // file if file is used by the action. + CommitFilesActionHeader header = 1; + // not used for DELETE action. + bytes content = 2; + } +} + +message CommitFilesRequest { + oneof payload { + // header defines the details of where to commit, the details and which commit to use as the parent. + // header must always be sent as the first request of the stream. + CommitFilesRequestHeader header = 1; + // action contains an action to build a commit. There can be multiple actions per stream. + CommitFilesAction action = 2; + } +} + +message CommitFilesResponse { + string commit_id = 1; + string branch = 2; +} diff --git a/gitrpc/proto/push.proto b/gitrpc/proto/push.proto new file mode 100644 index 0000000000..a294d3143c --- /dev/null +++ b/gitrpc/proto/push.proto @@ -0,0 +1,21 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +service PushService { + rpc PushRemote(PushRemoteRequest) returns (PushRemoteResponse); +} + +message PushRemoteRequest { + ReadRequest base = 1; + string remote_url = 2; + int64 timeout = 3; +} + + +message PushRemoteResponse { +} + diff --git a/gitrpc/proto/ref.proto b/gitrpc/proto/ref.proto new file mode 100644 index 0000000000..b9f1fa025a --- /dev/null +++ b/gitrpc/proto/ref.proto @@ -0,0 +1,141 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +service ReferenceService { + rpc CreateBranch(CreateBranchRequest) returns (CreateBranchResponse); + rpc GetBranch(GetBranchRequest) returns (GetBranchResponse); + rpc DeleteBranch(DeleteBranchRequest) returns (DeleteBranchResponse); + rpc ListBranches(ListBranchesRequest) returns (stream ListBranchesResponse); + rpc ListCommitTags(ListCommitTagsRequest) returns (stream ListCommitTagsResponse); + rpc CreateCommitTag(CreateCommitTagRequest) returns (CreateCommitTagResponse); + rpc DeleteTag(DeleteTagRequest) returns (UpdateRefResponse); + rpc GetRef(GetRefRequest) returns (GetRefResponse); + rpc UpdateRef(UpdateRefRequest) returns (UpdateRefResponse); +} + +message CreateCommitTagRequest { + WriteRequest base = 1; + string tag_name = 2; + string target = 3; + string message = 4; + Identity tagger = 5; + int64 taggerDate = 6; +} + +message CreateCommitTagResponse { + CommitTag tag = 1; +} + +message DeleteTagRequest{ + WriteRequest base = 1; + string tag_name = 2; +} + +message CreateBranchRequest { + WriteRequest base = 1; + string branch_name = 2; + string target = 3; +} + +message CreateBranchResponse { + Branch branch = 1; +} + +message GetBranchRequest { + ReadRequest base = 1; + string branch_name = 2; +} + +message GetBranchResponse { + Branch branch = 1; +} + +message DeleteBranchRequest { + WriteRequest base = 1; + string branch_name = 2; + bool force = 3; +} + +message DeleteBranchResponse { + string sha = 1; +} + +message ListBranchesRequest { + enum SortOption { + Default = 0; + Name = 1; + Date = 2; + } + + ReadRequest base = 1; + bool include_commit = 2; + string query = 3; + SortOption sort = 4; + SortOrder order = 5; + int32 page = 6; + int32 pageSize = 7; +} + +message ListBranchesResponse { + Branch branch = 1; +} + +message Branch { + string name = 1; + string sha = 2; + Commit commit = 3; +} + +message ListCommitTagsRequest { + enum SortOption { + Default = 0; + Name = 1; + Date = 2; + } + + ReadRequest base = 1; + bool include_commit = 2; + string query = 3; + SortOption sort = 4; + SortOrder order = 5; + int32 page = 6; + int32 pageSize = 7; +} + +message ListCommitTagsResponse { + CommitTag tag = 1; +} + +message CommitTag { + string name = 1; + string sha = 2; + bool is_annotated = 3; + string title = 4; + string message = 5; + Signature tagger = 6; + Commit commit = 7; +} + +message GetRefRequest { + ReadRequest base = 1; + string ref_name = 2; + RefType ref_type = 3; +} + +message GetRefResponse { + string sha = 1; +} + +message UpdateRefRequest { + WriteRequest base = 1; + string ref_name = 2; + RefType ref_type = 3; + string new_value = 4; + string old_value = 5; +} + +message UpdateRefResponse {} diff --git a/gitrpc/proto/repo.proto b/gitrpc/proto/repo.proto new file mode 100644 index 0000000000..633be054dc --- /dev/null +++ b/gitrpc/proto/repo.proto @@ -0,0 +1,262 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +import "shared.proto"; + +// RepositoryService is a service providing RPCs accessing repositories as a whole. +service RepositoryService { + rpc CreateRepository(stream CreateRepositoryRequest) returns (CreateRepositoryResponse); + rpc GetTreeNode(GetTreeNodeRequest) returns (GetTreeNodeResponse); + rpc ListTreeNodes(ListTreeNodesRequest) returns (stream ListTreeNodesResponse); + rpc PathsDetails(PathsDetailsRequest) returns (PathsDetailsResponse); + rpc GetSubmodule(GetSubmoduleRequest) returns (GetSubmoduleResponse); + rpc GetBlob(GetBlobRequest) returns (stream GetBlobResponse); + rpc ListCommits(ListCommitsRequest) returns (stream ListCommitsResponse); + rpc GetCommit(GetCommitRequest) returns (GetCommitResponse); + rpc GetCommitDivergences(GetCommitDivergencesRequest) returns (GetCommitDivergencesResponse); + rpc DeleteRepository(DeleteRepositoryRequest) returns (DeleteRepositoryResponse); + rpc SyncRepository(SyncRepositoryRequest) returns (SyncRepositoryResponse) {} + rpc HashRepository(HashRepositoryRequest) returns (HashRepositoryResponse) {} + rpc MergeBase(MergeBaseRequest) returns (MergeBaseResponse); + rpc MatchFiles(MatchFilesRequest) returns (MatchFilesResponse); + rpc GeneratePipeline(GeneratePipelineRequest) returns (GeneratePipelineResponse); +} + +message CreateRepositoryRequest { + oneof data { + CreateRepositoryRequestHeader header = 1; + FileUpload file = 2; + } +} + +message CreateRepositoryRequestHeader { + WriteRequest base = 1; + string default_branch = 2; + Identity author = 3; + int64 authorDate = 4; + Identity committer = 5; + int64 committerDate = 6; +} + +message CreateRepositoryResponse { } + +message GetTreeNodeRequest { + ReadRequest base = 1; + string git_ref = 2; + string path = 3; + bool include_latest_commit = 4; +} + +message GetTreeNodeResponse { + TreeNode node = 1; + Commit commit = 2; +} + +message ListTreeNodesRequest { + ReadRequest base = 1; + string git_ref = 2; + string path = 3; +} + +message ListTreeNodesResponse { + TreeNode node = 1; +} + +message TreeNode { + TreeNodeType type = 1; + TreeNodeMode mode = 2; + string sha = 3; + string name = 4; + string path = 5; +} + +enum TreeNodeType { + TreeNodeTypeTree = 0; + TreeNodeTypeBlob = 1; + TreeNodeTypeCommit = 2; +} + +enum TreeNodeMode { + TreeNodeModeFile = 0; + TreeNodeModeSymlink = 1; + TreeNodeModeExec = 2; + TreeNodeModeTree = 3; + TreeNodeModeCommit = 4; +} + +message PathsDetailsRequest { + ReadRequest base = 1; + string git_ref = 2; + repeated string paths = 3; +} + +message PathsDetailsResponse { + repeated PathDetails path_details = 1; +} + +message PathDetails { + string path = 1; + Commit last_commit = 2; + int64 size = 3; +} + +message GetCommitRequest { + ReadRequest base = 1; + string sha = 2; +} + +message GetCommitResponse { + Commit commit = 1; +} + +message ListCommitsRequest { + ReadRequest base = 1; + string git_ref = 2; + string after = 3; + int32 page = 4; + int32 limit = 5; + string path = 6; + int64 since = 7; + int64 until = 8; + string committer = 9; +} + +message ListCommitsResponse { + Commit commit = 1; + repeated RenameDetails rename_details = 2; +} + + +message RenameDetails{ + string old_path = 1; + string new_path = 2; + string commit_sha_before = 3; + string commit_sha_after = 4; +} + +message GetBlobRequest { + ReadRequest base = 1; + string sha = 2; + int64 size_limit = 3; +} + +message GetBlobResponse { + oneof data { + GetBlobResponseHeader header = 1; + bytes content = 2; + } +} + +message GetBlobResponseHeader { + string sha = 1; + int64 size = 2; + int64 content_size = 3; +} + +message GetSubmoduleRequest { + ReadRequest base = 1; + string git_ref = 2; + string path = 3; +} + +message GetSubmoduleResponse { + Submodule submodule = 1; +} + +message Submodule { + string name = 1; + string url = 2; +} + +message GetCommitDivergencesRequest { + ReadRequest base = 1; + int32 max_count = 2; + repeated CommitDivergenceRequest requests = 3; +} + +message CommitDivergenceRequest { + string from = 1; + string to = 2; +} + + +message GetCommitDivergencesResponse { + repeated CommitDivergence divergences = 1; +} + +message CommitDivergence { + int32 ahead = 1; + int32 behind = 2; +} + +message DeleteRepositoryRequest { + WriteRequest base = 1; +} + +message DeleteRepositoryResponse { +} + +message SyncRepositoryRequest { + WriteRequest base = 1; + string source = 2; + bool create_if_not_exists = 3; +} + +message SyncRepositoryResponse { + string default_branch = 1; +} + +enum HashType { + HashTypeSHA256 = 0; +} + +enum HashAggregationType { + HashAggregationTypeXOR = 0; +} + +message HashRepositoryRequest { + ReadRequest base = 1; + HashType hash_type = 2; + HashAggregationType aggregation_type = 3; +} + +message HashRepositoryResponse { + bytes hash = 1; +} + +message MergeBaseRequest { + ReadRequest base = 1; + string ref1 = 2; + string ref2 = 3; +} + +message MergeBaseResponse { + string merge_base_sha = 1; +} + +message FileContent { + string path = 1; + bytes content = 2; +} + +message MatchFilesRequest { + ReadRequest base = 1; + string ref = 2; + string dir_path = 3; + string pattern = 4; + int32 max_size = 5; +} + +message MatchFilesResponse { + repeated FileContent files = 1; +} + +message GeneratePipelineRequest { + ReadRequest base = 1; +} + +message GeneratePipelineResponse { + bytes pipeline_yaml = 1; +} diff --git a/gitrpc/proto/shared.proto b/gitrpc/proto/shared.proto new file mode 100644 index 0000000000..cb06620484 --- /dev/null +++ b/gitrpc/proto/shared.proto @@ -0,0 +1,74 @@ +syntax = "proto3"; +package rpc; + +option go_package = "github.com/harness/gitness/gitrpc/rpc"; + +message ReadRequest { + string repo_uid = 1; +} + +message WriteRequest { + string repo_uid = 1; + repeated EnvVar env_vars = 2; + Identity actor = 3; +} + +message EnvVar { + string name = 1; + string value = 2; +} + +message FileUpload { + oneof data { + FileUploadHeader header = 1; + Chunk chunk = 2; + } +} + +message FileUploadHeader { + string path = 1; +} + +message Chunk { + bool eof = 1; + bytes data = 2; +} + +enum SortOrder { + Default = 0; + Asc = 1; + Desc = 2; +} + +message Commit { + string sha = 1; + string title = 2; + string message = 3; + Signature author = 4; + Signature committer = 5; +} + +message Signature { + Identity identity = 1; + int64 when = 2; +} + +message Identity { + string name = 1; + string email = 2; +} + +enum RefType { + Undefined = 0; + RefRaw = 1; + RefBranch = 2; + RefTag = 3; + RefPullReqHead = 4; + RefPullReqMerge = 5; +} + +// PathNotFoundError is an error returned in the case a provided path is not found in the repo. +message PathNotFoundError { + // path is the path that wasn't found in the repo. + string path = 1; +} \ No newline at end of file diff --git a/gitrpc/push_remote.go b/gitrpc/push_remote.go new file mode 100644 index 0000000000..0eed0bd88b --- /dev/null +++ b/gitrpc/push_remote.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type PushRemoteParams struct { + ReadParams + RemoteUrl string +} + +func (c *Client) PushRemote(ctx context.Context, params *PushRemoteParams) error { + if params == nil { + return ErrNoParamsProvided + } + + _, err := c.pushService.PushRemote(ctx, &rpc.PushRemoteRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + RemoteUrl: params.RemoteUrl, + }) + if err != nil { + return processRPCErrorf(err, "failed to push to remote") + } + + return nil +} + +func (p PushRemoteParams) Validate() error { + if err := p.ReadParams.Validate(); err != nil { + return err + } + + if p.RemoteUrl == "" { + return ErrInvalidArgumentf("remote url cannot be empty") + } + return nil +} diff --git a/gitrpc/ref.go b/gitrpc/ref.go new file mode 100644 index 0000000000..3264ea0b83 --- /dev/null +++ b/gitrpc/ref.go @@ -0,0 +1,82 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + + "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/gitrpc/rpc" +) + +type GetRefParams struct { + ReadParams + Name string + Type enum.RefType +} + +type GetRefResponse struct { + SHA string +} + +func (c *Client) GetRef(ctx context.Context, params GetRefParams) (GetRefResponse, error) { + refType := enum.RefToRPC(params.Type) + if refType == rpc.RefType_Undefined { + return GetRefResponse{}, ErrInvalidArgumentf("invalid argument: '%s'", refType) + } + + result, err := c.refService.GetRef(ctx, &rpc.GetRefRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + RefName: params.Name, + RefType: refType, + }) + if err != nil { + return GetRefResponse{}, processRPCErrorf(err, "failed to get %s ref '%s'", params.Type.String(), params.Name) + } + + return GetRefResponse{SHA: result.Sha}, nil +} + +type UpdateRefParams struct { + WriteParams + Type enum.RefType + Name string + // NewValue specified the new value the reference should point at. + // An empty value will lead to the deletion of the branch. + NewValue string + // OldValue is an optional value that can be used to ensure that the reference + // is updated iff its current value is matching the provided value. + OldValue string +} + +func (c *Client) UpdateRef(ctx context.Context, params UpdateRefParams) error { + refType := enum.RefToRPC(params.Type) + if refType == rpc.RefType_Undefined { + return ErrInvalidArgumentf("invalid argument: '%s'", refType) + } + + _, err := c.refService.UpdateRef(ctx, &rpc.UpdateRefRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + RefName: params.Name, + RefType: refType, + NewValue: params.NewValue, + OldValue: params.OldValue, + }) + if err != nil { + return processRPCErrorf(err, "failed to update %s ref '%s'", params.Type.String(), params.Name) + } + + return err +} diff --git a/gitrpc/repo.go b/gitrpc/repo.go new file mode 100644 index 0000000000..0e532714f2 --- /dev/null +++ b/gitrpc/repo.go @@ -0,0 +1,214 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc/hash" + "github.com/harness/gitness/gitrpc/rpc" + + gonanoid "github.com/matoous/go-nanoid/v2" + "github.com/rs/zerolog/log" +) + +const ( + // repoGitUIDLength is the length of the generated repo uid. + repoGitUIDLength = 42 + + // repoGitUIDAlphabet is the alphabet used for generating repo uids + // NOTE: keep it lowercase and alphanumerical to avoid issues with case insensitive filesystems. + repoGitUIDAlphabet = "abcdefghijklmnopqrstuvwxyz0123456789" +) + +type CreateRepositoryParams struct { + // Create operation is different from all (from user side), as UID doesn't exist yet. + // Only take actor and envars as input and create WriteParams manually + Actor Identity + EnvVars map[string]string + + DefaultBranch string + Files []File + + // Committer overwrites the git committer used for committing the files + // (optional, default: actor) + Committer *Identity + // CommitterDate overwrites the git committer date used for committing the files + // (optional, default: current time on server) + CommitterDate *time.Time + // Author overwrites the git author used for committing the files + // (optional, default: committer) + Author *Identity + // AuthorDate overwrites the git author date used for committing the files + // (optional, default: committer date) + AuthorDate *time.Time +} + +type CreateRepositoryOutput struct { + UID string +} + +type DeleteRepositoryParams struct { + WriteParams +} + +type SyncRepositoryParams struct { + WriteParams + Source string + CreateIfNotExists bool +} + +type SyncRepositoryOutput struct { + DefaultBranch string +} + +type HashRepositoryParams struct { + ReadParams + HashType hash.Type + AggregationType hash.AggregationType +} + +type HashRepositoryOutput struct { + Hash []byte +} + +func (c *Client) CreateRepository(ctx context.Context, + params *CreateRepositoryParams) (*CreateRepositoryOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + log := log.Ctx(ctx) + + uid, err := newRepositoryUID() + if err != nil { + return nil, fmt.Errorf("failed to create new uid: %w", err) + } + log.Info(). + Msgf("Create new git repository with uid '%s' and default branch '%s'", uid, params.DefaultBranch) + + ctx, cancel := context.WithTimeout(ctx, 60*time.Second) + defer cancel() + stream, err := c.repoService.CreateRepository(ctx) + if err != nil { + return nil, err + } + + log.Info().Msgf("Send header") + + writeParams := WriteParams{ + RepoUID: uid, + Actor: params.Actor, + EnvVars: params.EnvVars, + } + + req := &rpc.CreateRepositoryRequest{ + Data: &rpc.CreateRepositoryRequest_Header{ + Header: &rpc.CreateRepositoryRequestHeader{ + Base: mapToRPCWriteRequest(writeParams), + DefaultBranch: params.DefaultBranch, + Author: mapToRPCIdentityOptional(params.Author), + AuthorDate: mapToRPCTimeOptional(params.AuthorDate), + Committer: mapToRPCIdentityOptional(params.Committer), + CommitterDate: mapToRPCTimeOptional(params.CommitterDate), + }, + }, + } + if err = stream.Send(req); err != nil { + return nil, err + } + + for _, file := range params.Files { + log.Info().Msgf("Send file %s", file.Path) + + err = uploadFile(ctx, file, FileTransferChunkSize, func(fs *rpc.FileUpload) error { + return stream.Send(&rpc.CreateRepositoryRequest{ + Data: &rpc.CreateRepositoryRequest_File{ + File: fs, + }, + }) + }) + if err != nil { + return nil, err + } + } + + _, err = stream.CloseAndRecv() + if err != nil { + return nil, processRPCErrorf(err, "failed to create repo on server (uid: '%s')", uid) + } + + log.Info().Msgf("completed git repo setup.") + + return &CreateRepositoryOutput{UID: uid}, nil +} + +func newRepositoryUID() (string, error) { + return gonanoid.Generate(repoGitUIDAlphabet, repoGitUIDLength) +} + +func (c *Client) DeleteRepository(ctx context.Context, params *DeleteRepositoryParams) error { + if params == nil { + return ErrNoParamsProvided + } + _, err := c.repoService.DeleteRepository(ctx, &rpc.DeleteRepositoryRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + }) + if err != nil { + return processRPCErrorf(err, "failed to delete repository on server") + } + return nil +} + +func (c *Client) SyncRepository(ctx context.Context, params *SyncRepositoryParams) (*SyncRepositoryOutput, error) { + result, err := c.repoService.SyncRepository(ctx, &rpc.SyncRepositoryRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + Source: params.Source, + CreateIfNotExists: params.CreateIfNotExists, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to sync repository on server to match provided source") + } + + return &SyncRepositoryOutput{ + DefaultBranch: result.DefaultBranch, + }, nil +} + +func (c *Client) HashRepository(ctx context.Context, params *HashRepositoryParams) (*HashRepositoryOutput, error) { + hashType, err := mapToRPCHashType(params.HashType) + if err != nil { + return nil, fmt.Errorf("failed to map hash type: %w", err) + } + aggregationType, err := mapToRPCHashAggregationType(params.AggregationType) + if err != nil { + return nil, fmt.Errorf("failed to map aggregation type: %w", err) + } + + resp, err := c.repoService.HashRepository(ctx, &rpc.HashRepositoryRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + HashType: hashType, + AggregationType: aggregationType, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to hash repository on server") + } + + return &HashRepositoryOutput{ + Hash: resp.GetHash(), + }, nil +} diff --git a/gitrpc/rpc/blame.pb.go b/gitrpc/rpc/blame.pb.go new file mode 100644 index 0000000000..783cfcf33d --- /dev/null +++ b/gitrpc/rpc/blame.pb.go @@ -0,0 +1,330 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: blame.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type BlameRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` + Range *LineRange `protobuf:"bytes,4,opt,name=range,proto3" json:"range,omitempty"` +} + +func (x *BlameRequest) Reset() { + *x = BlameRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_blame_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BlameRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BlameRequest) ProtoMessage() {} + +func (x *BlameRequest) ProtoReflect() protoreflect.Message { + mi := &file_blame_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BlameRequest.ProtoReflect.Descriptor instead. +func (*BlameRequest) Descriptor() ([]byte, []int) { + return file_blame_proto_rawDescGZIP(), []int{0} +} + +func (x *BlameRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *BlameRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *BlameRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *BlameRequest) GetRange() *LineRange { + if x != nil { + return x.Range + } + return nil +} + +type LineRange struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + From int32 `protobuf:"varint,1,opt,name=from,proto3" json:"from,omitempty"` + To int32 `protobuf:"varint,2,opt,name=to,proto3" json:"to,omitempty"` +} + +func (x *LineRange) Reset() { + *x = LineRange{} + if protoimpl.UnsafeEnabled { + mi := &file_blame_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LineRange) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LineRange) ProtoMessage() {} + +func (x *LineRange) ProtoReflect() protoreflect.Message { + mi := &file_blame_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LineRange.ProtoReflect.Descriptor instead. +func (*LineRange) Descriptor() ([]byte, []int) { + return file_blame_proto_rawDescGZIP(), []int{1} +} + +func (x *LineRange) GetFrom() int32 { + if x != nil { + return x.From + } + return 0 +} + +func (x *LineRange) GetTo() int32 { + if x != nil { + return x.To + } + return 0 +} + +type BlamePart struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Commit *Commit `protobuf:"bytes,1,opt,name=commit,proto3" json:"commit,omitempty"` + Lines [][]byte `protobuf:"bytes,2,rep,name=lines,proto3" json:"lines,omitempty"` +} + +func (x *BlamePart) Reset() { + *x = BlamePart{} + if protoimpl.UnsafeEnabled { + mi := &file_blame_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BlamePart) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BlamePart) ProtoMessage() {} + +func (x *BlamePart) ProtoReflect() protoreflect.Message { + mi := &file_blame_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BlamePart.ProtoReflect.Descriptor instead. +func (*BlamePart) Descriptor() ([]byte, []int) { + return file_blame_proto_rawDescGZIP(), []int{2} +} + +func (x *BlamePart) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +func (x *BlamePart) GetLines() [][]byte { + if x != nil { + return x.Lines + } + return nil +} + +var File_blame_proto protoreflect.FileDescriptor + +var file_blame_proto_rawDesc = []byte{ + 0x0a, 0x0b, 0x62, 0x6c, 0x61, 0x6d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, + 0x70, 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x22, 0x87, 0x01, 0x0a, 0x0c, 0x42, 0x6c, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x69, 0x74, 0x5f, 0x72, + 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, 0x74, 0x52, 0x65, 0x66, + 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x70, 0x61, 0x74, 0x68, 0x12, 0x24, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x61, + 0x6e, 0x67, 0x65, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2f, 0x0a, 0x09, 0x4c, 0x69, + 0x6e, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x74, + 0x6f, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x02, 0x74, 0x6f, 0x22, 0x46, 0x0a, 0x09, 0x42, + 0x6c, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x74, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, 0x14, 0x0a, + 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x05, 0x6c, 0x69, + 0x6e, 0x65, 0x73, 0x32, 0x3c, 0x0a, 0x0c, 0x42, 0x6c, 0x61, 0x6d, 0x65, 0x53, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x42, 0x6c, 0x61, 0x6d, 0x65, 0x12, 0x11, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x42, 0x6c, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x42, 0x6c, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x74, 0x30, + 0x01, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, + 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, +} + +var ( + file_blame_proto_rawDescOnce sync.Once + file_blame_proto_rawDescData = file_blame_proto_rawDesc +) + +func file_blame_proto_rawDescGZIP() []byte { + file_blame_proto_rawDescOnce.Do(func() { + file_blame_proto_rawDescData = protoimpl.X.CompressGZIP(file_blame_proto_rawDescData) + }) + return file_blame_proto_rawDescData +} + +var file_blame_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_blame_proto_goTypes = []interface{}{ + (*BlameRequest)(nil), // 0: rpc.BlameRequest + (*LineRange)(nil), // 1: rpc.LineRange + (*BlamePart)(nil), // 2: rpc.BlamePart + (*ReadRequest)(nil), // 3: rpc.ReadRequest + (*Commit)(nil), // 4: rpc.Commit +} +var file_blame_proto_depIdxs = []int32{ + 3, // 0: rpc.BlameRequest.base:type_name -> rpc.ReadRequest + 1, // 1: rpc.BlameRequest.range:type_name -> rpc.LineRange + 4, // 2: rpc.BlamePart.commit:type_name -> rpc.Commit + 0, // 3: rpc.BlameService.Blame:input_type -> rpc.BlameRequest + 2, // 4: rpc.BlameService.Blame:output_type -> rpc.BlamePart + 4, // [4:5] is the sub-list for method output_type + 3, // [3:4] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_blame_proto_init() } +func file_blame_proto_init() { + if File_blame_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_blame_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BlameRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_blame_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LineRange); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_blame_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BlamePart); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_blame_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_blame_proto_goTypes, + DependencyIndexes: file_blame_proto_depIdxs, + MessageInfos: file_blame_proto_msgTypes, + }.Build() + File_blame_proto = out.File + file_blame_proto_rawDesc = nil + file_blame_proto_goTypes = nil + file_blame_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/blame_grpc.pb.go b/gitrpc/rpc/blame_grpc.pb.go new file mode 100644 index 0000000000..0f327fe1e0 --- /dev/null +++ b/gitrpc/rpc/blame_grpc.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: blame.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// BlameServiceClient is the client API for BlameService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type BlameServiceClient interface { + Blame(ctx context.Context, in *BlameRequest, opts ...grpc.CallOption) (BlameService_BlameClient, error) +} + +type blameServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewBlameServiceClient(cc grpc.ClientConnInterface) BlameServiceClient { + return &blameServiceClient{cc} +} + +func (c *blameServiceClient) Blame(ctx context.Context, in *BlameRequest, opts ...grpc.CallOption) (BlameService_BlameClient, error) { + stream, err := c.cc.NewStream(ctx, &BlameService_ServiceDesc.Streams[0], "/rpc.BlameService/Blame", opts...) + if err != nil { + return nil, err + } + x := &blameServiceBlameClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type BlameService_BlameClient interface { + Recv() (*BlamePart, error) + grpc.ClientStream +} + +type blameServiceBlameClient struct { + grpc.ClientStream +} + +func (x *blameServiceBlameClient) Recv() (*BlamePart, error) { + m := new(BlamePart) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// BlameServiceServer is the server API for BlameService service. +// All implementations must embed UnimplementedBlameServiceServer +// for forward compatibility +type BlameServiceServer interface { + Blame(*BlameRequest, BlameService_BlameServer) error + mustEmbedUnimplementedBlameServiceServer() +} + +// UnimplementedBlameServiceServer must be embedded to have forward compatible implementations. +type UnimplementedBlameServiceServer struct { +} + +func (UnimplementedBlameServiceServer) Blame(*BlameRequest, BlameService_BlameServer) error { + return status.Errorf(codes.Unimplemented, "method Blame not implemented") +} +func (UnimplementedBlameServiceServer) mustEmbedUnimplementedBlameServiceServer() {} + +// UnsafeBlameServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to BlameServiceServer will +// result in compilation errors. +type UnsafeBlameServiceServer interface { + mustEmbedUnimplementedBlameServiceServer() +} + +func RegisterBlameServiceServer(s grpc.ServiceRegistrar, srv BlameServiceServer) { + s.RegisterService(&BlameService_ServiceDesc, srv) +} + +func _BlameService_Blame_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BlameRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BlameServiceServer).Blame(m, &blameServiceBlameServer{stream}) +} + +type BlameService_BlameServer interface { + Send(*BlamePart) error + grpc.ServerStream +} + +type blameServiceBlameServer struct { + grpc.ServerStream +} + +func (x *blameServiceBlameServer) Send(m *BlamePart) error { + return x.ServerStream.SendMsg(m) +} + +// BlameService_ServiceDesc is the grpc.ServiceDesc for BlameService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var BlameService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.BlameService", + HandlerType: (*BlameServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Blame", + Handler: _BlameService_Blame_Handler, + ServerStreams: true, + }, + }, + Metadata: "blame.proto", +} diff --git a/gitrpc/rpc/constants.go b/gitrpc/rpc/constants.go new file mode 100644 index 0000000000..a87845efa3 --- /dev/null +++ b/gitrpc/rpc/constants.go @@ -0,0 +1,28 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rpc + +const ( + // MetadataKeyRequestID is the key used to store the request ID in the metadata. + MetadataKeyRequestID = "x-request-id" + + MetadataKeyEnvironmentVariables = "x-gitrpc-envars" + + // ServiceUploadPack is the service constant used for triggering the upload pack operation. + ServiceUploadPack = "upload-pack" + + // ServiceReceivePack is the service constant used for triggering the receive pack operation. + ServiceReceivePack = "receive-pack" +) diff --git a/gitrpc/rpc/diff.pb.go b/gitrpc/rpc/diff.pb.go new file mode 100644 index 0000000000..84f2b226f1 --- /dev/null +++ b/gitrpc/rpc/diff.pb.go @@ -0,0 +1,1439 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: diff.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// A list of different file statuses +type DiffResponse_FileStatus int32 + +const ( + // undefined + DiffResponse_UNDEFINED DiffResponse_FileStatus = 0 + // file has been added + DiffResponse_ADDED DiffResponse_FileStatus = 1 + // file has been changed + DiffResponse_MODIFIED DiffResponse_FileStatus = 2 + // file has been deleted + DiffResponse_DELETED DiffResponse_FileStatus = 3 + // the file has been renamed + DiffResponse_RENAMED DiffResponse_FileStatus = 4 +) + +// Enum value maps for DiffResponse_FileStatus. +var ( + DiffResponse_FileStatus_name = map[int32]string{ + 0: "UNDEFINED", + 1: "ADDED", + 2: "MODIFIED", + 3: "DELETED", + 4: "RENAMED", + } + DiffResponse_FileStatus_value = map[string]int32{ + "UNDEFINED": 0, + "ADDED": 1, + "MODIFIED": 2, + "DELETED": 3, + "RENAMED": 4, + } +) + +func (x DiffResponse_FileStatus) Enum() *DiffResponse_FileStatus { + p := new(DiffResponse_FileStatus) + *p = x + return p +} + +func (x DiffResponse_FileStatus) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (DiffResponse_FileStatus) Descriptor() protoreflect.EnumDescriptor { + return file_diff_proto_enumTypes[0].Descriptor() +} + +func (DiffResponse_FileStatus) Type() protoreflect.EnumType { + return &file_diff_proto_enumTypes[0] +} + +func (x DiffResponse_FileStatus) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use DiffResponse_FileStatus.Descriptor instead. +func (DiffResponse_FileStatus) EnumDescriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{10, 0} +} + +type DiffRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + // base_ref is left side of compare and can be branch, commit and tag + BaseRef string `protobuf:"bytes,2,opt,name=base_ref,json=baseRef,proto3" json:"base_ref,omitempty"` + // head_ref is right side of compare and can be branch, commit and tag + HeadRef string `protobuf:"bytes,3,opt,name=head_ref,json=headRef,proto3" json:"head_ref,omitempty"` + // merge_base used only in branch comparison, if merge_base is true + // it will show diff from the commit where branch is created and head branch + MergeBase bool `protobuf:"varint,4,opt,name=merge_base,json=mergeBase,proto3" json:"merge_base,omitempty"` + // include_patch + IncludePatch bool `protobuf:"varint,5,opt,name=include_patch,json=includePatch,proto3" json:"include_patch,omitempty"` +} + +func (x *DiffRequest) Reset() { + *x = DiffRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffRequest) ProtoMessage() {} + +func (x *DiffRequest) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffRequest.ProtoReflect.Descriptor instead. +func (*DiffRequest) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{0} +} + +func (x *DiffRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *DiffRequest) GetBaseRef() string { + if x != nil { + return x.BaseRef + } + return "" +} + +func (x *DiffRequest) GetHeadRef() string { + if x != nil { + return x.HeadRef + } + return "" +} + +func (x *DiffRequest) GetMergeBase() bool { + if x != nil { + return x.MergeBase + } + return false +} + +func (x *DiffRequest) GetIncludePatch() bool { + if x != nil { + return x.IncludePatch + } + return false +} + +type RawDiffResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RawDiffResponse) Reset() { + *x = RawDiffResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RawDiffResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RawDiffResponse) ProtoMessage() {} + +func (x *RawDiffResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RawDiffResponse.ProtoReflect.Descriptor instead. +func (*RawDiffResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{1} +} + +func (x *RawDiffResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type DiffShortStatResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files int32 `protobuf:"varint,1,opt,name=files,proto3" json:"files,omitempty"` + Additions int32 `protobuf:"varint,2,opt,name=additions,proto3" json:"additions,omitempty"` + Deletions int32 `protobuf:"varint,3,opt,name=deletions,proto3" json:"deletions,omitempty"` +} + +func (x *DiffShortStatResponse) Reset() { + *x = DiffShortStatResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffShortStatResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffShortStatResponse) ProtoMessage() {} + +func (x *DiffShortStatResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffShortStatResponse.ProtoReflect.Descriptor instead. +func (*DiffShortStatResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{2} +} + +func (x *DiffShortStatResponse) GetFiles() int32 { + if x != nil { + return x.Files + } + return 0 +} + +func (x *DiffShortStatResponse) GetAdditions() int32 { + if x != nil { + return x.Additions + } + return 0 +} + +func (x *DiffShortStatResponse) GetDeletions() int32 { + if x != nil { + return x.Deletions + } + return 0 +} + +type HunkHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + OldLine int32 `protobuf:"varint,1,opt,name=old_line,json=oldLine,proto3" json:"old_line,omitempty"` + OldSpan int32 `protobuf:"varint,2,opt,name=old_span,json=oldSpan,proto3" json:"old_span,omitempty"` + NewLine int32 `protobuf:"varint,3,opt,name=new_line,json=newLine,proto3" json:"new_line,omitempty"` + NewSpan int32 `protobuf:"varint,4,opt,name=new_span,json=newSpan,proto3" json:"new_span,omitempty"` + Text string `protobuf:"bytes,5,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *HunkHeader) Reset() { + *x = HunkHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *HunkHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*HunkHeader) ProtoMessage() {} + +func (x *HunkHeader) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use HunkHeader.ProtoReflect.Descriptor instead. +func (*HunkHeader) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{3} +} + +func (x *HunkHeader) GetOldLine() int32 { + if x != nil { + return x.OldLine + } + return 0 +} + +func (x *HunkHeader) GetOldSpan() int32 { + if x != nil { + return x.OldSpan + } + return 0 +} + +func (x *HunkHeader) GetNewLine() int32 { + if x != nil { + return x.NewLine + } + return 0 +} + +func (x *HunkHeader) GetNewSpan() int32 { + if x != nil { + return x.NewSpan + } + return 0 +} + +func (x *HunkHeader) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +type DiffFileHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + OldFileName string `protobuf:"bytes,1,opt,name=old_file_name,json=oldFileName,proto3" json:"old_file_name,omitempty"` + NewFileName string `protobuf:"bytes,2,opt,name=new_file_name,json=newFileName,proto3" json:"new_file_name,omitempty"` + Extensions map[string]string `protobuf:"bytes,3,rep,name=extensions,proto3" json:"extensions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *DiffFileHeader) Reset() { + *x = DiffFileHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffFileHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffFileHeader) ProtoMessage() {} + +func (x *DiffFileHeader) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffFileHeader.ProtoReflect.Descriptor instead. +func (*DiffFileHeader) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{4} +} + +func (x *DiffFileHeader) GetOldFileName() string { + if x != nil { + return x.OldFileName + } + return "" +} + +func (x *DiffFileHeader) GetNewFileName() string { + if x != nil { + return x.NewFileName + } + return "" +} + +func (x *DiffFileHeader) GetExtensions() map[string]string { + if x != nil { + return x.Extensions + } + return nil +} + +type DiffFileHunkHeaders struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + FileHeader *DiffFileHeader `protobuf:"bytes,1,opt,name=file_header,json=fileHeader,proto3" json:"file_header,omitempty"` + HunkHeaders []*HunkHeader `protobuf:"bytes,2,rep,name=hunk_headers,json=hunkHeaders,proto3" json:"hunk_headers,omitempty"` +} + +func (x *DiffFileHunkHeaders) Reset() { + *x = DiffFileHunkHeaders{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffFileHunkHeaders) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffFileHunkHeaders) ProtoMessage() {} + +func (x *DiffFileHunkHeaders) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffFileHunkHeaders.ProtoReflect.Descriptor instead. +func (*DiffFileHunkHeaders) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{5} +} + +func (x *DiffFileHunkHeaders) GetFileHeader() *DiffFileHeader { + if x != nil { + return x.FileHeader + } + return nil +} + +func (x *DiffFileHunkHeaders) GetHunkHeaders() []*HunkHeader { + if x != nil { + return x.HunkHeaders + } + return nil +} + +type GetDiffHunkHeadersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + SourceCommitSha string `protobuf:"bytes,2,opt,name=source_commit_sha,json=sourceCommitSha,proto3" json:"source_commit_sha,omitempty"` + TargetCommitSha string `protobuf:"bytes,4,opt,name=target_commit_sha,json=targetCommitSha,proto3" json:"target_commit_sha,omitempty"` +} + +func (x *GetDiffHunkHeadersRequest) Reset() { + *x = GetDiffHunkHeadersRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetDiffHunkHeadersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetDiffHunkHeadersRequest) ProtoMessage() {} + +func (x *GetDiffHunkHeadersRequest) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetDiffHunkHeadersRequest.ProtoReflect.Descriptor instead. +func (*GetDiffHunkHeadersRequest) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{6} +} + +func (x *GetDiffHunkHeadersRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetDiffHunkHeadersRequest) GetSourceCommitSha() string { + if x != nil { + return x.SourceCommitSha + } + return "" +} + +func (x *GetDiffHunkHeadersRequest) GetTargetCommitSha() string { + if x != nil { + return x.TargetCommitSha + } + return "" +} + +type GetDiffHunkHeadersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files []*DiffFileHunkHeaders `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` +} + +func (x *GetDiffHunkHeadersResponse) Reset() { + *x = GetDiffHunkHeadersResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetDiffHunkHeadersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetDiffHunkHeadersResponse) ProtoMessage() {} + +func (x *GetDiffHunkHeadersResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetDiffHunkHeadersResponse.ProtoReflect.Descriptor instead. +func (*GetDiffHunkHeadersResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{7} +} + +func (x *GetDiffHunkHeadersResponse) GetFiles() []*DiffFileHunkHeaders { + if x != nil { + return x.Files + } + return nil +} + +type DiffCutRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + SourceCommitSha string `protobuf:"bytes,2,opt,name=source_commit_sha,json=sourceCommitSha,proto3" json:"source_commit_sha,omitempty"` + SourceBranch string `protobuf:"bytes,3,opt,name=source_branch,json=sourceBranch,proto3" json:"source_branch,omitempty"` + TargetCommitSha string `protobuf:"bytes,4,opt,name=target_commit_sha,json=targetCommitSha,proto3" json:"target_commit_sha,omitempty"` + TargetBranch string `protobuf:"bytes,5,opt,name=target_branch,json=targetBranch,proto3" json:"target_branch,omitempty"` + Path string `protobuf:"bytes,6,opt,name=path,proto3" json:"path,omitempty"` + LineStart int32 `protobuf:"varint,7,opt,name=line_start,json=lineStart,proto3" json:"line_start,omitempty"` + LineStartNew bool `protobuf:"varint,8,opt,name=line_start_new,json=lineStartNew,proto3" json:"line_start_new,omitempty"` + LineEnd int32 `protobuf:"varint,9,opt,name=line_end,json=lineEnd,proto3" json:"line_end,omitempty"` + LineEndNew bool `protobuf:"varint,10,opt,name=line_end_new,json=lineEndNew,proto3" json:"line_end_new,omitempty"` +} + +func (x *DiffCutRequest) Reset() { + *x = DiffCutRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffCutRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffCutRequest) ProtoMessage() {} + +func (x *DiffCutRequest) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffCutRequest.ProtoReflect.Descriptor instead. +func (*DiffCutRequest) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{8} +} + +func (x *DiffCutRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *DiffCutRequest) GetSourceCommitSha() string { + if x != nil { + return x.SourceCommitSha + } + return "" +} + +func (x *DiffCutRequest) GetSourceBranch() string { + if x != nil { + return x.SourceBranch + } + return "" +} + +func (x *DiffCutRequest) GetTargetCommitSha() string { + if x != nil { + return x.TargetCommitSha + } + return "" +} + +func (x *DiffCutRequest) GetTargetBranch() string { + if x != nil { + return x.TargetBranch + } + return "" +} + +func (x *DiffCutRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *DiffCutRequest) GetLineStart() int32 { + if x != nil { + return x.LineStart + } + return 0 +} + +func (x *DiffCutRequest) GetLineStartNew() bool { + if x != nil { + return x.LineStartNew + } + return false +} + +func (x *DiffCutRequest) GetLineEnd() int32 { + if x != nil { + return x.LineEnd + } + return 0 +} + +func (x *DiffCutRequest) GetLineEndNew() bool { + if x != nil { + return x.LineEndNew + } + return false +} + +type DiffCutResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + HunkHeader *HunkHeader `protobuf:"bytes,1,opt,name=hunk_header,json=hunkHeader,proto3" json:"hunk_header,omitempty"` + LinesHeader string `protobuf:"bytes,2,opt,name=lines_header,json=linesHeader,proto3" json:"lines_header,omitempty"` + Lines []string `protobuf:"bytes,3,rep,name=lines,proto3" json:"lines,omitempty"` + MergeBaseSha string `protobuf:"bytes,4,opt,name=merge_base_sha,json=mergeBaseSha,proto3" json:"merge_base_sha,omitempty"` + LatestSourceSha string `protobuf:"bytes,5,opt,name=latest_source_sha,json=latestSourceSha,proto3" json:"latest_source_sha,omitempty"` +} + +func (x *DiffCutResponse) Reset() { + *x = DiffCutResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffCutResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffCutResponse) ProtoMessage() {} + +func (x *DiffCutResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffCutResponse.ProtoReflect.Descriptor instead. +func (*DiffCutResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{9} +} + +func (x *DiffCutResponse) GetHunkHeader() *HunkHeader { + if x != nil { + return x.HunkHeader + } + return nil +} + +func (x *DiffCutResponse) GetLinesHeader() string { + if x != nil { + return x.LinesHeader + } + return "" +} + +func (x *DiffCutResponse) GetLines() []string { + if x != nil { + return x.Lines + } + return nil +} + +func (x *DiffCutResponse) GetMergeBaseSha() string { + if x != nil { + return x.MergeBaseSha + } + return "" +} + +func (x *DiffCutResponse) GetLatestSourceSha() string { + if x != nil { + return x.LatestSourceSha + } + return "" +} + +type DiffResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The path and name of the file + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // The old path and name of the file + OldPath string `protobuf:"bytes,2,opt,name=old_path,json=oldPath,proto3" json:"old_path,omitempty"` + // sha (SHA1 hash) of the file. For a changed/new file, it is the new SHA, + // and for a deleted file it becomes "000000". + Sha string `protobuf:"bytes,3,opt,name=sha,proto3" json:"sha,omitempty"` + // old_sha is the old index (SHA1 hash) of the file. + OldSha string `protobuf:"bytes,4,opt,name=old_sha,json=oldSha,proto3" json:"old_sha,omitempty"` + // status of the file. + Status DiffResponse_FileStatus `protobuf:"varint,5,opt,name=status,proto3,enum=rpc.DiffResponse_FileStatus" json:"status,omitempty"` + // total number of additions in the file + Additions int32 `protobuf:"varint,6,opt,name=additions,proto3" json:"additions,omitempty"` + // total number of deletions in the file + Deletions int32 `protobuf:"varint,7,opt,name=deletions,proto3" json:"deletions,omitempty"` + // number of changes in the file + Changes int32 `protobuf:"varint,8,opt,name=changes,proto3" json:"changes,omitempty"` + // patch from the file diff + Patch []byte `protobuf:"bytes,9,opt,name=patch,proto3" json:"patch,omitempty"` + // is binary file + IsBinary bool `protobuf:"varint,10,opt,name=is_binary,json=isBinary,proto3" json:"is_binary,omitempty"` + // is submodule + IsSubmodule bool `protobuf:"varint,11,opt,name=is_submodule,json=isSubmodule,proto3" json:"is_submodule,omitempty"` +} + +func (x *DiffResponse) Reset() { + *x = DiffResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DiffResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DiffResponse) ProtoMessage() {} + +func (x *DiffResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DiffResponse.ProtoReflect.Descriptor instead. +func (*DiffResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{10} +} + +func (x *DiffResponse) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *DiffResponse) GetOldPath() string { + if x != nil { + return x.OldPath + } + return "" +} + +func (x *DiffResponse) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *DiffResponse) GetOldSha() string { + if x != nil { + return x.OldSha + } + return "" +} + +func (x *DiffResponse) GetStatus() DiffResponse_FileStatus { + if x != nil { + return x.Status + } + return DiffResponse_UNDEFINED +} + +func (x *DiffResponse) GetAdditions() int32 { + if x != nil { + return x.Additions + } + return 0 +} + +func (x *DiffResponse) GetDeletions() int32 { + if x != nil { + return x.Deletions + } + return 0 +} + +func (x *DiffResponse) GetChanges() int32 { + if x != nil { + return x.Changes + } + return 0 +} + +func (x *DiffResponse) GetPatch() []byte { + if x != nil { + return x.Patch + } + return nil +} + +func (x *DiffResponse) GetIsBinary() bool { + if x != nil { + return x.IsBinary + } + return false +} + +func (x *DiffResponse) GetIsSubmodule() bool { + if x != nil { + return x.IsSubmodule + } + return false +} + +type CommitDiffRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Sha string `protobuf:"bytes,2,opt,name=sha,proto3" json:"sha,omitempty"` +} + +func (x *CommitDiffRequest) Reset() { + *x = CommitDiffRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitDiffRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitDiffRequest) ProtoMessage() {} + +func (x *CommitDiffRequest) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitDiffRequest.ProtoReflect.Descriptor instead. +func (*CommitDiffRequest) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{11} +} + +func (x *CommitDiffRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *CommitDiffRequest) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +type CommitDiffResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *CommitDiffResponse) Reset() { + *x = CommitDiffResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_diff_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitDiffResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitDiffResponse) ProtoMessage() {} + +func (x *CommitDiffResponse) ProtoReflect() protoreflect.Message { + mi := &file_diff_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitDiffResponse.ProtoReflect.Descriptor instead. +func (*CommitDiffResponse) Descriptor() ([]byte, []int) { + return file_diff_proto_rawDescGZIP(), []int{12} +} + +func (x *CommitDiffResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +var File_diff_proto protoreflect.FileDescriptor + +var file_diff_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x64, 0x69, 0x66, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, + 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0xad, 0x01, 0x0a, 0x0b, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x72, 0x65, + 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x66, + 0x12, 0x19, 0x0a, 0x08, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x52, 0x65, 0x66, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, + 0x65, 0x72, 0x67, 0x65, 0x5f, 0x62, 0x61, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x09, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x69, 0x6e, + 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x0c, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x50, 0x61, 0x74, 0x63, 0x68, 0x22, + 0x25, 0x0a, 0x0f, 0x52, 0x61, 0x77, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x69, 0x0a, 0x15, 0x44, 0x69, 0x66, 0x66, 0x53, 0x68, + 0x6f, 0x72, 0x74, 0x53, 0x74, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, + 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x22, 0x8c, 0x01, 0x0a, 0x0a, 0x48, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x6c, 0x64, 0x5f, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x07, 0x6f, 0x6c, 0x64, 0x4c, 0x69, 0x6e, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6f, + 0x6c, 0x64, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6f, + 0x6c, 0x64, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x65, 0x77, 0x5f, 0x6c, 0x69, + 0x6e, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, 0x65, 0x77, 0x4c, 0x69, 0x6e, + 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x07, 0x6e, 0x65, 0x77, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x12, 0x0a, 0x04, + 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, + 0x22, 0xdc, 0x01, 0x0a, 0x0e, 0x44, 0x69, 0x66, 0x66, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x12, 0x22, 0x0a, 0x0d, 0x6f, 0x6c, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, 0x6c, 0x64, 0x46, + 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x6e, 0x65, 0x77, 0x5f, 0x66, + 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x6e, 0x65, 0x77, 0x46, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x43, 0x0a, 0x0a, 0x65, + 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x23, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, + 0x1a, 0x3d, 0x0a, 0x0f, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x7f, 0x0a, 0x13, 0x44, 0x69, 0x66, 0x66, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x75, 0x6e, 0x6b, 0x48, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x34, 0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x32, 0x0a, 0x0c, + 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x52, 0x0b, 0x68, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, + 0x22, 0x99, 0x01, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x44, 0x69, 0x66, 0x66, 0x48, 0x75, 0x6e, 0x6b, + 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, + 0x62, 0x61, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x53, 0x68, 0x61, + 0x12, 0x2a, 0x0a, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x53, 0x68, 0x61, 0x22, 0x4c, 0x0a, 0x1a, + 0x47, 0x65, 0x74, 0x44, 0x69, 0x66, 0x66, 0x48, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x66, 0x69, + 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x44, 0x69, 0x66, 0x66, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x73, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, 0xee, 0x02, 0x0a, 0x0e, 0x44, + 0x69, 0x66, 0x66, 0x43, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, + 0x61, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x53, 0x68, 0x61, 0x12, + 0x23, 0x0a, 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x42, 0x72, + 0x61, 0x6e, 0x63, 0x68, 0x12, 0x2a, 0x0a, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x53, 0x68, 0x61, + 0x12, 0x23, 0x0a, 0x0d, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, + 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x42, + 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x69, 0x6e, + 0x65, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x6c, + 0x69, 0x6e, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x24, 0x0a, 0x0e, 0x6c, 0x69, 0x6e, 0x65, + 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6e, 0x65, 0x77, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0c, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x4e, 0x65, 0x77, 0x12, 0x19, + 0x0a, 0x08, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x65, 0x6e, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x07, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x6e, 0x64, 0x12, 0x20, 0x0a, 0x0c, 0x6c, 0x69, 0x6e, + 0x65, 0x5f, 0x65, 0x6e, 0x64, 0x5f, 0x6e, 0x65, 0x77, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x0a, 0x6c, 0x69, 0x6e, 0x65, 0x45, 0x6e, 0x64, 0x4e, 0x65, 0x77, 0x22, 0xce, 0x01, 0x0a, 0x0f, + 0x44, 0x69, 0x66, 0x66, 0x43, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x30, 0x0a, 0x0b, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x75, 0x6e, 0x6b, 0x48, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x0a, 0x68, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x65, + 0x72, 0x67, 0x65, 0x5f, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0c, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x53, 0x68, 0x61, + 0x12, 0x2a, 0x0a, 0x11, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x68, 0x61, 0x22, 0x9a, 0x03, 0x0a, + 0x0c, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x6c, 0x64, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x6c, 0x64, 0x50, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, 0x03, + 0x73, 0x68, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x17, + 0x0a, 0x07, 0x6f, 0x6c, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x6f, 0x6c, 0x64, 0x53, 0x68, 0x61, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, + 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1c, 0x0a, + 0x09, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x09, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, + 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, + 0x64, 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x63, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x69, 0x73, 0x5f, + 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, + 0x42, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x73, 0x75, 0x62, + 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x69, 0x73, + 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, 0x4e, 0x0a, 0x0a, 0x46, 0x69, 0x6c, + 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x44, 0x45, 0x46, + 0x49, 0x4e, 0x45, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x44, 0x44, 0x45, 0x44, 0x10, + 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x4f, 0x44, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x02, 0x12, + 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, + 0x52, 0x45, 0x4e, 0x41, 0x4d, 0x45, 0x44, 0x10, 0x04, 0x22, 0x4b, 0x0a, 0x11, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, + 0x62, 0x61, 0x73, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x22, 0x28, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, + 0x32, 0x88, 0x03, 0x0a, 0x0b, 0x44, 0x69, 0x66, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x12, 0x35, 0x0a, 0x07, 0x52, 0x61, 0x77, 0x44, 0x69, 0x66, 0x66, 0x12, 0x10, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x52, 0x61, 0x77, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x2f, 0x0a, 0x04, 0x44, 0x69, 0x66, 0x66, 0x12, + 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x3f, 0x0a, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x44, 0x69, 0x66, 0x66, 0x12, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x66, 0x66, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x3f, 0x0a, 0x0d, 0x44, 0x69, 0x66, + 0x66, 0x53, 0x68, 0x6f, 0x72, 0x74, 0x53, 0x74, 0x61, 0x74, 0x12, 0x10, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x44, 0x69, 0x66, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x53, 0x68, 0x6f, 0x72, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x12, 0x47, 0x65, + 0x74, 0x44, 0x69, 0x66, 0x66, 0x48, 0x75, 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, + 0x12, 0x1e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x69, 0x66, 0x66, 0x48, 0x75, + 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1f, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x69, 0x66, 0x66, 0x48, 0x75, + 0x6e, 0x6b, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x07, 0x44, 0x69, 0x66, 0x66, 0x43, 0x75, 0x74, 0x12, 0x13, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x43, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x69, 0x66, 0x66, 0x43, 0x75, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x27, 0x5a, 0x25, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, + 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, + 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_diff_proto_rawDescOnce sync.Once + file_diff_proto_rawDescData = file_diff_proto_rawDesc +) + +func file_diff_proto_rawDescGZIP() []byte { + file_diff_proto_rawDescOnce.Do(func() { + file_diff_proto_rawDescData = protoimpl.X.CompressGZIP(file_diff_proto_rawDescData) + }) + return file_diff_proto_rawDescData +} + +var file_diff_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_diff_proto_msgTypes = make([]protoimpl.MessageInfo, 14) +var file_diff_proto_goTypes = []interface{}{ + (DiffResponse_FileStatus)(0), // 0: rpc.DiffResponse.FileStatus + (*DiffRequest)(nil), // 1: rpc.DiffRequest + (*RawDiffResponse)(nil), // 2: rpc.RawDiffResponse + (*DiffShortStatResponse)(nil), // 3: rpc.DiffShortStatResponse + (*HunkHeader)(nil), // 4: rpc.HunkHeader + (*DiffFileHeader)(nil), // 5: rpc.DiffFileHeader + (*DiffFileHunkHeaders)(nil), // 6: rpc.DiffFileHunkHeaders + (*GetDiffHunkHeadersRequest)(nil), // 7: rpc.GetDiffHunkHeadersRequest + (*GetDiffHunkHeadersResponse)(nil), // 8: rpc.GetDiffHunkHeadersResponse + (*DiffCutRequest)(nil), // 9: rpc.DiffCutRequest + (*DiffCutResponse)(nil), // 10: rpc.DiffCutResponse + (*DiffResponse)(nil), // 11: rpc.DiffResponse + (*CommitDiffRequest)(nil), // 12: rpc.CommitDiffRequest + (*CommitDiffResponse)(nil), // 13: rpc.CommitDiffResponse + nil, // 14: rpc.DiffFileHeader.ExtensionsEntry + (*ReadRequest)(nil), // 15: rpc.ReadRequest +} +var file_diff_proto_depIdxs = []int32{ + 15, // 0: rpc.DiffRequest.base:type_name -> rpc.ReadRequest + 14, // 1: rpc.DiffFileHeader.extensions:type_name -> rpc.DiffFileHeader.ExtensionsEntry + 5, // 2: rpc.DiffFileHunkHeaders.file_header:type_name -> rpc.DiffFileHeader + 4, // 3: rpc.DiffFileHunkHeaders.hunk_headers:type_name -> rpc.HunkHeader + 15, // 4: rpc.GetDiffHunkHeadersRequest.base:type_name -> rpc.ReadRequest + 6, // 5: rpc.GetDiffHunkHeadersResponse.files:type_name -> rpc.DiffFileHunkHeaders + 15, // 6: rpc.DiffCutRequest.base:type_name -> rpc.ReadRequest + 4, // 7: rpc.DiffCutResponse.hunk_header:type_name -> rpc.HunkHeader + 0, // 8: rpc.DiffResponse.status:type_name -> rpc.DiffResponse.FileStatus + 15, // 9: rpc.CommitDiffRequest.base:type_name -> rpc.ReadRequest + 1, // 10: rpc.DiffService.RawDiff:input_type -> rpc.DiffRequest + 1, // 11: rpc.DiffService.Diff:input_type -> rpc.DiffRequest + 12, // 12: rpc.DiffService.CommitDiff:input_type -> rpc.CommitDiffRequest + 1, // 13: rpc.DiffService.DiffShortStat:input_type -> rpc.DiffRequest + 7, // 14: rpc.DiffService.GetDiffHunkHeaders:input_type -> rpc.GetDiffHunkHeadersRequest + 9, // 15: rpc.DiffService.DiffCut:input_type -> rpc.DiffCutRequest + 2, // 16: rpc.DiffService.RawDiff:output_type -> rpc.RawDiffResponse + 11, // 17: rpc.DiffService.Diff:output_type -> rpc.DiffResponse + 13, // 18: rpc.DiffService.CommitDiff:output_type -> rpc.CommitDiffResponse + 3, // 19: rpc.DiffService.DiffShortStat:output_type -> rpc.DiffShortStatResponse + 8, // 20: rpc.DiffService.GetDiffHunkHeaders:output_type -> rpc.GetDiffHunkHeadersResponse + 10, // 21: rpc.DiffService.DiffCut:output_type -> rpc.DiffCutResponse + 16, // [16:22] is the sub-list for method output_type + 10, // [10:16] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name +} + +func init() { file_diff_proto_init() } +func file_diff_proto_init() { + if File_diff_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_diff_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RawDiffResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffShortStatResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*HunkHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffFileHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffFileHunkHeaders); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetDiffHunkHeadersRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetDiffHunkHeadersResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffCutRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffCutResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DiffResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitDiffRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_diff_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitDiffResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_diff_proto_rawDesc, + NumEnums: 1, + NumMessages: 14, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_diff_proto_goTypes, + DependencyIndexes: file_diff_proto_depIdxs, + EnumInfos: file_diff_proto_enumTypes, + MessageInfos: file_diff_proto_msgTypes, + }.Build() + File_diff_proto = out.File + file_diff_proto_rawDesc = nil + file_diff_proto_goTypes = nil + file_diff_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/diff_grpc.pb.go b/gitrpc/rpc/diff_grpc.pb.go new file mode 100644 index 0000000000..0d3451e6db --- /dev/null +++ b/gitrpc/rpc/diff_grpc.pb.go @@ -0,0 +1,367 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: diff.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// DiffServiceClient is the client API for DiffService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type DiffServiceClient interface { + RawDiff(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (DiffService_RawDiffClient, error) + Diff(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (DiffService_DiffClient, error) + CommitDiff(ctx context.Context, in *CommitDiffRequest, opts ...grpc.CallOption) (DiffService_CommitDiffClient, error) + DiffShortStat(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (*DiffShortStatResponse, error) + GetDiffHunkHeaders(ctx context.Context, in *GetDiffHunkHeadersRequest, opts ...grpc.CallOption) (*GetDiffHunkHeadersResponse, error) + DiffCut(ctx context.Context, in *DiffCutRequest, opts ...grpc.CallOption) (*DiffCutResponse, error) +} + +type diffServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewDiffServiceClient(cc grpc.ClientConnInterface) DiffServiceClient { + return &diffServiceClient{cc} +} + +func (c *diffServiceClient) RawDiff(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (DiffService_RawDiffClient, error) { + stream, err := c.cc.NewStream(ctx, &DiffService_ServiceDesc.Streams[0], "/rpc.DiffService/RawDiff", opts...) + if err != nil { + return nil, err + } + x := &diffServiceRawDiffClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type DiffService_RawDiffClient interface { + Recv() (*RawDiffResponse, error) + grpc.ClientStream +} + +type diffServiceRawDiffClient struct { + grpc.ClientStream +} + +func (x *diffServiceRawDiffClient) Recv() (*RawDiffResponse, error) { + m := new(RawDiffResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *diffServiceClient) Diff(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (DiffService_DiffClient, error) { + stream, err := c.cc.NewStream(ctx, &DiffService_ServiceDesc.Streams[1], "/rpc.DiffService/Diff", opts...) + if err != nil { + return nil, err + } + x := &diffServiceDiffClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type DiffService_DiffClient interface { + Recv() (*DiffResponse, error) + grpc.ClientStream +} + +type diffServiceDiffClient struct { + grpc.ClientStream +} + +func (x *diffServiceDiffClient) Recv() (*DiffResponse, error) { + m := new(DiffResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *diffServiceClient) CommitDiff(ctx context.Context, in *CommitDiffRequest, opts ...grpc.CallOption) (DiffService_CommitDiffClient, error) { + stream, err := c.cc.NewStream(ctx, &DiffService_ServiceDesc.Streams[2], "/rpc.DiffService/CommitDiff", opts...) + if err != nil { + return nil, err + } + x := &diffServiceCommitDiffClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type DiffService_CommitDiffClient interface { + Recv() (*CommitDiffResponse, error) + grpc.ClientStream +} + +type diffServiceCommitDiffClient struct { + grpc.ClientStream +} + +func (x *diffServiceCommitDiffClient) Recv() (*CommitDiffResponse, error) { + m := new(CommitDiffResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *diffServiceClient) DiffShortStat(ctx context.Context, in *DiffRequest, opts ...grpc.CallOption) (*DiffShortStatResponse, error) { + out := new(DiffShortStatResponse) + err := c.cc.Invoke(ctx, "/rpc.DiffService/DiffShortStat", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *diffServiceClient) GetDiffHunkHeaders(ctx context.Context, in *GetDiffHunkHeadersRequest, opts ...grpc.CallOption) (*GetDiffHunkHeadersResponse, error) { + out := new(GetDiffHunkHeadersResponse) + err := c.cc.Invoke(ctx, "/rpc.DiffService/GetDiffHunkHeaders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *diffServiceClient) DiffCut(ctx context.Context, in *DiffCutRequest, opts ...grpc.CallOption) (*DiffCutResponse, error) { + out := new(DiffCutResponse) + err := c.cc.Invoke(ctx, "/rpc.DiffService/DiffCut", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DiffServiceServer is the server API for DiffService service. +// All implementations must embed UnimplementedDiffServiceServer +// for forward compatibility +type DiffServiceServer interface { + RawDiff(*DiffRequest, DiffService_RawDiffServer) error + Diff(*DiffRequest, DiffService_DiffServer) error + CommitDiff(*CommitDiffRequest, DiffService_CommitDiffServer) error + DiffShortStat(context.Context, *DiffRequest) (*DiffShortStatResponse, error) + GetDiffHunkHeaders(context.Context, *GetDiffHunkHeadersRequest) (*GetDiffHunkHeadersResponse, error) + DiffCut(context.Context, *DiffCutRequest) (*DiffCutResponse, error) + mustEmbedUnimplementedDiffServiceServer() +} + +// UnimplementedDiffServiceServer must be embedded to have forward compatible implementations. +type UnimplementedDiffServiceServer struct { +} + +func (UnimplementedDiffServiceServer) RawDiff(*DiffRequest, DiffService_RawDiffServer) error { + return status.Errorf(codes.Unimplemented, "method RawDiff not implemented") +} +func (UnimplementedDiffServiceServer) Diff(*DiffRequest, DiffService_DiffServer) error { + return status.Errorf(codes.Unimplemented, "method Diff not implemented") +} +func (UnimplementedDiffServiceServer) CommitDiff(*CommitDiffRequest, DiffService_CommitDiffServer) error { + return status.Errorf(codes.Unimplemented, "method CommitDiff not implemented") +} +func (UnimplementedDiffServiceServer) DiffShortStat(context.Context, *DiffRequest) (*DiffShortStatResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DiffShortStat not implemented") +} +func (UnimplementedDiffServiceServer) GetDiffHunkHeaders(context.Context, *GetDiffHunkHeadersRequest) (*GetDiffHunkHeadersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetDiffHunkHeaders not implemented") +} +func (UnimplementedDiffServiceServer) DiffCut(context.Context, *DiffCutRequest) (*DiffCutResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DiffCut not implemented") +} +func (UnimplementedDiffServiceServer) mustEmbedUnimplementedDiffServiceServer() {} + +// UnsafeDiffServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DiffServiceServer will +// result in compilation errors. +type UnsafeDiffServiceServer interface { + mustEmbedUnimplementedDiffServiceServer() +} + +func RegisterDiffServiceServer(s grpc.ServiceRegistrar, srv DiffServiceServer) { + s.RegisterService(&DiffService_ServiceDesc, srv) +} + +func _DiffService_RawDiff_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(DiffRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DiffServiceServer).RawDiff(m, &diffServiceRawDiffServer{stream}) +} + +type DiffService_RawDiffServer interface { + Send(*RawDiffResponse) error + grpc.ServerStream +} + +type diffServiceRawDiffServer struct { + grpc.ServerStream +} + +func (x *diffServiceRawDiffServer) Send(m *RawDiffResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _DiffService_Diff_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(DiffRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DiffServiceServer).Diff(m, &diffServiceDiffServer{stream}) +} + +type DiffService_DiffServer interface { + Send(*DiffResponse) error + grpc.ServerStream +} + +type diffServiceDiffServer struct { + grpc.ServerStream +} + +func (x *diffServiceDiffServer) Send(m *DiffResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _DiffService_CommitDiff_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(CommitDiffRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(DiffServiceServer).CommitDiff(m, &diffServiceCommitDiffServer{stream}) +} + +type DiffService_CommitDiffServer interface { + Send(*CommitDiffResponse) error + grpc.ServerStream +} + +type diffServiceCommitDiffServer struct { + grpc.ServerStream +} + +func (x *diffServiceCommitDiffServer) Send(m *CommitDiffResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _DiffService_DiffShortStat_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DiffRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DiffServiceServer).DiffShortStat(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.DiffService/DiffShortStat", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DiffServiceServer).DiffShortStat(ctx, req.(*DiffRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DiffService_GetDiffHunkHeaders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDiffHunkHeadersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DiffServiceServer).GetDiffHunkHeaders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.DiffService/GetDiffHunkHeaders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DiffServiceServer).GetDiffHunkHeaders(ctx, req.(*GetDiffHunkHeadersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DiffService_DiffCut_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DiffCutRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DiffServiceServer).DiffCut(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.DiffService/DiffCut", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DiffServiceServer).DiffCut(ctx, req.(*DiffCutRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// DiffService_ServiceDesc is the grpc.ServiceDesc for DiffService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var DiffService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.DiffService", + HandlerType: (*DiffServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DiffShortStat", + Handler: _DiffService_DiffShortStat_Handler, + }, + { + MethodName: "GetDiffHunkHeaders", + Handler: _DiffService_GetDiffHunkHeaders_Handler, + }, + { + MethodName: "DiffCut", + Handler: _DiffService_DiffCut_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "RawDiff", + Handler: _DiffService_RawDiff_Handler, + ServerStreams: true, + }, + { + StreamName: "Diff", + Handler: _DiffService_Diff_Handler, + ServerStreams: true, + }, + { + StreamName: "CommitDiff", + Handler: _DiffService_CommitDiff_Handler, + ServerStreams: true, + }, + }, + Metadata: "diff.proto", +} diff --git a/gitrpc/rpc/http.pb.go b/gitrpc/rpc/http.pb.go new file mode 100644 index 0000000000..9afe821bfb --- /dev/null +++ b/gitrpc/rpc/http.pb.go @@ -0,0 +1,478 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: http.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type InfoRefsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Base specifies the base read parameters + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + // Service can be: upload-pack or receive-pack + Service string `protobuf:"bytes,2,opt,name=service,proto3" json:"service,omitempty"` + // Parameters to use with git -c (key=value pairs) + GitConfigOptions []string `protobuf:"bytes,3,rep,name=git_config_options,json=gitConfigOptions,proto3" json:"git_config_options,omitempty"` + // Git protocol version + GitProtocol string `protobuf:"bytes,4,opt,name=git_protocol,json=gitProtocol,proto3" json:"git_protocol,omitempty"` +} + +func (x *InfoRefsRequest) Reset() { + *x = InfoRefsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_http_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InfoRefsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InfoRefsRequest) ProtoMessage() {} + +func (x *InfoRefsRequest) ProtoReflect() protoreflect.Message { + mi := &file_http_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InfoRefsRequest.ProtoReflect.Descriptor instead. +func (*InfoRefsRequest) Descriptor() ([]byte, []int) { + return file_http_proto_rawDescGZIP(), []int{0} +} + +func (x *InfoRefsRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *InfoRefsRequest) GetService() string { + if x != nil { + return x.Service + } + return "" +} + +func (x *InfoRefsRequest) GetGitConfigOptions() []string { + if x != nil { + return x.GitConfigOptions + } + return nil +} + +func (x *InfoRefsRequest) GetGitProtocol() string { + if x != nil { + return x.GitProtocol + } + return "" +} + +type InfoRefsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *InfoRefsResponse) Reset() { + *x = InfoRefsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_http_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InfoRefsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InfoRefsResponse) ProtoMessage() {} + +func (x *InfoRefsResponse) ProtoReflect() protoreflect.Message { + mi := &file_http_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InfoRefsResponse.ProtoReflect.Descriptor instead. +func (*InfoRefsResponse) Descriptor() ([]byte, []int) { + return file_http_proto_rawDescGZIP(), []int{1} +} + +func (x *InfoRefsResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type ServicePackRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Base specifies the base parameters. + // Depending on the service the matching base type has to be passed + // + // Types that are assignable to Base: + // *ServicePackRequest_ReadBase + // *ServicePackRequest_WriteBase + Base isServicePackRequest_Base `protobuf_oneof:"base"` + // Service can be: upload-pack or receive-pack + Service string `protobuf:"bytes,3,opt,name=service,proto3" json:"service,omitempty"` + // Raw data to be copied to stdin of 'git upload-pack' + Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` + // Parameters to use with git -c (key=value pairs) + GitConfigOptions []string `protobuf:"bytes,5,rep,name=git_config_options,json=gitConfigOptions,proto3" json:"git_config_options,omitempty"` + // Git protocol version + GitProtocol string `protobuf:"bytes,6,opt,name=git_protocol,json=gitProtocol,proto3" json:"git_protocol,omitempty"` +} + +func (x *ServicePackRequest) Reset() { + *x = ServicePackRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_http_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServicePackRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServicePackRequest) ProtoMessage() {} + +func (x *ServicePackRequest) ProtoReflect() protoreflect.Message { + mi := &file_http_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServicePackRequest.ProtoReflect.Descriptor instead. +func (*ServicePackRequest) Descriptor() ([]byte, []int) { + return file_http_proto_rawDescGZIP(), []int{2} +} + +func (m *ServicePackRequest) GetBase() isServicePackRequest_Base { + if m != nil { + return m.Base + } + return nil +} + +func (x *ServicePackRequest) GetReadBase() *ReadRequest { + if x, ok := x.GetBase().(*ServicePackRequest_ReadBase); ok { + return x.ReadBase + } + return nil +} + +func (x *ServicePackRequest) GetWriteBase() *WriteRequest { + if x, ok := x.GetBase().(*ServicePackRequest_WriteBase); ok { + return x.WriteBase + } + return nil +} + +func (x *ServicePackRequest) GetService() string { + if x != nil { + return x.Service + } + return "" +} + +func (x *ServicePackRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +func (x *ServicePackRequest) GetGitConfigOptions() []string { + if x != nil { + return x.GitConfigOptions + } + return nil +} + +func (x *ServicePackRequest) GetGitProtocol() string { + if x != nil { + return x.GitProtocol + } + return "" +} + +type isServicePackRequest_Base interface { + isServicePackRequest_Base() +} + +type ServicePackRequest_ReadBase struct { + ReadBase *ReadRequest `protobuf:"bytes,1,opt,name=read_base,json=readBase,proto3,oneof"` +} + +type ServicePackRequest_WriteBase struct { + WriteBase *WriteRequest `protobuf:"bytes,2,opt,name=write_base,json=writeBase,proto3,oneof"` +} + +func (*ServicePackRequest_ReadBase) isServicePackRequest_Base() {} + +func (*ServicePackRequest_WriteBase) isServicePackRequest_Base() {} + +type ServicePackResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Raw data from stdout of 'git upload-pack' + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *ServicePackResponse) Reset() { + *x = ServicePackResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_http_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServicePackResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServicePackResponse) ProtoMessage() {} + +func (x *ServicePackResponse) ProtoReflect() protoreflect.Message { + mi := &file_http_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServicePackResponse.ProtoReflect.Descriptor instead. +func (*ServicePackResponse) Descriptor() ([]byte, []int) { + return file_http_proto_rawDescGZIP(), []int{3} +} + +func (x *ServicePackResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +var File_http_proto protoreflect.FileDescriptor + +var file_http_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, + 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0xa2, 0x01, 0x0a, 0x0f, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x66, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x67, 0x69, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x10, 0x67, 0x69, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x69, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x67, 0x69, 0x74, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x22, 0x26, 0x0a, 0x10, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x66, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x80, 0x02, 0x0a, + 0x12, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x62, 0x61, 0x73, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, + 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x61, 0x64, + 0x42, 0x61, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x0a, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x62, 0x61, + 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, + 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x09, 0x77, + 0x72, 0x69, 0x74, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2c, 0x0a, 0x12, 0x67, 0x69, 0x74, 0x5f, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x10, 0x67, 0x69, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x69, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x67, 0x69, 0x74, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x22, + 0x29, 0x0a, 0x13, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x32, 0x97, 0x01, 0x0a, 0x10, 0x53, + 0x6d, 0x61, 0x72, 0x74, 0x48, 0x54, 0x54, 0x50, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, + 0x3b, 0x0a, 0x08, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x66, 0x73, 0x12, 0x14, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x66, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x15, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x66, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x30, 0x01, 0x12, 0x46, 0x0a, 0x0b, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x12, 0x17, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x28, 0x01, 0x30, 0x01, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, + 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_http_proto_rawDescOnce sync.Once + file_http_proto_rawDescData = file_http_proto_rawDesc +) + +func file_http_proto_rawDescGZIP() []byte { + file_http_proto_rawDescOnce.Do(func() { + file_http_proto_rawDescData = protoimpl.X.CompressGZIP(file_http_proto_rawDescData) + }) + return file_http_proto_rawDescData +} + +var file_http_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_http_proto_goTypes = []interface{}{ + (*InfoRefsRequest)(nil), // 0: rpc.InfoRefsRequest + (*InfoRefsResponse)(nil), // 1: rpc.InfoRefsResponse + (*ServicePackRequest)(nil), // 2: rpc.ServicePackRequest + (*ServicePackResponse)(nil), // 3: rpc.ServicePackResponse + (*ReadRequest)(nil), // 4: rpc.ReadRequest + (*WriteRequest)(nil), // 5: rpc.WriteRequest +} +var file_http_proto_depIdxs = []int32{ + 4, // 0: rpc.InfoRefsRequest.base:type_name -> rpc.ReadRequest + 4, // 1: rpc.ServicePackRequest.read_base:type_name -> rpc.ReadRequest + 5, // 2: rpc.ServicePackRequest.write_base:type_name -> rpc.WriteRequest + 0, // 3: rpc.SmartHTTPService.InfoRefs:input_type -> rpc.InfoRefsRequest + 2, // 4: rpc.SmartHTTPService.ServicePack:input_type -> rpc.ServicePackRequest + 1, // 5: rpc.SmartHTTPService.InfoRefs:output_type -> rpc.InfoRefsResponse + 3, // 6: rpc.SmartHTTPService.ServicePack:output_type -> rpc.ServicePackResponse + 5, // [5:7] is the sub-list for method output_type + 3, // [3:5] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_http_proto_init() } +func file_http_proto_init() { + if File_http_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_http_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InfoRefsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_http_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InfoRefsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_http_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServicePackRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_http_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServicePackResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_http_proto_msgTypes[2].OneofWrappers = []interface{}{ + (*ServicePackRequest_ReadBase)(nil), + (*ServicePackRequest_WriteBase)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_http_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_http_proto_goTypes, + DependencyIndexes: file_http_proto_depIdxs, + MessageInfos: file_http_proto_msgTypes, + }.Build() + File_http_proto = out.File + file_http_proto_rawDesc = nil + file_http_proto_goTypes = nil + file_http_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/http_grpc.pb.go b/gitrpc/rpc/http_grpc.pb.go new file mode 100644 index 0000000000..2d9ce2c5f7 --- /dev/null +++ b/gitrpc/rpc/http_grpc.pb.go @@ -0,0 +1,208 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: http.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// SmartHTTPServiceClient is the client API for SmartHTTPService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type SmartHTTPServiceClient interface { + // The response body for GET /info/refs?service=git-upload-pack + // Will be invoked when the user executes a `git fetch`, meaning the server + // will upload the packs to that user. The user doesn't upload new objects. + InfoRefs(ctx context.Context, in *InfoRefsRequest, opts ...grpc.CallOption) (SmartHTTPService_InfoRefsClient, error) + // ServicePack is just upload-pack or receive-pack + ServicePack(ctx context.Context, opts ...grpc.CallOption) (SmartHTTPService_ServicePackClient, error) +} + +type smartHTTPServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewSmartHTTPServiceClient(cc grpc.ClientConnInterface) SmartHTTPServiceClient { + return &smartHTTPServiceClient{cc} +} + +func (c *smartHTTPServiceClient) InfoRefs(ctx context.Context, in *InfoRefsRequest, opts ...grpc.CallOption) (SmartHTTPService_InfoRefsClient, error) { + stream, err := c.cc.NewStream(ctx, &SmartHTTPService_ServiceDesc.Streams[0], "/rpc.SmartHTTPService/InfoRefs", opts...) + if err != nil { + return nil, err + } + x := &smartHTTPServiceInfoRefsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type SmartHTTPService_InfoRefsClient interface { + Recv() (*InfoRefsResponse, error) + grpc.ClientStream +} + +type smartHTTPServiceInfoRefsClient struct { + grpc.ClientStream +} + +func (x *smartHTTPServiceInfoRefsClient) Recv() (*InfoRefsResponse, error) { + m := new(InfoRefsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *smartHTTPServiceClient) ServicePack(ctx context.Context, opts ...grpc.CallOption) (SmartHTTPService_ServicePackClient, error) { + stream, err := c.cc.NewStream(ctx, &SmartHTTPService_ServiceDesc.Streams[1], "/rpc.SmartHTTPService/ServicePack", opts...) + if err != nil { + return nil, err + } + x := &smartHTTPServiceServicePackClient{stream} + return x, nil +} + +type SmartHTTPService_ServicePackClient interface { + Send(*ServicePackRequest) error + Recv() (*ServicePackResponse, error) + grpc.ClientStream +} + +type smartHTTPServiceServicePackClient struct { + grpc.ClientStream +} + +func (x *smartHTTPServiceServicePackClient) Send(m *ServicePackRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *smartHTTPServiceServicePackClient) Recv() (*ServicePackResponse, error) { + m := new(ServicePackResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SmartHTTPServiceServer is the server API for SmartHTTPService service. +// All implementations must embed UnimplementedSmartHTTPServiceServer +// for forward compatibility +type SmartHTTPServiceServer interface { + // The response body for GET /info/refs?service=git-upload-pack + // Will be invoked when the user executes a `git fetch`, meaning the server + // will upload the packs to that user. The user doesn't upload new objects. + InfoRefs(*InfoRefsRequest, SmartHTTPService_InfoRefsServer) error + // ServicePack is just upload-pack or receive-pack + ServicePack(SmartHTTPService_ServicePackServer) error + mustEmbedUnimplementedSmartHTTPServiceServer() +} + +// UnimplementedSmartHTTPServiceServer must be embedded to have forward compatible implementations. +type UnimplementedSmartHTTPServiceServer struct { +} + +func (UnimplementedSmartHTTPServiceServer) InfoRefs(*InfoRefsRequest, SmartHTTPService_InfoRefsServer) error { + return status.Errorf(codes.Unimplemented, "method InfoRefs not implemented") +} +func (UnimplementedSmartHTTPServiceServer) ServicePack(SmartHTTPService_ServicePackServer) error { + return status.Errorf(codes.Unimplemented, "method ServicePack not implemented") +} +func (UnimplementedSmartHTTPServiceServer) mustEmbedUnimplementedSmartHTTPServiceServer() {} + +// UnsafeSmartHTTPServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to SmartHTTPServiceServer will +// result in compilation errors. +type UnsafeSmartHTTPServiceServer interface { + mustEmbedUnimplementedSmartHTTPServiceServer() +} + +func RegisterSmartHTTPServiceServer(s grpc.ServiceRegistrar, srv SmartHTTPServiceServer) { + s.RegisterService(&SmartHTTPService_ServiceDesc, srv) +} + +func _SmartHTTPService_InfoRefs_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(InfoRefsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(SmartHTTPServiceServer).InfoRefs(m, &smartHTTPServiceInfoRefsServer{stream}) +} + +type SmartHTTPService_InfoRefsServer interface { + Send(*InfoRefsResponse) error + grpc.ServerStream +} + +type smartHTTPServiceInfoRefsServer struct { + grpc.ServerStream +} + +func (x *smartHTTPServiceInfoRefsServer) Send(m *InfoRefsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _SmartHTTPService_ServicePack_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SmartHTTPServiceServer).ServicePack(&smartHTTPServiceServicePackServer{stream}) +} + +type SmartHTTPService_ServicePackServer interface { + Send(*ServicePackResponse) error + Recv() (*ServicePackRequest, error) + grpc.ServerStream +} + +type smartHTTPServiceServicePackServer struct { + grpc.ServerStream +} + +func (x *smartHTTPServiceServicePackServer) Send(m *ServicePackResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *smartHTTPServiceServicePackServer) Recv() (*ServicePackRequest, error) { + m := new(ServicePackRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SmartHTTPService_ServiceDesc is the grpc.ServiceDesc for SmartHTTPService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var SmartHTTPService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.SmartHTTPService", + HandlerType: (*SmartHTTPServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "InfoRefs", + Handler: _SmartHTTPService_InfoRefs_Handler, + ServerStreams: true, + }, + { + StreamName: "ServicePack", + Handler: _SmartHTTPService_ServicePack_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "http.proto", +} diff --git a/gitrpc/rpc/merge.pb.go b/gitrpc/rpc/merge.pb.go new file mode 100644 index 0000000000..2fca8fab5a --- /dev/null +++ b/gitrpc/rpc/merge.pb.go @@ -0,0 +1,541 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: merge.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type MergeRequest_MergeMethod int32 + +const ( + MergeRequest_merge MergeRequest_MergeMethod = 0 + MergeRequest_squash MergeRequest_MergeMethod = 1 + MergeRequest_rebase MergeRequest_MergeMethod = 2 +) + +// Enum value maps for MergeRequest_MergeMethod. +var ( + MergeRequest_MergeMethod_name = map[int32]string{ + 0: "merge", + 1: "squash", + 2: "rebase", + } + MergeRequest_MergeMethod_value = map[string]int32{ + "merge": 0, + "squash": 1, + "rebase": 2, + } +) + +func (x MergeRequest_MergeMethod) Enum() *MergeRequest_MergeMethod { + p := new(MergeRequest_MergeMethod) + *p = x + return p +} + +func (x MergeRequest_MergeMethod) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (MergeRequest_MergeMethod) Descriptor() protoreflect.EnumDescriptor { + return file_merge_proto_enumTypes[0].Descriptor() +} + +func (MergeRequest_MergeMethod) Type() protoreflect.EnumType { + return &file_merge_proto_enumTypes[0] +} + +func (x MergeRequest_MergeMethod) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use MergeRequest_MergeMethod.Descriptor instead. +func (MergeRequest_MergeMethod) EnumDescriptor() ([]byte, []int) { + return file_merge_proto_rawDescGZIP(), []int{0, 0} +} + +type MergeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + // head_branch is the source branch we want to merge + HeadBranch string `protobuf:"bytes,2,opt,name=head_branch,json=headBranch,proto3" json:"head_branch,omitempty"` + // base_branch is the branch into which the given commit shall be merged and whose + // reference is going to be updated. + BaseBranch string `protobuf:"bytes,3,opt,name=base_branch,json=baseBranch,proto3" json:"base_branch,omitempty"` + // title is the title to use for the merge commit. + Title string `protobuf:"bytes,4,opt,name=title,proto3" json:"title,omitempty"` + // message is the message to use for the merge commit. + Message string `protobuf:"bytes,5,opt,name=message,proto3" json:"message,omitempty"` + // author is the person who originally wrote the code + Author *Identity `protobuf:"bytes,6,opt,name=author,proto3" json:"author,omitempty"` + // authorDate is the date when the code was written + AuthorDate int64 `protobuf:"varint,7,opt,name=authorDate,proto3" json:"authorDate,omitempty"` + // committer is the person who last applied the patch + Committer *Identity `protobuf:"bytes,8,opt,name=committer,proto3" json:"committer,omitempty"` + // committer is the date when the code was applied + CommitterDate int64 `protobuf:"varint,9,opt,name=committerDate,proto3" json:"committerDate,omitempty"` + // ref_type is an otional value and is used to generate the full + // reference in which the merge result is stored. + RefType RefType `protobuf:"varint,10,opt,name=ref_type,json=refType,proto3,enum=rpc.RefType" json:"ref_type,omitempty"` + // ref_name is an otional value and is used to generate the full + // reference in which the merge result is stored. + RefName string `protobuf:"bytes,11,opt,name=ref_name,json=refName,proto3" json:"ref_name,omitempty"` + // head_expected_sha is commit sha on the head branch, if head_expected_sha is older + // than the head_branch latest sha then merge will fail. + HeadExpectedSha string `protobuf:"bytes,12,opt,name=head_expected_sha,json=headExpectedSha,proto3" json:"head_expected_sha,omitempty"` + // force merge + Force bool `protobuf:"varint,13,opt,name=force,proto3" json:"force,omitempty"` + // delete branch after merge + DeleteHeadBranch bool `protobuf:"varint,14,opt,name=delete_head_branch,json=deleteHeadBranch,proto3" json:"delete_head_branch,omitempty"` + // merging method + Method MergeRequest_MergeMethod `protobuf:"varint,15,opt,name=method,proto3,enum=rpc.MergeRequest_MergeMethod" json:"method,omitempty"` +} + +func (x *MergeRequest) Reset() { + *x = MergeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_merge_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MergeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MergeRequest) ProtoMessage() {} + +func (x *MergeRequest) ProtoReflect() protoreflect.Message { + mi := &file_merge_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MergeRequest.ProtoReflect.Descriptor instead. +func (*MergeRequest) Descriptor() ([]byte, []int) { + return file_merge_proto_rawDescGZIP(), []int{0} +} + +func (x *MergeRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *MergeRequest) GetHeadBranch() string { + if x != nil { + return x.HeadBranch + } + return "" +} + +func (x *MergeRequest) GetBaseBranch() string { + if x != nil { + return x.BaseBranch + } + return "" +} + +func (x *MergeRequest) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *MergeRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *MergeRequest) GetAuthor() *Identity { + if x != nil { + return x.Author + } + return nil +} + +func (x *MergeRequest) GetAuthorDate() int64 { + if x != nil { + return x.AuthorDate + } + return 0 +} + +func (x *MergeRequest) GetCommitter() *Identity { + if x != nil { + return x.Committer + } + return nil +} + +func (x *MergeRequest) GetCommitterDate() int64 { + if x != nil { + return x.CommitterDate + } + return 0 +} + +func (x *MergeRequest) GetRefType() RefType { + if x != nil { + return x.RefType + } + return RefType_Undefined +} + +func (x *MergeRequest) GetRefName() string { + if x != nil { + return x.RefName + } + return "" +} + +func (x *MergeRequest) GetHeadExpectedSha() string { + if x != nil { + return x.HeadExpectedSha + } + return "" +} + +func (x *MergeRequest) GetForce() bool { + if x != nil { + return x.Force + } + return false +} + +func (x *MergeRequest) GetDeleteHeadBranch() bool { + if x != nil { + return x.DeleteHeadBranch + } + return false +} + +func (x *MergeRequest) GetMethod() MergeRequest_MergeMethod { + if x != nil { + return x.Method + } + return MergeRequest_merge +} + +type MergeResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // base_sha is the sha of the latest commit on the base branch that was used for merging. + BaseSha string `protobuf:"bytes,1,opt,name=base_sha,json=baseSha,proto3" json:"base_sha,omitempty"` + // head_sha is the sha of the latest commit on the head branch that was used for merging. + HeadSha string `protobuf:"bytes,2,opt,name=head_sha,json=headSha,proto3" json:"head_sha,omitempty"` + // merge_base_sha is the sha of the merge base of the head_sha and base_sha + MergeBaseSha string `protobuf:"bytes,3,opt,name=merge_base_sha,json=mergeBaseSha,proto3" json:"merge_base_sha,omitempty"` + // merge_sha is the sha of the commit after merging head_sha with base_sha. + MergeSha string `protobuf:"bytes,4,opt,name=merge_sha,json=mergeSha,proto3" json:"merge_sha,omitempty"` +} + +func (x *MergeResponse) Reset() { + *x = MergeResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_merge_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MergeResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MergeResponse) ProtoMessage() {} + +func (x *MergeResponse) ProtoReflect() protoreflect.Message { + mi := &file_merge_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MergeResponse.ProtoReflect.Descriptor instead. +func (*MergeResponse) Descriptor() ([]byte, []int) { + return file_merge_proto_rawDescGZIP(), []int{1} +} + +func (x *MergeResponse) GetBaseSha() string { + if x != nil { + return x.BaseSha + } + return "" +} + +func (x *MergeResponse) GetHeadSha() string { + if x != nil { + return x.HeadSha + } + return "" +} + +func (x *MergeResponse) GetMergeBaseSha() string { + if x != nil { + return x.MergeBaseSha + } + return "" +} + +func (x *MergeResponse) GetMergeSha() string { + if x != nil { + return x.MergeSha + } + return "" +} + +// MergeConflictError is an error returned in the case when merging two commits +// fails due to a merge conflict. +type MergeConflictError struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // ConflictingFiles is the set of files which have been conflicting. + ConflictingFiles []string `protobuf:"bytes,1,rep,name=conflicting_files,json=conflictingFiles,proto3" json:"conflicting_files,omitempty"` +} + +func (x *MergeConflictError) Reset() { + *x = MergeConflictError{} + if protoimpl.UnsafeEnabled { + mi := &file_merge_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MergeConflictError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MergeConflictError) ProtoMessage() {} + +func (x *MergeConflictError) ProtoReflect() protoreflect.Message { + mi := &file_merge_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MergeConflictError.ProtoReflect.Descriptor instead. +func (*MergeConflictError) Descriptor() ([]byte, []int) { + return file_merge_proto_rawDescGZIP(), []int{2} +} + +func (x *MergeConflictError) GetConflictingFiles() []string { + if x != nil { + return x.ConflictingFiles + } + return nil +} + +var File_merge_proto protoreflect.FileDescriptor + +var file_merge_proto_rawDesc = []byte{ + 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, + 0x70, 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x22, 0xde, 0x04, 0x0a, 0x0c, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x68, 0x65, 0x61, 0x64, + 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x68, + 0x65, 0x61, 0x64, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x61, 0x73, + 0x65, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x62, 0x61, 0x73, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, + 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x61, 0x75, + 0x74, 0x68, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, + 0x65, 0x12, 0x2b, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x74, 0x79, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x12, 0x24, + 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, + 0x44, 0x61, 0x74, 0x65, 0x12, 0x27, 0x0a, 0x08, 0x72, 0x65, 0x66, 0x5f, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x66, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x72, 0x65, 0x66, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, + 0x08, 0x72, 0x65, 0x66, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x68, 0x65, 0x61, 0x64, + 0x5f, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0f, 0x68, 0x65, 0x61, 0x64, 0x45, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x53, 0x68, 0x61, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x18, 0x0d, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x64, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, + 0x18, 0x0e, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x65, + 0x61, 0x64, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x35, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, + 0x6f, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4d, + 0x65, 0x72, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x72, 0x67, + 0x65, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x22, + 0x30, 0x0a, 0x0b, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x09, + 0x0a, 0x05, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x73, 0x71, 0x75, + 0x61, 0x73, 0x68, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x72, 0x65, 0x62, 0x61, 0x73, 0x65, 0x10, + 0x02, 0x22, 0x88, 0x01, 0x0a, 0x0d, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x61, 0x73, 0x65, 0x53, 0x68, 0x61, 0x12, 0x19, + 0x0a, 0x08, 0x68, 0x65, 0x61, 0x64, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x53, 0x68, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x65, 0x72, + 0x67, 0x65, 0x5f, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0c, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x53, 0x68, 0x61, 0x12, + 0x1b, 0x0a, 0x09, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x53, 0x68, 0x61, 0x22, 0x41, 0x0a, 0x12, + 0x4d, 0x65, 0x72, 0x67, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x6c, 0x69, 0x63, 0x74, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x69, 0x63, 0x74, 0x69, 0x6e, + 0x67, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x10, 0x63, + 0x6f, 0x6e, 0x66, 0x6c, 0x69, 0x63, 0x74, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x32, + 0x40, 0x0a, 0x0c, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, + 0x30, 0x0a, 0x05, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x12, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4d, + 0x65, 0x72, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, + 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, +} + +var ( + file_merge_proto_rawDescOnce sync.Once + file_merge_proto_rawDescData = file_merge_proto_rawDesc +) + +func file_merge_proto_rawDescGZIP() []byte { + file_merge_proto_rawDescOnce.Do(func() { + file_merge_proto_rawDescData = protoimpl.X.CompressGZIP(file_merge_proto_rawDescData) + }) + return file_merge_proto_rawDescData +} + +var file_merge_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_merge_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_merge_proto_goTypes = []interface{}{ + (MergeRequest_MergeMethod)(0), // 0: rpc.MergeRequest.MergeMethod + (*MergeRequest)(nil), // 1: rpc.MergeRequest + (*MergeResponse)(nil), // 2: rpc.MergeResponse + (*MergeConflictError)(nil), // 3: rpc.MergeConflictError + (*WriteRequest)(nil), // 4: rpc.WriteRequest + (*Identity)(nil), // 5: rpc.Identity + (RefType)(0), // 6: rpc.RefType +} +var file_merge_proto_depIdxs = []int32{ + 4, // 0: rpc.MergeRequest.base:type_name -> rpc.WriteRequest + 5, // 1: rpc.MergeRequest.author:type_name -> rpc.Identity + 5, // 2: rpc.MergeRequest.committer:type_name -> rpc.Identity + 6, // 3: rpc.MergeRequest.ref_type:type_name -> rpc.RefType + 0, // 4: rpc.MergeRequest.method:type_name -> rpc.MergeRequest.MergeMethod + 1, // 5: rpc.MergeService.Merge:input_type -> rpc.MergeRequest + 2, // 6: rpc.MergeService.Merge:output_type -> rpc.MergeResponse + 6, // [6:7] is the sub-list for method output_type + 5, // [5:6] is the sub-list for method input_type + 5, // [5:5] is the sub-list for extension type_name + 5, // [5:5] is the sub-list for extension extendee + 0, // [0:5] is the sub-list for field type_name +} + +func init() { file_merge_proto_init() } +func file_merge_proto_init() { + if File_merge_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_merge_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MergeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_merge_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MergeResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_merge_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MergeConflictError); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_merge_proto_rawDesc, + NumEnums: 1, + NumMessages: 3, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_merge_proto_goTypes, + DependencyIndexes: file_merge_proto_depIdxs, + EnumInfos: file_merge_proto_enumTypes, + MessageInfos: file_merge_proto_msgTypes, + }.Build() + File_merge_proto = out.File + file_merge_proto_rawDesc = nil + file_merge_proto_goTypes = nil + file_merge_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/merge_grpc.pb.go b/gitrpc/rpc/merge_grpc.pb.go new file mode 100644 index 0000000000..c6eae63758 --- /dev/null +++ b/gitrpc/rpc/merge_grpc.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: merge.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// MergeServiceClient is the client API for MergeService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type MergeServiceClient interface { + Merge(ctx context.Context, in *MergeRequest, opts ...grpc.CallOption) (*MergeResponse, error) +} + +type mergeServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewMergeServiceClient(cc grpc.ClientConnInterface) MergeServiceClient { + return &mergeServiceClient{cc} +} + +func (c *mergeServiceClient) Merge(ctx context.Context, in *MergeRequest, opts ...grpc.CallOption) (*MergeResponse, error) { + out := new(MergeResponse) + err := c.cc.Invoke(ctx, "/rpc.MergeService/Merge", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MergeServiceServer is the server API for MergeService service. +// All implementations must embed UnimplementedMergeServiceServer +// for forward compatibility +type MergeServiceServer interface { + Merge(context.Context, *MergeRequest) (*MergeResponse, error) + mustEmbedUnimplementedMergeServiceServer() +} + +// UnimplementedMergeServiceServer must be embedded to have forward compatible implementations. +type UnimplementedMergeServiceServer struct { +} + +func (UnimplementedMergeServiceServer) Merge(context.Context, *MergeRequest) (*MergeResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Merge not implemented") +} +func (UnimplementedMergeServiceServer) mustEmbedUnimplementedMergeServiceServer() {} + +// UnsafeMergeServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to MergeServiceServer will +// result in compilation errors. +type UnsafeMergeServiceServer interface { + mustEmbedUnimplementedMergeServiceServer() +} + +func RegisterMergeServiceServer(s grpc.ServiceRegistrar, srv MergeServiceServer) { + s.RegisterService(&MergeService_ServiceDesc, srv) +} + +func _MergeService_Merge_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MergeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MergeServiceServer).Merge(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.MergeService/Merge", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MergeServiceServer).Merge(ctx, req.(*MergeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// MergeService_ServiceDesc is the grpc.ServiceDesc for MergeService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var MergeService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.MergeService", + HandlerType: (*MergeServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Merge", + Handler: _MergeService_Merge_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "merge.proto", +} diff --git a/gitrpc/rpc/operations.pb.go b/gitrpc/rpc/operations.pb.go new file mode 100644 index 0000000000..b526563df1 --- /dev/null +++ b/gitrpc/rpc/operations.pb.go @@ -0,0 +1,688 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: operations.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type CommitFilesActionHeader_ActionType int32 + +const ( + // CREATE creates a new file. + CommitFilesActionHeader_CREATE CommitFilesActionHeader_ActionType = 0 + // UPDATE updates an existing file. + CommitFilesActionHeader_UPDATE CommitFilesActionHeader_ActionType = 1 + // DELETE deletes an existing file or dir. + CommitFilesActionHeader_DELETE CommitFilesActionHeader_ActionType = 2 + // MOVE moves existing file to another dir. + CommitFilesActionHeader_MOVE CommitFilesActionHeader_ActionType = 3 +) + +// Enum value maps for CommitFilesActionHeader_ActionType. +var ( + CommitFilesActionHeader_ActionType_name = map[int32]string{ + 0: "CREATE", + 1: "UPDATE", + 2: "DELETE", + 3: "MOVE", + } + CommitFilesActionHeader_ActionType_value = map[string]int32{ + "CREATE": 0, + "UPDATE": 1, + "DELETE": 2, + "MOVE": 3, + } +) + +func (x CommitFilesActionHeader_ActionType) Enum() *CommitFilesActionHeader_ActionType { + p := new(CommitFilesActionHeader_ActionType) + *p = x + return p +} + +func (x CommitFilesActionHeader_ActionType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CommitFilesActionHeader_ActionType) Descriptor() protoreflect.EnumDescriptor { + return file_operations_proto_enumTypes[0].Descriptor() +} + +func (CommitFilesActionHeader_ActionType) Type() protoreflect.EnumType { + return &file_operations_proto_enumTypes[0] +} + +func (x CommitFilesActionHeader_ActionType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CommitFilesActionHeader_ActionType.Descriptor instead. +func (CommitFilesActionHeader_ActionType) EnumDescriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{1, 0} +} + +// CommitFilesRequestHeader is the header of the UserCommitFiles that defines the commit details, +// parent and other information related to the call. +type CommitFilesRequestHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + BranchName string `protobuf:"bytes,2,opt,name=branch_name,json=branchName,proto3" json:"branch_name,omitempty"` + NewBranchName string `protobuf:"bytes,3,opt,name=new_branch_name,json=newBranchName,proto3" json:"new_branch_name,omitempty"` + Title string `protobuf:"bytes,4,opt,name=title,proto3" json:"title,omitempty"` + Message string `protobuf:"bytes,5,opt,name=message,proto3" json:"message,omitempty"` + Author *Identity `protobuf:"bytes,6,opt,name=author,proto3" json:"author,omitempty"` + AuthorDate int64 `protobuf:"varint,7,opt,name=authorDate,proto3" json:"authorDate,omitempty"` + Committer *Identity `protobuf:"bytes,8,opt,name=committer,proto3" json:"committer,omitempty"` + CommitterDate int64 `protobuf:"varint,9,opt,name=committerDate,proto3" json:"committerDate,omitempty"` +} + +func (x *CommitFilesRequestHeader) Reset() { + *x = CommitFilesRequestHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_operations_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitFilesRequestHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitFilesRequestHeader) ProtoMessage() {} + +func (x *CommitFilesRequestHeader) ProtoReflect() protoreflect.Message { + mi := &file_operations_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitFilesRequestHeader.ProtoReflect.Descriptor instead. +func (*CommitFilesRequestHeader) Descriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{0} +} + +func (x *CommitFilesRequestHeader) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *CommitFilesRequestHeader) GetBranchName() string { + if x != nil { + return x.BranchName + } + return "" +} + +func (x *CommitFilesRequestHeader) GetNewBranchName() string { + if x != nil { + return x.NewBranchName + } + return "" +} + +func (x *CommitFilesRequestHeader) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *CommitFilesRequestHeader) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *CommitFilesRequestHeader) GetAuthor() *Identity { + if x != nil { + return x.Author + } + return nil +} + +func (x *CommitFilesRequestHeader) GetAuthorDate() int64 { + if x != nil { + return x.AuthorDate + } + return 0 +} + +func (x *CommitFilesRequestHeader) GetCommitter() *Identity { + if x != nil { + return x.Committer + } + return nil +} + +func (x *CommitFilesRequestHeader) GetCommitterDate() int64 { + if x != nil { + return x.CommitterDate + } + return 0 +} + +// CommitFilesActionHeader contains the details of the action to be performed. +type CommitFilesActionHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // action is the type of the action taken to build a commit. Not all fields are + // used for all of the actions. + Action CommitFilesActionHeader_ActionType `protobuf:"varint,1,opt,name=action,proto3,enum=rpc.CommitFilesActionHeader_ActionType" json:"action,omitempty"` + // path refers to the file or directory being modified. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + Sha string `protobuf:"bytes,3,opt,name=sha,proto3" json:"sha,omitempty"` +} + +func (x *CommitFilesActionHeader) Reset() { + *x = CommitFilesActionHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_operations_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitFilesActionHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitFilesActionHeader) ProtoMessage() {} + +func (x *CommitFilesActionHeader) ProtoReflect() protoreflect.Message { + mi := &file_operations_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitFilesActionHeader.ProtoReflect.Descriptor instead. +func (*CommitFilesActionHeader) Descriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{1} +} + +func (x *CommitFilesActionHeader) GetAction() CommitFilesActionHeader_ActionType { + if x != nil { + return x.Action + } + return CommitFilesActionHeader_CREATE +} + +func (x *CommitFilesActionHeader) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *CommitFilesActionHeader) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +// CommitFilesAction is the request message used to stream in the actions to build a commit. +type CommitFilesAction struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Payload: + // *CommitFilesAction_Header + // *CommitFilesAction_Content + Payload isCommitFilesAction_Payload `protobuf_oneof:"payload"` +} + +func (x *CommitFilesAction) Reset() { + *x = CommitFilesAction{} + if protoimpl.UnsafeEnabled { + mi := &file_operations_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitFilesAction) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitFilesAction) ProtoMessage() {} + +func (x *CommitFilesAction) ProtoReflect() protoreflect.Message { + mi := &file_operations_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitFilesAction.ProtoReflect.Descriptor instead. +func (*CommitFilesAction) Descriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{2} +} + +func (m *CommitFilesAction) GetPayload() isCommitFilesAction_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (x *CommitFilesAction) GetHeader() *CommitFilesActionHeader { + if x, ok := x.GetPayload().(*CommitFilesAction_Header); ok { + return x.Header + } + return nil +} + +func (x *CommitFilesAction) GetContent() []byte { + if x, ok := x.GetPayload().(*CommitFilesAction_Content); ok { + return x.Content + } + return nil +} + +type isCommitFilesAction_Payload interface { + isCommitFilesAction_Payload() +} + +type CommitFilesAction_Header struct { + // header contains the details of action being performed. Header must be sent before the + // file if file is used by the action. + Header *CommitFilesActionHeader `protobuf:"bytes,1,opt,name=header,proto3,oneof"` +} + +type CommitFilesAction_Content struct { + // not used for DELETE action. + Content []byte `protobuf:"bytes,2,opt,name=content,proto3,oneof"` +} + +func (*CommitFilesAction_Header) isCommitFilesAction_Payload() {} + +func (*CommitFilesAction_Content) isCommitFilesAction_Payload() {} + +type CommitFilesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Payload: + // *CommitFilesRequest_Header + // *CommitFilesRequest_Action + Payload isCommitFilesRequest_Payload `protobuf_oneof:"payload"` +} + +func (x *CommitFilesRequest) Reset() { + *x = CommitFilesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_operations_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitFilesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitFilesRequest) ProtoMessage() {} + +func (x *CommitFilesRequest) ProtoReflect() protoreflect.Message { + mi := &file_operations_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitFilesRequest.ProtoReflect.Descriptor instead. +func (*CommitFilesRequest) Descriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{3} +} + +func (m *CommitFilesRequest) GetPayload() isCommitFilesRequest_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (x *CommitFilesRequest) GetHeader() *CommitFilesRequestHeader { + if x, ok := x.GetPayload().(*CommitFilesRequest_Header); ok { + return x.Header + } + return nil +} + +func (x *CommitFilesRequest) GetAction() *CommitFilesAction { + if x, ok := x.GetPayload().(*CommitFilesRequest_Action); ok { + return x.Action + } + return nil +} + +type isCommitFilesRequest_Payload interface { + isCommitFilesRequest_Payload() +} + +type CommitFilesRequest_Header struct { + // header defines the details of where to commit, the details and which commit to use as the parent. + // header must always be sent as the first request of the stream. + Header *CommitFilesRequestHeader `protobuf:"bytes,1,opt,name=header,proto3,oneof"` +} + +type CommitFilesRequest_Action struct { + // action contains an action to build a commit. There can be multiple actions per stream. + Action *CommitFilesAction `protobuf:"bytes,2,opt,name=action,proto3,oneof"` +} + +func (*CommitFilesRequest_Header) isCommitFilesRequest_Payload() {} + +func (*CommitFilesRequest_Action) isCommitFilesRequest_Payload() {} + +type CommitFilesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CommitId string `protobuf:"bytes,1,opt,name=commit_id,json=commitId,proto3" json:"commit_id,omitempty"` + Branch string `protobuf:"bytes,2,opt,name=branch,proto3" json:"branch,omitempty"` +} + +func (x *CommitFilesResponse) Reset() { + *x = CommitFilesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_operations_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitFilesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitFilesResponse) ProtoMessage() {} + +func (x *CommitFilesResponse) ProtoReflect() protoreflect.Message { + mi := &file_operations_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitFilesResponse.ProtoReflect.Descriptor instead. +func (*CommitFilesResponse) Descriptor() ([]byte, []int) { + return file_operations_proto_rawDescGZIP(), []int{4} +} + +func (x *CommitFilesResponse) GetCommitId() string { + if x != nil { + return x.CommitId + } + return "" +} + +func (x *CommitFilesResponse) GetBranch() string { + if x != nil { + return x.Branch + } + return "" +} + +var File_operations_proto protoreflect.FileDescriptor + +var file_operations_proto_rawDesc = []byte{ + 0x0a, 0x10, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd4, 0x02, 0x0a, 0x18, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, + 0x77, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x77, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, + 0x79, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x61, 0x75, 0x74, + 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x61, + 0x75, 0x74, 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, 0x65, 0x12, 0x2b, 0x0a, 0x09, 0x63, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x09, 0x63, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x74, 0x65, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x44, 0x61, 0x74, 0x65, 0x22, 0xbc, 0x01, 0x0a, + 0x17, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x3f, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, + 0x03, 0x73, 0x68, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x22, + 0x3a, 0x0a, 0x0a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, 0x0a, + 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x44, + 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x10, + 0x02, 0x12, 0x08, 0x0a, 0x04, 0x4d, 0x4f, 0x56, 0x45, 0x10, 0x03, 0x22, 0x72, 0x0a, 0x11, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x36, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, + 0x65, 0x73, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, + 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, + 0x8a, 0x01, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x37, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, + 0x30, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, + 0x73, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x09, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x4a, 0x0a, 0x13, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x49, 0x64, + 0x12, 0x16, 0x0a, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x32, 0x58, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x42, + 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x17, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x28, 0x01, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, + 0x2f, 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_operations_proto_rawDescOnce sync.Once + file_operations_proto_rawDescData = file_operations_proto_rawDesc +) + +func file_operations_proto_rawDescGZIP() []byte { + file_operations_proto_rawDescOnce.Do(func() { + file_operations_proto_rawDescData = protoimpl.X.CompressGZIP(file_operations_proto_rawDescData) + }) + return file_operations_proto_rawDescData +} + +var file_operations_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_operations_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_operations_proto_goTypes = []interface{}{ + (CommitFilesActionHeader_ActionType)(0), // 0: rpc.CommitFilesActionHeader.ActionType + (*CommitFilesRequestHeader)(nil), // 1: rpc.CommitFilesRequestHeader + (*CommitFilesActionHeader)(nil), // 2: rpc.CommitFilesActionHeader + (*CommitFilesAction)(nil), // 3: rpc.CommitFilesAction + (*CommitFilesRequest)(nil), // 4: rpc.CommitFilesRequest + (*CommitFilesResponse)(nil), // 5: rpc.CommitFilesResponse + (*WriteRequest)(nil), // 6: rpc.WriteRequest + (*Identity)(nil), // 7: rpc.Identity +} +var file_operations_proto_depIdxs = []int32{ + 6, // 0: rpc.CommitFilesRequestHeader.base:type_name -> rpc.WriteRequest + 7, // 1: rpc.CommitFilesRequestHeader.author:type_name -> rpc.Identity + 7, // 2: rpc.CommitFilesRequestHeader.committer:type_name -> rpc.Identity + 0, // 3: rpc.CommitFilesActionHeader.action:type_name -> rpc.CommitFilesActionHeader.ActionType + 2, // 4: rpc.CommitFilesAction.header:type_name -> rpc.CommitFilesActionHeader + 1, // 5: rpc.CommitFilesRequest.header:type_name -> rpc.CommitFilesRequestHeader + 3, // 6: rpc.CommitFilesRequest.action:type_name -> rpc.CommitFilesAction + 4, // 7: rpc.CommitFilesService.CommitFiles:input_type -> rpc.CommitFilesRequest + 5, // 8: rpc.CommitFilesService.CommitFiles:output_type -> rpc.CommitFilesResponse + 8, // [8:9] is the sub-list for method output_type + 7, // [7:8] is the sub-list for method input_type + 7, // [7:7] is the sub-list for extension type_name + 7, // [7:7] is the sub-list for extension extendee + 0, // [0:7] is the sub-list for field type_name +} + +func init() { file_operations_proto_init() } +func file_operations_proto_init() { + if File_operations_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_operations_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitFilesRequestHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_operations_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitFilesActionHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_operations_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitFilesAction); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_operations_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitFilesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_operations_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitFilesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_operations_proto_msgTypes[2].OneofWrappers = []interface{}{ + (*CommitFilesAction_Header)(nil), + (*CommitFilesAction_Content)(nil), + } + file_operations_proto_msgTypes[3].OneofWrappers = []interface{}{ + (*CommitFilesRequest_Header)(nil), + (*CommitFilesRequest_Action)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_operations_proto_rawDesc, + NumEnums: 1, + NumMessages: 5, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_operations_proto_goTypes, + DependencyIndexes: file_operations_proto_depIdxs, + EnumInfos: file_operations_proto_enumTypes, + MessageInfos: file_operations_proto_msgTypes, + }.Build() + File_operations_proto = out.File + file_operations_proto_rawDesc = nil + file_operations_proto_goTypes = nil + file_operations_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/operations_grpc.pb.go b/gitrpc/rpc/operations_grpc.pb.go new file mode 100644 index 0000000000..f46723d9c3 --- /dev/null +++ b/gitrpc/rpc/operations_grpc.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: operations.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// CommitFilesServiceClient is the client API for CommitFilesService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type CommitFilesServiceClient interface { + CommitFiles(ctx context.Context, opts ...grpc.CallOption) (CommitFilesService_CommitFilesClient, error) +} + +type commitFilesServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewCommitFilesServiceClient(cc grpc.ClientConnInterface) CommitFilesServiceClient { + return &commitFilesServiceClient{cc} +} + +func (c *commitFilesServiceClient) CommitFiles(ctx context.Context, opts ...grpc.CallOption) (CommitFilesService_CommitFilesClient, error) { + stream, err := c.cc.NewStream(ctx, &CommitFilesService_ServiceDesc.Streams[0], "/rpc.CommitFilesService/CommitFiles", opts...) + if err != nil { + return nil, err + } + x := &commitFilesServiceCommitFilesClient{stream} + return x, nil +} + +type CommitFilesService_CommitFilesClient interface { + Send(*CommitFilesRequest) error + CloseAndRecv() (*CommitFilesResponse, error) + grpc.ClientStream +} + +type commitFilesServiceCommitFilesClient struct { + grpc.ClientStream +} + +func (x *commitFilesServiceCommitFilesClient) Send(m *CommitFilesRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *commitFilesServiceCommitFilesClient) CloseAndRecv() (*CommitFilesResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(CommitFilesResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// CommitFilesServiceServer is the server API for CommitFilesService service. +// All implementations must embed UnimplementedCommitFilesServiceServer +// for forward compatibility +type CommitFilesServiceServer interface { + CommitFiles(CommitFilesService_CommitFilesServer) error + mustEmbedUnimplementedCommitFilesServiceServer() +} + +// UnimplementedCommitFilesServiceServer must be embedded to have forward compatible implementations. +type UnimplementedCommitFilesServiceServer struct { +} + +func (UnimplementedCommitFilesServiceServer) CommitFiles(CommitFilesService_CommitFilesServer) error { + return status.Errorf(codes.Unimplemented, "method CommitFiles not implemented") +} +func (UnimplementedCommitFilesServiceServer) mustEmbedUnimplementedCommitFilesServiceServer() {} + +// UnsafeCommitFilesServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to CommitFilesServiceServer will +// result in compilation errors. +type UnsafeCommitFilesServiceServer interface { + mustEmbedUnimplementedCommitFilesServiceServer() +} + +func RegisterCommitFilesServiceServer(s grpc.ServiceRegistrar, srv CommitFilesServiceServer) { + s.RegisterService(&CommitFilesService_ServiceDesc, srv) +} + +func _CommitFilesService_CommitFiles_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(CommitFilesServiceServer).CommitFiles(&commitFilesServiceCommitFilesServer{stream}) +} + +type CommitFilesService_CommitFilesServer interface { + SendAndClose(*CommitFilesResponse) error + Recv() (*CommitFilesRequest, error) + grpc.ServerStream +} + +type commitFilesServiceCommitFilesServer struct { + grpc.ServerStream +} + +func (x *commitFilesServiceCommitFilesServer) SendAndClose(m *CommitFilesResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *commitFilesServiceCommitFilesServer) Recv() (*CommitFilesRequest, error) { + m := new(CommitFilesRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// CommitFilesService_ServiceDesc is the grpc.ServiceDesc for CommitFilesService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var CommitFilesService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.CommitFilesService", + HandlerType: (*CommitFilesServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "CommitFiles", + Handler: _CommitFilesService_CommitFiles_Handler, + ClientStreams: true, + }, + }, + Metadata: "operations.proto", +} diff --git a/gitrpc/rpc/push.pb.go b/gitrpc/rpc/push.pb.go new file mode 100644 index 0000000000..44e46d8d90 --- /dev/null +++ b/gitrpc/rpc/push.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: push.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type PushRemoteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + RemoteUrl string `protobuf:"bytes,2,opt,name=remote_url,json=remoteUrl,proto3" json:"remote_url,omitempty"` + Timeout int64 `protobuf:"varint,3,opt,name=timeout,proto3" json:"timeout,omitempty"` +} + +func (x *PushRemoteRequest) Reset() { + *x = PushRemoteRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_push_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PushRemoteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PushRemoteRequest) ProtoMessage() {} + +func (x *PushRemoteRequest) ProtoReflect() protoreflect.Message { + mi := &file_push_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PushRemoteRequest.ProtoReflect.Descriptor instead. +func (*PushRemoteRequest) Descriptor() ([]byte, []int) { + return file_push_proto_rawDescGZIP(), []int{0} +} + +func (x *PushRemoteRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *PushRemoteRequest) GetRemoteUrl() string { + if x != nil { + return x.RemoteUrl + } + return "" +} + +func (x *PushRemoteRequest) GetTimeout() int64 { + if x != nil { + return x.Timeout + } + return 0 +} + +type PushRemoteResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PushRemoteResponse) Reset() { + *x = PushRemoteResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_push_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PushRemoteResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PushRemoteResponse) ProtoMessage() {} + +func (x *PushRemoteResponse) ProtoReflect() protoreflect.Message { + mi := &file_push_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PushRemoteResponse.ProtoReflect.Descriptor instead. +func (*PushRemoteResponse) Descriptor() ([]byte, []int) { + return file_push_proto_rawDescGZIP(), []int{1} +} + +var File_push_proto protoreflect.FileDescriptor + +var file_push_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x70, 0x75, 0x73, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, + 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0x72, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x65, + 0x6d, 0x6f, 0x74, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, + 0x65, 0x6f, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, + 0x6f, 0x75, 0x74, 0x22, 0x14, 0x0a, 0x12, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x4c, 0x0a, 0x0b, 0x50, 0x75, 0x73, + 0x68, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x3d, 0x0a, 0x0a, 0x50, 0x75, 0x73, 0x68, + 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x12, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x50, 0x75, 0x73, + 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x52, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, + 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_push_proto_rawDescOnce sync.Once + file_push_proto_rawDescData = file_push_proto_rawDesc +) + +func file_push_proto_rawDescGZIP() []byte { + file_push_proto_rawDescOnce.Do(func() { + file_push_proto_rawDescData = protoimpl.X.CompressGZIP(file_push_proto_rawDescData) + }) + return file_push_proto_rawDescData +} + +var file_push_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_push_proto_goTypes = []interface{}{ + (*PushRemoteRequest)(nil), // 0: rpc.PushRemoteRequest + (*PushRemoteResponse)(nil), // 1: rpc.PushRemoteResponse + (*ReadRequest)(nil), // 2: rpc.ReadRequest +} +var file_push_proto_depIdxs = []int32{ + 2, // 0: rpc.PushRemoteRequest.base:type_name -> rpc.ReadRequest + 0, // 1: rpc.PushService.PushRemote:input_type -> rpc.PushRemoteRequest + 1, // 2: rpc.PushService.PushRemote:output_type -> rpc.PushRemoteResponse + 2, // [2:3] is the sub-list for method output_type + 1, // [1:2] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_push_proto_init() } +func file_push_proto_init() { + if File_push_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_push_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PushRemoteRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_push_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PushRemoteResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_push_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_push_proto_goTypes, + DependencyIndexes: file_push_proto_depIdxs, + MessageInfos: file_push_proto_msgTypes, + }.Build() + File_push_proto = out.File + file_push_proto_rawDesc = nil + file_push_proto_goTypes = nil + file_push_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/push_grpc.pb.go b/gitrpc/rpc/push_grpc.pb.go new file mode 100644 index 0000000000..f9a469a5ff --- /dev/null +++ b/gitrpc/rpc/push_grpc.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: push.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// PushServiceClient is the client API for PushService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type PushServiceClient interface { + PushRemote(ctx context.Context, in *PushRemoteRequest, opts ...grpc.CallOption) (*PushRemoteResponse, error) +} + +type pushServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewPushServiceClient(cc grpc.ClientConnInterface) PushServiceClient { + return &pushServiceClient{cc} +} + +func (c *pushServiceClient) PushRemote(ctx context.Context, in *PushRemoteRequest, opts ...grpc.CallOption) (*PushRemoteResponse, error) { + out := new(PushRemoteResponse) + err := c.cc.Invoke(ctx, "/rpc.PushService/PushRemote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PushServiceServer is the server API for PushService service. +// All implementations must embed UnimplementedPushServiceServer +// for forward compatibility +type PushServiceServer interface { + PushRemote(context.Context, *PushRemoteRequest) (*PushRemoteResponse, error) + mustEmbedUnimplementedPushServiceServer() +} + +// UnimplementedPushServiceServer must be embedded to have forward compatible implementations. +type UnimplementedPushServiceServer struct { +} + +func (UnimplementedPushServiceServer) PushRemote(context.Context, *PushRemoteRequest) (*PushRemoteResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PushRemote not implemented") +} +func (UnimplementedPushServiceServer) mustEmbedUnimplementedPushServiceServer() {} + +// UnsafePushServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to PushServiceServer will +// result in compilation errors. +type UnsafePushServiceServer interface { + mustEmbedUnimplementedPushServiceServer() +} + +func RegisterPushServiceServer(s grpc.ServiceRegistrar, srv PushServiceServer) { + s.RegisterService(&PushService_ServiceDesc, srv) +} + +func _PushService_PushRemote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PushRemoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PushServiceServer).PushRemote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.PushService/PushRemote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PushServiceServer).PushRemote(ctx, req.(*PushRemoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// PushService_ServiceDesc is the grpc.ServiceDesc for PushService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var PushService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.PushService", + HandlerType: (*PushServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "PushRemote", + Handler: _PushService_PushRemote_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "push.proto", +} diff --git a/gitrpc/rpc/ref.pb.go b/gitrpc/rpc/ref.pb.go new file mode 100644 index 0000000000..431785951b --- /dev/null +++ b/gitrpc/rpc/ref.pb.go @@ -0,0 +1,1840 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: ref.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ListBranchesRequest_SortOption int32 + +const ( + ListBranchesRequest_Default ListBranchesRequest_SortOption = 0 + ListBranchesRequest_Name ListBranchesRequest_SortOption = 1 + ListBranchesRequest_Date ListBranchesRequest_SortOption = 2 +) + +// Enum value maps for ListBranchesRequest_SortOption. +var ( + ListBranchesRequest_SortOption_name = map[int32]string{ + 0: "Default", + 1: "Name", + 2: "Date", + } + ListBranchesRequest_SortOption_value = map[string]int32{ + "Default": 0, + "Name": 1, + "Date": 2, + } +) + +func (x ListBranchesRequest_SortOption) Enum() *ListBranchesRequest_SortOption { + p := new(ListBranchesRequest_SortOption) + *p = x + return p +} + +func (x ListBranchesRequest_SortOption) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ListBranchesRequest_SortOption) Descriptor() protoreflect.EnumDescriptor { + return file_ref_proto_enumTypes[0].Descriptor() +} + +func (ListBranchesRequest_SortOption) Type() protoreflect.EnumType { + return &file_ref_proto_enumTypes[0] +} + +func (x ListBranchesRequest_SortOption) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ListBranchesRequest_SortOption.Descriptor instead. +func (ListBranchesRequest_SortOption) EnumDescriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{9, 0} +} + +type ListCommitTagsRequest_SortOption int32 + +const ( + ListCommitTagsRequest_Default ListCommitTagsRequest_SortOption = 0 + ListCommitTagsRequest_Name ListCommitTagsRequest_SortOption = 1 + ListCommitTagsRequest_Date ListCommitTagsRequest_SortOption = 2 +) + +// Enum value maps for ListCommitTagsRequest_SortOption. +var ( + ListCommitTagsRequest_SortOption_name = map[int32]string{ + 0: "Default", + 1: "Name", + 2: "Date", + } + ListCommitTagsRequest_SortOption_value = map[string]int32{ + "Default": 0, + "Name": 1, + "Date": 2, + } +) + +func (x ListCommitTagsRequest_SortOption) Enum() *ListCommitTagsRequest_SortOption { + p := new(ListCommitTagsRequest_SortOption) + *p = x + return p +} + +func (x ListCommitTagsRequest_SortOption) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ListCommitTagsRequest_SortOption) Descriptor() protoreflect.EnumDescriptor { + return file_ref_proto_enumTypes[1].Descriptor() +} + +func (ListCommitTagsRequest_SortOption) Type() protoreflect.EnumType { + return &file_ref_proto_enumTypes[1] +} + +func (x ListCommitTagsRequest_SortOption) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ListCommitTagsRequest_SortOption.Descriptor instead. +func (ListCommitTagsRequest_SortOption) EnumDescriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{12, 0} +} + +type CreateCommitTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + TagName string `protobuf:"bytes,2,opt,name=tag_name,json=tagName,proto3" json:"tag_name,omitempty"` + Target string `protobuf:"bytes,3,opt,name=target,proto3" json:"target,omitempty"` + Message string `protobuf:"bytes,4,opt,name=message,proto3" json:"message,omitempty"` + Tagger *Identity `protobuf:"bytes,5,opt,name=tagger,proto3" json:"tagger,omitempty"` + TaggerDate int64 `protobuf:"varint,6,opt,name=taggerDate,proto3" json:"taggerDate,omitempty"` +} + +func (x *CreateCommitTagRequest) Reset() { + *x = CreateCommitTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateCommitTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateCommitTagRequest) ProtoMessage() {} + +func (x *CreateCommitTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateCommitTagRequest.ProtoReflect.Descriptor instead. +func (*CreateCommitTagRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{0} +} + +func (x *CreateCommitTagRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *CreateCommitTagRequest) GetTagName() string { + if x != nil { + return x.TagName + } + return "" +} + +func (x *CreateCommitTagRequest) GetTarget() string { + if x != nil { + return x.Target + } + return "" +} + +func (x *CreateCommitTagRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *CreateCommitTagRequest) GetTagger() *Identity { + if x != nil { + return x.Tagger + } + return nil +} + +func (x *CreateCommitTagRequest) GetTaggerDate() int64 { + if x != nil { + return x.TaggerDate + } + return 0 +} + +type CreateCommitTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tag *CommitTag `protobuf:"bytes,1,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *CreateCommitTagResponse) Reset() { + *x = CreateCommitTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateCommitTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateCommitTagResponse) ProtoMessage() {} + +func (x *CreateCommitTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateCommitTagResponse.ProtoReflect.Descriptor instead. +func (*CreateCommitTagResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateCommitTagResponse) GetTag() *CommitTag { + if x != nil { + return x.Tag + } + return nil +} + +type DeleteTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + TagName string `protobuf:"bytes,2,opt,name=tag_name,json=tagName,proto3" json:"tag_name,omitempty"` +} + +func (x *DeleteTagRequest) Reset() { + *x = DeleteTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteTagRequest) ProtoMessage() {} + +func (x *DeleteTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteTagRequest.ProtoReflect.Descriptor instead. +func (*DeleteTagRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{2} +} + +func (x *DeleteTagRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *DeleteTagRequest) GetTagName() string { + if x != nil { + return x.TagName + } + return "" +} + +type CreateBranchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + BranchName string `protobuf:"bytes,2,opt,name=branch_name,json=branchName,proto3" json:"branch_name,omitempty"` + Target string `protobuf:"bytes,3,opt,name=target,proto3" json:"target,omitempty"` +} + +func (x *CreateBranchRequest) Reset() { + *x = CreateBranchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateBranchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateBranchRequest) ProtoMessage() {} + +func (x *CreateBranchRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateBranchRequest.ProtoReflect.Descriptor instead. +func (*CreateBranchRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{3} +} + +func (x *CreateBranchRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *CreateBranchRequest) GetBranchName() string { + if x != nil { + return x.BranchName + } + return "" +} + +func (x *CreateBranchRequest) GetTarget() string { + if x != nil { + return x.Target + } + return "" +} + +type CreateBranchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Branch *Branch `protobuf:"bytes,1,opt,name=branch,proto3" json:"branch,omitempty"` +} + +func (x *CreateBranchResponse) Reset() { + *x = CreateBranchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateBranchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateBranchResponse) ProtoMessage() {} + +func (x *CreateBranchResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateBranchResponse.ProtoReflect.Descriptor instead. +func (*CreateBranchResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{4} +} + +func (x *CreateBranchResponse) GetBranch() *Branch { + if x != nil { + return x.Branch + } + return nil +} + +type GetBranchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + BranchName string `protobuf:"bytes,2,opt,name=branch_name,json=branchName,proto3" json:"branch_name,omitempty"` +} + +func (x *GetBranchRequest) Reset() { + *x = GetBranchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetBranchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetBranchRequest) ProtoMessage() {} + +func (x *GetBranchRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetBranchRequest.ProtoReflect.Descriptor instead. +func (*GetBranchRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{5} +} + +func (x *GetBranchRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetBranchRequest) GetBranchName() string { + if x != nil { + return x.BranchName + } + return "" +} + +type GetBranchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Branch *Branch `protobuf:"bytes,1,opt,name=branch,proto3" json:"branch,omitempty"` +} + +func (x *GetBranchResponse) Reset() { + *x = GetBranchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetBranchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetBranchResponse) ProtoMessage() {} + +func (x *GetBranchResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetBranchResponse.ProtoReflect.Descriptor instead. +func (*GetBranchResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{6} +} + +func (x *GetBranchResponse) GetBranch() *Branch { + if x != nil { + return x.Branch + } + return nil +} + +type DeleteBranchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + BranchName string `protobuf:"bytes,2,opt,name=branch_name,json=branchName,proto3" json:"branch_name,omitempty"` + Force bool `protobuf:"varint,3,opt,name=force,proto3" json:"force,omitempty"` +} + +func (x *DeleteBranchRequest) Reset() { + *x = DeleteBranchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteBranchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteBranchRequest) ProtoMessage() {} + +func (x *DeleteBranchRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteBranchRequest.ProtoReflect.Descriptor instead. +func (*DeleteBranchRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{7} +} + +func (x *DeleteBranchRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *DeleteBranchRequest) GetBranchName() string { + if x != nil { + return x.BranchName + } + return "" +} + +func (x *DeleteBranchRequest) GetForce() bool { + if x != nil { + return x.Force + } + return false +} + +type DeleteBranchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sha string `protobuf:"bytes,1,opt,name=sha,proto3" json:"sha,omitempty"` +} + +func (x *DeleteBranchResponse) Reset() { + *x = DeleteBranchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteBranchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteBranchResponse) ProtoMessage() {} + +func (x *DeleteBranchResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteBranchResponse.ProtoReflect.Descriptor instead. +func (*DeleteBranchResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{8} +} + +func (x *DeleteBranchResponse) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +type ListBranchesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + IncludeCommit bool `protobuf:"varint,2,opt,name=include_commit,json=includeCommit,proto3" json:"include_commit,omitempty"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + Sort ListBranchesRequest_SortOption `protobuf:"varint,4,opt,name=sort,proto3,enum=rpc.ListBranchesRequest_SortOption" json:"sort,omitempty"` + Order SortOrder `protobuf:"varint,5,opt,name=order,proto3,enum=rpc.SortOrder" json:"order,omitempty"` + Page int32 `protobuf:"varint,6,opt,name=page,proto3" json:"page,omitempty"` + PageSize int32 `protobuf:"varint,7,opt,name=pageSize,proto3" json:"pageSize,omitempty"` +} + +func (x *ListBranchesRequest) Reset() { + *x = ListBranchesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListBranchesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListBranchesRequest) ProtoMessage() {} + +func (x *ListBranchesRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListBranchesRequest.ProtoReflect.Descriptor instead. +func (*ListBranchesRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{9} +} + +func (x *ListBranchesRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *ListBranchesRequest) GetIncludeCommit() bool { + if x != nil { + return x.IncludeCommit + } + return false +} + +func (x *ListBranchesRequest) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *ListBranchesRequest) GetSort() ListBranchesRequest_SortOption { + if x != nil { + return x.Sort + } + return ListBranchesRequest_Default +} + +func (x *ListBranchesRequest) GetOrder() SortOrder { + if x != nil { + return x.Order + } + return SortOrder_Default +} + +func (x *ListBranchesRequest) GetPage() int32 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *ListBranchesRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +type ListBranchesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Branch *Branch `protobuf:"bytes,1,opt,name=branch,proto3" json:"branch,omitempty"` +} + +func (x *ListBranchesResponse) Reset() { + *x = ListBranchesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListBranchesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListBranchesResponse) ProtoMessage() {} + +func (x *ListBranchesResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListBranchesResponse.ProtoReflect.Descriptor instead. +func (*ListBranchesResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{10} +} + +func (x *ListBranchesResponse) GetBranch() *Branch { + if x != nil { + return x.Branch + } + return nil +} + +type Branch struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Sha string `protobuf:"bytes,2,opt,name=sha,proto3" json:"sha,omitempty"` + Commit *Commit `protobuf:"bytes,3,opt,name=commit,proto3" json:"commit,omitempty"` +} + +func (x *Branch) Reset() { + *x = Branch{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Branch) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Branch) ProtoMessage() {} + +func (x *Branch) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Branch.ProtoReflect.Descriptor instead. +func (*Branch) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{11} +} + +func (x *Branch) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Branch) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *Branch) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +type ListCommitTagsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + IncludeCommit bool `protobuf:"varint,2,opt,name=include_commit,json=includeCommit,proto3" json:"include_commit,omitempty"` + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + Sort ListCommitTagsRequest_SortOption `protobuf:"varint,4,opt,name=sort,proto3,enum=rpc.ListCommitTagsRequest_SortOption" json:"sort,omitempty"` + Order SortOrder `protobuf:"varint,5,opt,name=order,proto3,enum=rpc.SortOrder" json:"order,omitempty"` + Page int32 `protobuf:"varint,6,opt,name=page,proto3" json:"page,omitempty"` + PageSize int32 `protobuf:"varint,7,opt,name=pageSize,proto3" json:"pageSize,omitempty"` +} + +func (x *ListCommitTagsRequest) Reset() { + *x = ListCommitTagsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListCommitTagsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListCommitTagsRequest) ProtoMessage() {} + +func (x *ListCommitTagsRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListCommitTagsRequest.ProtoReflect.Descriptor instead. +func (*ListCommitTagsRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{12} +} + +func (x *ListCommitTagsRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *ListCommitTagsRequest) GetIncludeCommit() bool { + if x != nil { + return x.IncludeCommit + } + return false +} + +func (x *ListCommitTagsRequest) GetQuery() string { + if x != nil { + return x.Query + } + return "" +} + +func (x *ListCommitTagsRequest) GetSort() ListCommitTagsRequest_SortOption { + if x != nil { + return x.Sort + } + return ListCommitTagsRequest_Default +} + +func (x *ListCommitTagsRequest) GetOrder() SortOrder { + if x != nil { + return x.Order + } + return SortOrder_Default +} + +func (x *ListCommitTagsRequest) GetPage() int32 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *ListCommitTagsRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +type ListCommitTagsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tag *CommitTag `protobuf:"bytes,1,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *ListCommitTagsResponse) Reset() { + *x = ListCommitTagsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListCommitTagsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListCommitTagsResponse) ProtoMessage() {} + +func (x *ListCommitTagsResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListCommitTagsResponse.ProtoReflect.Descriptor instead. +func (*ListCommitTagsResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{13} +} + +func (x *ListCommitTagsResponse) GetTag() *CommitTag { + if x != nil { + return x.Tag + } + return nil +} + +type CommitTag struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Sha string `protobuf:"bytes,2,opt,name=sha,proto3" json:"sha,omitempty"` + IsAnnotated bool `protobuf:"varint,3,opt,name=is_annotated,json=isAnnotated,proto3" json:"is_annotated,omitempty"` + Title string `protobuf:"bytes,4,opt,name=title,proto3" json:"title,omitempty"` + Message string `protobuf:"bytes,5,opt,name=message,proto3" json:"message,omitempty"` + Tagger *Signature `protobuf:"bytes,6,opt,name=tagger,proto3" json:"tagger,omitempty"` + Commit *Commit `protobuf:"bytes,7,opt,name=commit,proto3" json:"commit,omitempty"` +} + +func (x *CommitTag) Reset() { + *x = CommitTag{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitTag) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitTag) ProtoMessage() {} + +func (x *CommitTag) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitTag.ProtoReflect.Descriptor instead. +func (*CommitTag) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{14} +} + +func (x *CommitTag) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *CommitTag) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *CommitTag) GetIsAnnotated() bool { + if x != nil { + return x.IsAnnotated + } + return false +} + +func (x *CommitTag) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *CommitTag) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *CommitTag) GetTagger() *Signature { + if x != nil { + return x.Tagger + } + return nil +} + +func (x *CommitTag) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +type GetRefRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + RefName string `protobuf:"bytes,2,opt,name=ref_name,json=refName,proto3" json:"ref_name,omitempty"` + RefType RefType `protobuf:"varint,3,opt,name=ref_type,json=refType,proto3,enum=rpc.RefType" json:"ref_type,omitempty"` +} + +func (x *GetRefRequest) Reset() { + *x = GetRefRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetRefRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetRefRequest) ProtoMessage() {} + +func (x *GetRefRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetRefRequest.ProtoReflect.Descriptor instead. +func (*GetRefRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{15} +} + +func (x *GetRefRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetRefRequest) GetRefName() string { + if x != nil { + return x.RefName + } + return "" +} + +func (x *GetRefRequest) GetRefType() RefType { + if x != nil { + return x.RefType + } + return RefType_Undefined +} + +type GetRefResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sha string `protobuf:"bytes,1,opt,name=sha,proto3" json:"sha,omitempty"` +} + +func (x *GetRefResponse) Reset() { + *x = GetRefResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetRefResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetRefResponse) ProtoMessage() {} + +func (x *GetRefResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetRefResponse.ProtoReflect.Descriptor instead. +func (*GetRefResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{16} +} + +func (x *GetRefResponse) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +type UpdateRefRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + RefName string `protobuf:"bytes,2,opt,name=ref_name,json=refName,proto3" json:"ref_name,omitempty"` + RefType RefType `protobuf:"varint,3,opt,name=ref_type,json=refType,proto3,enum=rpc.RefType" json:"ref_type,omitempty"` + NewValue string `protobuf:"bytes,4,opt,name=new_value,json=newValue,proto3" json:"new_value,omitempty"` + OldValue string `protobuf:"bytes,5,opt,name=old_value,json=oldValue,proto3" json:"old_value,omitempty"` +} + +func (x *UpdateRefRequest) Reset() { + *x = UpdateRefRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateRefRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateRefRequest) ProtoMessage() {} + +func (x *UpdateRefRequest) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateRefRequest.ProtoReflect.Descriptor instead. +func (*UpdateRefRequest) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{17} +} + +func (x *UpdateRefRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *UpdateRefRequest) GetRefName() string { + if x != nil { + return x.RefName + } + return "" +} + +func (x *UpdateRefRequest) GetRefType() RefType { + if x != nil { + return x.RefType + } + return RefType_Undefined +} + +func (x *UpdateRefRequest) GetNewValue() string { + if x != nil { + return x.NewValue + } + return "" +} + +func (x *UpdateRefRequest) GetOldValue() string { + if x != nil { + return x.OldValue + } + return "" +} + +type UpdateRefResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *UpdateRefResponse) Reset() { + *x = UpdateRefResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_ref_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateRefResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateRefResponse) ProtoMessage() {} + +func (x *UpdateRefResponse) ProtoReflect() protoreflect.Message { + mi := &file_ref_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateRefResponse.ProtoReflect.Descriptor instead. +func (*UpdateRefResponse) Descriptor() ([]byte, []int) { + return file_ref_proto_rawDescGZIP(), []int{18} +} + +var File_ref_proto protoreflect.FileDescriptor + +var file_ref_proto_rawDesc = []byte{ + 0x0a, 0x09, 0x72, 0x65, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, 0x63, + 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd3, + 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, + 0x61, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, + 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, + 0x12, 0x19, 0x0a, 0x08, 0x74, 0x61, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x74, 0x61, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x25, 0x0a, + 0x06, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x06, 0x74, 0x61, + 0x67, 0x67, 0x65, 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, 0x44, 0x61, + 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x74, 0x61, 0x67, 0x67, 0x65, 0x72, + 0x44, 0x61, 0x74, 0x65, 0x22, 0x3b, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x20, 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x52, 0x03, 0x74, 0x61, + 0x67, 0x22, 0x54, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, + 0x74, 0x61, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x74, 0x61, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x75, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x62, 0x72, 0x61, 0x6e, + 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0x3b, + 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x42, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x52, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x22, 0x59, 0x0a, 0x10, 0x47, + 0x65, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x62, 0x72, 0x61, 0x6e, + 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x38, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x42, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x62, + 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, + 0x22, 0x73, 0x0a, 0x13, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1f, + 0x0a, 0x0b, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, + 0x66, 0x6f, 0x72, 0x63, 0x65, 0x22, 0x28, 0x0a, 0x14, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, + 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x10, 0x0a, + 0x03, 0x73, 0x68, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x22, + 0xb6, 0x02, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x25, 0x0a, + 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x37, 0x0a, 0x04, 0x73, 0x6f, + 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, + 0x69, 0x73, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x73, + 0x6f, 0x72, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, + 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x2d, 0x0a, 0x0a, 0x53, 0x6f, 0x72, + 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x10, 0x01, 0x12, 0x08, + 0x0a, 0x04, 0x44, 0x61, 0x74, 0x65, 0x10, 0x02, 0x22, 0x3b, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, + 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x23, 0x0a, 0x06, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x06, 0x62, + 0x72, 0x61, 0x6e, 0x63, 0x68, 0x22, 0x53, 0x0a, 0x06, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x22, 0xba, 0x02, 0x0a, 0x15, 0x4c, + 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, + 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0d, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x71, 0x75, 0x65, 0x72, 0x79, 0x12, 0x39, 0x0a, 0x04, 0x73, 0x6f, 0x72, 0x74, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x73, 0x74, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x73, 0x6f, + 0x72, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x6f, 0x72, 0x74, 0x4f, 0x72, 0x64, 0x65, + 0x72, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, 0x08, + 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, + 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x2d, 0x0a, 0x0a, 0x53, 0x6f, 0x72, 0x74, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x61, 0x6d, 0x65, 0x10, 0x01, 0x12, 0x08, 0x0a, + 0x04, 0x44, 0x61, 0x74, 0x65, 0x10, 0x02, 0x22, 0x3a, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x20, 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x52, 0x03, + 0x74, 0x61, 0x67, 0x22, 0xd1, 0x01, 0x0a, 0x09, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, + 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x61, 0x6e, + 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x69, + 0x73, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, + 0x74, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x26, 0x0a, 0x06, 0x74, 0x61, + 0x67, 0x67, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x06, 0x74, 0x61, 0x67, 0x67, + 0x65, 0x72, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, + 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x22, 0x79, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, 0x65, + 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, + 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x19, + 0x0a, 0x08, 0x72, 0x65, 0x66, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x08, 0x72, 0x65, 0x66, + 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x52, 0x65, 0x66, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x72, 0x65, 0x66, 0x54, 0x79, + 0x70, 0x65, 0x22, 0x22, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x22, 0xb7, 0x01, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, + 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, + 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x72, 0x65, 0x66, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x66, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, + 0x08, 0x72, 0x65, 0x66, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x0c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x66, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x72, + 0x65, 0x66, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x6f, 0x6c, 0x64, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6f, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x13, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xe5, 0x04, 0x0a, 0x10, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, + 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x43, 0x0a, 0x0c, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x18, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x3a, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x15, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x43, 0x0a, 0x0c, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x18, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x45, 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, + 0x12, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, + 0x68, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x4b, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x12, 0x1a, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x73, 0x74, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x30, 0x01, 0x12, 0x4c, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x12, 0x1b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x09, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x67, 0x12, + 0x15, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, + 0x0a, 0x06, 0x47, 0x65, 0x74, 0x52, 0x65, 0x66, 0x12, 0x12, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, + 0x65, 0x74, 0x52, 0x65, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x3a, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x12, 0x15, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x52, 0x65, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x27, 0x5a, + 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x72, 0x6e, + 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x72, + 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_ref_proto_rawDescOnce sync.Once + file_ref_proto_rawDescData = file_ref_proto_rawDesc +) + +func file_ref_proto_rawDescGZIP() []byte { + file_ref_proto_rawDescOnce.Do(func() { + file_ref_proto_rawDescData = protoimpl.X.CompressGZIP(file_ref_proto_rawDescData) + }) + return file_ref_proto_rawDescData +} + +var file_ref_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_ref_proto_msgTypes = make([]protoimpl.MessageInfo, 19) +var file_ref_proto_goTypes = []interface{}{ + (ListBranchesRequest_SortOption)(0), // 0: rpc.ListBranchesRequest.SortOption + (ListCommitTagsRequest_SortOption)(0), // 1: rpc.ListCommitTagsRequest.SortOption + (*CreateCommitTagRequest)(nil), // 2: rpc.CreateCommitTagRequest + (*CreateCommitTagResponse)(nil), // 3: rpc.CreateCommitTagResponse + (*DeleteTagRequest)(nil), // 4: rpc.DeleteTagRequest + (*CreateBranchRequest)(nil), // 5: rpc.CreateBranchRequest + (*CreateBranchResponse)(nil), // 6: rpc.CreateBranchResponse + (*GetBranchRequest)(nil), // 7: rpc.GetBranchRequest + (*GetBranchResponse)(nil), // 8: rpc.GetBranchResponse + (*DeleteBranchRequest)(nil), // 9: rpc.DeleteBranchRequest + (*DeleteBranchResponse)(nil), // 10: rpc.DeleteBranchResponse + (*ListBranchesRequest)(nil), // 11: rpc.ListBranchesRequest + (*ListBranchesResponse)(nil), // 12: rpc.ListBranchesResponse + (*Branch)(nil), // 13: rpc.Branch + (*ListCommitTagsRequest)(nil), // 14: rpc.ListCommitTagsRequest + (*ListCommitTagsResponse)(nil), // 15: rpc.ListCommitTagsResponse + (*CommitTag)(nil), // 16: rpc.CommitTag + (*GetRefRequest)(nil), // 17: rpc.GetRefRequest + (*GetRefResponse)(nil), // 18: rpc.GetRefResponse + (*UpdateRefRequest)(nil), // 19: rpc.UpdateRefRequest + (*UpdateRefResponse)(nil), // 20: rpc.UpdateRefResponse + (*WriteRequest)(nil), // 21: rpc.WriteRequest + (*Identity)(nil), // 22: rpc.Identity + (*ReadRequest)(nil), // 23: rpc.ReadRequest + (SortOrder)(0), // 24: rpc.SortOrder + (*Commit)(nil), // 25: rpc.Commit + (*Signature)(nil), // 26: rpc.Signature + (RefType)(0), // 27: rpc.RefType +} +var file_ref_proto_depIdxs = []int32{ + 21, // 0: rpc.CreateCommitTagRequest.base:type_name -> rpc.WriteRequest + 22, // 1: rpc.CreateCommitTagRequest.tagger:type_name -> rpc.Identity + 16, // 2: rpc.CreateCommitTagResponse.tag:type_name -> rpc.CommitTag + 21, // 3: rpc.DeleteTagRequest.base:type_name -> rpc.WriteRequest + 21, // 4: rpc.CreateBranchRequest.base:type_name -> rpc.WriteRequest + 13, // 5: rpc.CreateBranchResponse.branch:type_name -> rpc.Branch + 23, // 6: rpc.GetBranchRequest.base:type_name -> rpc.ReadRequest + 13, // 7: rpc.GetBranchResponse.branch:type_name -> rpc.Branch + 21, // 8: rpc.DeleteBranchRequest.base:type_name -> rpc.WriteRequest + 23, // 9: rpc.ListBranchesRequest.base:type_name -> rpc.ReadRequest + 0, // 10: rpc.ListBranchesRequest.sort:type_name -> rpc.ListBranchesRequest.SortOption + 24, // 11: rpc.ListBranchesRequest.order:type_name -> rpc.SortOrder + 13, // 12: rpc.ListBranchesResponse.branch:type_name -> rpc.Branch + 25, // 13: rpc.Branch.commit:type_name -> rpc.Commit + 23, // 14: rpc.ListCommitTagsRequest.base:type_name -> rpc.ReadRequest + 1, // 15: rpc.ListCommitTagsRequest.sort:type_name -> rpc.ListCommitTagsRequest.SortOption + 24, // 16: rpc.ListCommitTagsRequest.order:type_name -> rpc.SortOrder + 16, // 17: rpc.ListCommitTagsResponse.tag:type_name -> rpc.CommitTag + 26, // 18: rpc.CommitTag.tagger:type_name -> rpc.Signature + 25, // 19: rpc.CommitTag.commit:type_name -> rpc.Commit + 23, // 20: rpc.GetRefRequest.base:type_name -> rpc.ReadRequest + 27, // 21: rpc.GetRefRequest.ref_type:type_name -> rpc.RefType + 21, // 22: rpc.UpdateRefRequest.base:type_name -> rpc.WriteRequest + 27, // 23: rpc.UpdateRefRequest.ref_type:type_name -> rpc.RefType + 5, // 24: rpc.ReferenceService.CreateBranch:input_type -> rpc.CreateBranchRequest + 7, // 25: rpc.ReferenceService.GetBranch:input_type -> rpc.GetBranchRequest + 9, // 26: rpc.ReferenceService.DeleteBranch:input_type -> rpc.DeleteBranchRequest + 11, // 27: rpc.ReferenceService.ListBranches:input_type -> rpc.ListBranchesRequest + 14, // 28: rpc.ReferenceService.ListCommitTags:input_type -> rpc.ListCommitTagsRequest + 2, // 29: rpc.ReferenceService.CreateCommitTag:input_type -> rpc.CreateCommitTagRequest + 4, // 30: rpc.ReferenceService.DeleteTag:input_type -> rpc.DeleteTagRequest + 17, // 31: rpc.ReferenceService.GetRef:input_type -> rpc.GetRefRequest + 19, // 32: rpc.ReferenceService.UpdateRef:input_type -> rpc.UpdateRefRequest + 6, // 33: rpc.ReferenceService.CreateBranch:output_type -> rpc.CreateBranchResponse + 8, // 34: rpc.ReferenceService.GetBranch:output_type -> rpc.GetBranchResponse + 10, // 35: rpc.ReferenceService.DeleteBranch:output_type -> rpc.DeleteBranchResponse + 12, // 36: rpc.ReferenceService.ListBranches:output_type -> rpc.ListBranchesResponse + 15, // 37: rpc.ReferenceService.ListCommitTags:output_type -> rpc.ListCommitTagsResponse + 3, // 38: rpc.ReferenceService.CreateCommitTag:output_type -> rpc.CreateCommitTagResponse + 20, // 39: rpc.ReferenceService.DeleteTag:output_type -> rpc.UpdateRefResponse + 18, // 40: rpc.ReferenceService.GetRef:output_type -> rpc.GetRefResponse + 20, // 41: rpc.ReferenceService.UpdateRef:output_type -> rpc.UpdateRefResponse + 33, // [33:42] is the sub-list for method output_type + 24, // [24:33] is the sub-list for method input_type + 24, // [24:24] is the sub-list for extension type_name + 24, // [24:24] is the sub-list for extension extendee + 0, // [0:24] is the sub-list for field type_name +} + +func init() { file_ref_proto_init() } +func file_ref_proto_init() { + if File_ref_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_ref_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCommitTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCommitTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateBranchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateBranchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetBranchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetBranchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteBranchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteBranchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListBranchesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListBranchesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Branch); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListCommitTagsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListCommitTagsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitTag); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetRefRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetRefResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateRefRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_ref_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UpdateRefResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_ref_proto_rawDesc, + NumEnums: 2, + NumMessages: 19, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_ref_proto_goTypes, + DependencyIndexes: file_ref_proto_depIdxs, + EnumInfos: file_ref_proto_enumTypes, + MessageInfos: file_ref_proto_msgTypes, + }.Build() + File_ref_proto = out.File + file_ref_proto_rawDesc = nil + file_ref_proto_goTypes = nil + file_ref_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/ref_grpc.pb.go b/gitrpc/rpc/ref_grpc.pb.go new file mode 100644 index 0000000000..ea826ae403 --- /dev/null +++ b/gitrpc/rpc/ref_grpc.pb.go @@ -0,0 +1,448 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: ref.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// ReferenceServiceClient is the client API for ReferenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ReferenceServiceClient interface { + CreateBranch(ctx context.Context, in *CreateBranchRequest, opts ...grpc.CallOption) (*CreateBranchResponse, error) + GetBranch(ctx context.Context, in *GetBranchRequest, opts ...grpc.CallOption) (*GetBranchResponse, error) + DeleteBranch(ctx context.Context, in *DeleteBranchRequest, opts ...grpc.CallOption) (*DeleteBranchResponse, error) + ListBranches(ctx context.Context, in *ListBranchesRequest, opts ...grpc.CallOption) (ReferenceService_ListBranchesClient, error) + ListCommitTags(ctx context.Context, in *ListCommitTagsRequest, opts ...grpc.CallOption) (ReferenceService_ListCommitTagsClient, error) + CreateCommitTag(ctx context.Context, in *CreateCommitTagRequest, opts ...grpc.CallOption) (*CreateCommitTagResponse, error) + DeleteTag(ctx context.Context, in *DeleteTagRequest, opts ...grpc.CallOption) (*UpdateRefResponse, error) + GetRef(ctx context.Context, in *GetRefRequest, opts ...grpc.CallOption) (*GetRefResponse, error) + UpdateRef(ctx context.Context, in *UpdateRefRequest, opts ...grpc.CallOption) (*UpdateRefResponse, error) +} + +type referenceServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewReferenceServiceClient(cc grpc.ClientConnInterface) ReferenceServiceClient { + return &referenceServiceClient{cc} +} + +func (c *referenceServiceClient) CreateBranch(ctx context.Context, in *CreateBranchRequest, opts ...grpc.CallOption) (*CreateBranchResponse, error) { + out := new(CreateBranchResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/CreateBranch", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) GetBranch(ctx context.Context, in *GetBranchRequest, opts ...grpc.CallOption) (*GetBranchResponse, error) { + out := new(GetBranchResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/GetBranch", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) DeleteBranch(ctx context.Context, in *DeleteBranchRequest, opts ...grpc.CallOption) (*DeleteBranchResponse, error) { + out := new(DeleteBranchResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/DeleteBranch", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) ListBranches(ctx context.Context, in *ListBranchesRequest, opts ...grpc.CallOption) (ReferenceService_ListBranchesClient, error) { + stream, err := c.cc.NewStream(ctx, &ReferenceService_ServiceDesc.Streams[0], "/rpc.ReferenceService/ListBranches", opts...) + if err != nil { + return nil, err + } + x := &referenceServiceListBranchesClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type ReferenceService_ListBranchesClient interface { + Recv() (*ListBranchesResponse, error) + grpc.ClientStream +} + +type referenceServiceListBranchesClient struct { + grpc.ClientStream +} + +func (x *referenceServiceListBranchesClient) Recv() (*ListBranchesResponse, error) { + m := new(ListBranchesResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *referenceServiceClient) ListCommitTags(ctx context.Context, in *ListCommitTagsRequest, opts ...grpc.CallOption) (ReferenceService_ListCommitTagsClient, error) { + stream, err := c.cc.NewStream(ctx, &ReferenceService_ServiceDesc.Streams[1], "/rpc.ReferenceService/ListCommitTags", opts...) + if err != nil { + return nil, err + } + x := &referenceServiceListCommitTagsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type ReferenceService_ListCommitTagsClient interface { + Recv() (*ListCommitTagsResponse, error) + grpc.ClientStream +} + +type referenceServiceListCommitTagsClient struct { + grpc.ClientStream +} + +func (x *referenceServiceListCommitTagsClient) Recv() (*ListCommitTagsResponse, error) { + m := new(ListCommitTagsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *referenceServiceClient) CreateCommitTag(ctx context.Context, in *CreateCommitTagRequest, opts ...grpc.CallOption) (*CreateCommitTagResponse, error) { + out := new(CreateCommitTagResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/CreateCommitTag", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) DeleteTag(ctx context.Context, in *DeleteTagRequest, opts ...grpc.CallOption) (*UpdateRefResponse, error) { + out := new(UpdateRefResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/DeleteTag", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) GetRef(ctx context.Context, in *GetRefRequest, opts ...grpc.CallOption) (*GetRefResponse, error) { + out := new(GetRefResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/GetRef", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceClient) UpdateRef(ctx context.Context, in *UpdateRefRequest, opts ...grpc.CallOption) (*UpdateRefResponse, error) { + out := new(UpdateRefResponse) + err := c.cc.Invoke(ctx, "/rpc.ReferenceService/UpdateRef", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReferenceServiceServer is the server API for ReferenceService service. +// All implementations must embed UnimplementedReferenceServiceServer +// for forward compatibility +type ReferenceServiceServer interface { + CreateBranch(context.Context, *CreateBranchRequest) (*CreateBranchResponse, error) + GetBranch(context.Context, *GetBranchRequest) (*GetBranchResponse, error) + DeleteBranch(context.Context, *DeleteBranchRequest) (*DeleteBranchResponse, error) + ListBranches(*ListBranchesRequest, ReferenceService_ListBranchesServer) error + ListCommitTags(*ListCommitTagsRequest, ReferenceService_ListCommitTagsServer) error + CreateCommitTag(context.Context, *CreateCommitTagRequest) (*CreateCommitTagResponse, error) + DeleteTag(context.Context, *DeleteTagRequest) (*UpdateRefResponse, error) + GetRef(context.Context, *GetRefRequest) (*GetRefResponse, error) + UpdateRef(context.Context, *UpdateRefRequest) (*UpdateRefResponse, error) + mustEmbedUnimplementedReferenceServiceServer() +} + +// UnimplementedReferenceServiceServer must be embedded to have forward compatible implementations. +type UnimplementedReferenceServiceServer struct { +} + +func (UnimplementedReferenceServiceServer) CreateBranch(context.Context, *CreateBranchRequest) (*CreateBranchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateBranch not implemented") +} +func (UnimplementedReferenceServiceServer) GetBranch(context.Context, *GetBranchRequest) (*GetBranchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetBranch not implemented") +} +func (UnimplementedReferenceServiceServer) DeleteBranch(context.Context, *DeleteBranchRequest) (*DeleteBranchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteBranch not implemented") +} +func (UnimplementedReferenceServiceServer) ListBranches(*ListBranchesRequest, ReferenceService_ListBranchesServer) error { + return status.Errorf(codes.Unimplemented, "method ListBranches not implemented") +} +func (UnimplementedReferenceServiceServer) ListCommitTags(*ListCommitTagsRequest, ReferenceService_ListCommitTagsServer) error { + return status.Errorf(codes.Unimplemented, "method ListCommitTags not implemented") +} +func (UnimplementedReferenceServiceServer) CreateCommitTag(context.Context, *CreateCommitTagRequest) (*CreateCommitTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateCommitTag not implemented") +} +func (UnimplementedReferenceServiceServer) DeleteTag(context.Context, *DeleteTagRequest) (*UpdateRefResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteTag not implemented") +} +func (UnimplementedReferenceServiceServer) GetRef(context.Context, *GetRefRequest) (*GetRefResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetRef not implemented") +} +func (UnimplementedReferenceServiceServer) UpdateRef(context.Context, *UpdateRefRequest) (*UpdateRefResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateRef not implemented") +} +func (UnimplementedReferenceServiceServer) mustEmbedUnimplementedReferenceServiceServer() {} + +// UnsafeReferenceServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ReferenceServiceServer will +// result in compilation errors. +type UnsafeReferenceServiceServer interface { + mustEmbedUnimplementedReferenceServiceServer() +} + +func RegisterReferenceServiceServer(s grpc.ServiceRegistrar, srv ReferenceServiceServer) { + s.RegisterService(&ReferenceService_ServiceDesc, srv) +} + +func _ReferenceService_CreateBranch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateBranchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).CreateBranch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/CreateBranch", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).CreateBranch(ctx, req.(*CreateBranchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_GetBranch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBranchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).GetBranch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/GetBranch", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).GetBranch(ctx, req.(*GetBranchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_DeleteBranch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteBranchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).DeleteBranch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/DeleteBranch", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).DeleteBranch(ctx, req.(*DeleteBranchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_ListBranches_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ListBranchesRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(ReferenceServiceServer).ListBranches(m, &referenceServiceListBranchesServer{stream}) +} + +type ReferenceService_ListBranchesServer interface { + Send(*ListBranchesResponse) error + grpc.ServerStream +} + +type referenceServiceListBranchesServer struct { + grpc.ServerStream +} + +func (x *referenceServiceListBranchesServer) Send(m *ListBranchesResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _ReferenceService_ListCommitTags_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ListCommitTagsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(ReferenceServiceServer).ListCommitTags(m, &referenceServiceListCommitTagsServer{stream}) +} + +type ReferenceService_ListCommitTagsServer interface { + Send(*ListCommitTagsResponse) error + grpc.ServerStream +} + +type referenceServiceListCommitTagsServer struct { + grpc.ServerStream +} + +func (x *referenceServiceListCommitTagsServer) Send(m *ListCommitTagsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _ReferenceService_CreateCommitTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCommitTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).CreateCommitTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/CreateCommitTag", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).CreateCommitTag(ctx, req.(*CreateCommitTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_DeleteTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).DeleteTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/DeleteTag", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).DeleteTag(ctx, req.(*DeleteTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_GetRef_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRefRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).GetRef(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/GetRef", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).GetRef(ctx, req.(*GetRefRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceService_UpdateRef_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateRefRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceServer).UpdateRef(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.ReferenceService/UpdateRef", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceServer).UpdateRef(ctx, req.(*UpdateRefRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ReferenceService_ServiceDesc is the grpc.ServiceDesc for ReferenceService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ReferenceService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.ReferenceService", + HandlerType: (*ReferenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateBranch", + Handler: _ReferenceService_CreateBranch_Handler, + }, + { + MethodName: "GetBranch", + Handler: _ReferenceService_GetBranch_Handler, + }, + { + MethodName: "DeleteBranch", + Handler: _ReferenceService_DeleteBranch_Handler, + }, + { + MethodName: "CreateCommitTag", + Handler: _ReferenceService_CreateCommitTag_Handler, + }, + { + MethodName: "DeleteTag", + Handler: _ReferenceService_DeleteTag_Handler, + }, + { + MethodName: "GetRef", + Handler: _ReferenceService_GetRef_Handler, + }, + { + MethodName: "UpdateRef", + Handler: _ReferenceService_UpdateRef_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ListBranches", + Handler: _ReferenceService_ListBranches_Handler, + ServerStreams: true, + }, + { + StreamName: "ListCommitTags", + Handler: _ReferenceService_ListCommitTags_Handler, + ServerStreams: true, + }, + }, + Metadata: "ref.proto", +} diff --git a/gitrpc/rpc/repo.pb.go b/gitrpc/rpc/repo.pb.go new file mode 100644 index 0000000000..f847605bb5 --- /dev/null +++ b/gitrpc/rpc/repo.pb.go @@ -0,0 +1,3511 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: repo.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type TreeNodeType int32 + +const ( + TreeNodeType_TreeNodeTypeTree TreeNodeType = 0 + TreeNodeType_TreeNodeTypeBlob TreeNodeType = 1 + TreeNodeType_TreeNodeTypeCommit TreeNodeType = 2 +) + +// Enum value maps for TreeNodeType. +var ( + TreeNodeType_name = map[int32]string{ + 0: "TreeNodeTypeTree", + 1: "TreeNodeTypeBlob", + 2: "TreeNodeTypeCommit", + } + TreeNodeType_value = map[string]int32{ + "TreeNodeTypeTree": 0, + "TreeNodeTypeBlob": 1, + "TreeNodeTypeCommit": 2, + } +) + +func (x TreeNodeType) Enum() *TreeNodeType { + p := new(TreeNodeType) + *p = x + return p +} + +func (x TreeNodeType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TreeNodeType) Descriptor() protoreflect.EnumDescriptor { + return file_repo_proto_enumTypes[0].Descriptor() +} + +func (TreeNodeType) Type() protoreflect.EnumType { + return &file_repo_proto_enumTypes[0] +} + +func (x TreeNodeType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use TreeNodeType.Descriptor instead. +func (TreeNodeType) EnumDescriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{0} +} + +type TreeNodeMode int32 + +const ( + TreeNodeMode_TreeNodeModeFile TreeNodeMode = 0 + TreeNodeMode_TreeNodeModeSymlink TreeNodeMode = 1 + TreeNodeMode_TreeNodeModeExec TreeNodeMode = 2 + TreeNodeMode_TreeNodeModeTree TreeNodeMode = 3 + TreeNodeMode_TreeNodeModeCommit TreeNodeMode = 4 +) + +// Enum value maps for TreeNodeMode. +var ( + TreeNodeMode_name = map[int32]string{ + 0: "TreeNodeModeFile", + 1: "TreeNodeModeSymlink", + 2: "TreeNodeModeExec", + 3: "TreeNodeModeTree", + 4: "TreeNodeModeCommit", + } + TreeNodeMode_value = map[string]int32{ + "TreeNodeModeFile": 0, + "TreeNodeModeSymlink": 1, + "TreeNodeModeExec": 2, + "TreeNodeModeTree": 3, + "TreeNodeModeCommit": 4, + } +) + +func (x TreeNodeMode) Enum() *TreeNodeMode { + p := new(TreeNodeMode) + *p = x + return p +} + +func (x TreeNodeMode) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (TreeNodeMode) Descriptor() protoreflect.EnumDescriptor { + return file_repo_proto_enumTypes[1].Descriptor() +} + +func (TreeNodeMode) Type() protoreflect.EnumType { + return &file_repo_proto_enumTypes[1] +} + +func (x TreeNodeMode) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use TreeNodeMode.Descriptor instead. +func (TreeNodeMode) EnumDescriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{1} +} + +type HashType int32 + +const ( + HashType_HashTypeSHA256 HashType = 0 +) + +// Enum value maps for HashType. +var ( + HashType_name = map[int32]string{ + 0: "HashTypeSHA256", + } + HashType_value = map[string]int32{ + "HashTypeSHA256": 0, + } +) + +func (x HashType) Enum() *HashType { + p := new(HashType) + *p = x + return p +} + +func (x HashType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (HashType) Descriptor() protoreflect.EnumDescriptor { + return file_repo_proto_enumTypes[2].Descriptor() +} + +func (HashType) Type() protoreflect.EnumType { + return &file_repo_proto_enumTypes[2] +} + +func (x HashType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use HashType.Descriptor instead. +func (HashType) EnumDescriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{2} +} + +type HashAggregationType int32 + +const ( + HashAggregationType_HashAggregationTypeXOR HashAggregationType = 0 +) + +// Enum value maps for HashAggregationType. +var ( + HashAggregationType_name = map[int32]string{ + 0: "HashAggregationTypeXOR", + } + HashAggregationType_value = map[string]int32{ + "HashAggregationTypeXOR": 0, + } +) + +func (x HashAggregationType) Enum() *HashAggregationType { + p := new(HashAggregationType) + *p = x + return p +} + +func (x HashAggregationType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (HashAggregationType) Descriptor() protoreflect.EnumDescriptor { + return file_repo_proto_enumTypes[3].Descriptor() +} + +func (HashAggregationType) Type() protoreflect.EnumType { + return &file_repo_proto_enumTypes[3] +} + +func (x HashAggregationType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use HashAggregationType.Descriptor instead. +func (HashAggregationType) EnumDescriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{3} +} + +type CreateRepositoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Data: + // *CreateRepositoryRequest_Header + // *CreateRepositoryRequest_File + Data isCreateRepositoryRequest_Data `protobuf_oneof:"data"` +} + +func (x *CreateRepositoryRequest) Reset() { + *x = CreateRepositoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateRepositoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateRepositoryRequest) ProtoMessage() {} + +func (x *CreateRepositoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateRepositoryRequest.ProtoReflect.Descriptor instead. +func (*CreateRepositoryRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{0} +} + +func (m *CreateRepositoryRequest) GetData() isCreateRepositoryRequest_Data { + if m != nil { + return m.Data + } + return nil +} + +func (x *CreateRepositoryRequest) GetHeader() *CreateRepositoryRequestHeader { + if x, ok := x.GetData().(*CreateRepositoryRequest_Header); ok { + return x.Header + } + return nil +} + +func (x *CreateRepositoryRequest) GetFile() *FileUpload { + if x, ok := x.GetData().(*CreateRepositoryRequest_File); ok { + return x.File + } + return nil +} + +type isCreateRepositoryRequest_Data interface { + isCreateRepositoryRequest_Data() +} + +type CreateRepositoryRequest_Header struct { + Header *CreateRepositoryRequestHeader `protobuf:"bytes,1,opt,name=header,proto3,oneof"` +} + +type CreateRepositoryRequest_File struct { + File *FileUpload `protobuf:"bytes,2,opt,name=file,proto3,oneof"` +} + +func (*CreateRepositoryRequest_Header) isCreateRepositoryRequest_Data() {} + +func (*CreateRepositoryRequest_File) isCreateRepositoryRequest_Data() {} + +type CreateRepositoryRequestHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + DefaultBranch string `protobuf:"bytes,2,opt,name=default_branch,json=defaultBranch,proto3" json:"default_branch,omitempty"` + Author *Identity `protobuf:"bytes,3,opt,name=author,proto3" json:"author,omitempty"` + AuthorDate int64 `protobuf:"varint,4,opt,name=authorDate,proto3" json:"authorDate,omitempty"` + Committer *Identity `protobuf:"bytes,5,opt,name=committer,proto3" json:"committer,omitempty"` + CommitterDate int64 `protobuf:"varint,6,opt,name=committerDate,proto3" json:"committerDate,omitempty"` +} + +func (x *CreateRepositoryRequestHeader) Reset() { + *x = CreateRepositoryRequestHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateRepositoryRequestHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateRepositoryRequestHeader) ProtoMessage() {} + +func (x *CreateRepositoryRequestHeader) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateRepositoryRequestHeader.ProtoReflect.Descriptor instead. +func (*CreateRepositoryRequestHeader) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateRepositoryRequestHeader) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *CreateRepositoryRequestHeader) GetDefaultBranch() string { + if x != nil { + return x.DefaultBranch + } + return "" +} + +func (x *CreateRepositoryRequestHeader) GetAuthor() *Identity { + if x != nil { + return x.Author + } + return nil +} + +func (x *CreateRepositoryRequestHeader) GetAuthorDate() int64 { + if x != nil { + return x.AuthorDate + } + return 0 +} + +func (x *CreateRepositoryRequestHeader) GetCommitter() *Identity { + if x != nil { + return x.Committer + } + return nil +} + +func (x *CreateRepositoryRequestHeader) GetCommitterDate() int64 { + if x != nil { + return x.CommitterDate + } + return 0 +} + +type CreateRepositoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *CreateRepositoryResponse) Reset() { + *x = CreateRepositoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateRepositoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateRepositoryResponse) ProtoMessage() {} + +func (x *CreateRepositoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateRepositoryResponse.ProtoReflect.Descriptor instead. +func (*CreateRepositoryResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{2} +} + +type GetTreeNodeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` + IncludeLatestCommit bool `protobuf:"varint,4,opt,name=include_latest_commit,json=includeLatestCommit,proto3" json:"include_latest_commit,omitempty"` +} + +func (x *GetTreeNodeRequest) Reset() { + *x = GetTreeNodeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetTreeNodeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetTreeNodeRequest) ProtoMessage() {} + +func (x *GetTreeNodeRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetTreeNodeRequest.ProtoReflect.Descriptor instead. +func (*GetTreeNodeRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{3} +} + +func (x *GetTreeNodeRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetTreeNodeRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *GetTreeNodeRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *GetTreeNodeRequest) GetIncludeLatestCommit() bool { + if x != nil { + return x.IncludeLatestCommit + } + return false +} + +type GetTreeNodeResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Node *TreeNode `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"` + Commit *Commit `protobuf:"bytes,2,opt,name=commit,proto3" json:"commit,omitempty"` +} + +func (x *GetTreeNodeResponse) Reset() { + *x = GetTreeNodeResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetTreeNodeResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetTreeNodeResponse) ProtoMessage() {} + +func (x *GetTreeNodeResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetTreeNodeResponse.ProtoReflect.Descriptor instead. +func (*GetTreeNodeResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{4} +} + +func (x *GetTreeNodeResponse) GetNode() *TreeNode { + if x != nil { + return x.Node + } + return nil +} + +func (x *GetTreeNodeResponse) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +type ListTreeNodesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *ListTreeNodesRequest) Reset() { + *x = ListTreeNodesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTreeNodesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTreeNodesRequest) ProtoMessage() {} + +func (x *ListTreeNodesRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTreeNodesRequest.ProtoReflect.Descriptor instead. +func (*ListTreeNodesRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{5} +} + +func (x *ListTreeNodesRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *ListTreeNodesRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *ListTreeNodesRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type ListTreeNodesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Node *TreeNode `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"` +} + +func (x *ListTreeNodesResponse) Reset() { + *x = ListTreeNodesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTreeNodesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTreeNodesResponse) ProtoMessage() {} + +func (x *ListTreeNodesResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTreeNodesResponse.ProtoReflect.Descriptor instead. +func (*ListTreeNodesResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{6} +} + +func (x *ListTreeNodesResponse) GetNode() *TreeNode { + if x != nil { + return x.Node + } + return nil +} + +type TreeNode struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type TreeNodeType `protobuf:"varint,1,opt,name=type,proto3,enum=rpc.TreeNodeType" json:"type,omitempty"` + Mode TreeNodeMode `protobuf:"varint,2,opt,name=mode,proto3,enum=rpc.TreeNodeMode" json:"mode,omitempty"` + Sha string `protobuf:"bytes,3,opt,name=sha,proto3" json:"sha,omitempty"` + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *TreeNode) Reset() { + *x = TreeNode{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TreeNode) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TreeNode) ProtoMessage() {} + +func (x *TreeNode) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TreeNode.ProtoReflect.Descriptor instead. +func (*TreeNode) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{7} +} + +func (x *TreeNode) GetType() TreeNodeType { + if x != nil { + return x.Type + } + return TreeNodeType_TreeNodeTypeTree +} + +func (x *TreeNode) GetMode() TreeNodeMode { + if x != nil { + return x.Mode + } + return TreeNodeMode_TreeNodeModeFile +} + +func (x *TreeNode) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *TreeNode) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *TreeNode) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type PathsDetailsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + Paths []string `protobuf:"bytes,3,rep,name=paths,proto3" json:"paths,omitempty"` +} + +func (x *PathsDetailsRequest) Reset() { + *x = PathsDetailsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathsDetailsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathsDetailsRequest) ProtoMessage() {} + +func (x *PathsDetailsRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathsDetailsRequest.ProtoReflect.Descriptor instead. +func (*PathsDetailsRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{8} +} + +func (x *PathsDetailsRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *PathsDetailsRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *PathsDetailsRequest) GetPaths() []string { + if x != nil { + return x.Paths + } + return nil +} + +type PathsDetailsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PathDetails []*PathDetails `protobuf:"bytes,1,rep,name=path_details,json=pathDetails,proto3" json:"path_details,omitempty"` +} + +func (x *PathsDetailsResponse) Reset() { + *x = PathsDetailsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathsDetailsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathsDetailsResponse) ProtoMessage() {} + +func (x *PathsDetailsResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathsDetailsResponse.ProtoReflect.Descriptor instead. +func (*PathsDetailsResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{9} +} + +func (x *PathsDetailsResponse) GetPathDetails() []*PathDetails { + if x != nil { + return x.PathDetails + } + return nil +} + +type PathDetails struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + LastCommit *Commit `protobuf:"bytes,2,opt,name=last_commit,json=lastCommit,proto3" json:"last_commit,omitempty"` + Size int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"` +} + +func (x *PathDetails) Reset() { + *x = PathDetails{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathDetails) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathDetails) ProtoMessage() {} + +func (x *PathDetails) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathDetails.ProtoReflect.Descriptor instead. +func (*PathDetails) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{10} +} + +func (x *PathDetails) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *PathDetails) GetLastCommit() *Commit { + if x != nil { + return x.LastCommit + } + return nil +} + +func (x *PathDetails) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +type GetCommitRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Sha string `protobuf:"bytes,2,opt,name=sha,proto3" json:"sha,omitempty"` +} + +func (x *GetCommitRequest) Reset() { + *x = GetCommitRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetCommitRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetCommitRequest) ProtoMessage() {} + +func (x *GetCommitRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetCommitRequest.ProtoReflect.Descriptor instead. +func (*GetCommitRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{11} +} + +func (x *GetCommitRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetCommitRequest) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +type GetCommitResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Commit *Commit `protobuf:"bytes,1,opt,name=commit,proto3" json:"commit,omitempty"` +} + +func (x *GetCommitResponse) Reset() { + *x = GetCommitResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetCommitResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetCommitResponse) ProtoMessage() {} + +func (x *GetCommitResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetCommitResponse.ProtoReflect.Descriptor instead. +func (*GetCommitResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{12} +} + +func (x *GetCommitResponse) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +type ListCommitsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + After string `protobuf:"bytes,3,opt,name=after,proto3" json:"after,omitempty"` + Page int32 `protobuf:"varint,4,opt,name=page,proto3" json:"page,omitempty"` + Limit int32 `protobuf:"varint,5,opt,name=limit,proto3" json:"limit,omitempty"` + Path string `protobuf:"bytes,6,opt,name=path,proto3" json:"path,omitempty"` + Since int64 `protobuf:"varint,7,opt,name=since,proto3" json:"since,omitempty"` + Until int64 `protobuf:"varint,8,opt,name=until,proto3" json:"until,omitempty"` + Committer string `protobuf:"bytes,9,opt,name=committer,proto3" json:"committer,omitempty"` +} + +func (x *ListCommitsRequest) Reset() { + *x = ListCommitsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListCommitsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListCommitsRequest) ProtoMessage() {} + +func (x *ListCommitsRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListCommitsRequest.ProtoReflect.Descriptor instead. +func (*ListCommitsRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{13} +} + +func (x *ListCommitsRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *ListCommitsRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *ListCommitsRequest) GetAfter() string { + if x != nil { + return x.After + } + return "" +} + +func (x *ListCommitsRequest) GetPage() int32 { + if x != nil { + return x.Page + } + return 0 +} + +func (x *ListCommitsRequest) GetLimit() int32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *ListCommitsRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *ListCommitsRequest) GetSince() int64 { + if x != nil { + return x.Since + } + return 0 +} + +func (x *ListCommitsRequest) GetUntil() int64 { + if x != nil { + return x.Until + } + return 0 +} + +func (x *ListCommitsRequest) GetCommitter() string { + if x != nil { + return x.Committer + } + return "" +} + +type ListCommitsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Commit *Commit `protobuf:"bytes,1,opt,name=commit,proto3" json:"commit,omitempty"` + RenameDetails []*RenameDetails `protobuf:"bytes,2,rep,name=rename_details,json=renameDetails,proto3" json:"rename_details,omitempty"` +} + +func (x *ListCommitsResponse) Reset() { + *x = ListCommitsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListCommitsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListCommitsResponse) ProtoMessage() {} + +func (x *ListCommitsResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListCommitsResponse.ProtoReflect.Descriptor instead. +func (*ListCommitsResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{14} +} + +func (x *ListCommitsResponse) GetCommit() *Commit { + if x != nil { + return x.Commit + } + return nil +} + +func (x *ListCommitsResponse) GetRenameDetails() []*RenameDetails { + if x != nil { + return x.RenameDetails + } + return nil +} + +type RenameDetails struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + OldPath string `protobuf:"bytes,1,opt,name=old_path,json=oldPath,proto3" json:"old_path,omitempty"` + NewPath string `protobuf:"bytes,2,opt,name=new_path,json=newPath,proto3" json:"new_path,omitempty"` + CommitShaBefore string `protobuf:"bytes,3,opt,name=commit_sha_before,json=commitShaBefore,proto3" json:"commit_sha_before,omitempty"` + CommitShaAfter string `protobuf:"bytes,4,opt,name=commit_sha_after,json=commitShaAfter,proto3" json:"commit_sha_after,omitempty"` +} + +func (x *RenameDetails) Reset() { + *x = RenameDetails{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RenameDetails) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RenameDetails) ProtoMessage() {} + +func (x *RenameDetails) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RenameDetails.ProtoReflect.Descriptor instead. +func (*RenameDetails) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{15} +} + +func (x *RenameDetails) GetOldPath() string { + if x != nil { + return x.OldPath + } + return "" +} + +func (x *RenameDetails) GetNewPath() string { + if x != nil { + return x.NewPath + } + return "" +} + +func (x *RenameDetails) GetCommitShaBefore() string { + if x != nil { + return x.CommitShaBefore + } + return "" +} + +func (x *RenameDetails) GetCommitShaAfter() string { + if x != nil { + return x.CommitShaAfter + } + return "" +} + +type GetBlobRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Sha string `protobuf:"bytes,2,opt,name=sha,proto3" json:"sha,omitempty"` + SizeLimit int64 `protobuf:"varint,3,opt,name=size_limit,json=sizeLimit,proto3" json:"size_limit,omitempty"` +} + +func (x *GetBlobRequest) Reset() { + *x = GetBlobRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetBlobRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetBlobRequest) ProtoMessage() {} + +func (x *GetBlobRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetBlobRequest.ProtoReflect.Descriptor instead. +func (*GetBlobRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{16} +} + +func (x *GetBlobRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetBlobRequest) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *GetBlobRequest) GetSizeLimit() int64 { + if x != nil { + return x.SizeLimit + } + return 0 +} + +type GetBlobResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Data: + // *GetBlobResponse_Header + // *GetBlobResponse_Content + Data isGetBlobResponse_Data `protobuf_oneof:"data"` +} + +func (x *GetBlobResponse) Reset() { + *x = GetBlobResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetBlobResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetBlobResponse) ProtoMessage() {} + +func (x *GetBlobResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetBlobResponse.ProtoReflect.Descriptor instead. +func (*GetBlobResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{17} +} + +func (m *GetBlobResponse) GetData() isGetBlobResponse_Data { + if m != nil { + return m.Data + } + return nil +} + +func (x *GetBlobResponse) GetHeader() *GetBlobResponseHeader { + if x, ok := x.GetData().(*GetBlobResponse_Header); ok { + return x.Header + } + return nil +} + +func (x *GetBlobResponse) GetContent() []byte { + if x, ok := x.GetData().(*GetBlobResponse_Content); ok { + return x.Content + } + return nil +} + +type isGetBlobResponse_Data interface { + isGetBlobResponse_Data() +} + +type GetBlobResponse_Header struct { + Header *GetBlobResponseHeader `protobuf:"bytes,1,opt,name=header,proto3,oneof"` +} + +type GetBlobResponse_Content struct { + Content []byte `protobuf:"bytes,2,opt,name=content,proto3,oneof"` +} + +func (*GetBlobResponse_Header) isGetBlobResponse_Data() {} + +func (*GetBlobResponse_Content) isGetBlobResponse_Data() {} + +type GetBlobResponseHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sha string `protobuf:"bytes,1,opt,name=sha,proto3" json:"sha,omitempty"` + Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` + ContentSize int64 `protobuf:"varint,3,opt,name=content_size,json=contentSize,proto3" json:"content_size,omitempty"` +} + +func (x *GetBlobResponseHeader) Reset() { + *x = GetBlobResponseHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetBlobResponseHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetBlobResponseHeader) ProtoMessage() {} + +func (x *GetBlobResponseHeader) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetBlobResponseHeader.ProtoReflect.Descriptor instead. +func (*GetBlobResponseHeader) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{18} +} + +func (x *GetBlobResponseHeader) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *GetBlobResponseHeader) GetSize() int64 { + if x != nil { + return x.Size + } + return 0 +} + +func (x *GetBlobResponseHeader) GetContentSize() int64 { + if x != nil { + return x.ContentSize + } + return 0 +} + +type GetSubmoduleRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + GitRef string `protobuf:"bytes,2,opt,name=git_ref,json=gitRef,proto3" json:"git_ref,omitempty"` + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *GetSubmoduleRequest) Reset() { + *x = GetSubmoduleRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetSubmoduleRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetSubmoduleRequest) ProtoMessage() {} + +func (x *GetSubmoduleRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetSubmoduleRequest.ProtoReflect.Descriptor instead. +func (*GetSubmoduleRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{19} +} + +func (x *GetSubmoduleRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetSubmoduleRequest) GetGitRef() string { + if x != nil { + return x.GitRef + } + return "" +} + +func (x *GetSubmoduleRequest) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type GetSubmoduleResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Submodule *Submodule `protobuf:"bytes,1,opt,name=submodule,proto3" json:"submodule,omitempty"` +} + +func (x *GetSubmoduleResponse) Reset() { + *x = GetSubmoduleResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetSubmoduleResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetSubmoduleResponse) ProtoMessage() {} + +func (x *GetSubmoduleResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetSubmoduleResponse.ProtoReflect.Descriptor instead. +func (*GetSubmoduleResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{20} +} + +func (x *GetSubmoduleResponse) GetSubmodule() *Submodule { + if x != nil { + return x.Submodule + } + return nil +} + +type Submodule struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` +} + +func (x *Submodule) Reset() { + *x = Submodule{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Submodule) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Submodule) ProtoMessage() {} + +func (x *Submodule) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Submodule.ProtoReflect.Descriptor instead. +func (*Submodule) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{21} +} + +func (x *Submodule) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Submodule) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +type GetCommitDivergencesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + MaxCount int32 `protobuf:"varint,2,opt,name=max_count,json=maxCount,proto3" json:"max_count,omitempty"` + Requests []*CommitDivergenceRequest `protobuf:"bytes,3,rep,name=requests,proto3" json:"requests,omitempty"` +} + +func (x *GetCommitDivergencesRequest) Reset() { + *x = GetCommitDivergencesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetCommitDivergencesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetCommitDivergencesRequest) ProtoMessage() {} + +func (x *GetCommitDivergencesRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetCommitDivergencesRequest.ProtoReflect.Descriptor instead. +func (*GetCommitDivergencesRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{22} +} + +func (x *GetCommitDivergencesRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *GetCommitDivergencesRequest) GetMaxCount() int32 { + if x != nil { + return x.MaxCount + } + return 0 +} + +func (x *GetCommitDivergencesRequest) GetRequests() []*CommitDivergenceRequest { + if x != nil { + return x.Requests + } + return nil +} + +type CommitDivergenceRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + From string `protobuf:"bytes,1,opt,name=from,proto3" json:"from,omitempty"` + To string `protobuf:"bytes,2,opt,name=to,proto3" json:"to,omitempty"` +} + +func (x *CommitDivergenceRequest) Reset() { + *x = CommitDivergenceRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitDivergenceRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitDivergenceRequest) ProtoMessage() {} + +func (x *CommitDivergenceRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitDivergenceRequest.ProtoReflect.Descriptor instead. +func (*CommitDivergenceRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{23} +} + +func (x *CommitDivergenceRequest) GetFrom() string { + if x != nil { + return x.From + } + return "" +} + +func (x *CommitDivergenceRequest) GetTo() string { + if x != nil { + return x.To + } + return "" +} + +type GetCommitDivergencesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Divergences []*CommitDivergence `protobuf:"bytes,1,rep,name=divergences,proto3" json:"divergences,omitempty"` +} + +func (x *GetCommitDivergencesResponse) Reset() { + *x = GetCommitDivergencesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetCommitDivergencesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetCommitDivergencesResponse) ProtoMessage() {} + +func (x *GetCommitDivergencesResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetCommitDivergencesResponse.ProtoReflect.Descriptor instead. +func (*GetCommitDivergencesResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{24} +} + +func (x *GetCommitDivergencesResponse) GetDivergences() []*CommitDivergence { + if x != nil { + return x.Divergences + } + return nil +} + +type CommitDivergence struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ahead int32 `protobuf:"varint,1,opt,name=ahead,proto3" json:"ahead,omitempty"` + Behind int32 `protobuf:"varint,2,opt,name=behind,proto3" json:"behind,omitempty"` +} + +func (x *CommitDivergence) Reset() { + *x = CommitDivergence{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CommitDivergence) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CommitDivergence) ProtoMessage() {} + +func (x *CommitDivergence) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CommitDivergence.ProtoReflect.Descriptor instead. +func (*CommitDivergence) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{25} +} + +func (x *CommitDivergence) GetAhead() int32 { + if x != nil { + return x.Ahead + } + return 0 +} + +func (x *CommitDivergence) GetBehind() int32 { + if x != nil { + return x.Behind + } + return 0 +} + +type DeleteRepositoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` +} + +func (x *DeleteRepositoryRequest) Reset() { + *x = DeleteRepositoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteRepositoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteRepositoryRequest) ProtoMessage() {} + +func (x *DeleteRepositoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteRepositoryRequest.ProtoReflect.Descriptor instead. +func (*DeleteRepositoryRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{26} +} + +func (x *DeleteRepositoryRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +type DeleteRepositoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteRepositoryResponse) Reset() { + *x = DeleteRepositoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteRepositoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteRepositoryResponse) ProtoMessage() {} + +func (x *DeleteRepositoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteRepositoryResponse.ProtoReflect.Descriptor instead. +func (*DeleteRepositoryResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{27} +} + +type SyncRepositoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *WriteRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Source string `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + CreateIfNotExists bool `protobuf:"varint,3,opt,name=create_if_not_exists,json=createIfNotExists,proto3" json:"create_if_not_exists,omitempty"` +} + +func (x *SyncRepositoryRequest) Reset() { + *x = SyncRepositoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncRepositoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncRepositoryRequest) ProtoMessage() {} + +func (x *SyncRepositoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncRepositoryRequest.ProtoReflect.Descriptor instead. +func (*SyncRepositoryRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{28} +} + +func (x *SyncRepositoryRequest) GetBase() *WriteRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *SyncRepositoryRequest) GetSource() string { + if x != nil { + return x.Source + } + return "" +} + +func (x *SyncRepositoryRequest) GetCreateIfNotExists() bool { + if x != nil { + return x.CreateIfNotExists + } + return false +} + +type SyncRepositoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + DefaultBranch string `protobuf:"bytes,1,opt,name=default_branch,json=defaultBranch,proto3" json:"default_branch,omitempty"` +} + +func (x *SyncRepositoryResponse) Reset() { + *x = SyncRepositoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncRepositoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncRepositoryResponse) ProtoMessage() {} + +func (x *SyncRepositoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncRepositoryResponse.ProtoReflect.Descriptor instead. +func (*SyncRepositoryResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{29} +} + +func (x *SyncRepositoryResponse) GetDefaultBranch() string { + if x != nil { + return x.DefaultBranch + } + return "" +} + +type HashRepositoryRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + HashType HashType `protobuf:"varint,2,opt,name=hash_type,json=hashType,proto3,enum=rpc.HashType" json:"hash_type,omitempty"` + AggregationType HashAggregationType `protobuf:"varint,3,opt,name=aggregation_type,json=aggregationType,proto3,enum=rpc.HashAggregationType" json:"aggregation_type,omitempty"` +} + +func (x *HashRepositoryRequest) Reset() { + *x = HashRepositoryRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *HashRepositoryRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*HashRepositoryRequest) ProtoMessage() {} + +func (x *HashRepositoryRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use HashRepositoryRequest.ProtoReflect.Descriptor instead. +func (*HashRepositoryRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{30} +} + +func (x *HashRepositoryRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *HashRepositoryRequest) GetHashType() HashType { + if x != nil { + return x.HashType + } + return HashType_HashTypeSHA256 +} + +func (x *HashRepositoryRequest) GetAggregationType() HashAggregationType { + if x != nil { + return x.AggregationType + } + return HashAggregationType_HashAggregationTypeXOR +} + +type HashRepositoryResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Hash []byte `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` +} + +func (x *HashRepositoryResponse) Reset() { + *x = HashRepositoryResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *HashRepositoryResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*HashRepositoryResponse) ProtoMessage() {} + +func (x *HashRepositoryResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use HashRepositoryResponse.ProtoReflect.Descriptor instead. +func (*HashRepositoryResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{31} +} + +func (x *HashRepositoryResponse) GetHash() []byte { + if x != nil { + return x.Hash + } + return nil +} + +type MergeBaseRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Ref1 string `protobuf:"bytes,2,opt,name=ref1,proto3" json:"ref1,omitempty"` + Ref2 string `protobuf:"bytes,3,opt,name=ref2,proto3" json:"ref2,omitempty"` +} + +func (x *MergeBaseRequest) Reset() { + *x = MergeBaseRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MergeBaseRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MergeBaseRequest) ProtoMessage() {} + +func (x *MergeBaseRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MergeBaseRequest.ProtoReflect.Descriptor instead. +func (*MergeBaseRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{32} +} + +func (x *MergeBaseRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *MergeBaseRequest) GetRef1() string { + if x != nil { + return x.Ref1 + } + return "" +} + +func (x *MergeBaseRequest) GetRef2() string { + if x != nil { + return x.Ref2 + } + return "" +} + +type MergeBaseResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + MergeBaseSha string `protobuf:"bytes,1,opt,name=merge_base_sha,json=mergeBaseSha,proto3" json:"merge_base_sha,omitempty"` +} + +func (x *MergeBaseResponse) Reset() { + *x = MergeBaseResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MergeBaseResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MergeBaseResponse) ProtoMessage() {} + +func (x *MergeBaseResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MergeBaseResponse.ProtoReflect.Descriptor instead. +func (*MergeBaseResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{33} +} + +func (x *MergeBaseResponse) GetMergeBaseSha() string { + if x != nil { + return x.MergeBaseSha + } + return "" +} + +type FileContent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Content []byte `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"` +} + +func (x *FileContent) Reset() { + *x = FileContent{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileContent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileContent) ProtoMessage() {} + +func (x *FileContent) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileContent.ProtoReflect.Descriptor instead. +func (*FileContent) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{34} +} + +func (x *FileContent) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *FileContent) GetContent() []byte { + if x != nil { + return x.Content + } + return nil +} + +type MatchFilesRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` + Ref string `protobuf:"bytes,2,opt,name=ref,proto3" json:"ref,omitempty"` + DirPath string `protobuf:"bytes,3,opt,name=dir_path,json=dirPath,proto3" json:"dir_path,omitempty"` + Pattern string `protobuf:"bytes,4,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxSize int32 `protobuf:"varint,5,opt,name=max_size,json=maxSize,proto3" json:"max_size,omitempty"` +} + +func (x *MatchFilesRequest) Reset() { + *x = MatchFilesRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MatchFilesRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MatchFilesRequest) ProtoMessage() {} + +func (x *MatchFilesRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MatchFilesRequest.ProtoReflect.Descriptor instead. +func (*MatchFilesRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{35} +} + +func (x *MatchFilesRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +func (x *MatchFilesRequest) GetRef() string { + if x != nil { + return x.Ref + } + return "" +} + +func (x *MatchFilesRequest) GetDirPath() string { + if x != nil { + return x.DirPath + } + return "" +} + +func (x *MatchFilesRequest) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *MatchFilesRequest) GetMaxSize() int32 { + if x != nil { + return x.MaxSize + } + return 0 +} + +type MatchFilesResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files []*FileContent `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` +} + +func (x *MatchFilesResponse) Reset() { + *x = MatchFilesResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MatchFilesResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MatchFilesResponse) ProtoMessage() {} + +func (x *MatchFilesResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[36] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MatchFilesResponse.ProtoReflect.Descriptor instead. +func (*MatchFilesResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{36} +} + +func (x *MatchFilesResponse) GetFiles() []*FileContent { + if x != nil { + return x.Files + } + return nil +} + +type GeneratePipelineRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Base *ReadRequest `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"` +} + +func (x *GeneratePipelineRequest) Reset() { + *x = GeneratePipelineRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GeneratePipelineRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeneratePipelineRequest) ProtoMessage() {} + +func (x *GeneratePipelineRequest) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeneratePipelineRequest.ProtoReflect.Descriptor instead. +func (*GeneratePipelineRequest) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{37} +} + +func (x *GeneratePipelineRequest) GetBase() *ReadRequest { + if x != nil { + return x.Base + } + return nil +} + +type GeneratePipelineResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PipelineYaml []byte `protobuf:"bytes,1,opt,name=pipeline_yaml,json=pipelineYaml,proto3" json:"pipeline_yaml,omitempty"` +} + +func (x *GeneratePipelineResponse) Reset() { + *x = GeneratePipelineResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_repo_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GeneratePipelineResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeneratePipelineResponse) ProtoMessage() {} + +func (x *GeneratePipelineResponse) ProtoReflect() protoreflect.Message { + mi := &file_repo_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeneratePipelineResponse.ProtoReflect.Descriptor instead. +func (*GeneratePipelineResponse) Descriptor() ([]byte, []int) { + return file_repo_proto_rawDescGZIP(), []int{38} +} + +func (x *GeneratePipelineResponse) GetPipelineYaml() []byte { + if x != nil { + return x.PipelineYaml + } + return nil +} + +var File_repo_proto protoreflect.FileDescriptor + +var file_repo_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x72, 0x70, + 0x63, 0x1a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0x86, 0x01, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3c, 0x0a, 0x06, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, + 0x00, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x04, 0x66, 0x69, 0x6c, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, + 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x87, 0x02, 0x0a, 0x1d, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, + 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, + 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, + 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x62, 0x72, 0x61, + 0x6e, 0x63, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x12, 0x25, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, + 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, + 0x1e, 0x0a, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x44, 0x61, 0x74, 0x65, 0x12, + 0x2b, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, + 0x79, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x12, 0x24, 0x0a, 0x0d, + 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x44, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x44, 0x61, + 0x74, 0x65, 0x22, 0x1a, 0x0a, 0x18, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x9b, + 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, + 0x69, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, + 0x74, 0x52, 0x65, 0x66, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x32, 0x0a, 0x15, 0x69, 0x6e, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x5f, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, + 0x4c, 0x61, 0x74, 0x65, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x22, 0x5d, 0x0a, 0x13, + 0x47, 0x65, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, + 0x52, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x22, 0x69, 0x0a, 0x14, 0x4c, + 0x69, 0x73, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x69, 0x74, + 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, 0x74, 0x52, + 0x65, 0x66, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x3a, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x72, + 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x21, 0x0a, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6e, 0x6f, + 0x64, 0x65, 0x22, 0x92, 0x01, 0x0a, 0x08, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x12, + 0x25, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x25, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x54, 0x72, 0x65, 0x65, 0x4e, + 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x12, 0x10, 0x0a, + 0x03, 0x73, 0x68, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x6a, 0x0a, 0x13, 0x50, 0x61, 0x74, 0x68, 0x73, + 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, + 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, 0x74, 0x52, 0x65, 0x66, 0x12, 0x14, 0x0a, + 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x70, 0x61, + 0x74, 0x68, 0x73, 0x22, 0x4b, 0x0a, 0x14, 0x50, 0x61, 0x74, 0x68, 0x73, 0x44, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x0c, 0x70, + 0x61, 0x74, 0x68, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x44, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x52, 0x0b, 0x70, 0x61, 0x74, 0x68, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, + 0x22, 0x63, 0x0a, 0x0b, 0x50, 0x61, 0x74, 0x68, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, + 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, + 0x61, 0x74, 0x68, 0x12, 0x2c, 0x0a, 0x0b, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x04, 0x73, 0x69, 0x7a, 0x65, 0x22, 0x4a, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, + 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, + 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, + 0x61, 0x22, 0x38, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x22, 0xf1, 0x01, 0x0a, 0x12, + 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x69, 0x74, 0x5f, + 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, 0x74, 0x52, 0x65, + 0x66, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x66, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x61, 0x66, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, + 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x75, + 0x6e, 0x74, 0x69, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x75, 0x6e, 0x74, 0x69, + 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x22, + 0x75, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x52, 0x06, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, 0x39, 0x0a, 0x0e, 0x72, + 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x6e, 0x61, 0x6d, 0x65, + 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x0d, 0x72, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x44, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x9b, 0x01, 0x0a, 0x0d, 0x52, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x6c, 0x64, 0x5f, + 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x6c, 0x64, 0x50, + 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, 0x6e, 0x65, 0x77, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6e, 0x65, 0x77, 0x50, 0x61, 0x74, 0x68, 0x12, 0x2a, + 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x68, 0x61, 0x5f, 0x62, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x53, 0x68, 0x61, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x63, 0x6f, + 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x68, 0x61, 0x5f, 0x61, 0x66, 0x74, 0x65, 0x72, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x53, 0x68, 0x61, 0x41, + 0x66, 0x74, 0x65, 0x72, 0x22, 0x67, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x10, 0x0a, 0x03, + 0x73, 0x68, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x1d, + 0x0a, 0x0a, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x09, 0x73, 0x69, 0x7a, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x6b, 0x0a, + 0x0f, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x34, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x60, 0x0a, 0x15, 0x47, 0x65, + 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x68, 0x0a, 0x13, + 0x47, 0x65, 0x74, 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x69, 0x74, + 0x5f, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x67, 0x69, 0x74, 0x52, + 0x65, 0x66, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x44, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x53, 0x75, 0x62, + 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, + 0x0a, 0x09, 0x73, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, + 0x65, 0x52, 0x09, 0x73, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, 0x31, 0x0a, 0x09, + 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, + 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x22, + 0x9a, 0x01, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x76, + 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x43, 0x6f, 0x75, + 0x6e, 0x74, 0x12, 0x38, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x22, 0x3d, 0x0a, 0x17, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x72, 0x6f, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x74, + 0x6f, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x74, 0x6f, 0x22, 0x57, 0x0a, 0x1c, 0x47, + 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, + 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, + 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x76, + 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x0b, 0x64, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, + 0x6e, 0x63, 0x65, 0x73, 0x22, 0x40, 0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, + 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x68, 0x65, 0x61, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x61, 0x68, 0x65, 0x61, 0x64, 0x12, 0x16, + 0x0a, 0x06, 0x62, 0x65, 0x68, 0x69, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, + 0x62, 0x65, 0x68, 0x69, 0x6e, 0x64, 0x22, 0x40, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x25, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x11, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x22, 0x1a, 0x0a, 0x18, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x87, 0x01, 0x0a, 0x15, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2f, 0x0a, + 0x14, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x66, 0x5f, 0x6e, 0x6f, 0x74, 0x5f, 0x65, + 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x49, 0x66, 0x4e, 0x6f, 0x74, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x22, 0x3f, + 0x0a, 0x16, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x5f, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x22, + 0xae, 0x01, 0x0a, 0x15, 0x48, 0x61, 0x73, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, + 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, + 0x2a, 0x0a, 0x09, 0x68, 0x61, 0x73, 0x68, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x08, 0x68, 0x61, 0x73, 0x68, 0x54, 0x79, 0x70, 0x65, 0x12, 0x43, 0x0a, 0x10, 0x61, + 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x61, 0x73, 0x68, + 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, + 0x22, 0x2c, 0x0a, 0x16, 0x48, 0x61, 0x73, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, + 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x22, 0x60, + 0x0a, 0x10, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x10, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x52, 0x04, 0x62, 0x61, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x65, 0x66, 0x31, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x72, 0x65, 0x66, 0x31, 0x12, 0x12, 0x0a, 0x04, + 0x72, 0x65, 0x66, 0x32, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x72, 0x65, 0x66, 0x32, + 0x22, 0x39, 0x0a, 0x11, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x6d, 0x65, 0x72, 0x67, 0x65, 0x5f, 0x62, + 0x61, 0x73, 0x65, 0x5f, 0x73, 0x68, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6d, + 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x53, 0x68, 0x61, 0x22, 0x3b, 0x0a, 0x0b, 0x46, + 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x18, + 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x9b, 0x01, 0x0a, 0x11, 0x4d, 0x61, 0x74, + 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, + 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x04, + 0x62, 0x61, 0x73, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x65, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x72, 0x65, 0x66, 0x12, 0x19, 0x0a, 0x08, 0x64, 0x69, 0x72, 0x5f, 0x70, 0x61, + 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x64, 0x69, 0x72, 0x50, 0x61, 0x74, + 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x6d, + 0x61, 0x78, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x6d, + 0x61, 0x78, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x3c, 0x0a, 0x12, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x46, + 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x05, + 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x52, 0x05, 0x66, + 0x69, 0x6c, 0x65, 0x73, 0x22, 0x3f, 0x0a, 0x17, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, + 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x24, 0x0a, 0x04, 0x62, 0x61, 0x73, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, + 0x04, 0x62, 0x61, 0x73, 0x65, 0x22, 0x3f, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x79, 0x61, + 0x6d, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x59, 0x61, 0x6d, 0x6c, 0x2a, 0x52, 0x0a, 0x0c, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, + 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, + 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x54, 0x72, 0x65, 0x65, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, + 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x42, 0x6c, 0x6f, 0x62, + 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, + 0x70, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x10, 0x02, 0x2a, 0x81, 0x01, 0x0a, 0x0c, 0x54, + 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, + 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x10, + 0x00, 0x12, 0x17, 0x0a, 0x13, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, + 0x65, 0x53, 0x79, 0x6d, 0x6c, 0x69, 0x6e, 0x6b, 0x10, 0x01, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x72, + 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x10, 0x02, + 0x12, 0x14, 0x0a, 0x10, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, + 0x54, 0x72, 0x65, 0x65, 0x10, 0x03, 0x12, 0x16, 0x0a, 0x12, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, + 0x64, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x10, 0x04, 0x2a, 0x1e, + 0x0a, 0x08, 0x48, 0x61, 0x73, 0x68, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x48, 0x61, + 0x73, 0x68, 0x54, 0x79, 0x70, 0x65, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x00, 0x2a, 0x31, + 0x0a, 0x13, 0x48, 0x61, 0x73, 0x68, 0x41, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x16, 0x48, 0x61, 0x73, 0x68, 0x41, 0x67, 0x67, + 0x72, 0x65, 0x67, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x58, 0x4f, 0x52, 0x10, + 0x00, 0x32, 0xc8, 0x08, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x51, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1c, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, + 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x12, 0x40, 0x0a, 0x0b, 0x47, 0x65, + 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x17, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x47, 0x65, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x65, 0x65, + 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48, 0x0a, 0x0d, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x12, 0x19, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, + 0x69, 0x73, 0x74, 0x54, 0x72, 0x65, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x43, 0x0a, 0x0c, 0x50, 0x61, 0x74, 0x68, 0x73, 0x44, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x50, 0x61, 0x74, + 0x68, 0x73, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x19, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x73, 0x44, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x43, 0x0a, 0x0c, 0x47, + 0x65, 0x74, 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x18, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x53, + 0x75, 0x62, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x36, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x12, 0x13, 0x2e, 0x72, 0x70, + 0x63, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x14, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x6c, 0x6f, 0x62, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x42, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x17, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, + 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x18, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x3a, 0x0a, 0x09, + 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, 0x15, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5b, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x43, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x73, + 0x12, 0x20, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x44, 0x69, 0x76, 0x65, 0x72, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x44, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0e, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, + 0x79, 0x6e, 0x63, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x52, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x0e, 0x48, 0x61, 0x73, 0x68, 0x52, 0x65, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x1a, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x61, 0x73, 0x68, + 0x52, 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1b, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x52, 0x65, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x3a, 0x0a, 0x09, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x12, 0x15, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4d, 0x65, 0x72, 0x67, 0x65, + 0x42, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3d, 0x0a, 0x0a, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x46, 0x69, + 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x10, 0x47, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, + 0x1c, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x50, 0x69, + 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x27, 0x5a, 0x25, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x72, 0x6e, 0x65, + 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x72, 0x70, + 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_repo_proto_rawDescOnce sync.Once + file_repo_proto_rawDescData = file_repo_proto_rawDesc +) + +func file_repo_proto_rawDescGZIP() []byte { + file_repo_proto_rawDescOnce.Do(func() { + file_repo_proto_rawDescData = protoimpl.X.CompressGZIP(file_repo_proto_rawDescData) + }) + return file_repo_proto_rawDescData +} + +var file_repo_proto_enumTypes = make([]protoimpl.EnumInfo, 4) +var file_repo_proto_msgTypes = make([]protoimpl.MessageInfo, 39) +var file_repo_proto_goTypes = []interface{}{ + (TreeNodeType)(0), // 0: rpc.TreeNodeType + (TreeNodeMode)(0), // 1: rpc.TreeNodeMode + (HashType)(0), // 2: rpc.HashType + (HashAggregationType)(0), // 3: rpc.HashAggregationType + (*CreateRepositoryRequest)(nil), // 4: rpc.CreateRepositoryRequest + (*CreateRepositoryRequestHeader)(nil), // 5: rpc.CreateRepositoryRequestHeader + (*CreateRepositoryResponse)(nil), // 6: rpc.CreateRepositoryResponse + (*GetTreeNodeRequest)(nil), // 7: rpc.GetTreeNodeRequest + (*GetTreeNodeResponse)(nil), // 8: rpc.GetTreeNodeResponse + (*ListTreeNodesRequest)(nil), // 9: rpc.ListTreeNodesRequest + (*ListTreeNodesResponse)(nil), // 10: rpc.ListTreeNodesResponse + (*TreeNode)(nil), // 11: rpc.TreeNode + (*PathsDetailsRequest)(nil), // 12: rpc.PathsDetailsRequest + (*PathsDetailsResponse)(nil), // 13: rpc.PathsDetailsResponse + (*PathDetails)(nil), // 14: rpc.PathDetails + (*GetCommitRequest)(nil), // 15: rpc.GetCommitRequest + (*GetCommitResponse)(nil), // 16: rpc.GetCommitResponse + (*ListCommitsRequest)(nil), // 17: rpc.ListCommitsRequest + (*ListCommitsResponse)(nil), // 18: rpc.ListCommitsResponse + (*RenameDetails)(nil), // 19: rpc.RenameDetails + (*GetBlobRequest)(nil), // 20: rpc.GetBlobRequest + (*GetBlobResponse)(nil), // 21: rpc.GetBlobResponse + (*GetBlobResponseHeader)(nil), // 22: rpc.GetBlobResponseHeader + (*GetSubmoduleRequest)(nil), // 23: rpc.GetSubmoduleRequest + (*GetSubmoduleResponse)(nil), // 24: rpc.GetSubmoduleResponse + (*Submodule)(nil), // 25: rpc.Submodule + (*GetCommitDivergencesRequest)(nil), // 26: rpc.GetCommitDivergencesRequest + (*CommitDivergenceRequest)(nil), // 27: rpc.CommitDivergenceRequest + (*GetCommitDivergencesResponse)(nil), // 28: rpc.GetCommitDivergencesResponse + (*CommitDivergence)(nil), // 29: rpc.CommitDivergence + (*DeleteRepositoryRequest)(nil), // 30: rpc.DeleteRepositoryRequest + (*DeleteRepositoryResponse)(nil), // 31: rpc.DeleteRepositoryResponse + (*SyncRepositoryRequest)(nil), // 32: rpc.SyncRepositoryRequest + (*SyncRepositoryResponse)(nil), // 33: rpc.SyncRepositoryResponse + (*HashRepositoryRequest)(nil), // 34: rpc.HashRepositoryRequest + (*HashRepositoryResponse)(nil), // 35: rpc.HashRepositoryResponse + (*MergeBaseRequest)(nil), // 36: rpc.MergeBaseRequest + (*MergeBaseResponse)(nil), // 37: rpc.MergeBaseResponse + (*FileContent)(nil), // 38: rpc.FileContent + (*MatchFilesRequest)(nil), // 39: rpc.MatchFilesRequest + (*MatchFilesResponse)(nil), // 40: rpc.MatchFilesResponse + (*GeneratePipelineRequest)(nil), // 41: rpc.GeneratePipelineRequest + (*GeneratePipelineResponse)(nil), // 42: rpc.GeneratePipelineResponse + (*FileUpload)(nil), // 43: rpc.FileUpload + (*WriteRequest)(nil), // 44: rpc.WriteRequest + (*Identity)(nil), // 45: rpc.Identity + (*ReadRequest)(nil), // 46: rpc.ReadRequest + (*Commit)(nil), // 47: rpc.Commit +} +var file_repo_proto_depIdxs = []int32{ + 5, // 0: rpc.CreateRepositoryRequest.header:type_name -> rpc.CreateRepositoryRequestHeader + 43, // 1: rpc.CreateRepositoryRequest.file:type_name -> rpc.FileUpload + 44, // 2: rpc.CreateRepositoryRequestHeader.base:type_name -> rpc.WriteRequest + 45, // 3: rpc.CreateRepositoryRequestHeader.author:type_name -> rpc.Identity + 45, // 4: rpc.CreateRepositoryRequestHeader.committer:type_name -> rpc.Identity + 46, // 5: rpc.GetTreeNodeRequest.base:type_name -> rpc.ReadRequest + 11, // 6: rpc.GetTreeNodeResponse.node:type_name -> rpc.TreeNode + 47, // 7: rpc.GetTreeNodeResponse.commit:type_name -> rpc.Commit + 46, // 8: rpc.ListTreeNodesRequest.base:type_name -> rpc.ReadRequest + 11, // 9: rpc.ListTreeNodesResponse.node:type_name -> rpc.TreeNode + 0, // 10: rpc.TreeNode.type:type_name -> rpc.TreeNodeType + 1, // 11: rpc.TreeNode.mode:type_name -> rpc.TreeNodeMode + 46, // 12: rpc.PathsDetailsRequest.base:type_name -> rpc.ReadRequest + 14, // 13: rpc.PathsDetailsResponse.path_details:type_name -> rpc.PathDetails + 47, // 14: rpc.PathDetails.last_commit:type_name -> rpc.Commit + 46, // 15: rpc.GetCommitRequest.base:type_name -> rpc.ReadRequest + 47, // 16: rpc.GetCommitResponse.commit:type_name -> rpc.Commit + 46, // 17: rpc.ListCommitsRequest.base:type_name -> rpc.ReadRequest + 47, // 18: rpc.ListCommitsResponse.commit:type_name -> rpc.Commit + 19, // 19: rpc.ListCommitsResponse.rename_details:type_name -> rpc.RenameDetails + 46, // 20: rpc.GetBlobRequest.base:type_name -> rpc.ReadRequest + 22, // 21: rpc.GetBlobResponse.header:type_name -> rpc.GetBlobResponseHeader + 46, // 22: rpc.GetSubmoduleRequest.base:type_name -> rpc.ReadRequest + 25, // 23: rpc.GetSubmoduleResponse.submodule:type_name -> rpc.Submodule + 46, // 24: rpc.GetCommitDivergencesRequest.base:type_name -> rpc.ReadRequest + 27, // 25: rpc.GetCommitDivergencesRequest.requests:type_name -> rpc.CommitDivergenceRequest + 29, // 26: rpc.GetCommitDivergencesResponse.divergences:type_name -> rpc.CommitDivergence + 44, // 27: rpc.DeleteRepositoryRequest.base:type_name -> rpc.WriteRequest + 44, // 28: rpc.SyncRepositoryRequest.base:type_name -> rpc.WriteRequest + 46, // 29: rpc.HashRepositoryRequest.base:type_name -> rpc.ReadRequest + 2, // 30: rpc.HashRepositoryRequest.hash_type:type_name -> rpc.HashType + 3, // 31: rpc.HashRepositoryRequest.aggregation_type:type_name -> rpc.HashAggregationType + 46, // 32: rpc.MergeBaseRequest.base:type_name -> rpc.ReadRequest + 46, // 33: rpc.MatchFilesRequest.base:type_name -> rpc.ReadRequest + 38, // 34: rpc.MatchFilesResponse.files:type_name -> rpc.FileContent + 46, // 35: rpc.GeneratePipelineRequest.base:type_name -> rpc.ReadRequest + 4, // 36: rpc.RepositoryService.CreateRepository:input_type -> rpc.CreateRepositoryRequest + 7, // 37: rpc.RepositoryService.GetTreeNode:input_type -> rpc.GetTreeNodeRequest + 9, // 38: rpc.RepositoryService.ListTreeNodes:input_type -> rpc.ListTreeNodesRequest + 12, // 39: rpc.RepositoryService.PathsDetails:input_type -> rpc.PathsDetailsRequest + 23, // 40: rpc.RepositoryService.GetSubmodule:input_type -> rpc.GetSubmoduleRequest + 20, // 41: rpc.RepositoryService.GetBlob:input_type -> rpc.GetBlobRequest + 17, // 42: rpc.RepositoryService.ListCommits:input_type -> rpc.ListCommitsRequest + 15, // 43: rpc.RepositoryService.GetCommit:input_type -> rpc.GetCommitRequest + 26, // 44: rpc.RepositoryService.GetCommitDivergences:input_type -> rpc.GetCommitDivergencesRequest + 30, // 45: rpc.RepositoryService.DeleteRepository:input_type -> rpc.DeleteRepositoryRequest + 32, // 46: rpc.RepositoryService.SyncRepository:input_type -> rpc.SyncRepositoryRequest + 34, // 47: rpc.RepositoryService.HashRepository:input_type -> rpc.HashRepositoryRequest + 36, // 48: rpc.RepositoryService.MergeBase:input_type -> rpc.MergeBaseRequest + 39, // 49: rpc.RepositoryService.MatchFiles:input_type -> rpc.MatchFilesRequest + 41, // 50: rpc.RepositoryService.GeneratePipeline:input_type -> rpc.GeneratePipelineRequest + 6, // 51: rpc.RepositoryService.CreateRepository:output_type -> rpc.CreateRepositoryResponse + 8, // 52: rpc.RepositoryService.GetTreeNode:output_type -> rpc.GetTreeNodeResponse + 10, // 53: rpc.RepositoryService.ListTreeNodes:output_type -> rpc.ListTreeNodesResponse + 13, // 54: rpc.RepositoryService.PathsDetails:output_type -> rpc.PathsDetailsResponse + 24, // 55: rpc.RepositoryService.GetSubmodule:output_type -> rpc.GetSubmoduleResponse + 21, // 56: rpc.RepositoryService.GetBlob:output_type -> rpc.GetBlobResponse + 18, // 57: rpc.RepositoryService.ListCommits:output_type -> rpc.ListCommitsResponse + 16, // 58: rpc.RepositoryService.GetCommit:output_type -> rpc.GetCommitResponse + 28, // 59: rpc.RepositoryService.GetCommitDivergences:output_type -> rpc.GetCommitDivergencesResponse + 31, // 60: rpc.RepositoryService.DeleteRepository:output_type -> rpc.DeleteRepositoryResponse + 33, // 61: rpc.RepositoryService.SyncRepository:output_type -> rpc.SyncRepositoryResponse + 35, // 62: rpc.RepositoryService.HashRepository:output_type -> rpc.HashRepositoryResponse + 37, // 63: rpc.RepositoryService.MergeBase:output_type -> rpc.MergeBaseResponse + 40, // 64: rpc.RepositoryService.MatchFiles:output_type -> rpc.MatchFilesResponse + 42, // 65: rpc.RepositoryService.GeneratePipeline:output_type -> rpc.GeneratePipelineResponse + 51, // [51:66] is the sub-list for method output_type + 36, // [36:51] is the sub-list for method input_type + 36, // [36:36] is the sub-list for extension type_name + 36, // [36:36] is the sub-list for extension extendee + 0, // [0:36] is the sub-list for field type_name +} + +func init() { file_repo_proto_init() } +func file_repo_proto_init() { + if File_repo_proto != nil { + return + } + file_shared_proto_init() + if !protoimpl.UnsafeEnabled { + file_repo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateRepositoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateRepositoryRequestHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateRepositoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetTreeNodeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetTreeNodeResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTreeNodesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTreeNodesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TreeNode); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathsDetailsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathsDetailsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathDetails); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetCommitRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetCommitResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListCommitsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListCommitsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RenameDetails); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetBlobRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetBlobResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetBlobResponseHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetSubmoduleRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetSubmoduleResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Submodule); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetCommitDivergencesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitDivergenceRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetCommitDivergencesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CommitDivergence); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteRepositoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteRepositoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncRepositoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncRepositoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*HashRepositoryRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*HashRepositoryResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MergeBaseRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MergeBaseResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileContent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MatchFilesRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MatchFilesResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GeneratePipelineRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_repo_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GeneratePipelineResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_repo_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*CreateRepositoryRequest_Header)(nil), + (*CreateRepositoryRequest_File)(nil), + } + file_repo_proto_msgTypes[17].OneofWrappers = []interface{}{ + (*GetBlobResponse_Header)(nil), + (*GetBlobResponse_Content)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_repo_proto_rawDesc, + NumEnums: 4, + NumMessages: 39, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_repo_proto_goTypes, + DependencyIndexes: file_repo_proto_depIdxs, + EnumInfos: file_repo_proto_enumTypes, + MessageInfos: file_repo_proto_msgTypes, + }.Build() + File_repo_proto = out.File + file_repo_proto_rawDesc = nil + file_repo_proto_goTypes = nil + file_repo_proto_depIdxs = nil +} diff --git a/gitrpc/rpc/repo_grpc.pb.go b/gitrpc/rpc/repo_grpc.pb.go new file mode 100644 index 0000000000..18dd4255a7 --- /dev/null +++ b/gitrpc/rpc/repo_grpc.pb.go @@ -0,0 +1,725 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.11 +// source: repo.proto + +package rpc + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// RepositoryServiceClient is the client API for RepositoryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type RepositoryServiceClient interface { + CreateRepository(ctx context.Context, opts ...grpc.CallOption) (RepositoryService_CreateRepositoryClient, error) + GetTreeNode(ctx context.Context, in *GetTreeNodeRequest, opts ...grpc.CallOption) (*GetTreeNodeResponse, error) + ListTreeNodes(ctx context.Context, in *ListTreeNodesRequest, opts ...grpc.CallOption) (RepositoryService_ListTreeNodesClient, error) + PathsDetails(ctx context.Context, in *PathsDetailsRequest, opts ...grpc.CallOption) (*PathsDetailsResponse, error) + GetSubmodule(ctx context.Context, in *GetSubmoduleRequest, opts ...grpc.CallOption) (*GetSubmoduleResponse, error) + GetBlob(ctx context.Context, in *GetBlobRequest, opts ...grpc.CallOption) (RepositoryService_GetBlobClient, error) + ListCommits(ctx context.Context, in *ListCommitsRequest, opts ...grpc.CallOption) (RepositoryService_ListCommitsClient, error) + GetCommit(ctx context.Context, in *GetCommitRequest, opts ...grpc.CallOption) (*GetCommitResponse, error) + GetCommitDivergences(ctx context.Context, in *GetCommitDivergencesRequest, opts ...grpc.CallOption) (*GetCommitDivergencesResponse, error) + DeleteRepository(ctx context.Context, in *DeleteRepositoryRequest, opts ...grpc.CallOption) (*DeleteRepositoryResponse, error) + SyncRepository(ctx context.Context, in *SyncRepositoryRequest, opts ...grpc.CallOption) (*SyncRepositoryResponse, error) + HashRepository(ctx context.Context, in *HashRepositoryRequest, opts ...grpc.CallOption) (*HashRepositoryResponse, error) + MergeBase(ctx context.Context, in *MergeBaseRequest, opts ...grpc.CallOption) (*MergeBaseResponse, error) + MatchFiles(ctx context.Context, in *MatchFilesRequest, opts ...grpc.CallOption) (*MatchFilesResponse, error) + GeneratePipeline(ctx context.Context, in *GeneratePipelineRequest, opts ...grpc.CallOption) (*GeneratePipelineResponse, error) +} + +type repositoryServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewRepositoryServiceClient(cc grpc.ClientConnInterface) RepositoryServiceClient { + return &repositoryServiceClient{cc} +} + +func (c *repositoryServiceClient) CreateRepository(ctx context.Context, opts ...grpc.CallOption) (RepositoryService_CreateRepositoryClient, error) { + stream, err := c.cc.NewStream(ctx, &RepositoryService_ServiceDesc.Streams[0], "/rpc.RepositoryService/CreateRepository", opts...) + if err != nil { + return nil, err + } + x := &repositoryServiceCreateRepositoryClient{stream} + return x, nil +} + +type RepositoryService_CreateRepositoryClient interface { + Send(*CreateRepositoryRequest) error + CloseAndRecv() (*CreateRepositoryResponse, error) + grpc.ClientStream +} + +type repositoryServiceCreateRepositoryClient struct { + grpc.ClientStream +} + +func (x *repositoryServiceCreateRepositoryClient) Send(m *CreateRepositoryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *repositoryServiceCreateRepositoryClient) CloseAndRecv() (*CreateRepositoryResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(CreateRepositoryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *repositoryServiceClient) GetTreeNode(ctx context.Context, in *GetTreeNodeRequest, opts ...grpc.CallOption) (*GetTreeNodeResponse, error) { + out := new(GetTreeNodeResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/GetTreeNode", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) ListTreeNodes(ctx context.Context, in *ListTreeNodesRequest, opts ...grpc.CallOption) (RepositoryService_ListTreeNodesClient, error) { + stream, err := c.cc.NewStream(ctx, &RepositoryService_ServiceDesc.Streams[1], "/rpc.RepositoryService/ListTreeNodes", opts...) + if err != nil { + return nil, err + } + x := &repositoryServiceListTreeNodesClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type RepositoryService_ListTreeNodesClient interface { + Recv() (*ListTreeNodesResponse, error) + grpc.ClientStream +} + +type repositoryServiceListTreeNodesClient struct { + grpc.ClientStream +} + +func (x *repositoryServiceListTreeNodesClient) Recv() (*ListTreeNodesResponse, error) { + m := new(ListTreeNodesResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *repositoryServiceClient) PathsDetails(ctx context.Context, in *PathsDetailsRequest, opts ...grpc.CallOption) (*PathsDetailsResponse, error) { + out := new(PathsDetailsResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/PathsDetails", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) GetSubmodule(ctx context.Context, in *GetSubmoduleRequest, opts ...grpc.CallOption) (*GetSubmoduleResponse, error) { + out := new(GetSubmoduleResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/GetSubmodule", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) GetBlob(ctx context.Context, in *GetBlobRequest, opts ...grpc.CallOption) (RepositoryService_GetBlobClient, error) { + stream, err := c.cc.NewStream(ctx, &RepositoryService_ServiceDesc.Streams[2], "/rpc.RepositoryService/GetBlob", opts...) + if err != nil { + return nil, err + } + x := &repositoryServiceGetBlobClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type RepositoryService_GetBlobClient interface { + Recv() (*GetBlobResponse, error) + grpc.ClientStream +} + +type repositoryServiceGetBlobClient struct { + grpc.ClientStream +} + +func (x *repositoryServiceGetBlobClient) Recv() (*GetBlobResponse, error) { + m := new(GetBlobResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *repositoryServiceClient) ListCommits(ctx context.Context, in *ListCommitsRequest, opts ...grpc.CallOption) (RepositoryService_ListCommitsClient, error) { + stream, err := c.cc.NewStream(ctx, &RepositoryService_ServiceDesc.Streams[3], "/rpc.RepositoryService/ListCommits", opts...) + if err != nil { + return nil, err + } + x := &repositoryServiceListCommitsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type RepositoryService_ListCommitsClient interface { + Recv() (*ListCommitsResponse, error) + grpc.ClientStream +} + +type repositoryServiceListCommitsClient struct { + grpc.ClientStream +} + +func (x *repositoryServiceListCommitsClient) Recv() (*ListCommitsResponse, error) { + m := new(ListCommitsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *repositoryServiceClient) GetCommit(ctx context.Context, in *GetCommitRequest, opts ...grpc.CallOption) (*GetCommitResponse, error) { + out := new(GetCommitResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/GetCommit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) GetCommitDivergences(ctx context.Context, in *GetCommitDivergencesRequest, opts ...grpc.CallOption) (*GetCommitDivergencesResponse, error) { + out := new(GetCommitDivergencesResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/GetCommitDivergences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) DeleteRepository(ctx context.Context, in *DeleteRepositoryRequest, opts ...grpc.CallOption) (*DeleteRepositoryResponse, error) { + out := new(DeleteRepositoryResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/DeleteRepository", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) SyncRepository(ctx context.Context, in *SyncRepositoryRequest, opts ...grpc.CallOption) (*SyncRepositoryResponse, error) { + out := new(SyncRepositoryResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/SyncRepository", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) HashRepository(ctx context.Context, in *HashRepositoryRequest, opts ...grpc.CallOption) (*HashRepositoryResponse, error) { + out := new(HashRepositoryResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/HashRepository", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) MergeBase(ctx context.Context, in *MergeBaseRequest, opts ...grpc.CallOption) (*MergeBaseResponse, error) { + out := new(MergeBaseResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/MergeBase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) MatchFiles(ctx context.Context, in *MatchFilesRequest, opts ...grpc.CallOption) (*MatchFilesResponse, error) { + out := new(MatchFilesResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/MatchFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *repositoryServiceClient) GeneratePipeline(ctx context.Context, in *GeneratePipelineRequest, opts ...grpc.CallOption) (*GeneratePipelineResponse, error) { + out := new(GeneratePipelineResponse) + err := c.cc.Invoke(ctx, "/rpc.RepositoryService/GeneratePipeline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RepositoryServiceServer is the server API for RepositoryService service. +// All implementations must embed UnimplementedRepositoryServiceServer +// for forward compatibility +type RepositoryServiceServer interface { + CreateRepository(RepositoryService_CreateRepositoryServer) error + GetTreeNode(context.Context, *GetTreeNodeRequest) (*GetTreeNodeResponse, error) + ListTreeNodes(*ListTreeNodesRequest, RepositoryService_ListTreeNodesServer) error + PathsDetails(context.Context, *PathsDetailsRequest) (*PathsDetailsResponse, error) + GetSubmodule(context.Context, *GetSubmoduleRequest) (*GetSubmoduleResponse, error) + GetBlob(*GetBlobRequest, RepositoryService_GetBlobServer) error + ListCommits(*ListCommitsRequest, RepositoryService_ListCommitsServer) error + GetCommit(context.Context, *GetCommitRequest) (*GetCommitResponse, error) + GetCommitDivergences(context.Context, *GetCommitDivergencesRequest) (*GetCommitDivergencesResponse, error) + DeleteRepository(context.Context, *DeleteRepositoryRequest) (*DeleteRepositoryResponse, error) + SyncRepository(context.Context, *SyncRepositoryRequest) (*SyncRepositoryResponse, error) + HashRepository(context.Context, *HashRepositoryRequest) (*HashRepositoryResponse, error) + MergeBase(context.Context, *MergeBaseRequest) (*MergeBaseResponse, error) + MatchFiles(context.Context, *MatchFilesRequest) (*MatchFilesResponse, error) + GeneratePipeline(context.Context, *GeneratePipelineRequest) (*GeneratePipelineResponse, error) + mustEmbedUnimplementedRepositoryServiceServer() +} + +// UnimplementedRepositoryServiceServer must be embedded to have forward compatible implementations. +type UnimplementedRepositoryServiceServer struct { +} + +func (UnimplementedRepositoryServiceServer) CreateRepository(RepositoryService_CreateRepositoryServer) error { + return status.Errorf(codes.Unimplemented, "method CreateRepository not implemented") +} +func (UnimplementedRepositoryServiceServer) GetTreeNode(context.Context, *GetTreeNodeRequest) (*GetTreeNodeResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTreeNode not implemented") +} +func (UnimplementedRepositoryServiceServer) ListTreeNodes(*ListTreeNodesRequest, RepositoryService_ListTreeNodesServer) error { + return status.Errorf(codes.Unimplemented, "method ListTreeNodes not implemented") +} +func (UnimplementedRepositoryServiceServer) PathsDetails(context.Context, *PathsDetailsRequest) (*PathsDetailsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PathsDetails not implemented") +} +func (UnimplementedRepositoryServiceServer) GetSubmodule(context.Context, *GetSubmoduleRequest) (*GetSubmoduleResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetSubmodule not implemented") +} +func (UnimplementedRepositoryServiceServer) GetBlob(*GetBlobRequest, RepositoryService_GetBlobServer) error { + return status.Errorf(codes.Unimplemented, "method GetBlob not implemented") +} +func (UnimplementedRepositoryServiceServer) ListCommits(*ListCommitsRequest, RepositoryService_ListCommitsServer) error { + return status.Errorf(codes.Unimplemented, "method ListCommits not implemented") +} +func (UnimplementedRepositoryServiceServer) GetCommit(context.Context, *GetCommitRequest) (*GetCommitResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetCommit not implemented") +} +func (UnimplementedRepositoryServiceServer) GetCommitDivergences(context.Context, *GetCommitDivergencesRequest) (*GetCommitDivergencesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetCommitDivergences not implemented") +} +func (UnimplementedRepositoryServiceServer) DeleteRepository(context.Context, *DeleteRepositoryRequest) (*DeleteRepositoryResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteRepository not implemented") +} +func (UnimplementedRepositoryServiceServer) SyncRepository(context.Context, *SyncRepositoryRequest) (*SyncRepositoryResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SyncRepository not implemented") +} +func (UnimplementedRepositoryServiceServer) HashRepository(context.Context, *HashRepositoryRequest) (*HashRepositoryResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method HashRepository not implemented") +} +func (UnimplementedRepositoryServiceServer) MergeBase(context.Context, *MergeBaseRequest) (*MergeBaseResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method MergeBase not implemented") +} +func (UnimplementedRepositoryServiceServer) MatchFiles(context.Context, *MatchFilesRequest) (*MatchFilesResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method MatchFiles not implemented") +} +func (UnimplementedRepositoryServiceServer) GeneratePipeline(context.Context, *GeneratePipelineRequest) (*GeneratePipelineResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GeneratePipeline not implemented") +} +func (UnimplementedRepositoryServiceServer) mustEmbedUnimplementedRepositoryServiceServer() {} + +// UnsafeRepositoryServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to RepositoryServiceServer will +// result in compilation errors. +type UnsafeRepositoryServiceServer interface { + mustEmbedUnimplementedRepositoryServiceServer() +} + +func RegisterRepositoryServiceServer(s grpc.ServiceRegistrar, srv RepositoryServiceServer) { + s.RegisterService(&RepositoryService_ServiceDesc, srv) +} + +func _RepositoryService_CreateRepository_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(RepositoryServiceServer).CreateRepository(&repositoryServiceCreateRepositoryServer{stream}) +} + +type RepositoryService_CreateRepositoryServer interface { + SendAndClose(*CreateRepositoryResponse) error + Recv() (*CreateRepositoryRequest, error) + grpc.ServerStream +} + +type repositoryServiceCreateRepositoryServer struct { + grpc.ServerStream +} + +func (x *repositoryServiceCreateRepositoryServer) SendAndClose(m *CreateRepositoryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *repositoryServiceCreateRepositoryServer) Recv() (*CreateRepositoryRequest, error) { + m := new(CreateRepositoryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _RepositoryService_GetTreeNode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTreeNodeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).GetTreeNode(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/GetTreeNode", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).GetTreeNode(ctx, req.(*GetTreeNodeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_ListTreeNodes_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ListTreeNodesRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(RepositoryServiceServer).ListTreeNodes(m, &repositoryServiceListTreeNodesServer{stream}) +} + +type RepositoryService_ListTreeNodesServer interface { + Send(*ListTreeNodesResponse) error + grpc.ServerStream +} + +type repositoryServiceListTreeNodesServer struct { + grpc.ServerStream +} + +func (x *repositoryServiceListTreeNodesServer) Send(m *ListTreeNodesResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _RepositoryService_PathsDetails_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PathsDetailsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).PathsDetails(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/PathsDetails", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).PathsDetails(ctx, req.(*PathsDetailsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_GetSubmodule_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSubmoduleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).GetSubmodule(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/GetSubmodule", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).GetSubmodule(ctx, req.(*GetSubmoduleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_GetBlob_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetBlobRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(RepositoryServiceServer).GetBlob(m, &repositoryServiceGetBlobServer{stream}) +} + +type RepositoryService_GetBlobServer interface { + Send(*GetBlobResponse) error + grpc.ServerStream +} + +type repositoryServiceGetBlobServer struct { + grpc.ServerStream +} + +func (x *repositoryServiceGetBlobServer) Send(m *GetBlobResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _RepositoryService_ListCommits_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ListCommitsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(RepositoryServiceServer).ListCommits(m, &repositoryServiceListCommitsServer{stream}) +} + +type RepositoryService_ListCommitsServer interface { + Send(*ListCommitsResponse) error + grpc.ServerStream +} + +type repositoryServiceListCommitsServer struct { + grpc.ServerStream +} + +func (x *repositoryServiceListCommitsServer) Send(m *ListCommitsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _RepositoryService_GetCommit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).GetCommit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/GetCommit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).GetCommit(ctx, req.(*GetCommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_GetCommitDivergences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCommitDivergencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).GetCommitDivergences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/GetCommitDivergences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).GetCommitDivergences(ctx, req.(*GetCommitDivergencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_DeleteRepository_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRepositoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).DeleteRepository(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/DeleteRepository", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).DeleteRepository(ctx, req.(*DeleteRepositoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_SyncRepository_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SyncRepositoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).SyncRepository(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/SyncRepository", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).SyncRepository(ctx, req.(*SyncRepositoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_HashRepository_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(HashRepositoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).HashRepository(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/HashRepository", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).HashRepository(ctx, req.(*HashRepositoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_MergeBase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MergeBaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).MergeBase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/MergeBase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).MergeBase(ctx, req.(*MergeBaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_MatchFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MatchFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).MatchFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/MatchFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).MatchFiles(ctx, req.(*MatchFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RepositoryService_GeneratePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GeneratePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RepositoryServiceServer).GeneratePipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/rpc.RepositoryService/GeneratePipeline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RepositoryServiceServer).GeneratePipeline(ctx, req.(*GeneratePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// RepositoryService_ServiceDesc is the grpc.ServiceDesc for RepositoryService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var RepositoryService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "rpc.RepositoryService", + HandlerType: (*RepositoryServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetTreeNode", + Handler: _RepositoryService_GetTreeNode_Handler, + }, + { + MethodName: "PathsDetails", + Handler: _RepositoryService_PathsDetails_Handler, + }, + { + MethodName: "GetSubmodule", + Handler: _RepositoryService_GetSubmodule_Handler, + }, + { + MethodName: "GetCommit", + Handler: _RepositoryService_GetCommit_Handler, + }, + { + MethodName: "GetCommitDivergences", + Handler: _RepositoryService_GetCommitDivergences_Handler, + }, + { + MethodName: "DeleteRepository", + Handler: _RepositoryService_DeleteRepository_Handler, + }, + { + MethodName: "SyncRepository", + Handler: _RepositoryService_SyncRepository_Handler, + }, + { + MethodName: "HashRepository", + Handler: _RepositoryService_HashRepository_Handler, + }, + { + MethodName: "MergeBase", + Handler: _RepositoryService_MergeBase_Handler, + }, + { + MethodName: "MatchFiles", + Handler: _RepositoryService_MatchFiles_Handler, + }, + { + MethodName: "GeneratePipeline", + Handler: _RepositoryService_GeneratePipeline_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "CreateRepository", + Handler: _RepositoryService_CreateRepository_Handler, + ClientStreams: true, + }, + { + StreamName: "ListTreeNodes", + Handler: _RepositoryService_ListTreeNodes_Handler, + ServerStreams: true, + }, + { + StreamName: "GetBlob", + Handler: _RepositoryService_GetBlob_Handler, + ServerStreams: true, + }, + { + StreamName: "ListCommits", + Handler: _RepositoryService_ListCommits_Handler, + ServerStreams: true, + }, + }, + Metadata: "repo.proto", +} diff --git a/gitrpc/rpc/shared.pb.go b/gitrpc/rpc/shared.pb.go new file mode 100644 index 0000000000..ad563bf241 --- /dev/null +++ b/gitrpc/rpc/shared.pb.go @@ -0,0 +1,974 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.11 +// source: shared.proto + +package rpc + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SortOrder int32 + +const ( + SortOrder_Default SortOrder = 0 + SortOrder_Asc SortOrder = 1 + SortOrder_Desc SortOrder = 2 +) + +// Enum value maps for SortOrder. +var ( + SortOrder_name = map[int32]string{ + 0: "Default", + 1: "Asc", + 2: "Desc", + } + SortOrder_value = map[string]int32{ + "Default": 0, + "Asc": 1, + "Desc": 2, + } +) + +func (x SortOrder) Enum() *SortOrder { + p := new(SortOrder) + *p = x + return p +} + +func (x SortOrder) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (SortOrder) Descriptor() protoreflect.EnumDescriptor { + return file_shared_proto_enumTypes[0].Descriptor() +} + +func (SortOrder) Type() protoreflect.EnumType { + return &file_shared_proto_enumTypes[0] +} + +func (x SortOrder) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use SortOrder.Descriptor instead. +func (SortOrder) EnumDescriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{0} +} + +type RefType int32 + +const ( + RefType_Undefined RefType = 0 + RefType_RefRaw RefType = 1 + RefType_RefBranch RefType = 2 + RefType_RefTag RefType = 3 + RefType_RefPullReqHead RefType = 4 + RefType_RefPullReqMerge RefType = 5 +) + +// Enum value maps for RefType. +var ( + RefType_name = map[int32]string{ + 0: "Undefined", + 1: "RefRaw", + 2: "RefBranch", + 3: "RefTag", + 4: "RefPullReqHead", + 5: "RefPullReqMerge", + } + RefType_value = map[string]int32{ + "Undefined": 0, + "RefRaw": 1, + "RefBranch": 2, + "RefTag": 3, + "RefPullReqHead": 4, + "RefPullReqMerge": 5, + } +) + +func (x RefType) Enum() *RefType { + p := new(RefType) + *p = x + return p +} + +func (x RefType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (RefType) Descriptor() protoreflect.EnumDescriptor { + return file_shared_proto_enumTypes[1].Descriptor() +} + +func (RefType) Type() protoreflect.EnumType { + return &file_shared_proto_enumTypes[1] +} + +func (x RefType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use RefType.Descriptor instead. +func (RefType) EnumDescriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{1} +} + +type ReadRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + RepoUid string `protobuf:"bytes,1,opt,name=repo_uid,json=repoUid,proto3" json:"repo_uid,omitempty"` +} + +func (x *ReadRequest) Reset() { + *x = ReadRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ReadRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ReadRequest) ProtoMessage() {} + +func (x *ReadRequest) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ReadRequest.ProtoReflect.Descriptor instead. +func (*ReadRequest) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{0} +} + +func (x *ReadRequest) GetRepoUid() string { + if x != nil { + return x.RepoUid + } + return "" +} + +type WriteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + RepoUid string `protobuf:"bytes,1,opt,name=repo_uid,json=repoUid,proto3" json:"repo_uid,omitempty"` + EnvVars []*EnvVar `protobuf:"bytes,2,rep,name=env_vars,json=envVars,proto3" json:"env_vars,omitempty"` + Actor *Identity `protobuf:"bytes,3,opt,name=actor,proto3" json:"actor,omitempty"` +} + +func (x *WriteRequest) Reset() { + *x = WriteRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *WriteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*WriteRequest) ProtoMessage() {} + +func (x *WriteRequest) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use WriteRequest.ProtoReflect.Descriptor instead. +func (*WriteRequest) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{1} +} + +func (x *WriteRequest) GetRepoUid() string { + if x != nil { + return x.RepoUid + } + return "" +} + +func (x *WriteRequest) GetEnvVars() []*EnvVar { + if x != nil { + return x.EnvVars + } + return nil +} + +func (x *WriteRequest) GetActor() *Identity { + if x != nil { + return x.Actor + } + return nil +} + +type EnvVar struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *EnvVar) Reset() { + *x = EnvVar{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnvVar) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnvVar) ProtoMessage() {} + +func (x *EnvVar) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnvVar.ProtoReflect.Descriptor instead. +func (*EnvVar) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{2} +} + +func (x *EnvVar) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *EnvVar) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +type FileUpload struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Data: + // *FileUpload_Header + // *FileUpload_Chunk + Data isFileUpload_Data `protobuf_oneof:"data"` +} + +func (x *FileUpload) Reset() { + *x = FileUpload{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileUpload) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileUpload) ProtoMessage() {} + +func (x *FileUpload) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileUpload.ProtoReflect.Descriptor instead. +func (*FileUpload) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{3} +} + +func (m *FileUpload) GetData() isFileUpload_Data { + if m != nil { + return m.Data + } + return nil +} + +func (x *FileUpload) GetHeader() *FileUploadHeader { + if x, ok := x.GetData().(*FileUpload_Header); ok { + return x.Header + } + return nil +} + +func (x *FileUpload) GetChunk() *Chunk { + if x, ok := x.GetData().(*FileUpload_Chunk); ok { + return x.Chunk + } + return nil +} + +type isFileUpload_Data interface { + isFileUpload_Data() +} + +type FileUpload_Header struct { + Header *FileUploadHeader `protobuf:"bytes,1,opt,name=header,proto3,oneof"` +} + +type FileUpload_Chunk struct { + Chunk *Chunk `protobuf:"bytes,2,opt,name=chunk,proto3,oneof"` +} + +func (*FileUpload_Header) isFileUpload_Data() {} + +func (*FileUpload_Chunk) isFileUpload_Data() {} + +type FileUploadHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *FileUploadHeader) Reset() { + *x = FileUploadHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileUploadHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileUploadHeader) ProtoMessage() {} + +func (x *FileUploadHeader) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileUploadHeader.ProtoReflect.Descriptor instead. +func (*FileUploadHeader) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{4} +} + +func (x *FileUploadHeader) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +type Chunk struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Eof bool `protobuf:"varint,1,opt,name=eof,proto3" json:"eof,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *Chunk) Reset() { + *x = Chunk{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Chunk) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Chunk) ProtoMessage() {} + +func (x *Chunk) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Chunk.ProtoReflect.Descriptor instead. +func (*Chunk) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{5} +} + +func (x *Chunk) GetEof() bool { + if x != nil { + return x.Eof + } + return false +} + +func (x *Chunk) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type Commit struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Sha string `protobuf:"bytes,1,opt,name=sha,proto3" json:"sha,omitempty"` + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + Author *Signature `protobuf:"bytes,4,opt,name=author,proto3" json:"author,omitempty"` + Committer *Signature `protobuf:"bytes,5,opt,name=committer,proto3" json:"committer,omitempty"` +} + +func (x *Commit) Reset() { + *x = Commit{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Commit) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Commit) ProtoMessage() {} + +func (x *Commit) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Commit.ProtoReflect.Descriptor instead. +func (*Commit) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{6} +} + +func (x *Commit) GetSha() string { + if x != nil { + return x.Sha + } + return "" +} + +func (x *Commit) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *Commit) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *Commit) GetAuthor() *Signature { + if x != nil { + return x.Author + } + return nil +} + +func (x *Commit) GetCommitter() *Signature { + if x != nil { + return x.Committer + } + return nil +} + +type Signature struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Identity *Identity `protobuf:"bytes,1,opt,name=identity,proto3" json:"identity,omitempty"` + When int64 `protobuf:"varint,2,opt,name=when,proto3" json:"when,omitempty"` +} + +func (x *Signature) Reset() { + *x = Signature{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Signature) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Signature) ProtoMessage() {} + +func (x *Signature) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Signature.ProtoReflect.Descriptor instead. +func (*Signature) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{7} +} + +func (x *Signature) GetIdentity() *Identity { + if x != nil { + return x.Identity + } + return nil +} + +func (x *Signature) GetWhen() int64 { + if x != nil { + return x.When + } + return 0 +} + +type Identity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Email string `protobuf:"bytes,2,opt,name=email,proto3" json:"email,omitempty"` +} + +func (x *Identity) Reset() { + *x = Identity{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Identity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Identity) ProtoMessage() {} + +func (x *Identity) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Identity.ProtoReflect.Descriptor instead. +func (*Identity) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{8} +} + +func (x *Identity) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Identity) GetEmail() string { + if x != nil { + return x.Email + } + return "" +} + +// PathNotFoundError is an error returned in the case a provided path is not found in the repo. +type PathNotFoundError struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // path is the path that wasn't found in the repo. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` +} + +func (x *PathNotFoundError) Reset() { + *x = PathNotFoundError{} + if protoimpl.UnsafeEnabled { + mi := &file_shared_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathNotFoundError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathNotFoundError) ProtoMessage() {} + +func (x *PathNotFoundError) ProtoReflect() protoreflect.Message { + mi := &file_shared_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathNotFoundError.ProtoReflect.Descriptor instead. +func (*PathNotFoundError) Descriptor() ([]byte, []int) { + return file_shared_proto_rawDescGZIP(), []int{9} +} + +func (x *PathNotFoundError) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +var File_shared_proto protoreflect.FileDescriptor + +var file_shared_proto_rawDesc = []byte{ + 0x0a, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, + 0x72, 0x70, 0x63, 0x22, 0x28, 0x0a, 0x0b, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x72, 0x65, 0x70, 0x6f, 0x5f, 0x75, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x70, 0x6f, 0x55, 0x69, 0x64, 0x22, 0x76, 0x0a, + 0x0c, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, + 0x08, 0x72, 0x65, 0x70, 0x6f, 0x5f, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x72, 0x65, 0x70, 0x6f, 0x55, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x08, 0x65, 0x6e, 0x76, 0x5f, + 0x76, 0x61, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x52, 0x07, 0x65, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, + 0x12, 0x23, 0x0a, 0x05, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0d, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x05, + 0x61, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x32, 0x0a, 0x06, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x69, 0x0a, 0x0a, 0x46, 0x69, 0x6c, + 0x65, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x2f, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, + 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x22, 0x0a, 0x05, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x43, 0x68, + 0x75, 0x6e, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x42, 0x06, 0x0a, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x26, 0x0a, 0x10, 0x46, 0x69, 0x6c, 0x65, 0x55, 0x70, 0x6c, 0x6f, + 0x61, 0x64, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x2d, 0x0a, 0x05, + 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x03, 0x65, 0x6f, 0x66, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xa0, 0x01, 0x0a, 0x06, + 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x68, 0x61, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x68, 0x61, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x26, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, + 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, + 0x12, 0x2c, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x74, 0x65, 0x72, 0x22, 0x4a, + 0x0a, 0x09, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x29, 0x0a, 0x08, 0x69, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x08, 0x69, 0x64, + 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x77, 0x68, 0x65, 0x6e, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x77, 0x68, 0x65, 0x6e, 0x22, 0x34, 0x0a, 0x08, 0x49, 0x64, + 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x6d, + 0x61, 0x69, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, + 0x22, 0x27, 0x0a, 0x11, 0x50, 0x61, 0x74, 0x68, 0x4e, 0x6f, 0x74, 0x46, 0x6f, 0x75, 0x6e, 0x64, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x2a, 0x2b, 0x0a, 0x09, 0x53, 0x6f, 0x72, + 0x74, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x73, 0x63, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, + 0x44, 0x65, 0x73, 0x63, 0x10, 0x02, 0x2a, 0x68, 0x0a, 0x07, 0x52, 0x65, 0x66, 0x54, 0x79, 0x70, + 0x65, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x10, 0x00, + 0x12, 0x0a, 0x0a, 0x06, 0x52, 0x65, 0x66, 0x52, 0x61, 0x77, 0x10, 0x01, 0x12, 0x0d, 0x0a, 0x09, + 0x52, 0x65, 0x66, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x52, + 0x65, 0x66, 0x54, 0x61, 0x67, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, 0x52, 0x65, 0x66, 0x50, 0x75, + 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x48, 0x65, 0x61, 0x64, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, 0x52, + 0x65, 0x66, 0x50, 0x75, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x4d, 0x65, 0x72, 0x67, 0x65, 0x10, 0x05, + 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, + 0x61, 0x72, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, 0x69, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x2f, 0x67, + 0x69, 0x74, 0x72, 0x70, 0x63, 0x2f, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, +} + +var ( + file_shared_proto_rawDescOnce sync.Once + file_shared_proto_rawDescData = file_shared_proto_rawDesc +) + +func file_shared_proto_rawDescGZIP() []byte { + file_shared_proto_rawDescOnce.Do(func() { + file_shared_proto_rawDescData = protoimpl.X.CompressGZIP(file_shared_proto_rawDescData) + }) + return file_shared_proto_rawDescData +} + +var file_shared_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_shared_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_shared_proto_goTypes = []interface{}{ + (SortOrder)(0), // 0: rpc.SortOrder + (RefType)(0), // 1: rpc.RefType + (*ReadRequest)(nil), // 2: rpc.ReadRequest + (*WriteRequest)(nil), // 3: rpc.WriteRequest + (*EnvVar)(nil), // 4: rpc.EnvVar + (*FileUpload)(nil), // 5: rpc.FileUpload + (*FileUploadHeader)(nil), // 6: rpc.FileUploadHeader + (*Chunk)(nil), // 7: rpc.Chunk + (*Commit)(nil), // 8: rpc.Commit + (*Signature)(nil), // 9: rpc.Signature + (*Identity)(nil), // 10: rpc.Identity + (*PathNotFoundError)(nil), // 11: rpc.PathNotFoundError +} +var file_shared_proto_depIdxs = []int32{ + 4, // 0: rpc.WriteRequest.env_vars:type_name -> rpc.EnvVar + 10, // 1: rpc.WriteRequest.actor:type_name -> rpc.Identity + 6, // 2: rpc.FileUpload.header:type_name -> rpc.FileUploadHeader + 7, // 3: rpc.FileUpload.chunk:type_name -> rpc.Chunk + 9, // 4: rpc.Commit.author:type_name -> rpc.Signature + 9, // 5: rpc.Commit.committer:type_name -> rpc.Signature + 10, // 6: rpc.Signature.identity:type_name -> rpc.Identity + 7, // [7:7] is the sub-list for method output_type + 7, // [7:7] is the sub-list for method input_type + 7, // [7:7] is the sub-list for extension type_name + 7, // [7:7] is the sub-list for extension extendee + 0, // [0:7] is the sub-list for field type_name +} + +func init() { file_shared_proto_init() } +func file_shared_proto_init() { + if File_shared_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_shared_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ReadRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WriteRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnvVar); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileUpload); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileUploadHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Chunk); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Commit); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Signature); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Identity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_shared_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathNotFoundError); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_shared_proto_msgTypes[3].OneofWrappers = []interface{}{ + (*FileUpload_Header)(nil), + (*FileUpload_Chunk)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_shared_proto_rawDesc, + NumEnums: 2, + NumMessages: 10, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_shared_proto_goTypes, + DependencyIndexes: file_shared_proto_depIdxs, + EnumInfos: file_shared_proto_enumTypes, + MessageInfos: file_shared_proto_msgTypes, + }.Build() + File_shared_proto = out.File + file_shared_proto_rawDesc = nil + file_shared_proto_goTypes = nil + file_shared_proto_depIdxs = nil +} diff --git a/gitrpc/server/config.go b/gitrpc/server/config.go new file mode 100644 index 0000000000..c3c10af114 --- /dev/null +++ b/gitrpc/server/config.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "errors" + "time" +) + +const ( + ModeInMemory = "inmemory" + ModeRedis = "redis" + ModeNone = "none" +) + +// Config represents the configuration for the gitrpc server. +type Config struct { + // Bind specifies the addr used to bind the grpc server. + Bind string `envconfig:"GITRPC_SERVER_BIND" default:":3001"` + // GitRoot specifies the directory containing git related data (e.g. repos, ...) + GitRoot string `envconfig:"GITRPC_SERVER_GIT_ROOT"` + // TmpDir (optional) specifies the directory for temporary data (e.g. repo clones, ...) + TmpDir string `envconfig:"GITRPC_SERVER_TMP_DIR"` + // GitHookPath points to the binary used as git server hook. + GitHookPath string `envconfig:"GITRPC_SERVER_GIT_HOOK_PATH"` + + HTTP struct { + Bind string `envconfig:"GITRPC_SERVER_HTTP_BIND" default:":4001"` + } + MaxConnAge time.Duration `envconfig:"GITRPC_SERVER_MAX_CONN_AGE" default:"630720000s"` + MaxConnAgeGrace time.Duration `envconfig:"GITRPC_SERVER_MAX_CONN_AGE_GRACE" default:"630720000s"` + + // LastCommitCache holds configuration options for the last commit cache. + LastCommitCache struct { + // Mode determines where the cache will be. Valid values are "inmemory" (default), "redis" or "none". + Mode string `envconfig:"GITRPC_LAST_COMMIT_CACHE_MODE" default:"inmemory"` + + // DurationSeconds defines cache duration in seconds of last commit, default=12h. + DurationSeconds int `envconfig:"GITRPC_LAST_COMMIT_CACHE_SECONDS" default:"43200"` + } + + Redis struct { + Endpoint string `envconfig:"GITRPC_REDIS_ENDPOINT" default:"localhost:6379"` + MaxRetries int `envconfig:"GITRPC_REDIS_MAX_RETRIES" default:"3"` + MinIdleConnections int `envconfig:"GITRPC_REDIS_MIN_IDLE_CONNECTIONS" default:"0"` + Password string `envconfig:"GITRPC_REDIS_PASSWORD"` + SentinelMode bool `envconfig:"GITRPC_REDIS_USE_SENTINEL" default:"false"` + SentinelMaster string `envconfig:"GITRPC_REDIS_SENTINEL_MASTER"` + SentinelEndpoint string `envconfig:"GITRPC_REDIS_SENTINEL_ENDPOINT"` + } +} + +func (c *Config) Validate() error { + if c == nil { + return errors.New("config is required") + } + if c.Bind == "" { + return errors.New("config.Bind is required") + } + if c.GitRoot == "" { + return errors.New("config.GitRoot is required") + } + if c.GitHookPath == "" { + return errors.New("config.GitHookPath is required") + } + if c.MaxConnAge == 0 { + return errors.New("config.MaxConnAge is required") + } + if c.MaxConnAgeGrace == 0 { + return errors.New("config.MaxConnAgeGrace is required") + } + if m := c.LastCommitCache.Mode; m != "" && m != ModeInMemory && m != ModeRedis && m != ModeNone { + return errors.New("config.LastCommitCache.Mode has unsupported value") + } + + return nil +} diff --git a/gitrpc/server/cron/clean_slate_data.go b/gitrpc/server/cron/clean_slate_data.go new file mode 100644 index 0000000000..7606893e5f --- /dev/null +++ b/gitrpc/server/cron/clean_slate_data.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "context" + "os" + "path" + "path/filepath" + + "github.com/harness/gitness/gitrpc/server" + + "github.com/rs/zerolog/log" +) + +// cleanupRepoGraveyard cleanups repository graveyard. +func cleanupRepoGraveyard(ctx context.Context, graveyardpath string) error { + logger := log.Ctx(ctx) + repolist, err := os.ReadDir(graveyardpath) + if err != nil { + logger.Warn().Err(err).Msgf("failed to read repos graveyard directory %s", graveyardpath) + return err + } + for _, repo := range repolist { + // exit early if context is cancelled + if ctx.Err() != nil { + return ctx.Err() + } + if err := os.RemoveAll(path.Join(graveyardpath, repo.Name())); err != nil { + logger.Error().Err(err).Msgf("failed to remove repository %s from graveyard", repo.Name()) + } else { + logger.Info().Msgf("repository %s removed from graveyard", repo.Name()) + } + } + return nil +} + +func AddAllGitRPCCronJobs(cm *Manager, gitrpcconfig server.Config) error { + // periodic repository graveyard cleanup + graveyardpath := filepath.Join(gitrpcconfig.GitRoot, server.ReposGraveyardSubdirName) + err := cm.NewCronTask(Nightly, func(ctx context.Context) error { return cleanupRepoGraveyard(ctx, graveyardpath) }) + if err != nil { + return err + } + return nil +} diff --git a/gitrpc/server/cron/clean_slate_data_test.go b/gitrpc/server/cron/clean_slate_data_test.go new file mode 100644 index 0000000000..285179c7ea --- /dev/null +++ b/gitrpc/server/cron/clean_slate_data_test.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "context" + "os" + "testing" +) + +func TestCleanupRepoGraveyardFunc(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + // create a dummy repository + testRepo, _ := os.MkdirTemp(tmpDir, "TestRepo100") + err := cleanupRepoGraveyard(ctx, tmpDir) + if err != nil { + t.Error("cleanupRepoGraveyard failed") + } + if _, err := os.Stat(testRepo); !os.IsNotExist(err) { + t.Error("cleanupRepoGraveyard failed to remove the directory") + } +} + +func TestCleanupRepoGraveyardEmpty(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + err := cleanupRepoGraveyard(ctx, tmpDir) + if err != nil { + t.Error("cleanupRepoGraveyard failed") + } +} diff --git a/gitrpc/server/cron/manager.go b/gitrpc/server/cron/manager.go new file mode 100644 index 0000000000..504b962e87 --- /dev/null +++ b/gitrpc/server/cron/manager.go @@ -0,0 +1,94 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "context" + "errors" + "fmt" + + cron "github.com/robfig/cron/v3" + "github.com/rs/zerolog/log" +) + +// Format: seconds minute(0-59) hour(0-23) day of month(1-31) month(1-12) day of week(0-6). +const ( + Hourly = "0 0 * * * *" // once an hour at minute 0 + Nightly = "0 0 0 * * *" // once a day at midnight + Weekly = "0 0 0 * * 0" // once a week on Sun midnight + Monthly = "0 0 0 1 * *" // once a month on the first day of the month + EverySecond = "* * * * * *" // every second (for testing) +) + +var ErrFatal = errors.New("fatal error occurred") + +type Manager struct { + c *cron.Cron + ctx context.Context + cancel context.CancelFunc + fatal chan error +} + +// NewManager creates a cron manager. +func NewManager() *Manager { + return &Manager{ + c: cron.New(cron.WithSeconds()), + fatal: make(chan error), + } +} + +// NewCronTask adds a new func to cron job. +func (c *Manager) NewCronTask(sepc string, job func(ctx context.Context) error) error { + _, err := c.c.AddFunc(sepc, func() { + jerr := job(c.ctx) + if jerr != nil { // check different severity of errors + log.Ctx(c.ctx).Error().Err(jerr).Msg("gitrpc cron job failed") + + if errors.Is(jerr, ErrFatal) { + c.fatal <- jerr + return + } + } + }) + if err != nil { + return fmt.Errorf("gitrpc cron manager failed to add cron job function: %w", err) + } + return nil +} + +// Run the cron scheduler, or no-op if already running. +func (c *Manager) Run(ctx context.Context) error { + c.ctx, c.cancel = context.WithCancel(ctx) + var err error + go func() { + select { + case <-ctx.Done(): + err = fmt.Errorf("context done: %w", ctx.Err()) + case fErr := <-c.fatal: + err = fmt.Errorf("fatal error occurred: %w", fErr) + } + + // stop scheduling of new jobs. + // NOTE: doesn't wait for running jobs, but c.Run() does, and we don't have to wait here + _ = c.c.Stop() + + // cancel running jobs (redundant for ctx.Done(), but makes code simpler) + c.cancel() + }() + + c.c.Run() + close(c.fatal) + return err +} diff --git a/gitrpc/server/cron/manager_test.go b/gitrpc/server/cron/manager_test.go new file mode 100644 index 0000000000..32c1b0bb49 --- /dev/null +++ b/gitrpc/server/cron/manager_test.go @@ -0,0 +1,117 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "context" + "errors" + "fmt" + "testing" + "time" +) + +func run(ctx context.Context, cmngr *Manager) chan error { + cron := make(chan error) + go func() { + cron <- cmngr.Run(ctx) + }() + return cron +} + +func TestCronManagerFatalErr(t *testing.T) { + cmngr := NewManager() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + _ = cmngr.NewCronTask(EverySecond, func(ctx context.Context) error { + return fmt.Errorf("inner: %w", ErrFatal) + }) + select { + case ferr := <-run(ctx, cmngr): + if ferr == nil { + t.Error("Cronmanager failed to receive fatal error") + } + case <-time.After(2 * time.Second): + t.Error("Cronmanager failed to stop after a fatal error") + } +} + +func TestCronManagerNonFatalErr(t *testing.T) { + cmngr := NewManager() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + _ = cmngr.NewCronTask(EverySecond, func(ctx context.Context) error { + return errors.New("dummy error") + }) + select { + case ferr := <-run(ctx, cmngr): + if ferr != nil { + t.Error("Cronmanager failed at a non fatal error") + } + case <-time.After(1500 * time.Microsecond): + // cron manager should keep running + } +} +func TestCronManagerNewTask(t *testing.T) { + cmngr := NewManager() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + a := 0 + // add a task + _ = cmngr.NewCronTask(EverySecond, func(ctx context.Context) error { + a = 1 + return nil + }) + + select { + case cerr := <-run(ctx, cmngr): + if cerr != nil { + t.Error("Cronmanager failed at Run:", cerr) + } + case <-time.After(1500 * time.Millisecond): + if a != 1 { + t.Error("Cronmanager failed to run the task") + } + } +} + +func TestCronManagerStopOnCtxCancel(t *testing.T) { + cmngr := NewManager() + ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) + defer cancel() + + _ = cmngr.NewCronTask(EverySecond, func(ctx context.Context) error { + cancel() + return nil + }) + err := cmngr.Run(ctx) + if !errors.Is(err, context.Canceled) { + t.Error("Cronmanager failed to stop after ctx got canceled ", err) + } +} + +func TestCronManagerStopOnCtxTimeout(t *testing.T) { + cmngr := NewManager() + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + _ = cmngr.NewCronTask(EverySecond, func(ctx context.Context) error { + time.Sleep(5 * time.Second) + return nil + }) + err := cmngr.Run(ctx) + if !errors.Is(err, context.DeadlineExceeded) { + t.Error("Cronmanager failed to stop after ctx timeout", err) + } +} diff --git a/gitrpc/server/cron/wire.go b/gitrpc/server/cron/wire.go new file mode 100644 index 0000000000..396136cfc5 --- /dev/null +++ b/gitrpc/server/cron/wire.go @@ -0,0 +1,30 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "github.com/harness/gitness/gitrpc/server" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet(ProvideManager) + +func ProvideManager(gitrpcconfig server.Config) *Manager { + cmngr := NewManager() + _ = AddAllGitRPCCronJobs(cmngr, gitrpcconfig) + return cmngr +} diff --git a/gitrpc/server/http.go b/gitrpc/server/http.go new file mode 100644 index 0000000000..eddab46cc3 --- /dev/null +++ b/gitrpc/server/http.go @@ -0,0 +1,254 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "net/http" + "path/filepath" + "regexp" + "strconv" + "strings" + + gitnesshttp "github.com/harness/gitness/http" + + "code.gitea.io/gitea/modules/git" + "github.com/go-chi/chi" + "github.com/go-chi/chi/middleware" + "github.com/rs/zerolog/hlog" + "github.com/rs/zerolog/log" +) + +const ( + PathParamRepoUID = "repoUID" +) + +var ( + safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`) +) + +// HTTPServer exposes the gitrpc rest api. +type HTTPServer struct { + *gitnesshttp.Server +} + +func NewHTTPServer(config Config) (*HTTPServer, error) { + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("configuration is invalid: %w", err) + } + + reposRoot := filepath.Join(config.GitRoot, repoSubdirName) + + return &HTTPServer{ + gitnesshttp.NewServer( + gitnesshttp.Config{ + Addr: config.HTTP.Bind, + }, + handleHTTP(reposRoot), + ), + }, nil +} + +func handleHTTP(reposRoot string) http.Handler { + r := chi.NewRouter() + + // Apply common api middleware. + r.Use(middleware.NoCache) + r.Use(middleware.Recoverer) + + // configure logging middleware. + log := log.Logger.With().Logger() + r.Use(hlog.NewHandler(log)) + r.Use(hlog.URLHandler("http.url")) + r.Use(hlog.MethodHandler("http.method")) + r.Use(HLogRequestIDHandler()) + r.Use(HLogAccessLogHandler()) + + r.Route(fmt.Sprintf("/{%s}", PathParamRepoUID), func(r chi.Router) { + r.Get("/info/refs", handleHTTPInfoRefs(reposRoot)) + r.Handle("/git-upload-pack", handleHTTPUploadPack(reposRoot)) + + // push is not supported + r.Post("/git-receive-pack", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotImplemented) + _, _ = w.Write([]byte("receive pack is not supported by this endpoint")) + }) + }) + + return r +} + +func handleHTTPInfoRefs(reposRoot string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + // Clients MUST NOT reuse or revalidate a cached response. + // Servers MUST include sufficient Cache-Control headers to prevent caching of the response. + // https://git-scm.com/docs/http-protocol + setHeaderNoCache(w) + + repoUID := chi.URLParam(r, PathParamRepoUID) + repoPath := getFullPathForRepo(reposRoot, repoUID) + gitProtocol := r.Header.Get("Git-Protocol") + service := getServiceType(r) + + log.Ctx(ctx).Trace().Msgf( + "handleHTTPInfoRefs for git service: '%s', protocol: '%s', path: '%s'", + service, + gitProtocol, + repoPath, + ) + + w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service)) + + // NOTE: Don't include os.Environ() as we don't have control over it - define everything explicitly + environ := []string{} + if gitProtocol != "" { + environ = append(environ, "GIT_PROTOCOL="+gitProtocol) + } + + stdOut := &bytes.Buffer{} + if err := git.NewCommand(ctx, service, "--stateless-rpc", "--advertise-refs", "."). + Run(&git.RunOpts{ + Env: environ, + Dir: repoPath, + Stdout: stdOut, + }); err != nil { + w.WriteHeader(http.StatusInternalServerError) + + log.Ctx(ctx).Error().Err(err).Msgf("failed running git command") + + return + } + if _, err := w.Write(packetWrite("# service=git-" + service + "\n")); err != nil { + w.WriteHeader(http.StatusInternalServerError) + + log.Ctx(ctx).Error().Err(err).Msgf("failed writing packet line") + + return + } + + if _, err := w.Write([]byte("0000")); err != nil { + w.WriteHeader(http.StatusInternalServerError) + + log.Ctx(ctx).Error().Err(err).Msgf("failed writing end of response") + + return + } + + if _, err := io.Copy(w, stdOut); err != nil { + w.WriteHeader(http.StatusInternalServerError) + + log.Ctx(ctx).Warn().Err(err).Msgf("failed copying response body") + + return + } + } +} + +func handleHTTPUploadPack(reposRoot string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + const service = "upload-pack" + repoUID := chi.URLParam(r, PathParamRepoUID) + repoPath := getFullPathForRepo(reposRoot, repoUID) + gitProtocol := r.Header.Get("Git-Protocol") + + log.Ctx(ctx).Trace().Msgf( + "handleHTTPUploadPack for git service: '%s', protocol: '%s', path: '%s'", + service, + gitProtocol, + repoPath, + ) + + w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service)) + + var err error + reqBody := r.Body + + // Handle GZIP. + if r.Header.Get("Content-Encoding") == "gzip" { + reqBody, err = gzip.NewReader(reqBody) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + log.Ctx(ctx).Error().Err(err).Msgf("failed gziping response body") + return + } + } + + // NOTE: Don't include os.Environ() as we don't have control over it - define everything explicitly + environ := []string{} + // set this for allow pre-receive and post-receive execute + environ = append(environ, "SSH_ORIGINAL_COMMAND="+service) + if gitProtocol != "" && safeGitProtocolHeader.MatchString(gitProtocol) { + environ = append(environ, "GIT_PROTOCOL="+gitProtocol) + } + + var ( + stderr bytes.Buffer + ) + cmd := git.NewCommand(ctx, service, "--stateless-rpc", repoPath) + cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", repoPath)) + err = cmd.Run(&git.RunOpts{ + Dir: repoPath, + Env: environ, + Stdout: w, + Stdin: reqBody, + Stderr: &stderr, + UseContextTimeout: true, + }) + if err != nil { + log.Ctx(ctx).Error().Err(err).Msgf("Failed to serve RPC(%s) in %s: %v - %s", service, repoPath, err, stderr.String()) + w.WriteHeader(http.StatusInternalServerError) + } + } +} + +func setHeaderNoCache(w http.ResponseWriter) { + w.Header().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT") + w.Header().Set("Pragma", "no-cache") + w.Header().Set("Cache-Control", "no-cache, max-age=0, must-revalidate") +} + +func getServiceType(r *http.Request) string { + serviceType := r.URL.Query().Get("service") + if !strings.HasPrefix(serviceType, "git-") { + return "" + } + return strings.Replace(serviceType, "git-", "", 1) +} + +// getFullPathForRepo returns the full path of a repo given the root dir of repos and the uid of the repo. +// NOTE: Split repos into subfolders using their prefix to distribute repos across a set of folders. +// TODO: Use common function between grpc and git server +func getFullPathForRepo(reposRoot, uid string) string { + // ASSUMPTION: repoUID is of lenth at least 4 - otherwise we have trouble either way. + return filepath.Join( + reposRoot, // root folder + uid[0:2], // first subfolder + uid[2:4], // second subfolder + fmt.Sprintf("%s.%s", uid[4:], "git"), // remainder with .git + ) +} + +func packetWrite(str string) []byte { + s := strconv.FormatInt(int64(len(str)+4), 16) + if len(s)%4 != 0 { + s = strings.Repeat("0", 4-len(s)%4) + s + } + return []byte(s + str) +} diff --git a/gitrpc/server/http_log.go b/gitrpc/server/http_log.go new file mode 100644 index 0000000000..00f8f92c67 --- /dev/null +++ b/gitrpc/server/http_log.go @@ -0,0 +1,80 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "net/http" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/gitrpc/internal/middleware" + + "github.com/rs/xid" + "github.com/rs/zerolog" + "github.com/rs/zerolog/hlog" +) + +const ( + // TODO: use single constant with githook / gitness + requestIDHeader = "X-Request-Id" +) + +// HLogRequestIDHandler provides a middleware that injects request_id into the logging and execution context. +// It prefers the X-Request-Id header, if that doesn't exist it creates a new request id similar to zerolog. +func HLogRequestIDHandler() func(http.Handler) http.Handler { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // read requestID from header (or create new one if none exists) + var reqID string + if reqIDs, ok := r.Header[requestIDHeader]; ok && len(reqIDs) > 0 && len(reqIDs[0]) > 0 { + reqID = reqIDs[0] + } else { + // similar to zerolog requestID generation + reqID = xid.New().String() + } + + // add requestID to context for internal usage + gitrpc client! + ctx = middleware.WithRequestID(ctx, reqID) + ctx = gitrpc.WithRequestID(ctx, reqID) + + // update logging context with request ID + log := zerolog.Ctx(ctx) + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c.Str("request_id", reqID) + }) + + // write request ID to response headers + w.Header().Set(requestIDHeader, reqID) + + // continue serving request + h.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} + +// HLogAccessLogHandler provides an hlog based middleware that logs access logs. +func HLogAccessLogHandler() func(http.Handler) http.Handler { + return hlog.AccessHandler( + func(r *http.Request, status, size int, duration time.Duration) { + hlog.FromRequest(r).Info(). + Int("http.status_code", status). + Int("http.response_size_bytes", size). + Dur("http.elapsed_ms", duration). + Msg("http request completed.") + }, + ) +} diff --git a/gitrpc/server/server.go b/gitrpc/server/server.go new file mode 100644 index 0000000000..422f799270 --- /dev/null +++ b/gitrpc/server/server.go @@ -0,0 +1,142 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "errors" + "fmt" + "net" + "os" + "path/filepath" + + "github.com/harness/gitness/gitrpc/internal/middleware" + "github.com/harness/gitness/gitrpc/internal/service" + "github.com/harness/gitness/gitrpc/internal/storage" + "github.com/harness/gitness/gitrpc/rpc" + + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" + grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" + "google.golang.org/grpc" + "google.golang.org/grpc/keepalive" +) + +const ( + repoSubdirName = "repos" + ReposGraveyardSubdirName = "cleanup" +) + +type GRPCServer struct { + *grpc.Server + Bind string +} + +func NewServer(config Config, adapter service.GitAdapter) (*GRPCServer, error) { + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("configuration is invalid: %w", err) + } + // Create repos folder + reposRoot := filepath.Join(config.GitRoot, repoSubdirName) + if _, err := os.Stat(reposRoot); errors.Is(err, os.ErrNotExist) { + if err = os.MkdirAll(reposRoot, 0o700); err != nil { + return nil, err + } + } + + // interceptors + errIntc := middleware.NewErrInterceptor() + logIntc := middleware.NewLogInterceptor() + + s := grpc.NewServer( + grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer( + grpc_recovery.UnaryServerInterceptor(), + logIntc.UnaryInterceptor(), + errIntc.UnaryInterceptor(), + )), + grpc.StreamInterceptor(grpc_middleware.ChainStreamServer( + grpc_recovery.StreamServerInterceptor(), + logIntc.StreamInterceptor(), + errIntc.StreamInterceptor(), + )), + grpc.KeepaliveParams(keepalive.ServerParameters{ + MaxConnectionAge: config.MaxConnAge, + MaxConnectionAgeGrace: config.MaxConnAgeGrace, + }), + ) + store := storage.NewLocalStore() + // create a temp dir for deleted repositories + // this dir should get cleaned up peridocally if it's not empty + reposGraveyard := filepath.Join(config.GitRoot, ReposGraveyardSubdirName) + if _, errdir := os.Stat(reposGraveyard); os.IsNotExist(errdir) { + if errdir = os.MkdirAll(reposGraveyard, 0o700); errdir != nil { + return nil, errdir + } + } + // initialize services + repoService, err := service.NewRepositoryService(adapter, store, reposRoot, config.TmpDir, + config.GitHookPath, reposGraveyard) + if err != nil { + return nil, err + } + refService, err := service.NewReferenceService(adapter, reposRoot, config.TmpDir) + if err != nil { + return nil, err + } + httpService, err := service.NewHTTPService(adapter, reposRoot) + if err != nil { + return nil, err + } + commitFilesService, err := service.NewCommitFilesService(adapter, reposRoot, config.TmpDir) + if err != nil { + return nil, err + } + diffService, err := service.NewDiffService(adapter, reposRoot, config.TmpDir) + if err != nil { + return nil, err + } + mergeService, err := service.NewMergeService(adapter, reposRoot, config.TmpDir) + if err != nil { + return nil, err + } + blameService := service.NewBlameService(adapter, reposRoot) + pushService := service.NewPushService(adapter, reposRoot) + + // register services + rpc.RegisterRepositoryServiceServer(s, repoService) + rpc.RegisterReferenceServiceServer(s, refService) + rpc.RegisterSmartHTTPServiceServer(s, httpService) + rpc.RegisterCommitFilesServiceServer(s, commitFilesService) + rpc.RegisterDiffServiceServer(s, diffService) + rpc.RegisterMergeServiceServer(s, mergeService) + rpc.RegisterBlameServiceServer(s, blameService) + rpc.RegisterPushServiceServer(s, pushService) + + return &GRPCServer{ + Server: s, + Bind: config.Bind, + }, nil +} + +func (s *GRPCServer) Start() error { + lis, err := net.Listen("tcp", s.Bind) + if err != nil { + return err + } + return s.Server.Serve(lis) +} + +func (s *GRPCServer) Stop() error { + s.Server.GracefulStop() + return nil +} diff --git a/gitrpc/server/wire.go b/gitrpc/server/wire.go new file mode 100644 index 0000000000..2a0f3fbbf2 --- /dev/null +++ b/gitrpc/server/wire.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "time" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/gitrpc/internal/gitea" + "github.com/harness/gitness/gitrpc/internal/service" + "github.com/harness/gitness/gitrpc/internal/types" + + "github.com/go-redis/redis/v8" + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideServer, + ProvideHTTPServer, + ProvideGITAdapter, + ProvideGoGitRepoProvider, + ProvideLastCommitCache, +) + +func ProvideGoGitRepoProvider() *gitea.GoGitRepoProvider { + const objectCacheSize = 16 << 20 // 16MiB + return gitea.NewGoGitRepoProvider(objectCacheSize, 15*time.Minute) +} + +func ProvideLastCommitCache( + config Config, + redisClient redis.UniversalClient, + repoProvider *gitea.GoGitRepoProvider, +) cache.Cache[gitea.CommitEntryKey, *types.Commit] { + cacheDuration := time.Duration(config.LastCommitCache.DurationSeconds) * time.Second + + if config.LastCommitCache.Mode == ModeNone || cacheDuration < time.Second { + return gitea.NoLastCommitCache(repoProvider) + } + + if config.LastCommitCache.Mode == ModeRedis && redisClient != nil { + return gitea.NewRedisLastCommitCache(redisClient, cacheDuration, repoProvider) + } + + return gitea.NewInMemoryLastCommitCache(cacheDuration, repoProvider) +} + +func ProvideGITAdapter( + repoProvider *gitea.GoGitRepoProvider, + lastCommitCache cache.Cache[gitea.CommitEntryKey, *types.Commit], +) (service.GitAdapter, error) { + return gitea.New(repoProvider, lastCommitCache) +} + +func ProvideServer(config Config, adapter service.GitAdapter) (*GRPCServer, error) { + return NewServer(config, adapter) +} + +func ProvideHTTPServer(config Config) (*HTTPServer, error) { + return NewHTTPServer(config) +} diff --git a/gitrpc/smarthttp.go b/gitrpc/smarthttp.go new file mode 100644 index 0000000000..6e5fbb5edb --- /dev/null +++ b/gitrpc/smarthttp.go @@ -0,0 +1,180 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/internal/streamio" + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +type InfoRefsParams struct { + ReadParams + Service string + Options []string // (key, value) pair + GitProtocol string +} + +func (c *Client) GetInfoRefs(ctx context.Context, w io.Writer, params *InfoRefsParams) error { + if w == nil { + return errors.New("writer cannot be nil") + } + if params == nil { + return ErrNoParamsProvided + } + stream, err := c.httpService.InfoRefs(ctx, &rpc.InfoRefsRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + Service: params.Service, + GitConfigOptions: params.Options, + GitProtocol: params.GitProtocol, + }) + if err != nil { + return fmt.Errorf("error initializing GetInfoRefs() stream: %w", err) + } + + var ( + response *rpc.InfoRefsResponse + ) + for { + response, err = stream.Recv() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return fmt.Errorf("GetInfoRefs() error receiving stream bytes: %w", err) + } + _, err = w.Write(response.GetData()) + if err != nil { + return fmt.Errorf("GetInfoRefs() error: %w", err) + } + } + + return nil +} + +type ServicePackParams struct { + *ReadParams + *WriteParams + Service string + GitProtocol string + Data io.ReadCloser + Options []string // (key, value) pair +} + +func (c *Client) ServicePack(ctx context.Context, w io.Writer, params *ServicePackParams) error { + if w == nil { + return errors.New("writer cannot be nil") + } + if params == nil { + return ErrNoParamsProvided + } + + log := log.Ctx(ctx) + + // create request (depends on service whether we need readparams or writeparams) + // TODO: can we solve this nicer? expose two methods instead? + request := &rpc.ServicePackRequest{ + Service: params.Service, + GitConfigOptions: params.Options, + GitProtocol: params.GitProtocol, + } + switch params.Service { + case rpc.ServiceUploadPack: + if params.ReadParams == nil { + return errors.New("upload-pack requires ReadParams") + } + request.Base = &rpc.ServicePackRequest_ReadBase{ + ReadBase: mapToRPCReadRequest(*params.ReadParams), + } + case rpc.ServiceReceivePack: + if params.WriteParams == nil { + return errors.New("receive-pack requires WriteParams") + } + request.Base = &rpc.ServicePackRequest_WriteBase{ + WriteBase: mapToRPCWriteRequest(*params.WriteParams), + } + default: + return fmt.Errorf("unsupported service provided: %s", params.Service) + } + + stream, err := c.httpService.ServicePack(ctx) + if err != nil { + return err + } + + log.Debug().Msgf("Start service pack '%s' with options '%v'.", + params.Service, params.Options) + + // send basic information + if err = stream.Send(request); err != nil { + return err + } + + log.Debug().Msg("Send request stream.") + + // send body as stream + stdout := streamio.NewWriter(func(p []byte) error { + return stream.Send(&rpc.ServicePackRequest{ + Data: p, + }) + }) + + _, err = io.Copy(stdout, params.Data) + if err != nil { + return fmt.Errorf("PostUploadPack() error copying reader: %w", err) + } + + log.Debug().Msg("completed sending request stream.") + + if err = stream.CloseSend(); err != nil { + return fmt.Errorf("PostUploadPack() error closing the stream: %w", err) + } + + log.Debug().Msg("start receiving response stream.") + + // when we are done with inputs then we should expect + // git data + var ( + response *rpc.ServicePackResponse + ) + for { + response, err = stream.Recv() + if errors.Is(err, io.EOF) { + log.Debug().Msg("received end of response stream.") + break + } + if err != nil { + return processRPCErrorf(err, "PostUploadPack() error receiving stream bytes") + } + if response.GetData() == nil { + return fmt.Errorf("PostUploadPack() data is nil") + } + + _, err = w.Write(response.GetData()) + if err != nil { + return fmt.Errorf("PostUploadPack() error writing response data: %w", err) + } + } + + log.Debug().Msg("completed service pack.") + + return nil +} diff --git a/gitrpc/sort.go b/gitrpc/sort.go new file mode 100644 index 0000000000..abd927a33a --- /dev/null +++ b/gitrpc/sort.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +type SortOrder int + +const ( + SortOrderDefault SortOrder = iota + SortOrderAsc = iota + SortOrderDesc +) diff --git a/gitrpc/stream.go b/gitrpc/stream.go new file mode 100644 index 0000000000..0a470ff73c --- /dev/null +++ b/gitrpc/stream.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import "io" + +// StreamReader is a helper utility to ease reading from streaming channel pair (the data and the error channel). +type StreamReader[T any] struct { + chData <-chan T + chErr <-chan error +} + +// NewStreamReader creates new StreamReader. +func NewStreamReader[T any](chData <-chan T, chErr <-chan error) *StreamReader[T] { + return &StreamReader[T]{ + chData: chData, + chErr: chErr, + } +} + +// Next returns the next element or error. +// In case the end has been reached, an io.EOF is returned. +func (str *StreamReader[T]) Next() (T, error) { + var null T + + select { + case data, ok := <-str.chData: + if !ok { + return null, io.EOF + } + + return data, nil + case err, ok := <-str.chErr: + if !ok { + return null, io.EOF + } + + return null, err + } +} diff --git a/gitrpc/submodule.go b/gitrpc/submodule.go new file mode 100644 index 0000000000..59c982cd08 --- /dev/null +++ b/gitrpc/submodule.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc/rpc" +) + +type GetSubmoduleParams struct { + ReadParams + // GitREF is a git reference (branch / tag / commit SHA) + GitREF string + Path string +} + +type GetSubmoduleOutput struct { + Submodule Submodule +} +type Submodule struct { + Name string + URL string +} + +func (c *Client) GetSubmodule(ctx context.Context, params *GetSubmoduleParams) (*GetSubmoduleOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + resp, err := c.repoService.GetSubmodule(ctx, &rpc.GetSubmoduleRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitREF, + Path: params.Path, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to get submodule from server") + } + if resp.GetSubmodule() == nil { + return nil, fmt.Errorf("rpc submodule is nil") + } + + return &GetSubmoduleOutput{ + Submodule: Submodule{ + Name: resp.GetSubmodule().Name, + URL: resp.GetSubmodule().Url, + }, + }, nil +} diff --git a/gitrpc/tag.go b/gitrpc/tag.go new file mode 100644 index 0000000000..8c07eb4a36 --- /dev/null +++ b/gitrpc/tag.go @@ -0,0 +1,210 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "fmt" + "io" + "time" + + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +type TagSortOption int + +const ( + TagSortOptionDefault TagSortOption = iota + TagSortOptionName + TagSortOptionDate +) + +type ListCommitTagsParams struct { + ReadParams + IncludeCommit bool + Query string + Sort TagSortOption + Order SortOrder + Page int32 + PageSize int32 +} + +type ListCommitTagsOutput struct { + Tags []CommitTag +} + +type CommitTag struct { + Name string + SHA string + IsAnnotated bool + Title string + Message string + Tagger *Signature + Commit *Commit +} + +type CreateCommitTagParams struct { + WriteParams + Name string + + // Target is the commit (or points to the commit) the new tag will be pointing to. + Target string + + // Message is the optional message the tag will be created with - if the message is empty + // the tag will be lightweight, otherwise it'll be annotated + Message string + + // Tagger overwrites the git author used in case the tag is annotated + // (optional, default: actor) + Tagger *Identity + // TaggerDate overwrites the git author date used in case the tag is annotated + // (optional, default: current time on server) + TaggerDate *time.Time +} + +func (p *CreateCommitTagParams) Validate() error { + if p == nil { + return ErrNoParamsProvided + } + + if p.Name == "" { + return errors.New("tag name cannot be empty") + } + if p.Target == "" { + return errors.New("target cannot be empty") + } + + return nil +} + +type CreateCommitTagOutput struct { + CommitTag +} + +type DeleteTagParams struct { + WriteParams + Name string +} + +func (p DeleteTagParams) Validate() error { + if p.Name == "" { + return errors.New("tag name cannot be empty") + } + return nil +} + +func (c *Client) ListCommitTags(ctx context.Context, params *ListCommitTagsParams) (*ListCommitTagsOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + + stream, err := c.refService.ListCommitTags(ctx, &rpc.ListCommitTagsRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + IncludeCommit: params.IncludeCommit, + Query: params.Query, + Sort: mapToRPCListCommitTagsSortOption(params.Sort), + Order: mapToRPCSortOrder(params.Order), + Page: params.Page, + PageSize: params.PageSize, + }) + if err != nil { + return nil, fmt.Errorf("failed to start stream for tags: %w", err) + } + + // NOTE: don't use PageSize as initial slice capacity - as that theoretically could be MaxInt + output := &ListCommitTagsOutput{ + Tags: make([]CommitTag, 0, 16), + } + for { + var next *rpc.ListCommitTagsResponse + next, err = stream.Recv() + if errors.Is(err, io.EOF) { + log.Ctx(ctx).Debug().Msg("received end of stream") + break + } + if err != nil { + return nil, processRPCErrorf(err, "received unexpected error from server") + } + if next.GetTag() == nil { + return nil, fmt.Errorf("expected tag message") + } + + var tag *CommitTag + tag, err = mapRPCCommitTag(next.GetTag()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc tag: %w", err) + } + + output.Tags = append(output.Tags, *tag) + } + + err = stream.CloseSend() + if err != nil { + return nil, fmt.Errorf("failed to close stream") + } + + return output, nil +} +func (c *Client) CreateCommitTag(ctx context.Context, params *CreateCommitTagParams) (*CreateCommitTagOutput, error) { + err := params.Validate() + + if err != nil { + return nil, err + } + + resp, err := c.refService.CreateCommitTag(ctx, &rpc.CreateCommitTagRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + Target: params.Target, + TagName: params.Name, + Message: params.Message, + Tagger: mapToRPCIdentityOptional(params.Tagger), + TaggerDate: mapToRPCTimeOptional(params.TaggerDate), + }) + + if err != nil { + return nil, processRPCErrorf(err, "Failed to create tag %s", params.Name) + } + + var commitTag *CommitTag + commitTag, err = mapRPCCommitTag(resp.GetTag()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc tag: %w", err) + } + + return &CreateCommitTagOutput{ + CommitTag: *commitTag, + }, nil +} + +func (c *Client) DeleteTag(ctx context.Context, params *DeleteTagParams) error { + err := params.Validate() + + if err != nil { + return err + } + + _, err = c.refService.DeleteTag(ctx, &rpc.DeleteTagRequest{ + Base: mapToRPCWriteRequest(params.WriteParams), + TagName: params.Name, + }) + + if err != nil { + return processRPCErrorf(err, "Failed to create tag %s", params.Name) + } + return nil +} diff --git a/gitrpc/tree.go b/gitrpc/tree.go new file mode 100644 index 0000000000..cb52d8be06 --- /dev/null +++ b/gitrpc/tree.go @@ -0,0 +1,202 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +// TreeNodeType specifies the different types of nodes in a git tree. +// IMPORTANT: has to be consistent with rpc.TreeNodeType (proto). +type TreeNodeType string + +const ( + TreeNodeTypeTree TreeNodeType = "tree" + TreeNodeTypeBlob TreeNodeType = "blob" + TreeNodeTypeCommit TreeNodeType = "commit" +) + +// TreeNodeMode specifies the different modes of a node in a git tree. +// IMPORTANT: has to be consistent with rpc.TreeNodeMode (proto). +type TreeNodeMode string + +const ( + TreeNodeModeFile TreeNodeMode = "file" + TreeNodeModeSymlink TreeNodeMode = "symlink" + TreeNodeModeExec TreeNodeMode = "exec" + TreeNodeModeTree TreeNodeMode = "tree" + TreeNodeModeCommit TreeNodeMode = "commit" +) + +type TreeNode struct { + Type TreeNodeType + Mode TreeNodeMode + SHA string + Name string + Path string +} + +type ListTreeNodeParams struct { + ReadParams + // GitREF is a git reference (branch / tag / commit SHA) + GitREF string + Path string + IncludeLatestCommit bool +} + +type ListTreeNodeOutput struct { + Nodes []TreeNode +} + +type GetTreeNodeParams struct { + ReadParams + // GitREF is a git reference (branch / tag / commit SHA) + GitREF string + Path string + IncludeLatestCommit bool +} + +type GetTreeNodeOutput struct { + Node TreeNode + Commit *Commit +} + +func (c *Client) GetTreeNode(ctx context.Context, params *GetTreeNodeParams) (*GetTreeNodeOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + resp, err := c.repoService.GetTreeNode(ctx, &rpc.GetTreeNodeRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitREF, + Path: params.Path, + IncludeLatestCommit: params.IncludeLatestCommit, + }) + if err != nil { + return nil, processRPCErrorf(err, "failed to get tree node from server") + } + + node, err := mapRPCTreeNode(resp.GetNode()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc node: %w", err) + } + + var commit *Commit + if resp.GetCommit() != nil { + commit, err = mapRPCCommit(resp.GetCommit()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc commit: %w", err) + } + } + + return &GetTreeNodeOutput{ + Node: node, + Commit: commit, + }, nil +} + +func (c *Client) ListTreeNodes(ctx context.Context, params *ListTreeNodeParams) (*ListTreeNodeOutput, error) { + if params == nil { + return nil, ErrNoParamsProvided + } + stream, err := c.repoService.ListTreeNodes(ctx, &rpc.ListTreeNodesRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitREF, + Path: params.Path, + }) + if err != nil { + return nil, fmt.Errorf("failed to start stream for tree nodes: %w", err) + } + + nodes := make([]TreeNode, 0, 16) + for { + var next *rpc.ListTreeNodesResponse + next, err = stream.Recv() + if errors.Is(err, io.EOF) { + log.Ctx(ctx).Debug().Msg("received end of stream") + break + } + if err != nil { + return nil, processRPCErrorf(err, "received unexpected error from server") + } + + var node TreeNode + node, err = mapRPCTreeNode(next.GetNode()) + if err != nil { + return nil, fmt.Errorf("failed to map rpc node: %w", err) + } + + nodes = append(nodes, node) + } + + return &ListTreeNodeOutput{ + Nodes: nodes, + }, nil +} + +type PathsDetailsParams struct { + ReadParams + GitREF string + Paths []string +} + +type PathsDetailsOutput struct { + Details []PathDetails +} + +type PathDetails struct { + Path string `json:"path"` + LastCommit *Commit `json:"last_commit,omitempty"` + Size int64 `json:"size,omitempty"` +} + +func (c *Client) PathsDetails(ctx context.Context, params PathsDetailsParams) (PathsDetailsOutput, error) { + response, err := c.repoService.PathsDetails(ctx, &rpc.PathsDetailsRequest{ + Base: mapToRPCReadRequest(params.ReadParams), + GitRef: params.GitREF, + Paths: params.Paths, + }) + if err != nil { + return PathsDetailsOutput{}, processRPCErrorf(err, "failed to get paths details") + } + + details := make([]PathDetails, len(response.PathDetails)) + for i, pathDetail := range response.PathDetails { + var lastCommit *Commit + + if pathDetail.LastCommit != nil { + lastCommit, err = mapRPCCommit(pathDetail.LastCommit) + if err != nil { + return PathsDetailsOutput{}, fmt.Errorf("failed to map last commit: %w", err) + } + } + + details[i] = PathDetails{ + Path: pathDetail.Path, + Size: pathDetail.Size, + LastCommit: lastCommit, + } + } + + return PathsDetailsOutput{ + Details: details, + }, nil +} diff --git a/gitrpc/upload.go b/gitrpc/upload.go new file mode 100644 index 0000000000..1cc2f1c674 --- /dev/null +++ b/gitrpc/upload.go @@ -0,0 +1,111 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc/rpc" + + "github.com/rs/zerolog/log" +) + +const ( + // TODO: this should be configurable + FileTransferChunkSize = 1024 +) + +type File struct { + Path string + Content []byte +} + +func uploadFile( + ctx context.Context, + file File, + chunkSize int, + send func(*rpc.FileUpload) error, +) error { + log := log.Ctx(ctx) + + log.Info().Msgf("start sending %v", file.Path) + + // send filename message + header := &rpc.FileUpload{ + Data: &rpc.FileUpload_Header{ + Header: &rpc.FileUploadHeader{ + Path: file.Path, + }, + }, + } + if err := send(header); err != nil { + return fmt.Errorf("failed to send file upload header: %w", err) + } + + err := sendChunks(file.Content, chunkSize, func(c *rpc.Chunk) error { + return send(&rpc.FileUpload{ + Data: &rpc.FileUpload_Chunk{ + Chunk: c, + }, + }) + }) + if err != nil { + return fmt.Errorf("failed to send file data: %w", err) + } + + log.Info().Msgf("completed sending %v", file.Path) + + return nil +} + +func sendChunks( + content []byte, + chunkSize int, + send func(*rpc.Chunk) error) error { + buffer := make([]byte, chunkSize) + reader := bytes.NewReader(content) + + for { + n, err := reader.Read(buffer) + if errors.Is(err, io.EOF) { + err = send(&rpc.Chunk{ + Eof: true, + Data: buffer[:n], + }) + if err != nil { + return err + } + + break + } + if err != nil { + return fmt.Errorf("cannot read buffer: %w", err) + } + + err = send(&rpc.Chunk{ + Eof: false, + Data: buffer[:n], + }) + if err != nil { + return err + } + } + + return nil +} diff --git a/gitrpc/validate.go b/gitrpc/validate.go new file mode 100644 index 0000000000..2cac491188 --- /dev/null +++ b/gitrpc/validate.go @@ -0,0 +1,27 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import "regexp" + +var matchCommitSHA = regexp.MustCompile("^[0-9a-f]+$") + +func ValidateCommitSHA(commitSHA string) bool { + if len(commitSHA) != 40 && len(commitSHA) != 64 { + return false + } + + return matchCommitSHA.MatchString(commitSHA) +} diff --git a/gitrpc/wire.go b/gitrpc/wire.go new file mode 100644 index 0000000000..63f71f9d5a --- /dev/null +++ b/gitrpc/wire.go @@ -0,0 +1,26 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitrpc + +import "github.com/google/wire" + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideClient, +) + +func ProvideClient(config Config) (Interface, error) { + return New(config) +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000000..91cc84166e --- /dev/null +++ b/go.mod @@ -0,0 +1,169 @@ +module github.com/harness/gitness + +go 1.19 + +replace github.com/docker/docker => github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible + +require ( + code.gitea.io/gitea v1.17.2 + github.com/Masterminds/squirrel v1.5.1 + github.com/adrg/xdg v0.3.2 + github.com/aws/aws-sdk-go v1.44.322 + github.com/coreos/go-semver v0.3.0 + github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5 + github.com/dgrijalva/jwt-go v3.2.0+incompatible + github.com/drone-runners/drone-runner-docker v1.8.4-0.20230919202034-23803f6b38c2 + github.com/drone/drone-go v1.7.1 + github.com/drone/drone-yaml v1.2.3 + github.com/drone/funcmap v0.0.0-20190918184546-d4ef6e88376d + github.com/drone/go-convert v0.0.0-20230919093251-7104c3bcc635 + github.com/drone/go-generate v0.0.0-20230920014042-6085ee5c9522 + github.com/drone/go-scm v1.31.2 + github.com/drone/runner-go v1.12.0 + github.com/drone/spec v0.0.0-20230919004456-7455b8913ff5 + github.com/go-chi/chi v1.5.4 + github.com/go-chi/cors v1.2.1 + github.com/go-redis/redis/v8 v8.11.5 + github.com/go-redsync/redsync/v4 v4.7.1 + github.com/golang-jwt/jwt v3.2.2+incompatible + github.com/golang/mock v1.6.0 + github.com/google/go-cmp v0.5.9 + github.com/google/wire v0.5.0 + github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75 + github.com/gotidy/ptr v1.4.0 + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 + github.com/guregu/null v4.0.0+incompatible + github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2 + github.com/hashicorp/go-multierror v1.1.1 + github.com/jmoiron/sqlx v1.3.3 + github.com/joho/godotenv v1.3.0 + github.com/kelseyhightower/envconfig v1.4.0 + github.com/lib/pq v1.10.5 + github.com/maragudk/migrate v0.4.1 + github.com/matoous/go-nanoid v1.5.0 + github.com/matoous/go-nanoid/v2 v2.0.0 + github.com/mattn/go-isatty v0.0.17 + github.com/mattn/go-sqlite3 v1.14.12 + github.com/pkg/errors v0.9.1 + github.com/robfig/cron/v3 v3.0.0 + github.com/rs/xid v1.4.0 + github.com/rs/zerolog v1.29.0 + github.com/sercand/kuberesolver/v5 v5.1.0 + github.com/sirupsen/logrus v1.9.0 + github.com/stretchr/testify v1.8.4 + github.com/swaggest/openapi-go v0.2.23 + github.com/swaggest/swgui v1.4.2 + github.com/unrolled/secure v1.0.8 + go.uber.org/multierr v1.8.0 + golang.org/x/crypto v0.13.0 + golang.org/x/exp v0.0.0-20230108222341-4b8118a2686a + golang.org/x/sync v0.3.0 + golang.org/x/term v0.12.0 + golang.org/x/text v0.13.0 + google.golang.org/grpc v1.55.0 + google.golang.org/protobuf v1.30.0 + gopkg.in/alecthomas/kingpin.v2 v2.2.6 +) + +require ( + cloud.google.com/go v0.110.0 // indirect + cloud.google.com/go/compute v1.18.0 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + dario.cat/mergo v1.0.0 // indirect + github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e // indirect + github.com/antonmedv/expr v1.15.2 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bmatcuk/doublestar v1.3.4 // indirect + github.com/buildkite/yaml v2.1.0+incompatible // indirect + github.com/cloudflare/circl v1.3.3 // indirect + github.com/containerd/containerd v1.3.4 // indirect + github.com/cyphar/filepath-securejoin v0.2.4 // indirect + github.com/docker/distribution v2.7.1+incompatible // indirect + github.com/docker/docker v0.0.0-00010101000000-000000000000 // indirect + github.com/docker/go-connections v0.3.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/drone/envsubst v1.0.3 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/google/uuid v1.3.1 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect + github.com/googleapis/gax-go/v2 v2.7.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/natessilva/dag v0.0.0-20180124060714-7194b8dcc5c4 // indirect + github.com/opencontainers/go-digest v1.0.0-rc1 // indirect + github.com/opencontainers/image-spec v1.0.1 // indirect + github.com/pjbgf/sha1cd v0.3.0 // indirect + github.com/prometheus/client_golang v1.15.1 // indirect + github.com/prometheus/client_model v0.3.0 // indirect + github.com/prometheus/common v0.42.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect + github.com/skeema/knownhosts v1.2.0 // indirect + go.opencensus.io v0.24.0 // indirect + golang.org/x/oauth2 v0.6.0 // indirect + google.golang.org/api v0.110.0 // indirect + google.golang.org/appengine v1.6.7 // indirect +) + +require ( + github.com/go-logr/logr v1.2.4 + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-logr/zerologr v1.2.3 + github.com/mattn/go-colorable v0.1.13 // indirect +) + +require ( + cloud.google.com/go/profiler v0.3.1 + github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect + github.com/acomagu/bufpipe v1.0.4 // indirect + github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/djherbis/buffer v1.2.0 // indirect + github.com/djherbis/nio/v3 v3.0.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/go-enry/go-enry/v2 v2.8.2 // indirect + github.com/go-enry/go-oniguruma v1.2.1 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.5.0 + github.com/go-git/go-git/v5 v5.9.0 + github.com/gobwas/glob v0.2.3 // indirect + github.com/golang-jwt/jwt/v4 v4.4.1 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/google/pprof v0.0.0-20221103000818-d260c55eee4c // indirect + github.com/google/subcommands v1.2.0 // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect + github.com/hashicorp/go-version v1.4.0 // indirect + github.com/jackc/pgx/v4 v4.12.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect + github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/sergi/go-diff v1.3.1 // indirect + github.com/swaggest/jsonschema-go v0.3.40 // indirect + github.com/swaggest/refl v1.1.0 // indirect + github.com/vearutop/statigz v1.1.5 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/yuin/goldmark v1.4.13 // indirect + go.uber.org/atomic v1.10.0 // indirect + golang.org/x/mod v0.12.0 // indirect + golang.org/x/net v0.15.0 // indirect + golang.org/x/sys v0.12.0 // indirect + golang.org/x/tools v0.13.0 // indirect + google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 // indirect + gopkg.in/ini.v1 v1.66.4 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v2 v2.4.0 + gopkg.in/yaml.v3 v3.0.1 // indirect + strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000000..296fc7c51e --- /dev/null +++ b/go.sum @@ -0,0 +1,1033 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.110.0 h1:Zc8gqp3+a9/Eyph2KDmcGaPtbKRIoqq4YTlL4NMD0Ys= +cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= +cloud.google.com/go/compute v1.18.0 h1:FEigFqoDbys2cvFkZ9Fjq4gnHBP55anJ0yQyau2f9oY= +cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/iam v0.12.0 h1:DRtTY29b75ciH6Ov1PHb4/iat2CLCvrOm40Q0a6DFpE= +cloud.google.com/go/longrunning v0.4.1 h1:v+yFJOfKC3yZdY6ZUI933pIYdhyhV8S3NpWrXWmg7jM= +cloud.google.com/go/profiler v0.3.1 h1:b5got9Be9Ia0HVvyt7PavWxXEht15B9lWnigdvHtxOc= +cloud.google.com/go/profiler v0.3.1/go.mod h1:GsG14VnmcMFQ9b+kq71wh3EKMZr3WRMgLzNiFRpW7tE= +cloud.google.com/go/storage v1.28.1 h1:F5QDG5ChchaAVQhINh24U99OWHURqrW8OmQcGKXcbgI= +code.gitea.io/gitea v1.17.2 h1:NRcVr07jF+za4d0NZZlJXeCuQK5FfHMtjPDjq4u3UiY= +code.gitea.io/gitea v1.17.2/go.mod h1:sovminOoSsc8IC2T29rX9+MmaboHTu8QDEvJjaSqIXg= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +docker.io/go-docker v1.0.0/go.mod h1:7tiAn5a0LFmjbPDbyTPOaTTOuG1ZRNXdPA6RvKY+fpY= +github.com/99designs/basicauth-go v0.0.0-20160802081356-2a93ba0f464d/go.mod h1:3cARGAK9CfW3HoxCy1a0G4TKrdiKke8ftOMEOHyySYs= +github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e h1:rl2Aq4ZODqTDkeSqQBy+fzpZPamacO1Srp8zq7jf2Sc= +github.com/99designs/httpsignatures-go v0.0.0-20170731043157-88528bf4ca7e/go.mod h1:Xa6lInWHNQnuWoF0YPSsx+INFA9qk7/7pTjwb3PInkY= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/squirrel v1.5.1 h1:kWAKlLLJFxZG7N2E0mBMNWVp5AuUX+JUrnhFN74Eg+w= +github.com/Masterminds/squirrel v1.5.1/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= +github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 h1:kkhsdkhsCvIsutKu5zLMgWtgh9YxGCNAw8Ad8hjwfYg= +github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= +github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= +github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= +github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= +github.com/adrg/xdg v0.3.2 h1:GUSGQ5pHdev83AYhDSS1A/CX+0JIsxbiWtow2DSA+RU= +github.com/adrg/xdg v0.3.2/go.mod h1:7I2hH/IT30IsupOpKZ5ue7/qNi3CoKzD6tL3HwpaRMQ= +github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= +github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/antonmedv/expr v1.15.2 h1:afFXpDWIC2n3bF+kTZE1JvFo+c34uaM3sTqh8z0xfdU= +github.com/antonmedv/expr v1.15.2/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE= +github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= +github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.44.322 h1:7JfwifGRGQMHd99PvfXqxBaZsjuRaOF6e3X9zRx2uYo= +github.com/aws/aws-sdk-go v1.44.322/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= +github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0= +github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= +github.com/bool64/dev v0.1.41/go.mod h1:cTHiTDNc8EewrQPy3p1obNilpMpdmlUesDkFTF2zRWU= +github.com/bool64/dev v0.1.42/go.mod h1:cTHiTDNc8EewrQPy3p1obNilpMpdmlUesDkFTF2zRWU= +github.com/bool64/dev v0.2.22 h1:YJFKBRKplkt+0Emq/5Xk1Z5QRmMNzc1UOJkR3rxJksA= +github.com/bool64/shared v0.1.5 h1:fp3eUhBsrSjNCQPcSdQqZxxh9bBwrYiZ+zOKFkM0/2E= +github.com/buildkite/yaml v2.1.0+incompatible h1:xirI+ql5GzfikVNDmt+yeiXpf/v1Gt03qXTtT5WXdr8= +github.com/buildkite/yaml v2.1.0+incompatible/go.mod h1:UoU8vbcwu1+vjZq01+KrpSeLBgQQIjL/H7Y6KwikUrI= +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= +github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= +github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= +github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= +github.com/containerd/containerd v1.3.4 h1:3o0smo5SKY7H6AJCmJhsnCjR2/V2T8VmiHt7seN2/kI= +github.com/containerd/containerd v1.3.4/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5 h1:RAV05c0xOkJ3dZGS0JFybxFKZ2WMLabgx3uXnd7rpGs= +github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5/go.mod h1:GgB8SF9nRG+GqaDtLcwJZsQFhcogVCJ79j4EdT0c2V4= +github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/djherbis/buffer v1.1.0/go.mod h1:VwN8VdFkMY0DCALdY8o00d3IZ6Amz/UNVMWcSaJT44o= +github.com/djherbis/buffer v1.2.0 h1:PH5Dd2ss0C7CRRhQCZ2u7MssF+No9ide8Ye71nPHcrQ= +github.com/djherbis/buffer v1.2.0/go.mod h1:fjnebbZjCUpPinBRD+TDwXSOeNQ7fPQWLfGQqiAiUyE= +github.com/djherbis/nio/v3 v3.0.1 h1:6wxhnuppteMa6RHA4L81Dq7ThkZH8SwnDzXDYy95vB4= +github.com/djherbis/nio/v3 v3.0.1/go.mod h1:Ng4h80pbZFMla1yKzm61cF0tqqilXZYrogmWgZxOcmg= +github.com/docker/distribution v0.0.0-20170726174610-edc3ab29cdff/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= +github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible h1:hx8H7MbcmXUXAmphQuA/XB7CfSzX4DRrNuHFvfK9aIQ= +github.com/docker/engine v17.12.0-ce-rc1.0.20200309214505-aa6a9891b09c+incompatible/go.mod h1:3CPr2caMgTHxxIAZgEMd3uLYPDlRvPqCpyeRf6ncPcY= +github.com/docker/go-connections v0.3.0 h1:3lOnM9cSzgGwx8VfK/NGOW5fLQ0GjIlCkaktF+n1M6o= +github.com/docker/go-connections v0.3.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/drone-runners/drone-runner-docker v1.8.4-0.20230919202034-23803f6b38c2 h1:NmYT2bCmacG9mFWql0hKlJ7KOjfXDlusrpWbjUDYKoA= +github.com/drone-runners/drone-runner-docker v1.8.4-0.20230919202034-23803f6b38c2/go.mod h1:iXTCJv+tESfI/ggWZwinI2ZAzHTGS+Ic5A9gcUElTns= +github.com/drone/drone-go v1.7.1 h1:ZX+3Rs8YHUSUQ5mkuMLmm1zr1ttiiE2YGNxF3AnyDKw= +github.com/drone/drone-go v1.7.1/go.mod h1:fxCf9jAnXDZV1yDr0ckTuWd1intvcQwfJmTRpTZ1mXg= +github.com/drone/drone-runtime v1.0.7-0.20190729202838-87c84080f4a1/go.mod h1:+osgwGADc/nyl40J0fdsf8Z09bgcBZXvXXnLOY48zYs= +github.com/drone/drone-yaml v1.2.3 h1:SWzLmzr8ARhbtw1WsVDENa8WFY2Pi9l0FVMfafVUWz8= +github.com/drone/drone-yaml v1.2.3/go.mod h1:QsqliFK8nG04AHFN9tTn9XJomRBQHD4wcejWW1uz/10= +github.com/drone/envsubst v1.0.2/go.mod h1:bkZbnc/2vh1M12Ecn7EYScpI4YGYU0etwLJICOWi8Z0= +github.com/drone/envsubst v1.0.3 h1:PCIBwNDYjs50AsLZPYdfhSATKaRg/FJmDc2D6+C2x8g= +github.com/drone/envsubst v1.0.3/go.mod h1:N2jZmlMufstn1KEqvbHjw40h1KyTmnVzHcSc9bFiJ2g= +github.com/drone/funcmap v0.0.0-20190918184546-d4ef6e88376d h1:/IO7UVVu191Jc0DajV4cDVoO+91cuppvgxg2MZl+AXI= +github.com/drone/funcmap v0.0.0-20190918184546-d4ef6e88376d/go.mod h1:Hph0/pT6ZxbujnE1Z6/08p5I0XXuOsppqF6NQlGOK0E= +github.com/drone/go-convert v0.0.0-20230919093251-7104c3bcc635 h1:qQX+U2iEm4X2FcmBzxZwZgz8gLpUTa6lBB1vBBCV9Oo= +github.com/drone/go-convert v0.0.0-20230919093251-7104c3bcc635/go.mod h1:PyCDcuAhGF6W0VJ6qMmlM47dsSyGv/zDiMqeJxMFuGM= +github.com/drone/go-generate v0.0.0-20230920014042-6085ee5c9522 h1:i3EfRpr/eYifK9w0ninT3xHAthkS4NTQjLX0/zDIsy4= +github.com/drone/go-generate v0.0.0-20230920014042-6085ee5c9522/go.mod h1:eTfy716efMJgVvk/ZkRvitaXY2UuytfqDjxclFMeLdQ= +github.com/drone/go-scm v1.31.2 h1:6hZxf0aETV17830fMCPrgcA4y8j/8Gdfy0xEdInUeqQ= +github.com/drone/go-scm v1.31.2/go.mod h1:DFIJJjhMj0TSXPz+0ni4nyZ9gtTtC40Vh/TGRugtyWw= +github.com/drone/runner-go v1.12.0 h1:zUjDj9ylsJ4n4Mvy4znddq/Z4EBzcUXzTltpzokKtgs= +github.com/drone/runner-go v1.12.0/go.mod h1:vu4pPPYDoeN6vdYQAY01GGGsAIW4aLganJNaa8Fx8zE= +github.com/drone/signal v1.0.0/go.mod h1:S8t92eFT0g4WUgEc/LxG+LCuiskpMNsG0ajAMGnyZpc= +github.com/drone/spec v0.0.0-20230919004456-7455b8913ff5 h1:NgAseJNQpJE3XtgJUPu4x7x5fcBjqZ3oKHDJfwBYdWk= +github.com/drone/spec v0.0.0-20230919004456-7455b8913ff5/go.mod h1:KyQZA9qwuscbbM7yTrtZg25Wammoc5GKwaRem8kDA5k= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcejNsXKSkQ6lcIaNec2nyfOdlTBR2lU= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= +github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= +github.com/go-chi/chi v1.5.4 h1:QHdzF2szwjqVV4wmByUnTcsbIg7UGaQ0tPF2t5GcAIs= +github.com/go-chi/chi v1.5.4/go.mod h1:uaf8YgoFazUOkPBG7fxPftUylNumIev9awIWOENIuEg= +github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= +github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= +github.com/go-enry/go-enry/v2 v2.8.2 h1:uiGmC+3K8sVd/6DOe2AOJEOihJdqda83nPyJNtMR8RI= +github.com/go-enry/go-enry/v2 v2.8.2/go.mod h1:GVzIiAytiS5uT/QiuakK7TF1u4xDab87Y8V5EJRpsIQ= +github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo= +github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f h1:Pz0DHeFij3XFhoBRGUDPzSJ+w2UcK5/0JvF8DRI58r8= +github.com/go-git/go-git/v5 v5.9.0 h1:cD9SFA7sHVRdJ7AYck1ZaAa/yeuBvGPxwXDL8cxrObY= +github.com/go-git/go-git/v5 v5.9.0/go.mod h1:RKIqga24sWdMGZF+1Ekv9kylsDz6LzdTSI2s/OsZWE0= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-logr/zerologr v1.2.3 h1:up5N9vcH9Xck3jJkXzgyOxozT14R47IyDODz8LM1KSs= +github.com/go-logr/zerologr v1.2.3/go.mod h1:BxwGo7y5zgSHYR1BjbnHPyF/5ZjVKfKxAZANVu6E8Ho= +github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= +github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-redis/redis/v7 v7.4.0 h1:7obg6wUoj05T0EpY0o8B59S9w5yeMWql7sw2kwNW1x4= +github.com/go-redis/redis/v7 v7.4.0/go.mod h1:JDNMw23GTyLNC4GZu9njt15ctBQVn7xjRfnwdHj/Dcg= +github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= +github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= +github.com/go-redis/redis/v9 v9.0.0-beta.2 h1:ZSr84TsnQyKMAg8gnV+oawuQezeJR11/09THcWCQzr4= +github.com/go-redis/redis/v9 v9.0.0-beta.2/go.mod h1:Bldcd/M/bm9HbnNPi/LUtYBSD8ttcZYBMupwMXhdU0o= +github.com/go-redsync/redsync/v4 v4.7.1 h1:j5rmHCdN5qCEWp5oA2XEbGwtD4LZblqkhbcjCUsfNhs= +github.com/go-redsync/redsync/v4 v4.7.1/go.mod h1:IxV3sygNwjOERTXrj3XvNMSb1tgNgic8GvM8alwnWcM= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/protobuf v0.0.0-20170307180453-100ba4e88506/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.4.1 h1:pC5DB52sCeK48Wlb9oPcdhnjkz1TKt1D/P7WKJ0kUcQ= +github.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/gomodule/redigo v1.8.2 h1:H5XSIre1MB5NbPYFp+i1NBbb5qN1W8Y8YAQoAYbkm8k= +github.com/gomodule/redigo v1.8.2/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20221103000818-d260c55eee4c h1:lvddKcYTQ545ADhBujtIJmqQrZBDsGo7XIMbAQe/sNY= +github.com/google/pprof v0.0.0-20221103000818-d260c55eee4c/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= +github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8= +github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= +github.com/googleapis/enterprise-certificate-proxy v0.2.3 h1:yk9/cqRKtT9wXZSsRH9aurXEpJX+U6FLtpYTdC3R06k= +github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= +github.com/googleapis/gax-go/v2 v2.7.0 h1:IcsPKeInNvYi7eqSaDjiZqDDKu5rsmunY0Y1YupQSSQ= +github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= +github.com/googleapis/gnostic v0.2.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75 h1:f0n1xnMSmBLzVfsMMvriDyA75NB/oBgILX2GcHXIQzY= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gotidy/ptr v1.4.0 h1:7++suUs+HNHMnyz6/AW3SE+4EnBhupPSQTSI7QNijVc= +github.com/gotidy/ptr v1.4.0/go.mod h1:MjRBG6/IETiiZGWI8LrRtISXEji+8b/jigmj2q0mEyM= +github.com/gregjones/httpcache v0.0.0-20181110185634-c63ab54fda8f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw= +github.com/guregu/null v4.0.0+incompatible/go.mod h1:ePGpQaN9cw0tj45IR5E5ehMvsFlLlQZAkkOXZurJ3NM= +github.com/h2non/gock v1.0.9 h1:17gCehSo8ZOgEsFKpQgqHiR7VLyjxdAG3lkhVvO9QZU= +github.com/h2non/gock v1.0.9/go.mod h1:CZMcB0Lg5IWnr9bF79pPMg9WeV6WumxQiUJ1UvdO1iE= +github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2 h1:M1Jd2uEKl4YW9g/6vzN1qo06d5dshYYdwxlhOTUSnh4= +github.com/harness/go-rbac v0.0.0-20230829014129-c9b217856ea2/go.mod h1:uGgBgSZPgyygG5rWzoYsKIQ8TM4zt5yQq9nreznWvOI= +github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.4.0 h1:aAQzgqIrRKRa7w75CKpbBxYsmUoPjzVm1W59ca1L0J4= +github.com/hashicorp/go-version v1.4.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= +github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.4.0/go.mod h1:Y2O3ZDF0q4mMacyWV3AstPJpeHXWGEetiFttmq5lahk= +github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI= +github.com/jackc/pgconn v1.5.1-0.20200601181101-fa742c524853/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.8.1/go.mod h1:JV6m6b6jhjdmzchES0drzCcYcAHS1OPD5xu3OZ/lE2g= +github.com/jackc/pgconn v1.9.0 h1:gqibKSTJup/ahCsNKyMZAniPuZEfIqfXFc8FOWVYR+Q= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1 h1:7PQ/4gLoqnl87ZxL7xjO0DR5gYuviDCZxQJsUlFW1eI= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.2.0/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0= +github.com/jackc/pgtype v1.3.1-0.20200510190516-8cd94a14c75a/go.mod h1:vaogEUkALtxZMCH411K+tKzNpwzCKU+AnPzBKZ+I+Po= +github.com/jackc/pgtype v1.3.1-0.20200606141011-f6355165a91c/go.mod h1:cvk9Bgu/VzJ9/lxTO5R5sf80p0DiucVtN7ZxvaC4GmQ= +github.com/jackc/pgtype v1.7.0/go.mod h1:ZnHF+rMePVqDKaOfJVI4Q8IVvAQMryDlDkZnKOI75BE= +github.com/jackc/pgtype v1.8.0 h1:iFVCcVhYlw0PulYCVoguRGm0SE9guIcPcccnLzHj8bA= +github.com/jackc/pgtype v1.8.0/go.mod h1:PqDKcEBtllAtk/2p6z6SHdXW5UB+MhE75tUol2OKexE= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.5.0/go.mod h1:EpAKPLdnTorwmPUUsqrPxy5fphV18j9q3wrfRXgo+kA= +github.com/jackc/pgx/v4 v4.6.1-0.20200510190926-94ba730bb1e9/go.mod h1:t3/cdRQl6fOLDxqtlyhe9UWgfIi9R8+8v8GKV5TRA/o= +github.com/jackc/pgx/v4 v4.6.1-0.20200606145419-4e5062306904/go.mod h1:ZDaNWkt9sW1JMiNn0kdYBaLelIhw7Pg4qd+Vk6tw7Hg= +github.com/jackc/pgx/v4 v4.11.0/go.mod h1:i62xJgdrtVDsnL3U8ekyrQXEwGNTRoG7/8r+CIdYfcc= +github.com/jackc/pgx/v4 v4.12.0 h1:xiP3TdnkwyslWNp77yE5XAPfxAsU9RMFDe0c1SwN8h4= +github.com/jackc/pgx/v4 v4.12.0/go.mod h1:fE547h6VulLPA3kySjfnSG/e2D861g/50JlVUa/ub60= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jmoiron/sqlx v1.3.3 h1:j82X0bf7oQ27XeqxicSZsTU5suPwKElg3oyxNn43iTk= +github.com/jmoiron/sqlx v1.3.3/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= +github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.5 h1:J+gdV2cUmX7ZqL2B0lFcW0m+egaHC2V3lpO8nWxyYiQ= +github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= +github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/maragudk/migrate v0.4.1 h1:oAY8bCyaHIreLj3ar9b6cf7PSqOZsCkKXHU8Yn1bkb4= +github.com/maragudk/migrate v0.4.1/go.mod h1:vhmL4s+Xz75KU6DPZWRfqb45YyqjYQfcXliA1DsYzvY= +github.com/matoous/go-nanoid v1.5.0 h1:VRorl6uCngneC4oUQqOYtO3S0H5QKFtKuKycFG3euek= +github.com/matoous/go-nanoid v1.5.0/go.mod h1:zyD2a71IubI24efhpvkJz+ZwfwagzgSO6UNiFsZKN7U= +github.com/matoous/go-nanoid/v2 v2.0.0 h1:d19kur2QuLeHmJBkvYkFdhFBzLoo1XVm2GgTpL+9Tj0= +github.com/matoous/go-nanoid/v2 v2.0.0/go.mod h1:FtS4aGPVfEkxKxhdWPAspZpZSh1cOjtM7Ej/So3hR0g= +github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= +github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/natessilva/dag v0.0.0-20180124060714-7194b8dcc5c4 h1:dnMxwus89s86tI8rcGVp2HwZzlz7c5o92VOy7dSckBQ= +github.com/natessilva/dag v0.0.0-20180124060714-7194b8dcc5c4/go.mod h1:cojhOHk1gbMeklOyDP2oKKLftefXoJreOQGOrXk+Z38= +github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= +github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= +github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= +github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= +github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/onsi/gomega v1.20.0/go.mod h1:DtrZpjmvpn2mPm4YWQa0/ALMDj9v4YxLgojwPeREyVo= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= +github.com/opencontainers/go-digest v1.0.0-rc1 h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ= +github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= +github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= +github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= +github.com/petar/GoLLRB v0.0.0-20130427215148-53be0d36a84c/go.mod h1:HUpKUBZnpzkdx0kD/+Yfuft+uD3zHGtXF/XJB14TUr4= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= +github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= +github.com/prometheus/client_golang v1.15.1 h1:8tXpTmJbyH5lydzFPoxSIJ0J46jdh3tylbvM1xCv0LI= +github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= +github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= +github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM= +github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI= +github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/robfig/cron/v3 v3.0.0 h1:kQ6Cb7aHOHTSzNVNEhmp8EcWKLb4CbiMW9h9VyIhO4E= +github.com/robfig/cron/v3 v3.0.0/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY= +github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w= +github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sercand/kuberesolver/v5 v5.1.0 h1:YLqreB1vUFbZHSidcqI5ChMp+RIRmop0brQOeUFWiRw= +github.com/sercand/kuberesolver/v5 v5.1.0/go.mod h1:Fs1KbKhVRnB2aDWN12NjKCB+RgYMWZJ294T3BtmVCpQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0/go.mod h1:919LwcH0M7/W4fcZ0/jy0qGght1GIhqyS/EgWGH2j5Q= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.2.0 h1:h9r9cf0+u7wSE+M183ZtMGgOJKiL96brpaz5ekfJCpM= +github.com/skeema/knownhosts v1.2.0/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203 h1:QVqDTf3h2WHt08YuiTGPZLls0Wq99X9bWd0Q5ZSBesM= +github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203/go.mod h1:oqN97ltKNihBbwlX8dLpwxCl3+HnXKV/R0e+sRLd9C8= +github.com/swaggest/assertjson v1.7.0 h1:SKw5Rn0LQs6UvmGrIdaKQbMR1R3ncXm5KNon+QJ7jtw= +github.com/swaggest/jsonschema-go v0.3.40 h1:9EqQ9RvtdW69xfYODmyEKWOSZ12x5eiK+wGD2EVh/L4= +github.com/swaggest/jsonschema-go v0.3.40/go.mod h1:ipIOmoFP64QyRUgyPyU/P9tayq2m2TlvUhyZHrhe3S4= +github.com/swaggest/openapi-go v0.2.23 h1:DYUezSTyw180z1bL51wUnalYYbTMwHBjp1Itvji8/rs= +github.com/swaggest/openapi-go v0.2.23/go.mod h1:T1Koc6EAFAvnCI1MUqOOPDniqGzZy6dOiHtA/j54k14= +github.com/swaggest/refl v1.1.0 h1:a+9a75Kv6ciMozPjVbOfcVTEQe81t2R3emvaD9oGQGc= +github.com/swaggest/refl v1.1.0/go.mod h1:g3Qa6ki0A/L2yxiuUpT+cuBURuRaltF5SDQpg1kMZSY= +github.com/swaggest/swgui v1.4.2 h1:6AT8ICO0+t6WpbIFsACf5vBmviVX0sqspNbZLoe6vgw= +github.com/swaggest/swgui v1.4.2/go.mod h1:xWDsT2h8obEoGHzX/a6FRClUOS8NvkICyInhi7s3fN8= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/unrolled/secure v1.0.8 h1:JaMvKbe4CRt8oyxVXn+xY+6jlqd7pyJNSVkmsBxxQsM= +github.com/unrolled/secure v1.0.8/go.mod h1:fO+mEan+FLB0CdEnHf6Q4ZZVNqG+5fuLFnP8p0BXDPI= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= +github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/vearutop/statigz v1.1.5 h1:qWvRgXFsseWVTFCkIvwHQPpaLNf9WI0+dDJE7I9432o= +github.com/vearutop/statigz v1.1.5/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI= +github.com/vinzenz/yaml v0.0.0-20170920082545-91409cdd725d/go.mod h1:mb5taDqMnJiZNRQ3+02W2IFG+oEz1+dTuCXkp4jpkfo= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA= +github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= +go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= +go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20230108222341-4b8118a2686a h1:tlXy25amD5A7gOfbXdqCGN5k8ESEed/Ee1E5RcrYnqU= +golang.org/x/exp v0.0.0-20230108222341-4b8118a2686a/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211105192438-b53810dc28af/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.6.0 h1:Lh8GPgSKBfWSwFvtuWOfeI3aAAnbXTSutYxJiOJFgIw= +golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181005133103-4497e2df6f9e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= +google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= +google.golang.org/api v0.110.0 h1:l+rh0KYUooe9JGbGVx71tbFo4SMbMTXK3I3ia2QSEeU= +google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4 h1:DdoeryqhaXp1LtT/emMP1BRJPHHKFi5akj/nbx/zNTA= +google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag= +google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= +gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +k8s.io/api v0.0.0-20181130031204-d04500c8c3dd/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA= +k8s.io/apimachinery v0.0.0-20181201231028-18a5ff3097b4/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0= +k8s.io/client-go v9.0.0+incompatible/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s= +k8s.io/klog v0.1.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs= +strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY= diff --git a/http/server.go b/http/server.go new file mode 100644 index 0000000000..8ae58eaa40 --- /dev/null +++ b/http/server.go @@ -0,0 +1,166 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package http + +import ( + "context" + "crypto/tls" + "net/http" + "time" + + "golang.org/x/crypto/acme/autocert" + "golang.org/x/sync/errgroup" +) + +const ( + // DefaultReadHeaderTimeout defines the default timeout for reading headers. + DefaultReadHeaderTimeout = 2 * time.Second +) + +// Config defines the config of an http server. +// TODO: expose via options? +type Config struct { + Acme bool + Addr string + Cert string + Key string + Host string + ReadHeaderTimeout time.Duration +} + +// Server is a wrapper around http.Server that exposes different async ListenAndServe methods +// that return corresponding ShutdownFunctions. +type Server struct { + config Config + handler http.Handler +} + +// ShutdownFunction defines a function that is called to shutdown the server. +type ShutdownFunction func(context.Context) error + +func NewServer(config Config, handler http.Handler) *Server { + if config.ReadHeaderTimeout == 0 { + config.ReadHeaderTimeout = DefaultReadHeaderTimeout + } + + return &Server{ + config: config, + handler: handler, + } +} + +// ListenAndServe initializes a server to respond to HTTP network requests. +func (s *Server) ListenAndServe() (*errgroup.Group, ShutdownFunction) { + if s.config.Acme { + return s.listenAndServeAcme() + } else if s.config.Key != "" { + return s.listenAndServeTLS() + } + return s.listenAndServe() +} + +func (s *Server) listenAndServe() (*errgroup.Group, ShutdownFunction) { + var g errgroup.Group + s1 := &http.Server{ + Addr: s.config.Addr, + ReadHeaderTimeout: s.config.ReadHeaderTimeout, + Handler: s.handler, + } + g.Go(func() error { + return s1.ListenAndServe() + }) + + return &g, s1.Shutdown +} + +func (s *Server) listenAndServeTLS() (*errgroup.Group, ShutdownFunction) { + var g errgroup.Group + s1 := &http.Server{ + Addr: ":http", + ReadHeaderTimeout: s.config.ReadHeaderTimeout, + Handler: http.HandlerFunc(redirect), + } + s2 := &http.Server{ + Addr: ":https", + ReadHeaderTimeout: s.config.ReadHeaderTimeout, + Handler: s.handler, + } + g.Go(func() error { + return s1.ListenAndServe() + }) + g.Go(func() error { + return s2.ListenAndServeTLS( + s.config.Cert, + s.config.Key, + ) + }) + + return &g, func(ctx context.Context) error { + var sg errgroup.Group + sg.Go(func() error { + return s1.Shutdown(ctx) + }) + sg.Go(func() error { + return s2.Shutdown(ctx) + }) + return sg.Wait() + } +} + +func (s Server) listenAndServeAcme() (*errgroup.Group, ShutdownFunction) { + var g errgroup.Group + m := &autocert.Manager{ + Cache: autocert.DirCache(".cache"), + Prompt: autocert.AcceptTOS, + HostPolicy: autocert.HostWhitelist(s.config.Host), + } + s1 := &http.Server{ + Addr: ":http", + ReadHeaderTimeout: s.config.ReadHeaderTimeout, + Handler: m.HTTPHandler(nil), + } + s2 := &http.Server{ + Addr: ":https", + Handler: s.handler, + ReadHeaderTimeout: s.config.ReadHeaderTimeout, + TLSConfig: &tls.Config{ + MinVersion: tls.VersionTLS12, + GetCertificate: m.GetCertificate, + NextProtos: []string{"h2", "http/1.1"}, + }, + } + g.Go(func() error { + return s1.ListenAndServe() + }) + g.Go(func() error { + return s2.ListenAndServeTLS("", "") + }) + + return &g, func(ctx context.Context) error { + var sg errgroup.Group + sg.Go(func() error { + return s1.Shutdown(ctx) + }) + sg.Go(func() error { + return s2.Shutdown(ctx) + }) + return sg.Wait() + } +} + +func redirect(w http.ResponseWriter, req *http.Request) { + target := "https://" + req.Host + req.URL.Path + http.Redirect(w, req, target, http.StatusTemporaryRedirect) +} diff --git a/internal/api/api.go b/internal/api/api.go new file mode 100644 index 0000000000..21dd451401 --- /dev/null +++ b/internal/api/api.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api diff --git a/internal/api/auth/auth.go b/internal/api/auth/auth.go new file mode 100644 index 0000000000..ee85fa38fc --- /dev/null +++ b/internal/api/auth/auth.go @@ -0,0 +1,117 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var ( + ErrNotAuthenticated = errors.New("not authenticated") + ErrNotAuthorized = errors.New("not authorized") + ErrParentResourceTypeUnknown = errors.New("Unknown parent resource type") + ErrPrincipalTypeUnknown = errors.New("Unknown principal type") +) + +// Check checks if a resource specific permission is granted for the current auth session in the scope. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func Check(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + scope *types.Scope, resource *types.Resource, permission enum.Permission, +) error { + if session == nil { + return ErrNotAuthenticated + } + + authorized, err := authorizer.Check( + ctx, + session, + scope, + resource, + permission) + if err != nil { + return err + } + + if !authorized { + return ErrNotAuthorized + } + + return nil +} + +// CheckChild checks if a resource specific permission is granted for the current auth session +// in the scope of a parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckChild(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + spaceStore store.SpaceStore, repoStore store.RepoStore, parentType enum.ParentResourceType, parentID int64, + resourceType enum.ResourceType, resourceName string, permission enum.Permission) error { + scope, err := getScopeForParent(ctx, spaceStore, repoStore, parentType, parentID) + if err != nil { + return err + } + + resource := &types.Resource{ + Type: resourceType, + Name: resourceName, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} + +// getScopeForParent Returns the scope for a given resource parent (space or repo). +func getScopeForParent(ctx context.Context, spaceStore store.SpaceStore, repoStore store.RepoStore, + parentType enum.ParentResourceType, parentID int64) (*types.Scope, error) { + // TODO: Can this be done cleaner? + switch parentType { + case enum.ParentResourceTypeSpace: + space, err := spaceStore.Find(ctx, parentID) + if err != nil { + return nil, fmt.Errorf("parent space not found: %w", err) + } + + return &types.Scope{SpacePath: space.Path}, nil + + case enum.ParentResourceTypeRepo: + repo, err := repoStore.Find(ctx, parentID) + if err != nil { + return nil, fmt.Errorf("parent repo not found: %w", err) + } + + spacePath, repoName, err := paths.DisectLeaf(repo.Path) + if err != nil { + return nil, errors.Wrapf(err, "Failed to disect path '%s'", repo.Path) + } + + return &types.Scope{SpacePath: spacePath, Repo: repoName}, nil + + default: + log.Ctx(ctx).Debug().Msgf("Unsupported parent type encountered: '%s'", parentType) + + return nil, ErrParentResourceTypeUnknown + } +} diff --git a/internal/api/auth/connector.go b/internal/api/auth/connector.go new file mode 100644 index 0000000000..ae571e3763 --- /dev/null +++ b/internal/api/auth/connector.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// CheckConnector checks if a repo specific permission is granted for the current auth session +// in the scope of its parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckConnector( + ctx context.Context, + authorizer authz.Authorizer, + session *auth.Session, + parentPath, + uid string, + permission enum.Permission, +) error { + scope := &types.Scope{SpacePath: parentPath} + resource := &types.Resource{ + Type: enum.ResourceTypeConnector, + Name: uid, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/pipeline.go b/internal/api/auth/pipeline.go new file mode 100644 index 0000000000..3ca1ce3e7c --- /dev/null +++ b/internal/api/auth/pipeline.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" +) + +// CheckPipeline checks if a pipeline specific permission is granted for the current auth session +// in the scope of the parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckPipeline(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + repoPath string, pipelineUID string, permission enum.Permission) error { + spacePath, repoName, err := paths.DisectLeaf(repoPath) + if err != nil { + return errors.Wrapf(err, "Failed to disect path '%s'", repoPath) + } + scope := &types.Scope{SpacePath: spacePath, Repo: repoName} + resource := &types.Resource{ + Type: enum.ResourceTypePipeline, + Name: pipelineUID, + } + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/repo.go b/internal/api/auth/repo.go new file mode 100644 index 0000000000..05723d1fcf --- /dev/null +++ b/internal/api/auth/repo.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" +) + +// CheckRepo checks if a repo specific permission is granted for the current auth session +// in the scope of its parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckRepo(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + repo *types.Repository, permission enum.Permission, orPublic bool, +) error { + if orPublic && repo.IsPublic { + return nil + } + + parentSpace, name, err := paths.DisectLeaf(repo.Path) + if err != nil { + return errors.Wrapf(err, "Failed to disect path '%s'", repo.Path) + } + + scope := &types.Scope{SpacePath: parentSpace} + resource := &types.Resource{ + Type: enum.ResourceTypeRepo, + Name: name, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/secret.go b/internal/api/auth/secret.go new file mode 100644 index 0000000000..a8d47013f5 --- /dev/null +++ b/internal/api/auth/secret.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// CheckSecret checks if a repo specific permission is granted for the current auth session +// in the scope of its parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckSecret(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + parentPath, uid string, permission enum.Permission) error { + scope := &types.Scope{SpacePath: parentPath} + resource := &types.Resource{ + Type: enum.ResourceTypeSecret, + Name: uid, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/service.go b/internal/api/auth/service.go new file mode 100644 index 0000000000..230cfc9cdd --- /dev/null +++ b/internal/api/auth/service.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// CheckService checks if a service specific permission is granted for the current auth session. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckService(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + svc *types.Service, permission enum.Permission, +) error { + // a service exists outside any scope + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeService, + Name: svc.UID, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/service_account.go b/internal/api/auth/service_account.go new file mode 100644 index 0000000000..f529ac9044 --- /dev/null +++ b/internal/api/auth/service_account.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/enum" +) + +// CheckServiceAccount checks if a service account specific permission is granted for the current auth session +// in the scope of the parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckServiceAccount(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + spaceStore store.SpaceStore, repoStore store.RepoStore, parentType enum.ParentResourceType, parentID int64, + saUID string, permission enum.Permission, +) error { + return CheckChild(ctx, authorizer, session, + spaceStore, repoStore, parentType, parentID, + enum.ResourceTypeServiceAccount, saUID, permission) +} diff --git a/internal/api/auth/space.go b/internal/api/auth/space.go new file mode 100644 index 0000000000..5a6b605055 --- /dev/null +++ b/internal/api/auth/space.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" +) + +// CheckSpace checks if a space specific permission is granted for the current auth session +// in the scope of its parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckSpace(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + space *types.Space, permission enum.Permission, orPublic bool, +) error { + if orPublic && space.IsPublic { + return nil + } + + parentSpace, name, err := paths.DisectLeaf(space.Path) + if err != nil { + return errors.Wrapf(err, "Failed to disect path '%s'", space.Path) + } + + scope := &types.Scope{SpacePath: parentSpace} + resource := &types.Resource{ + Type: enum.ResourceTypeSpace, + Name: name, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/template.go b/internal/api/auth/template.go new file mode 100644 index 0000000000..479bda08b2 --- /dev/null +++ b/internal/api/auth/template.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// CheckTemplate checks if a repo specific permission is granted for the current auth session +// in the scope of its parent. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckTemplate(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + parentPath, uid string, permission enum.Permission) error { + scope := &types.Scope{SpacePath: parentPath} + resource := &types.Resource{ + Type: enum.ResourceTypeTemplate, + Name: uid, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/auth/user.go b/internal/api/auth/user.go new file mode 100644 index 0000000000..81b8c10d21 --- /dev/null +++ b/internal/api/auth/user.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// CheckUser checks if a user specific permission is granted for the current auth session. +// Returns nil if the permission is granted, otherwise returns an error. +// NotAuthenticated, NotAuthorized, or any underlying error. +func CheckUser(ctx context.Context, authorizer authz.Authorizer, session *auth.Session, + user *types.User, permission enum.Permission, +) error { + // a user exists outside any scope + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeUser, + Name: user.UID, + } + + return Check(ctx, authorizer, session, scope, resource, permission) +} diff --git a/internal/api/controller/check/check_list.go b/internal/api/controller/check/check_list.go new file mode 100644 index 0000000000..dc6144934e --- /dev/null +++ b/internal/api/controller/check/check_list.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListChecks return an array of status check results for a commit in a repository. +func (c *Controller) ListChecks( + ctx context.Context, + session *auth.Session, + repoRef string, + commitSHA string, + opts types.CheckListOptions, +) ([]types.Check, int, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, 0, fmt.Errorf("failed to acquire access access to repo: %w", err) + } + + var checks []types.Check + var count int + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { + count, err = c.checkStore.Count(ctx, repo.ID, commitSHA, opts) + if err != nil { + return fmt.Errorf("failed to count status check results for repo=%s: %w", repo.UID, err) + } + + checks, err = c.checkStore.List(ctx, repo.ID, commitSHA, opts) + if err != nil { + return fmt.Errorf("failed to list status check results for repo=%s: %w", repo.UID, err) + } + + return nil + }) + if err != nil { + return nil, 0, err + } + + return checks, count, nil +} diff --git a/internal/api/controller/check/check_report.go b/internal/api/controller/check/check_report.go new file mode 100644 index 0000000000..f960f7000b --- /dev/null +++ b/internal/api/controller/check/check_report.go @@ -0,0 +1,181 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "regexp" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type ReportInput struct { + CheckUID string `json:"check_uid"` + Status enum.CheckStatus `json:"status"` + Summary string `json:"summary"` + Link string `json:"link"` + Payload types.CheckPayload `json:"payload"` +} + +var regexpCheckUID = "^[a-zA-Z_][0-9a-zA-Z-_.$]{0,127}$" +var matcherCheckUID = regexp.MustCompile(regexpCheckUID) + +// Validate validates and sanitizes the ReportInput data. +func (in *ReportInput) Validate() error { + if in.CheckUID == "" { + return usererror.BadRequest("Status check UID is missing") + } + + if !matcherCheckUID.MatchString(in.CheckUID) { + return usererror.BadRequestf("Status check UID must match the regular expression: %s", regexpCheckUID) + } + + _, ok := in.Status.Sanitize() + if !ok { + return usererror.BadRequest("Invalid value provided for status check status") + } + + payloadKind, ok := in.Payload.Kind.Sanitize() + if !ok { + return usererror.BadRequest("Invalid value provided for the payload type") + } + in.Payload.Kind = payloadKind + + switch in.Payload.Kind { + case enum.CheckPayloadKindEmpty: + // the default payload kind (empty) does not support the payload data: clear it here + in.Payload.Version = "" + in.Payload.Data = []byte("{}") + + if in.Link == "" { // the link is mandatory as there is nothing in the payload + return usererror.BadRequest("Link is missing") + } + + case enum.CheckPayloadKindRaw, enum.CheckPayloadKindMarkdown: + // the text payload kinds (raw and markdown) do not support the version + if in.Payload.Version != "" { + return usererror.BadRequestf("Payload version must be empty for the payload kind '%s'", + in.Payload.Kind) + } + + payloadDataJSON, err := sanitizeJSONPayload(in.Payload.Data, &types.CheckPayloadText{}) + if err != nil { + return err + } + + in.Payload.Data = payloadDataJSON + + case enum.CheckPayloadKindPipeline: + return usererror.BadRequest("Kind cannot be pipeline for external checks") + + } + + return nil +} + +func sanitizeJSONPayload(source json.RawMessage, data any) (json.RawMessage, error) { + if len(source) == 0 { + return json.Marshal(data) // marshal the empty object + } + + decoder := json.NewDecoder(bytes.NewReader(source)) + decoder.DisallowUnknownFields() + + if err := decoder.Decode(&data); err != nil { + return nil, usererror.BadRequestf("Payload data doesn't match the required format: %s", err.Error()) + } + + buffer := bytes.NewBuffer(nil) + buffer.Grow(512) + + encoder := json.NewEncoder(buffer) + encoder.SetEscapeHTML(false) + if err := encoder.Encode(data); err != nil { + return nil, fmt.Errorf("failed to sanitize json payload: %w", err) + } + + result := buffer.Bytes() + + if result[len(result)-1] == '\n' { + result = result[:len(result)-1] + } + + return result, nil +} + +// Report modifies an existing or creates a new (if none yet exists) status check report for a specific commit. +func (c *Controller) Report( + ctx context.Context, + session *auth.Session, + repoRef string, + commitSHA string, + in *ReportInput, + metadata map[string]string, +) (*types.Check, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoReportCommitCheck) + if err != nil { + return nil, fmt.Errorf("failed to acquire access access to repo: %w", err) + } + + if errValidate := in.Validate(); errValidate != nil { + return nil, errValidate + } + + if !gitrpc.ValidateCommitSHA(commitSHA) { + return nil, usererror.BadRequest("invalid commit SHA provided") + } + + _, err = c.gitRPCClient.GetCommit(ctx, &gitrpc.GetCommitParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repo.GitUID}, + SHA: commitSHA, + }) + if err != nil { + return nil, fmt.Errorf("failed to commit sha=%s: %w", commitSHA, err) + } + + now := time.Now().UnixMilli() + + metadataJSON, _ := json.Marshal(metadata) + + statusCheckReport := &types.Check{ + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + RepoID: repo.ID, + CommitSHA: commitSHA, + UID: in.CheckUID, + Status: in.Status, + Summary: in.Summary, + Link: in.Link, + Payload: in.Payload, + Metadata: metadataJSON, + ReportedBy: *session.Principal.ToPrincipalInfo(), + } + + err = c.checkStore.Upsert(ctx, statusCheckReport) + if err != nil { + return nil, fmt.Errorf("failed to upsert status check result for repo=%s: %w", repo.UID, err) + } + + return statusCheckReport, nil +} diff --git a/internal/api/controller/check/controller.go b/internal/api/controller/check/controller.go new file mode 100644 index 0000000000..866af76278 --- /dev/null +++ b/internal/api/controller/check/controller.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + authorizer authz.Authorizer + repoStore store.RepoStore + checkStore store.CheckStore + gitRPCClient gitrpc.Interface +} + +func NewController( + db *sqlx.DB, + authorizer authz.Authorizer, + repoStore store.RepoStore, + checkStore store.CheckStore, + gitRPCClient gitrpc.Interface, +) *Controller { + return &Controller{ + db: db, + authorizer: authorizer, + repoStore: repoStore, + checkStore: checkStore, + gitRPCClient: gitRPCClient, + } +} + +func (c *Controller) getRepoCheckAccess(ctx context.Context, + session *auth.Session, repoRef string, reqPermission enum.Permission, +) (*types.Repository, error) { + if repoRef == "" { + return nil, usererror.BadRequest("A valid repository reference must be provided.") + } + + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repository: %w", err) + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, reqPermission, false); err != nil { + return nil, fmt.Errorf("access check failed: %w", err) + } + + return repo, nil +} diff --git a/internal/api/controller/check/wire.go b/internal/api/controller/check/wire.go new file mode 100644 index 0000000000..234926af39 --- /dev/null +++ b/internal/api/controller/check/wire.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController( + db *sqlx.DB, + authorizer authz.Authorizer, + repoStore store.RepoStore, + checkStore store.CheckStore, + rpcClient gitrpc.Interface, +) *Controller { + return NewController( + db, + authorizer, + repoStore, + checkStore, + rpcClient, + ) +} diff --git a/internal/api/controller/connector/controller.go b/internal/api/controller/connector/controller.go new file mode 100644 index 0000000000..9e21be5891 --- /dev/null +++ b/internal/api/controller/connector/controller.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + uidCheck check.PathUID + connectorStore store.ConnectorStore + authorizer authz.Authorizer + spaceStore store.SpaceStore +} + +func NewController( + db *sqlx.DB, + uidCheck check.PathUID, + authorizer authz.Authorizer, + connectorStore store.ConnectorStore, + spaceStore store.SpaceStore, +) *Controller { + return &Controller{ + db: db, + uidCheck: uidCheck, + connectorStore: connectorStore, + authorizer: authorizer, + spaceStore: spaceStore, + } +} diff --git a/internal/api/controller/connector/create.go b/internal/api/controller/connector/create.go new file mode 100644 index 0000000000..ec3fce7630 --- /dev/null +++ b/internal/api/controller/connector/create.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +var ( + // errConnectorRequiresParent if the user tries to create a connector without a parent space. + errConnectorRequiresParent = usererror.BadRequest( + "Parent space required - standalone connector are not supported.") +) + +type CreateInput struct { + Description string `json:"description"` + SpaceRef string `json:"space_ref"` // Ref of the parent space + UID string `json:"uid"` + Type string `json:"type"` + Data string `json:"data"` +} + +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + in *CreateInput, +) (*types.Connector, error) { + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + parentSpace, err := c.spaceStore.FindByRef(ctx, in.SpaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find parent by ref: %w", err) + } + + err = apiauth.CheckConnector(ctx, c.authorizer, session, parentSpace.Path, in.UID, enum.PermissionConnectorEdit) + if err != nil { + return nil, err + } + + now := time.Now().UnixMilli() + connector := &types.Connector{ + Description: in.Description, + Data: in.Data, + Type: in.Type, + SpaceID: parentSpace.ID, + UID: in.UID, + Created: now, + Updated: now, + Version: 0, + } + err = c.connectorStore.Create(ctx, connector) + if err != nil { + return nil, fmt.Errorf("connector creation failed: %w", err) + } + + return connector, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + parentRefAsID, _ := strconv.ParseInt(in.SpaceRef, 10, 64) + + if parentRefAsID <= 0 || len(strings.TrimSpace(in.SpaceRef)) == 0 { + return errConnectorRequiresParent + } + + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + return check.Description(in.Description) +} diff --git a/internal/api/controller/connector/delete.go b/internal/api/controller/connector/delete.go new file mode 100644 index 0000000000..4a669f9e40 --- /dev/null +++ b/internal/api/controller/connector/delete.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef string, uid string) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckConnector(ctx, c.authorizer, session, space.Path, uid, enum.PermissionConnectorDelete) + if err != nil { + return fmt.Errorf("failed to authorize: %w", err) + } + err = c.connectorStore.DeleteByUID(ctx, space.ID, uid) + if err != nil { + return fmt.Errorf("could not delete connector: %w", err) + } + return nil +} diff --git a/internal/api/controller/connector/find.go b/internal/api/controller/connector/find.go new file mode 100644 index 0000000000..5e2d5d6079 --- /dev/null +++ b/internal/api/controller/connector/find.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, +) (*types.Connector, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + err = apiauth.CheckConnector(ctx, c.authorizer, session, space.Path, uid, enum.PermissionConnectorView) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + connector, err := c.connectorStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find connector: %w", err) + } + return connector, nil +} diff --git a/internal/api/controller/connector/update.go b/internal/api/controller/connector/update.go new file mode 100644 index 0000000000..102903a962 --- /dev/null +++ b/internal/api/controller/connector/update.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a connector. +type UpdateInput struct { + UID *string `json:"uid"` + Description *string `json:"description"` + Data *string `json:"data"` +} + +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, + in *UpdateInput, +) (*types.Connector, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckConnector(ctx, c.authorizer, session, space.Path, uid, enum.PermissionConnectorEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + connector, err := c.connectorStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find connector: %w", err) + } + + return c.connectorStore.UpdateOptLock(ctx, connector, func(original *types.Connector) error { + if in.UID != nil { + original.UID = *in.UID + } + if in.Description != nil { + original.Description = *in.Description + } + if in.Data != nil { + original.Data = *in.Data + } + + return nil + }) +} + +func (c *Controller) sanitizeUpdateInput(in *UpdateInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + // TODO: Validate Data + + return nil +} diff --git a/internal/api/controller/connector/wire.go b/internal/api/controller/connector/wire.go new file mode 100644 index 0000000000..fede552311 --- /dev/null +++ b/internal/api/controller/connector/wire.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + uidCheck check.PathUID, + connectorStore store.ConnectorStore, + authorizer authz.Authorizer, + spaceStore store.SpaceStore, +) *Controller { + return NewController(db, uidCheck, authorizer, connectorStore, spaceStore) +} diff --git a/internal/api/controller/execution/cancel.go b/internal/api/controller/execution/cancel.go new file mode 100644 index 0000000000..c172dcb27e --- /dev/null +++ b/internal/api/controller/execution/cancel.go @@ -0,0 +1,67 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/pipeline/checks" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + "github.com/rs/zerolog/log" +) + +func (c *Controller) Cancel( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + executionNum int64, +) (*types.Execution, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineExecute) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum) + if err != nil { + return nil, fmt.Errorf("failed to find execution %d: %w", executionNum, err) + } + + err = c.canceler.Cancel(ctx, repo, execution) + if err != nil { + return nil, fmt.Errorf("unable to cancel execution: %w", err) + } + + // Write to the checks store, log and ignore on errors + err = checks.Write(ctx, c.checkStore, execution, pipeline) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msg("could not update status check") + } + + return execution, nil +} diff --git a/internal/api/controller/execution/controller.go b/internal/api/controller/execution/controller.go new file mode 100644 index 0000000000..e5b44dbcd0 --- /dev/null +++ b/internal/api/controller/execution/controller.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/pipeline/canceler" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/store" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + authorizer authz.Authorizer + executionStore store.ExecutionStore + checkStore store.CheckStore + canceler canceler.Canceler + commitService commit.CommitService + triggerer triggerer.Triggerer + repoStore store.RepoStore + stageStore store.StageStore + pipelineStore store.PipelineStore +} + +func NewController( + db *sqlx.DB, + authorizer authz.Authorizer, + executionStore store.ExecutionStore, + checkStore store.CheckStore, + canceler canceler.Canceler, + commitService commit.CommitService, + triggerer triggerer.Triggerer, + repoStore store.RepoStore, + stageStore store.StageStore, + pipelineStore store.PipelineStore, +) *Controller { + return &Controller{ + db: db, + authorizer: authorizer, + executionStore: executionStore, + checkStore: checkStore, + canceler: canceler, + commitService: commitService, + triggerer: triggerer, + repoStore: repoStore, + stageStore: stageStore, + pipelineStore: pipelineStore, + } +} diff --git a/internal/api/controller/execution/create.go b/internal/api/controller/execution/create.go new file mode 100644 index 0000000000..3174e548a2 --- /dev/null +++ b/internal/api/controller/execution/create.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/drone/go-scm/scm" +) + +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + branch string, +) (*types.Execution, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, + pipelineUID, enum.PermissionPipelineExecute) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + // If the branch is empty, use the default branch specified in the pipeline. + // It that is also empty, use the repo default branch. + if branch == "" { + branch = pipeline.DefaultBranch + if branch == "" { + branch = repo.DefaultBranch + } + } + // expand the branch to a git reference. + ref := scm.ExpandRef(branch, "refs/heads") + + // Fetch the commit information from the commits service. + commit, err := c.commitService.FindRef(ctx, repo, ref) + if err != nil { + return nil, fmt.Errorf("failed to fetch commit: %w", err) + } + + // Create manual hook for execution. + hook := &triggerer.Hook{ + Trigger: session.Principal.UID, // who/what triggered the build, different from commit author + AuthorLogin: commit.Author.Identity.Name, + TriggeredBy: session.Principal.ID, + AuthorName: commit.Author.Identity.Name, + AuthorEmail: commit.Author.Identity.Email, + Ref: ref, + Message: commit.Message, + Title: "", // we expect this to be empty. + Before: commit.SHA, + After: commit.SHA, + Sender: session.Principal.UID, + Source: branch, + Target: branch, + Params: map[string]string{}, + Timestamp: commit.Author.When.UnixMilli(), + } + + // Trigger the execution + return c.triggerer.Trigger(ctx, pipeline, hook) +} diff --git a/internal/api/controller/execution/delete.go b/internal/api/controller/execution/delete.go new file mode 100644 index 0000000000..429991ca35 --- /dev/null +++ b/internal/api/controller/execution/delete.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + executionNum int64, +) error { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineDelete) + if err != nil { + return fmt.Errorf("failed to authorize: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return fmt.Errorf("failed to find pipeline: %w", err) + } + err = c.executionStore.Delete(ctx, pipeline.ID, executionNum) + if err != nil { + return fmt.Errorf("could not delete execution: %w", err) + } + return nil +} diff --git a/internal/api/controller/execution/find.go b/internal/api/controller/execution/find.go new file mode 100644 index 0000000000..f62b238f85 --- /dev/null +++ b/internal/api/controller/execution/find.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + executionNum int64, +) (*types.Execution, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum) + if err != nil { + return nil, fmt.Errorf("failed to find execution %d: %w", executionNum, err) + } + + stages, err := c.stageStore.ListWithSteps(ctx, execution.ID) + if err != nil { + return nil, fmt.Errorf("could not query stage information for execution %d: %w", + executionNum, err) + } + + // Add stages information to the execution + execution.Stages = stages + + return execution, nil +} diff --git a/internal/api/controller/execution/list.go b/internal/api/controller/execution/list.go new file mode 100644 index 0000000000..e7d0fd4afa --- /dev/null +++ b/internal/api/controller/execution/list.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package execution + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) List( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + pagination types.Pagination, +) ([]*types.Execution, int64, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, 0, fmt.Errorf("failed to find repo by ref: %w", err) + } + + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, 0, fmt.Errorf("failed to authorize: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) + } + + var count int64 + var executions []*types.Execution + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { + count, err = c.executionStore.Count(ctx, pipeline.ID) + if err != nil { + return fmt.Errorf("failed to count child executions: %w", err) + } + + executions, err = c.executionStore.List(ctx, pipeline.ID, pagination) + if err != nil { + return fmt.Errorf("failed to list child executions: %w", err) + } + + return + }, dbtx.TxDefaultReadOnly) + if err != nil { + return executions, count, fmt.Errorf("failed to fetch list: %w", err) + } + + return executions, count, nil +} diff --git a/internal/api/controller/execution/wire.go b/internal/api/controller/execution/wire.go new file mode 100644 index 0000000000..420b28a7d0 --- /dev/null +++ b/internal/api/controller/execution/wire.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/pipeline/canceler" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + authorizer authz.Authorizer, + executionStore store.ExecutionStore, + checkStore store.CheckStore, + canceler canceler.Canceler, + commitService commit.CommitService, + triggerer triggerer.Triggerer, + repoStore store.RepoStore, + stageStore store.StageStore, + pipelineStore store.PipelineStore, +) *Controller { + return NewController(db, authorizer, executionStore, checkStore, + canceler, commitService, triggerer, repoStore, stageStore, pipelineStore) +} diff --git a/internal/api/controller/githook/controller.go b/internal/api/controller/githook/controller.go new file mode 100644 index 0000000000..ec5accfd14 --- /dev/null +++ b/internal/api/controller/githook/controller.go @@ -0,0 +1,93 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + eventsgit "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +// ServerHookOutput represents the output of server hook api calls. +// TODO: support non-error messages (once we need it). +type ServerHookOutput struct { + // Error contains the user facing error (like "branch is protected", ...). + Error *string `json:"error,omitempty"` +} + +// ReferenceUpdate represents an update of a git reference. +type ReferenceUpdate struct { + // Ref is the full name of the reference that got updated. + Ref string `json:"ref"` + // Old is the old commmit hash (before the update). + Old string `json:"old"` + // New is the new commit hash (after the update). + New string `json:"new"` +} + +// BaseInput contains the base input for any githook api call. +type BaseInput struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` +} + +type Controller struct { + db *sqlx.DB + authorizer authz.Authorizer + principalStore store.PrincipalStore + repoStore store.RepoStore + gitReporter *eventsgit.Reporter +} + +func NewController( + db *sqlx.DB, + authorizer authz.Authorizer, + principalStore store.PrincipalStore, + repoStore store.RepoStore, + gitReporter *eventsgit.Reporter, +) *Controller { + return &Controller{ + db: db, + authorizer: authorizer, + principalStore: principalStore, + repoStore: repoStore, + gitReporter: gitReporter, + } +} + +func (c *Controller) getRepoCheckAccess(ctx context.Context, + _ *auth.Session, repoID int64, _ enum.Permission) (*types.Repository, error) { + if repoID < 1 { + return nil, usererror.BadRequest("A valid repository reference must be provided.") + } + + repo, err := c.repoStore.Find(ctx, repoID) + if err != nil { + return nil, fmt.Errorf("failed to find repo with id %d: %w", repoID, err) + } + + // TODO: execute permission check. block anything but gitness service? + + return repo, nil +} diff --git a/internal/api/controller/githook/post_receive.go b/internal/api/controller/githook/post_receive.go new file mode 100644 index 0000000000..8f6871cd31 --- /dev/null +++ b/internal/api/controller/githook/post_receive.go @@ -0,0 +1,140 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/githook" + "github.com/harness/gitness/internal/auth" + events "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/types" +) + +const ( + // gitReferenceNamePrefixBranch is the prefix of references of type branch. + gitReferenceNamePrefixBranch = "refs/heads/" + + // gitReferenceNamePrefixTag is the prefix of references of type tag. + gitReferenceNamePrefixTag = "refs/tags/" +) + +// PostReceive executes the post-receive hook for a git repository. +func (c *Controller) PostReceive( + ctx context.Context, + session *auth.Session, + repoID int64, + principalID int64, + in *githook.PostReceiveInput, +) (*githook.Output, error) { + if in == nil { + return nil, fmt.Errorf("input is nil") + } + + // report ref events (best effort) + c.reportReferenceEvents(ctx, repoID, principalID, in) + + return &githook.Output{}, nil +} + +// reportReferenceEvents is reporting reference events to the event system. +// NOTE: keep best effort for now as it doesn't change the outcome of the git operation. +// TODO: in the future we might want to think about propagating errors so user is aware of events not being triggered. +func (c *Controller) reportReferenceEvents( + ctx context.Context, + repoID int64, + principalID int64, + in *githook.PostReceiveInput, +) { + for _, refUpdate := range in.RefUpdates { + switch { + case strings.HasPrefix(refUpdate.Ref, gitReferenceNamePrefixBranch): + c.reportBranchEvent(ctx, repoID, principalID, refUpdate) + case strings.HasPrefix(refUpdate.Ref, gitReferenceNamePrefixTag): + c.reportTagEvent(ctx, repoID, principalID, refUpdate) + default: + // Ignore any other references in post-receive + } + } +} + +func (c *Controller) reportBranchEvent( + ctx context.Context, + repoID int64, + principalID int64, + branchUpdate githook.ReferenceUpdate, +) { + switch { + case branchUpdate.Old == types.NilSHA: + c.gitReporter.BranchCreated(ctx, &events.BranchCreatedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: branchUpdate.Ref, + SHA: branchUpdate.New, + }) + case branchUpdate.New == types.NilSHA: + c.gitReporter.BranchDeleted(ctx, &events.BranchDeletedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: branchUpdate.Ref, + SHA: branchUpdate.Old, + }) + default: + c.gitReporter.BranchUpdated(ctx, &events.BranchUpdatedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: branchUpdate.Ref, + OldSHA: branchUpdate.Old, + NewSHA: branchUpdate.New, + Forced: false, // TODO: data not available yet + }) + } +} + +func (c *Controller) reportTagEvent( + ctx context.Context, + repoID int64, + principalID int64, + tagUpdate githook.ReferenceUpdate, +) { + switch { + case tagUpdate.Old == types.NilSHA: + c.gitReporter.TagCreated(ctx, &events.TagCreatedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: tagUpdate.Ref, + SHA: tagUpdate.New, + }) + case tagUpdate.New == types.NilSHA: + c.gitReporter.TagDeleted(ctx, &events.TagDeletedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: tagUpdate.Ref, + SHA: tagUpdate.Old, + }) + default: + c.gitReporter.TagUpdated(ctx, &events.TagUpdatedPayload{ + RepoID: repoID, + PrincipalID: principalID, + Ref: tagUpdate.Ref, + OldSHA: tagUpdate.Old, + NewSHA: tagUpdate.New, + // tags can only be force updated! + Forced: true, + }) + } +} diff --git a/internal/api/controller/githook/pre_receive.go b/internal/api/controller/githook/pre_receive.go new file mode 100644 index 0000000000..c690c73f93 --- /dev/null +++ b/internal/api/controller/githook/pre_receive.go @@ -0,0 +1,69 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/githook" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" +) + +// PreReceive executes the pre-receive hook for a git repository. +func (c *Controller) PreReceive( + ctx context.Context, + session *auth.Session, + repoID int64, + principalID int64, + in *githook.PreReceiveInput, +) (*githook.Output, error) { + if in == nil { + return nil, fmt.Errorf("input is nil") + } + + repo, err := c.getRepoCheckAccess(ctx, session, repoID, enum.PermissionRepoEdit) + if err != nil { + return nil, err + } + + branchOutput := c.blockDefaultBranchDeletion(repo, in) + if branchOutput != nil { + return branchOutput, nil + } + + // TODO: Branch Protection, Block non-brach/tag refs (?), ... + + return &githook.Output{}, nil +} + +func (c *Controller) blockDefaultBranchDeletion(repo *types.Repository, + in *githook.PreReceiveInput) *githook.Output { + repoDefaultBranchRef := gitReferenceNamePrefixBranch + repo.DefaultBranch + + for _, refUpdate := range in.RefUpdates { + if refUpdate.New == types.NilSHA && refUpdate.Ref == repoDefaultBranchRef { + return &githook.Output{ + Error: ptr.String(usererror.ErrDefaultBranchCantBeDeleted.Error()), + } + } + } + return nil +} diff --git a/internal/api/controller/githook/update.go b/internal/api/controller/githook/update.go new file mode 100644 index 0000000000..f84028aea8 --- /dev/null +++ b/internal/api/controller/githook/update.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/githook" + "github.com/harness/gitness/internal/auth" +) + +// Update executes the update hook for a git repository. +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + repoID int64, + principalID int64, + in *githook.UpdateInput, +) (*githook.Output, error) { + if in == nil { + return nil, fmt.Errorf("input is nil") + } + + // We currently don't have any update action (nothing planned as of now) + + return &githook.Output{}, nil +} diff --git a/internal/api/controller/githook/wire.go b/internal/api/controller/githook/wire.go new file mode 100644 index 0000000000..bc1828449a --- /dev/null +++ b/internal/api/controller/githook/wire.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "github.com/harness/gitness/internal/auth/authz" + eventsgit "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, authorizer authz.Authorizer, principalStore store.PrincipalStore, + repoStore store.RepoStore, gitReporter *eventsgit.Reporter) *Controller { + return NewController(db, authorizer, principalStore, repoStore, gitReporter) +} diff --git a/internal/api/controller/logs/controller.go b/internal/api/controller/logs/controller.go new file mode 100644 index 0000000000..21f5a9768e --- /dev/null +++ b/internal/api/controller/logs/controller.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/livelog" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + authorizer authz.Authorizer + executionStore store.ExecutionStore + repoStore store.RepoStore + pipelineStore store.PipelineStore + stageStore store.StageStore + stepStore store.StepStore + logStore store.LogStore + logStream livelog.LogStream +} + +func NewController( + db *sqlx.DB, + authorizer authz.Authorizer, + executionStore store.ExecutionStore, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + stageStore store.StageStore, + stepStore store.StepStore, + logStore store.LogStore, + logStream livelog.LogStream, +) *Controller { + return &Controller{ + db: db, + authorizer: authorizer, + executionStore: executionStore, + repoStore: repoStore, + pipelineStore: pipelineStore, + stageStore: stageStore, + stepStore: stepStore, + logStore: logStore, + logStream: logStream, + } +} diff --git a/internal/api/controller/logs/find.go b/internal/api/controller/logs/find.go new file mode 100644 index 0000000000..0510fe348f --- /dev/null +++ b/internal/api/controller/logs/find.go @@ -0,0 +1,83 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + executionNum int64, + stageNum int, + stepNum int, +) ([]*livelog.Line, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum) + if err != nil { + return nil, fmt.Errorf("failed to find execution: %w", err) + } + + stage, err := c.stageStore.FindByNumber(ctx, execution.ID, stageNum) + if err != nil { + return nil, fmt.Errorf("failed to find stage: %w", err) + } + + step, err := c.stepStore.FindByNumber(ctx, stage.ID, stepNum) + if err != nil { + return nil, fmt.Errorf("failed to find step: %w", err) + } + + rc, err := c.logStore.Find(ctx, step.ID) + if err != nil { + return nil, fmt.Errorf("could not find logs: %w", err) + } + defer rc.Close() + + lines := []*livelog.Line{} + buf := new(bytes.Buffer) + buf.ReadFrom(rc) + + err = json.Unmarshal(buf.Bytes(), &lines) + if err != nil { + return nil, fmt.Errorf("could not unmarshal logs: %w", err) + } + + return lines, nil +} diff --git a/internal/api/controller/logs/tail.go b/internal/api/controller/logs/tail.go new file mode 100644 index 0000000000..41071a80e4 --- /dev/null +++ b/internal/api/controller/logs/tail.go @@ -0,0 +1,66 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Tail( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + executionNum int64, + stageNum int, + stepNum int, +) (<-chan *livelog.Line, <-chan error, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + execution, err := c.executionStore.FindByNumber(ctx, pipeline.ID, executionNum) + if err != nil { + return nil, nil, fmt.Errorf("failed to find execution: %w", err) + } + + stage, err := c.stageStore.FindByNumber(ctx, execution.ID, stageNum) + if err != nil { + return nil, nil, fmt.Errorf("failed to find stage: %w", err) + } + + step, err := c.stepStore.FindByNumber(ctx, stage.ID, stepNum) + if err != nil { + return nil, nil, fmt.Errorf("failed to find step: %w", err) + } + + linec, errc := c.logStream.Tail(ctx, step.ID) + return linec, errc, nil +} diff --git a/internal/api/controller/logs/wire.go b/internal/api/controller/logs/wire.go new file mode 100644 index 0000000000..395e12d0c2 --- /dev/null +++ b/internal/api/controller/logs/wire.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/livelog" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + authorizer authz.Authorizer, + executionStore store.ExecutionStore, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + stageStore store.StageStore, + stepStore store.StepStore, + logStore store.LogStore, + logStream livelog.LogStream, +) *Controller { + return NewController(db, authorizer, executionStore, repoStore, + pipelineStore, stageStore, stepStore, logStore, logStream) +} diff --git a/internal/api/controller/pipeline/controller.go b/internal/api/controller/pipeline/controller.go new file mode 100644 index 0000000000..f4a0112678 --- /dev/null +++ b/internal/api/controller/pipeline/controller.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + defaultBranch string + db *sqlx.DB + uidCheck check.PathUID + repoStore store.RepoStore + triggerStore store.TriggerStore + authorizer authz.Authorizer + pipelineStore store.PipelineStore +} + +func NewController( + db *sqlx.DB, + uidCheck check.PathUID, + authorizer authz.Authorizer, + repoStore store.RepoStore, + triggerStore store.TriggerStore, + pipelineStore store.PipelineStore, +) *Controller { + return &Controller{ + db: db, + uidCheck: uidCheck, + repoStore: repoStore, + triggerStore: triggerStore, + authorizer: authorizer, + pipelineStore: pipelineStore, + } +} diff --git a/internal/api/controller/pipeline/create.go b/internal/api/controller/pipeline/create.go new file mode 100644 index 0000000000..1e4206a5de --- /dev/null +++ b/internal/api/controller/pipeline/create.go @@ -0,0 +1,134 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +var ( + // errPipelineRequiresParent is returned if the user tries to create a pipeline without a parent space. + errPipelineRequiresParent = usererror.BadRequest( + "Parent space required - standalone pipelines are not supported.") + + // errPipelineRequiresConfigPath is returned if the user tries to create a pipeline with an empty config path. + errPipelineRequiresConfigPath = usererror.BadRequest( + "Pipeline requires a config path.") +) + +type CreateInput struct { + Description string `json:"description"` + UID string `json:"uid"` + Disabled bool `json:"disabled"` + DefaultBranch string `json:"default_branch"` + ConfigPath string `json:"config_path"` +} + +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateInput, +) (*types.Pipeline, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, "", enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + var pipeline *types.Pipeline + now := time.Now().UnixMilli() + pipeline = &types.Pipeline{ + Description: in.Description, + RepoID: repo.ID, + UID: in.UID, + Disabled: in.Disabled, + CreatedBy: session.Principal.ID, + Seq: 0, + DefaultBranch: in.DefaultBranch, + ConfigPath: in.ConfigPath, + Created: now, + Updated: now, + Version: 0, + } + err = c.pipelineStore.Create(ctx, pipeline) + if err != nil { + return nil, fmt.Errorf("pipeline creation failed: %w", err) + } + + // Try to create a default trigger on pipeline creation. + // Default trigger operations are set on pull request created, reopened or updated. + // We log an error on failure but don't fail the op. + trigger := &types.Trigger{ + Description: "auto-created trigger on pipeline creation", + Created: now, + Updated: now, + PipelineID: pipeline.ID, + RepoID: pipeline.RepoID, + CreatedBy: session.Principal.ID, + UID: "default", + Actions: []enum.TriggerAction{enum.TriggerActionPullReqCreated, + enum.TriggerActionPullReqReopened, enum.TriggerActionPullReqBranchUpdated}, + Disabled: false, + Version: 0, + } + err = c.triggerStore.Create(ctx, trigger) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to create auto trigger on pipeline creation") + } + + return pipeline, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + if err := check.Description(in.Description); err != nil { + return err + } + + if in.DefaultBranch == "" { + in.DefaultBranch = c.defaultBranch + } + + if in.ConfigPath == "" { + return errPipelineRequiresConfigPath + } + + return nil +} diff --git a/internal/api/controller/pipeline/delete.go b/internal/api/controller/pipeline/delete.go new file mode 100644 index 0000000000..640a5209fd --- /dev/null +++ b/internal/api/controller/pipeline/delete.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete(ctx context.Context, session *auth.Session, repoRef string, uid string) error { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return fmt.Errorf("failed to find repo by ref: %w", err) + } + + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineDelete) + if err != nil { + return fmt.Errorf("failed to authorize pipeline: %w", err) + } + + err = c.pipelineStore.DeleteByUID(ctx, repo.ID, uid) + if err != nil { + return fmt.Errorf("could not delete pipeline: %w", err) + } + return nil +} diff --git a/internal/api/controller/pipeline/find.go b/internal/api/controller/pipeline/find.go new file mode 100644 index 0000000000..886db7c6e5 --- /dev/null +++ b/internal/api/controller/pipeline/find.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + uid string, +) (*types.Pipeline, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + return c.pipelineStore.FindByUID(ctx, repo.ID, uid) +} diff --git a/internal/api/controller/pipeline/update.go b/internal/api/controller/pipeline/update.go new file mode 100644 index 0000000000..0db615d680 --- /dev/null +++ b/internal/api/controller/pipeline/update.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +type UpdateInput struct { + UID *string `json:"uid"` + Description *string `json:"description"` + Disabled *bool `json:"disabled"` + ConfigPath *string `json:"config_path"` +} + +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + repoRef string, + uid string, + in *UpdateInput, +) (*types.Pipeline, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, uid, enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + return c.pipelineStore.UpdateOptLock(ctx, pipeline, func(pipeline *types.Pipeline) error { + if in.UID != nil { + pipeline.UID = *in.UID + } + if in.Description != nil { + pipeline.Description = *in.Description + } + if in.ConfigPath != nil { + pipeline.ConfigPath = *in.ConfigPath + } + if in.Disabled != nil { + pipeline.Disabled = *in.Disabled + } + + return nil + }) +} + +func (c *Controller) sanitizeUpdatenput(in *UpdateInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + if in.ConfigPath != nil { + if *in.ConfigPath == "" { + return errPipelineRequiresConfigPath + } + } + + return nil +} diff --git a/internal/api/controller/pipeline/wire.go b/internal/api/controller/pipeline/wire.go new file mode 100644 index 0000000000..f79c436653 --- /dev/null +++ b/internal/api/controller/pipeline/wire.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + uidCheck check.PathUID, + repoStore store.RepoStore, + triggerStore store.TriggerStore, + authorizer authz.Authorizer, + pipelineStore store.PipelineStore, +) *Controller { + return NewController(db, uidCheck, authorizer, + repoStore, triggerStore, pipelineStore) +} diff --git a/internal/api/controller/plugin/controller.go b/internal/api/controller/plugin/controller.go new file mode 100644 index 0000000000..6287791128 --- /dev/null +++ b/internal/api/controller/plugin/controller.go @@ -0,0 +1,36 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "github.com/harness/gitness/internal/store" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + pluginStore store.PluginStore +} + +func NewController( + db *sqlx.DB, + pluginStore store.PluginStore, +) *Controller { + return &Controller{ + db: db, + pluginStore: pluginStore, + } +} diff --git a/internal/api/controller/plugin/list.go b/internal/api/controller/plugin/list.go new file mode 100644 index 0000000000..6230654c81 --- /dev/null +++ b/internal/api/controller/plugin/list.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package plugin + +import ( + "context" + "fmt" + + "github.com/harness/gitness/types" +) + +// List lists all the global plugins. +// Since this just lists the schema of plugins, it does not require any +// specific authorization. Plugins are available globally so they are not +// associated with any space. +func (c *Controller) List( + ctx context.Context, + filter types.ListQueryFilter, +) ([]*types.Plugin, int64, error) { + + plugins, err := c.pluginStore.List(ctx, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list plugins: %w", err) + } + + if len(plugins) < filter.Size { + return plugins, int64(len(plugins)), nil + } + count, err := c.pluginStore.Count(ctx, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count plugins: %w", err) + } + + return plugins, count, nil +} diff --git a/internal/api/controller/plugin/wire.go b/internal/api/controller/plugin/wire.go new file mode 100644 index 0000000000..55be3bc875 --- /dev/null +++ b/internal/api/controller/plugin/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + pluginStore store.PluginStore, +) *Controller { + return NewController(db, pluginStore) +} diff --git a/internal/api/controller/principal/controller.go b/internal/api/controller/principal/controller.go new file mode 100644 index 0000000000..b5b48ec808 --- /dev/null +++ b/internal/api/controller/principal/controller.go @@ -0,0 +1,29 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package principal + +import ( + "github.com/harness/gitness/internal/store" +) + +type controller struct { + principalStore store.PrincipalStore +} + +func newController(principalStore store.PrincipalStore) *controller { + return &controller{ + principalStore: principalStore, + } +} diff --git a/internal/api/controller/principal/interface.go b/internal/api/controller/principal/interface.go new file mode 100644 index 0000000000..22118fece5 --- /dev/null +++ b/internal/api/controller/principal/interface.go @@ -0,0 +1,28 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package principal + +import ( + "context" + + "github.com/harness/gitness/types" +) + +// Controller interface provides an abstraction that allows to have different implementations of +// principal related information. +type Controller interface { + // List lists the principals based on the provided filter. + List(ctx context.Context, opts *types.PrincipalFilter) ([]*types.PrincipalInfo, error) +} diff --git a/internal/api/controller/principal/search.go b/internal/api/controller/principal/search.go new file mode 100644 index 0000000000..95527c92dc --- /dev/null +++ b/internal/api/controller/principal/search.go @@ -0,0 +1,36 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package principal + +import ( + "context" + + "github.com/harness/gitness/types" +) + +func (c controller) List(ctx context.Context, opts *types.PrincipalFilter) ( + []*types.PrincipalInfo, error) { + principals, err := c.principalStore.List(ctx, opts) + if err != nil { + return nil, err + } + + pInfoUsers := make([]*types.PrincipalInfo, len(principals)) + for i := range principals { + pInfoUsers[i] = principals[i].ToPrincipalInfo() + } + + return pInfoUsers, nil +} diff --git a/internal/api/controller/principal/wire.go b/internal/api/controller/principal/wire.go new file mode 100644 index 0000000000..954a19c004 --- /dev/null +++ b/internal/api/controller/principal/wire.go @@ -0,0 +1,30 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package principal + +import ( + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(principalStore store.PrincipalStore) Controller { + return newController(principalStore) +} diff --git a/internal/api/controller/pullreq/activity_list.go b/internal/api/controller/pullreq/activity_list.go new file mode 100644 index 0000000000..2ea8e6bb2c --- /dev/null +++ b/internal/api/controller/pullreq/activity_list.go @@ -0,0 +1,67 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ActivityList returns a list of pull request activities +// from the provided repository and pull request number. +func (c *Controller) ActivityList( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + filter *types.PullReqActivityFilter, +) ([]*types.PullReqActivity, int64, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, 0, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, 0, fmt.Errorf("failed to find pull request by number: %w", err) + } + + list, err := c.activityStore.List(ctx, pr.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list pull requests activities: %w", err) + } + + // the function returns deleted comments, but it removes their content + for _, act := range list { + if act.Deleted != nil { + act.Text = "" + } + } + + if filter.Limit == 0 { + return list, int64(len(list)), nil + } + + count, err := c.activityStore.Count(ctx, pr.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count pull request activities: %w", err) + } + + return list, count, nil +} diff --git a/internal/api/controller/pullreq/comment_create.go b/internal/api/controller/pullreq/comment_create.go new file mode 100644 index 0000000000..62fdf8a067 --- /dev/null +++ b/internal/api/controller/pullreq/comment_create.go @@ -0,0 +1,309 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type CommentCreateInput struct { + // ParentID is set only for replies + ParentID int64 `json:"parent_id"` + // Text is comment text + Text string `json:"text"` + // Used only for code comments + TargetCommitSHA string `json:"target_commit_sha"` + SourceCommitSHA string `json:"source_commit_sha"` + Path string `json:"path"` + LineStart int `json:"line_start"` + LineStartNew bool `json:"line_start_new"` + LineEnd int `json:"line_end"` + LineEndNew bool `json:"line_end_new"` +} + +func (in *CommentCreateInput) IsReply() bool { + return in.ParentID != 0 +} + +func (in *CommentCreateInput) IsCodeComment() bool { + return in.SourceCommitSHA != "" +} + +func (in *CommentCreateInput) Validate() error { + // TODO: Validate Text size. + + if in.SourceCommitSHA == "" && in.TargetCommitSHA == "" { + return nil // not a code comment + } + + if in.SourceCommitSHA == "" || in.TargetCommitSHA == "" { + return usererror.BadRequest("for code comments source commit SHA and target commit SHA must be provided") + } + + if in.ParentID != 0 { + return usererror.BadRequest("can't create a reply that is a code comment") + } + + if in.Path == "" { + return usererror.BadRequest("code comment requires file path") + } + + if in.LineStart <= 0 || in.LineEnd <= 0 { + return usererror.BadRequest("code comments require line numbers") + } + + return nil +} + +// CommentCreate creates a new pull request comment (pull request activity, type=comment/code-comment). +// +//nolint:gocognit,funlen // refactor if needed +func (c *Controller) CommentCreate( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + in *CommentCreateInput, +) (*types.PullReqActivity, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + if errValidate := in.Validate(); errValidate != nil { + return nil, errValidate + } + + act := getCommentActivity(session, pr, in) + + switch { + case in.IsCodeComment(): + var cut gitrpc.DiffCutOutput + + cut, err = c.gitRPCClient.DiffCut(ctx, &gitrpc.DiffCutParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repo.GitUID}, + SourceCommitSHA: in.SourceCommitSHA, + SourceBranch: pr.SourceBranch, + TargetCommitSHA: in.TargetCommitSHA, + TargetBranch: pr.TargetBranch, + Path: in.Path, + LineStart: in.LineStart, + LineStartNew: in.LineStartNew, + LineEnd: in.LineEnd, + LineEndNew: in.LineEndNew, + }) + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotFound || gitrpc.ErrorStatus(err) == gitrpc.StatusPathNotFound { + return nil, usererror.BadRequest(gitrpc.ErrorMessage(err)) + } + if err != nil { + return nil, err + } + + setAsCodeComment(act, cut, in.Path, in.SourceCommitSHA) + _ = act.SetPayload(&types.PullRequestActivityPayloadCodeComment{ + Title: cut.LinesHeader, + Lines: cut.Lines, + LineStartNew: in.LineStartNew, + LineEndNew: in.LineEndNew, + }) + + err = c.writeActivity(ctx, pr, act) + + // Migrate the comment if necessary... Note: we still need to return the code comment as is. + needsNewLineMigrate := in.SourceCommitSHA != cut.LatestSourceSHA + needsOldLineMigrate := pr.MergeBaseSHA != cut.MergeBaseSHA + if err == nil && (needsNewLineMigrate || needsOldLineMigrate) { + comments := []*types.CodeComment{act.AsCodeComment()} + + if needsNewLineMigrate { + c.codeCommentMigrator.MigrateNew(ctx, repo.GitUID, cut.LatestSourceSHA, comments) + } + if needsOldLineMigrate { + c.codeCommentMigrator.MigrateOld(ctx, repo.GitUID, cut.MergeBaseSHA, comments) + } + + if errMigrateUpdate := c.codeCommentView.UpdateAll(ctx, comments); errMigrateUpdate != nil { + // non-critical error + log.Ctx(ctx).Err(errMigrateUpdate). + Msgf("failed to migrate code comment to the latest source/merge-base commit SHA") + } + } + case in.ParentID != 0: + var parentAct *types.PullReqActivity + parentAct, err = c.checkIsReplyable(ctx, pr, in.ParentID) + if err != nil { + return nil, err + } + + act.ParentID = &parentAct.ID + act.Kind = parentAct.Kind + _ = act.SetPayload(types.PullRequestActivityPayloadComment{}) + + err = c.writeReplyActivity(ctx, parentAct, act) + default: + _ = act.SetPayload(types.PullRequestActivityPayloadComment{}) + err = c.writeActivity(ctx, pr, act) + } + if err != nil { + return nil, fmt.Errorf("failed to create comment: %w", err) + } + + pr, err = c.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.CommentCount++ + if act.IsBlocking() { + pr.UnresolvedCount++ + } + return nil + }) + if err != nil { + // non-critical error + log.Ctx(ctx).Err(err).Msgf("failed to increment pull request comment counters") + } + + if err = c.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return act, nil +} + +func (c *Controller) checkIsReplyable(ctx context.Context, + pr *types.PullReq, parentID int64) (*types.PullReqActivity, error) { + // make sure the parent comment exists, belongs to the same PR and isn't itself a reply + parentAct, err := c.activityStore.Find(ctx, parentID) + if errors.Is(err, store.ErrResourceNotFound) || parentAct == nil { + return nil, usererror.BadRequest("Parent pull request activity not found.") + } + if err != nil { + return nil, fmt.Errorf("failed to find parent pull request activity: %w", err) + } + + if parentAct.PullReqID != pr.ID || parentAct.RepoID != pr.TargetRepoID { + return nil, usererror.BadRequest("Parent pull request activity doesn't belong to the same pull request.") + } + + if !parentAct.IsReplyable() { + return nil, usererror.BadRequest("Can't create a reply to the specified entry.") + } + + return parentAct, nil +} + +// writeActivity updates the PR's activity sequence number (using the optimistic locking mechanism), +// sets the correct Order value and writes the activity to the database. +// Even if the writing fails, the updating of the sequence number can succeed. +func (c *Controller) writeActivity(ctx context.Context, pr *types.PullReq, act *types.PullReqActivity) error { + prUpd, err := c.pullreqStore.UpdateActivitySeq(ctx, pr) + if err != nil { + return fmt.Errorf("failed to get pull request activity number: %w", err) + } + + *pr = *prUpd // update the pull request object + + act.Order = prUpd.ActivitySeq + + err = c.activityStore.Create(ctx, act) + if err != nil { + return fmt.Errorf("failed to create pull request activity: %w", err) + } + + return nil +} + +// writeReplyActivity updates the parent activity's reply sequence number (using the optimistic locking mechanism), +// sets the correct Order and SubOrder values and writes the activity to the database. +// Even if the writing fails, the updating of the sequence number can succeed. +func (c *Controller) writeReplyActivity(ctx context.Context, parent, act *types.PullReqActivity) error { + parentUpd, err := c.activityStore.UpdateOptLock(ctx, parent, func(act *types.PullReqActivity) error { + act.ReplySeq++ + return nil + }) + if err != nil { + return fmt.Errorf("failed to get pull request activity number: %w", err) + } + + *parent = *parentUpd // update the parent pull request activity object + + act.Order = parentUpd.Order + act.SubOrder = parentUpd.ReplySeq + + err = c.activityStore.Create(ctx, act) + if err != nil { + return fmt.Errorf("failed to create pull request activity: %w", err) + } + + return nil +} + +func getCommentActivity(session *auth.Session, pr *types.PullReq, in *CommentCreateInput) *types.PullReqActivity { + now := time.Now().UnixMilli() + act := &types.PullReqActivity{ + ID: 0, // Will be populated in the data layer + Version: 0, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + Edited: now, + Deleted: nil, + ParentID: nil, // Will be filled in CommentCreate + RepoID: pr.TargetRepoID, + PullReqID: pr.ID, + Order: 0, // Will be filled in writeActivity/writeReplyActivity + SubOrder: 0, // Will be filled in writeReplyActivity + ReplySeq: 0, + Type: enum.PullReqActivityTypeComment, + Kind: enum.PullReqActivityKindComment, + Text: in.Text, + Metadata: nil, + ResolvedBy: nil, + Resolved: nil, + Author: *session.Principal.ToPrincipalInfo(), + } + + return act +} + +func setAsCodeComment(a *types.PullReqActivity, cut gitrpc.DiffCutOutput, path, sourceCommitSHA string) { + var falseBool bool + a.Type = enum.PullReqActivityTypeCodeComment + a.Kind = enum.PullReqActivityKindChangeComment + a.CodeComment = &types.CodeCommentFields{ + Outdated: falseBool, + MergeBaseSHA: cut.MergeBaseSHA, + SourceSHA: sourceCommitSHA, + Path: path, + LineNew: cut.Header.NewLine, + SpanNew: cut.Header.NewSpan, + LineOld: cut.Header.OldLine, + SpanOld: cut.Header.OldSpan, + } +} diff --git a/internal/api/controller/pullreq/comment_delete.go b/internal/api/controller/pullreq/comment_delete.go new file mode 100644 index 0000000000..c4e09d06b5 --- /dev/null +++ b/internal/api/controller/pullreq/comment_delete.go @@ -0,0 +1,84 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// CommentDelete deletes a pull request comment. +func (c *Controller) CommentDelete( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + commentID int64, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return fmt.Errorf("failed to find pull request by number: %w", err) + } + + act, err := c.getCommentCheckEditAccess(ctx, session, pr, commentID) + if err != nil { + return fmt.Errorf("failed to get comment: %w", err) + } + + if act.Deleted != nil { + return nil + } + + isBlocking := act.IsBlocking() + + _, err = c.activityStore.UpdateOptLock(ctx, act, func(act *types.PullReqActivity) error { + now := time.Now().UnixMilli() + act.Deleted = &now + return nil + }) + if err != nil { + return fmt.Errorf("failed to mark comment as deleted: %w", err) + } + + _, err = c.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.CommentCount-- + if isBlocking { + pr.UnresolvedCount-- + } + return nil + }) + if err != nil { + // non-critical error + log.Ctx(ctx).Err(err).Msgf("failed to decrement pull request comment counters") + } + + if err = c.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return nil +} diff --git a/internal/api/controller/pullreq/comment_status.go b/internal/api/controller/pullreq/comment_status.go new file mode 100644 index 0000000000..cfec92a51d --- /dev/null +++ b/internal/api/controller/pullreq/comment_status.go @@ -0,0 +1,133 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type CommentStatusInput struct { + Status enum.PullReqCommentStatus `json:"status"` +} + +func (in *CommentStatusInput) Validate() error { + _, ok := in.Status.Sanitize() + if !ok { + return usererror.BadRequest("Invalid value provided for comment status") + } + + return nil +} + +func (in *CommentStatusInput) hasChanges(act *types.PullReqActivity, userID int64) bool { + // clearing resolved + if in.Status == enum.PullReqCommentStatusActive { + return act.Resolved != nil + } + // setting resolved + return act.Resolved == nil || act.ResolvedBy == nil || *act.ResolvedBy != userID +} + +// CommentStatus updates a pull request comment status. +func (c *Controller) CommentStatus( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + commentID int64, + in *CommentStatusInput, +) (*types.PullReqActivity, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + var pr *types.PullReq + var act *types.PullReqActivity + + err = controller.TxOptLock(ctx, c.db, func(ctx context.Context) error { + pr, err = c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return fmt.Errorf("failed to find pull request by number: %w", err) + } + + if errValidate := in.Validate(); errValidate != nil { + return errValidate + } + + act, err = c.getCommentCheckChangeStatusAccess(ctx, pr, commentID) + if err != nil { + return fmt.Errorf("failed to get comment: %w", err) + } + + if !in.hasChanges(act, session.Principal.ID) { + return nil + } + + act.Resolved = nil + act.ResolvedBy = nil + + now := time.Now().UnixMilli() + + if in.Status != enum.PullReqCommentStatusActive { + // In the future if we add more comment resolved statuses + // we'll add the ResolvedReason field and put the reason there. + // For now, the nullable timestamp field/db-column "Resolved" tells the status (active/resolved). + act.Resolved = &now + act.ResolvedBy = &session.Principal.ID + } + + err = c.activityStore.Update(ctx, act) + if err != nil { + return fmt.Errorf("failed to update status of pull request activity: %w", err) + } + + // Here we deliberately use the transaction and counting the unresolved comments, + // rather than optimistic locking and incrementing/decrementing the counter. + // The idea is that if the counter ever goes out of sync, this would be the place where we get it back in sync. + unresolvedCount, err := c.activityStore.CountUnresolved(ctx, pr.ID) + if err != nil { + return fmt.Errorf("failed to count unresolved comments: %w", err) + } + + pr.UnresolvedCount = unresolvedCount + + err = c.pullreqStore.Update(ctx, pr) + if err != nil { + return fmt.Errorf("failed to update pull request's unresolved comment count: %w", err) + } + + return nil + }) + if err != nil { + return nil, err + } + + if err = c.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return act, nil +} diff --git a/internal/api/controller/pullreq/comment_update.go b/internal/api/controller/pullreq/comment_update.go new file mode 100644 index 0000000000..c0a0b1e4c3 --- /dev/null +++ b/internal/api/controller/pullreq/comment_update.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type CommentUpdateInput struct { + Text string `json:"text"` +} + +func (in *CommentUpdateInput) Validate() error { + // TODO: Check Text length + return nil +} + +func (in *CommentUpdateInput) hasChanges(act *types.PullReqActivity) bool { + return in.Text != act.Text +} + +// CommentUpdate updates a pull request comment. +func (c *Controller) CommentUpdate( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + commentID int64, + in *CommentUpdateInput, +) (*types.PullReqActivity, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + if errValidate := in.Validate(); errValidate != nil { + return nil, errValidate + } + + act, err := c.getCommentCheckEditAccess(ctx, session, pr, commentID) + if err != nil { + return nil, fmt.Errorf("failed to get comment: %w", err) + } + + if !in.hasChanges(act) { + return act, nil + } + + act, err = c.activityStore.UpdateOptLock(ctx, act, func(act *types.PullReqActivity) error { + now := time.Now().UnixMilli() + act.Edited = now + act.Text = in.Text + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to update comment: %w", err) + } + + if err = c.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return act, nil +} diff --git a/internal/api/controller/pullreq/controller.go b/internal/api/controller/pullreq/controller.go new file mode 100644 index 0000000000..6b1bb4bc41 --- /dev/null +++ b/internal/api/controller/pullreq/controller.go @@ -0,0 +1,248 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + gitrpcenum "github.com/harness/gitness/gitrpc/enum" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/pullreq" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + urlProvider *url.Provider + authorizer authz.Authorizer + pullreqStore store.PullReqStore + activityStore store.PullReqActivityStore + codeCommentView store.CodeCommentView + reviewStore store.PullReqReviewStore + reviewerStore store.PullReqReviewerStore + repoStore store.RepoStore + principalStore store.PrincipalStore + fileViewStore store.PullReqFileViewStore + gitRPCClient gitrpc.Interface + eventReporter *pullreqevents.Reporter + mtxManager lock.MutexManager + codeCommentMigrator *codecomments.Migrator + pullreqService *pullreq.Service + sseStreamer sse.Streamer +} + +func NewController( + db *sqlx.DB, + urlProvider *url.Provider, + authorizer authz.Authorizer, + pullreqStore store.PullReqStore, + pullreqActivityStore store.PullReqActivityStore, + codeCommentView store.CodeCommentView, + pullreqReviewStore store.PullReqReviewStore, + pullreqReviewerStore store.PullReqReviewerStore, + repoStore store.RepoStore, + principalStore store.PrincipalStore, + fileViewStore store.PullReqFileViewStore, + gitRPCClient gitrpc.Interface, + eventReporter *pullreqevents.Reporter, + mtxManager lock.MutexManager, + codeCommentMigrator *codecomments.Migrator, + pullreqService *pullreq.Service, + sseStreamer sse.Streamer, +) *Controller { + return &Controller{ + db: db, + urlProvider: urlProvider, + authorizer: authorizer, + pullreqStore: pullreqStore, + activityStore: pullreqActivityStore, + codeCommentView: codeCommentView, + reviewStore: pullreqReviewStore, + reviewerStore: pullreqReviewerStore, + repoStore: repoStore, + principalStore: principalStore, + fileViewStore: fileViewStore, + gitRPCClient: gitRPCClient, + codeCommentMigrator: codeCommentMigrator, + eventReporter: eventReporter, + mtxManager: mtxManager, + pullreqService: pullreqService, + sseStreamer: sseStreamer, + } +} + +func (c *Controller) verifyBranchExistence(ctx context.Context, + repo *types.Repository, branch string, +) (string, error) { + if branch == "" { + return "", usererror.BadRequest("branch name can't be empty") + } + + ref, err := c.gitRPCClient.GetRef(ctx, + gitrpc.GetRefParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repo.GitUID}, + Name: branch, + Type: gitrpcenum.RefTypeBranch, + }) + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotFound { + return "", usererror.BadRequest( + fmt.Sprintf("branch %s does not exist in the repository %s", branch, repo.UID)) + } + if err != nil { + return "", fmt.Errorf( + "failed to check existence of the branch %s in the repository %s: %w", + branch, repo.UID, err) + } + + return ref.SHA, nil +} + +func (c *Controller) getRepoCheckAccess(ctx context.Context, + session *auth.Session, repoRef string, reqPermission enum.Permission, +) (*types.Repository, error) { + if repoRef == "" { + return nil, usererror.BadRequest("A valid repository reference must be provided.") + } + + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repository: %w", err) + } + + if repo.Importing { + return nil, usererror.BadRequest("Repository import is in progress.") + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, reqPermission, false); err != nil { + return nil, fmt.Errorf("access check failed: %w", err) + } + + return repo, nil +} + +func (c *Controller) getCommentCheckModifyAccess(ctx context.Context, + pr *types.PullReq, commentID int64, +) (*types.PullReqActivity, error) { + if commentID <= 0 { + return nil, usererror.BadRequest("A valid comment ID must be provided.") + } + + comment, err := c.activityStore.Find(ctx, commentID) + if err != nil { + return nil, fmt.Errorf("failed to find comment by ID: %w", err) + } + + if comment == nil { + return nil, usererror.ErrNotFound + } + + if comment.Deleted != nil || comment.RepoID != pr.TargetRepoID || comment.PullReqID != pr.ID { + return nil, usererror.ErrNotFound + } + + if comment.Kind == enum.PullReqActivityKindSystem { + return nil, usererror.BadRequest("Can't update a comment created by the system.") + } + + if comment.Type != enum.PullReqActivityTypeComment && comment.Type != enum.PullReqActivityTypeCodeComment { + return nil, usererror.BadRequest("Only comments and code comments can be edited.") + } + + return comment, nil +} + +func (c *Controller) getCommentCheckEditAccess(ctx context.Context, + session *auth.Session, pr *types.PullReq, commentID int64, +) (*types.PullReqActivity, error) { + comment, err := c.getCommentCheckModifyAccess(ctx, pr, commentID) + if err != nil { + return nil, err + } + + if comment.CreatedBy != session.Principal.ID { + return nil, usererror.BadRequest("Only own comments may be updated.") + } + + return comment, nil +} + +func (c *Controller) getCommentCheckChangeStatusAccess(ctx context.Context, + pr *types.PullReq, commentID int64, +) (*types.PullReqActivity, error) { + comment, err := c.getCommentCheckModifyAccess(ctx, pr, commentID) + if err != nil { + return nil, err + } + + if comment.SubOrder != 0 { + return nil, usererror.BadRequest("Can't change status of replies.") + } + + return comment, nil +} + +func (c *Controller) checkIfAlreadyExists(ctx context.Context, + targetRepoID, sourceRepoID int64, targetBranch, sourceBranch string, +) error { + existing, err := c.pullreqStore.List(ctx, &types.PullReqFilter{ + SourceRepoID: sourceRepoID, + SourceBranch: sourceBranch, + TargetRepoID: targetRepoID, + TargetBranch: targetBranch, + States: []enum.PullReqState{enum.PullReqStateOpen}, + Size: 1, + Sort: enum.PullReqSortNumber, + Order: enum.OrderAsc, + }) + if err != nil { + return fmt.Errorf("failed to get existing pull requests: %w", err) + } + if len(existing) > 0 { + return usererror.ConflictWithPayload( + "a pull request for this target and source branch already exists", + map[string]any{ + "type": "pr already exists", + "number": existing[0].Number, + "title": existing[0].Title, + }, + ) + } + + return nil +} + +func eventBase(pr *types.PullReq, principal *types.Principal) pullreqevents.Base { + return pullreqevents.Base{ + PullReqID: pr.ID, + SourceRepoID: pr.SourceRepoID, + TargetRepoID: pr.TargetRepoID, + Number: pr.Number, + PrincipalID: principal.ID, + } +} diff --git a/internal/api/controller/pullreq/file_view_add.go b/internal/api/controller/pullreq/file_view_add.go new file mode 100644 index 0000000000..e4feace797 --- /dev/null +++ b/internal/api/controller/pullreq/file_view_add.go @@ -0,0 +1,164 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type FileViewAddInput struct { + Path string `json:"path"` + CommitSHA string `json:"commit_sha"` +} + +func (f *FileViewAddInput) Validate() error { + if f.Path == "" { + return usererror.BadRequest("path can't be empty") + } + if !gitrpc.ValidateCommitSHA(f.CommitSHA) { + return usererror.BadRequest("commit_sha is invalid") + } + + return nil +} + +// FileViewAdd marks a file as viewed. +// NOTE: +// We take the commit SHA from the user to ensure we mark as viewed only what the user actually sees. +// The downside is that the caller could provide a SHA that never was part of the PR in the first place. +// We can't block against that with our current data, as the existence of force push makes it impossible to verify +// whether the commit ever was part of the PR - it would require us to store the full pr.SourceSHA history. +func (c *Controller) FileViewAdd( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + in *FileViewAddInput, +) (*types.PullReqFileView, error) { + if err := in.Validate(); err != nil { + return nil, err + } + + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + // retrieve file from both provided SHA and mergeBaseSHA to validate user input + // TODO: Add GITRPC call to get multiple tree nodes at once + + inNode, err := c.gitRPCClient.GetTreeNode(ctx, &gitrpc.GetTreeNodeParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + GitREF: in.CommitSHA, + Path: in.Path, + IncludeLatestCommit: false, + }) + if err != nil && gitrpc.ErrorStatus(err) != gitrpc.StatusPathNotFound { + return nil, fmt.Errorf( + "failed to get tree node '%s' for provided sha '%s' from gitrpc: %w", + in.Path, + in.CommitSHA, + err, + ) + } + + // ensure provided path actually points to a blob or commit (submodule) + if inNode != nil && + inNode.Node.Type != gitrpc.TreeNodeTypeBlob && + inNode.Node.Type != gitrpc.TreeNodeTypeCommit { + return nil, usererror.BadRequestf("Provided path '%s' doesn't point to a file.", in.Path) + } + + mergeBaseNode, err := c.gitRPCClient.GetTreeNode(ctx, &gitrpc.GetTreeNodeParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + GitREF: pr.MergeBaseSHA, + Path: in.Path, + IncludeLatestCommit: false, + }) + if err != nil && gitrpc.ErrorStatus(err) != gitrpc.StatusPathNotFound { + return nil, fmt.Errorf( + "failed to get tree node '%s' for MergeBaseSHA '%s' from gitrpc: %w", + in.Path, + pr.MergeBaseSHA, + err, + ) + } + + // fail the call in case the file doesn't exist in either, or in case it didn't change. + // NOTE: There is a RARE chance if the user provides an old SHA AND there's a new mergeBaseSHA + // which now already contains the changes, that we return an error saying there are no changes + // (even though with the old merge base there were). Effectively, it would lead to the users + // 'file viewed' resetting when the user refreshes the page - we are okay with that. + if inNode == nil && mergeBaseNode == nil { + return nil, usererror.BadRequestf( + "File '%s' neither found for merge-base '%s' nor for provided sha '%s'.", + in.Path, + pr.MergeBaseSHA, + in.CommitSHA, + ) + } + if inNode != nil && mergeBaseNode != nil && inNode.Node.SHA == mergeBaseNode.Node.SHA { + return nil, usererror.BadRequestf( + "File '%s' is not part of changes between merge-base '%s' and provided sha '%s'.", + in.Path, + pr.MergeBaseSHA, + in.CommitSHA, + ) + } + + // in case of deleted file set sha to nilsha - that's how git diff treats it, too. + sha := types.NilSHA + if inNode != nil { + sha = inNode.Node.SHA + } + + now := time.Now().UnixMilli() + fileView := &types.PullReqFileView{ + PullReqID: pr.ID, + PrincipalID: session.Principal.ID, + + Path: in.Path, + SHA: sha, + + // always add as non-obsolete, even if the file view is derived from a non-latest commit sha. + // The file sha ensures that the user's review is out of date in case the file changed in the meanwhile. + // And in the rare case of the file having changed and changed back between the two commits, + // the content the reviewer just approved on is the same, so it's actually good to not mark it as obsolete. + Obsolete: false, + + Created: now, + Updated: now, + } + + err = c.fileViewStore.Upsert(ctx, fileView) + if err != nil { + return nil, fmt.Errorf("failed to upsert file view information in db: %w", err) + } + + return fileView, nil +} diff --git a/internal/api/controller/pullreq/file_view_delete.go b/internal/api/controller/pullreq/file_view_delete.go new file mode 100644 index 0000000000..a400113b31 --- /dev/null +++ b/internal/api/controller/pullreq/file_view_delete.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// FileViewDelete removes a file from being marked as viewed. +func (c *Controller) FileViewDelete( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + filePath string, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return fmt.Errorf("failed to find pull request by number: %w", err) + } + + if filePath == "" { + return usererror.BadRequest("file path can't be empty") + } + + err = c.fileViewStore.DeleteByFileForPrincipal(ctx, pr.ID, session.Principal.ID, filePath) + if err != nil { + return fmt.Errorf("failed to delete file view entry in db: %w", err) + } + + return nil +} diff --git a/internal/api/controller/pullreq/file_view_list.go b/internal/api/controller/pullreq/file_view_list.go new file mode 100644 index 0000000000..8b12f2855f --- /dev/null +++ b/internal/api/controller/pullreq/file_view_list.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// FileViewList lists all files of the PR marked as viewed for the user. +func (c *Controller) FileViewList( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, +) ([]*types.PullReqFileView, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + fileViews, err := c.fileViewStore.List(ctx, pr.ID, session.Principal.ID) + if err != nil { + return nil, fmt.Errorf("failed to read file view entries for user from db: %w", err) + } + + return fileViews, nil +} diff --git a/internal/api/controller/pullreq/locks.go b/internal/api/controller/pullreq/locks.go new file mode 100644 index 0000000000..7355de11ab --- /dev/null +++ b/internal/api/controller/pullreq/locks.go @@ -0,0 +1,29 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "strconv" + + "github.com/harness/gitness/lock" +) + +func (c *Controller) newMutexForPR(repoUID string, pr int64, options ...lock.Option) (lock.Mutex, error) { + key := repoUID + "/pulls" + if pr != 0 { + key += "/" + strconv.FormatInt(pr, 10) + } + return c.mtxManager.NewMutex(key, append(options, lock.WithNamespace("repo"))...) +} diff --git a/internal/api/controller/pullreq/mapper.go b/internal/api/controller/pullreq/mapper.go new file mode 100644 index 0000000000..3a9110e64b --- /dev/null +++ b/internal/api/controller/pullreq/mapper.go @@ -0,0 +1,27 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/types" +) + +func rpcIdentityFromPrincipal(p types.Principal) *gitrpc.Identity { + return &gitrpc.Identity{ + Name: p.DisplayName, + Email: p.Email, + } +} diff --git a/internal/api/controller/pullreq/merge.go b/internal/api/controller/pullreq/merge.go new file mode 100644 index 0000000000..5425db840c --- /dev/null +++ b/internal/api/controller/pullreq/merge.go @@ -0,0 +1,208 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc" + gitrpcenum "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/bootstrap" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type MergeInput struct { + Method enum.MergeMethod `json:"method"` + SourceSHA string `json:"source_sha"` +} + +// Merge merges the pull request. +// +//nolint:gocognit +func (c *Controller) Merge( + ctx context.Context, + session *auth.Session, + repoRef string, + pullreqNum int64, + in *MergeInput, +) (types.MergeResponse, error) { + var ( + sha string + pr *types.PullReq + ) + + method, ok := in.Method.Sanitize() + if !ok { + return types.MergeResponse{}, usererror.BadRequest( + fmt.Sprintf("wrong merge method type: %s", in.Method)) + } + in.Method = method + + targetRepo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to acquire access to target repo: %w", err) + } + + // if two requests for merging comes at the same time then mutex will lock + // first one and second one will wait, when first one is done then second one + // continue with latest data from db with state merged and return error that + // pr is already merged. + mutex, err := c.newMutexForPR(targetRepo.GitUID, 0) // 0 means locks all PRs for this repo + if err != nil { + return types.MergeResponse{}, err + } + err = mutex.Lock(ctx) + if err != nil { + return types.MergeResponse{}, err + } + defer func() { + _ = mutex.Unlock(ctx) + }() + + pr, err = c.pullreqStore.FindByNumber(ctx, targetRepo.ID, pullreqNum) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to get pull request by number: %w", err) + } + + if pr.Merged != nil { + return types.MergeResponse{}, usererror.BadRequest("Pull request already merged") + } + + if pr.State != enum.PullReqStateOpen { + return types.MergeResponse{}, usererror.BadRequest("Pull request must be open") + } + + if pr.IsDraft { + return types.MergeResponse{}, usererror.BadRequest( + "Draft pull requests can't be merged. Clear the draft flag first.", + ) + } + + reviewers, err := c.reviewerStore.List(ctx, pr.ID) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to load list of reviwers: %w", err) + } + + // TODO: We need to extend this section. A review decision might be for an older commit. + // TODO: Repository admin users should be able to override this and proceed with the merge. + for _, reviewer := range reviewers { + if reviewer.ReviewDecision == enum.PullReqReviewDecisionChangeReq { + return types.MergeResponse{}, usererror.BadRequest("At least one reviewer still requests changes.") + } + } + + sourceRepo := targetRepo + if pr.SourceRepoID != pr.TargetRepoID { + sourceRepo, err = c.repoStore.Find(ctx, pr.SourceRepoID) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to get source repository: %w", err) + } + } + + var writeParams gitrpc.WriteParams + writeParams, err = controller.CreateRPCWriteParams(ctx, c.urlProvider, session, targetRepo) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to create RPC write params: %w", err) + } + + // TODO: for forking merge title might be different? + var mergeTitle string + if in.Method == enum.MergeMethod(gitrpcenum.MergeMethodSquash) { + mergeTitle = fmt.Sprintf("%s (#%d)", pr.Title, pr.Number) + } else { + mergeTitle = fmt.Sprintf("Merge branch '%s' of %s (#%d)", pr.SourceBranch, sourceRepo.Path, pr.Number) + } + + now := time.Now() + var mergeOutput gitrpc.MergeOutput + mergeOutput, err = c.gitRPCClient.Merge(ctx, &gitrpc.MergeParams{ + WriteParams: writeParams, + BaseBranch: pr.TargetBranch, + HeadRepoUID: sourceRepo.GitUID, + HeadBranch: pr.SourceBranch, + Title: mergeTitle, + Message: "", + Committer: rpcIdentityFromPrincipal(bootstrap.NewSystemServiceSession().Principal), + CommitterDate: &now, + Author: rpcIdentityFromPrincipal(session.Principal), + AuthorDate: &now, + RefType: gitrpcenum.RefTypeBranch, + RefName: pr.TargetBranch, + HeadExpectedSHA: in.SourceSHA, + Method: gitrpcenum.MergeMethod(in.Method), + }) + if err != nil { + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotMergeable { + return types.MergeResponse{ + ConflictFiles: gitrpc.AsConflictFilesError(err), + }, nil + } + return types.MergeResponse{}, fmt.Errorf("merge check execution failed: %w", err) + } + + pr, err = c.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.State = enum.PullReqStateMerged + + now := time.Now().UnixMilli() + pr.Merged = &now + pr.MergedBy = &session.Principal.ID + pr.MergeMethod = &in.Method + + // update all Merge specific information (might be empty if previous merge check failed) + pr.MergeCheckStatus = enum.MergeCheckStatusMergeable + pr.MergeTargetSHA = &mergeOutput.BaseSHA + pr.MergeBaseSHA = mergeOutput.MergeBaseSHA + pr.MergeSHA = &mergeOutput.MergeSHA + pr.MergeConflicts = nil + + pr.ActivitySeq++ // because we need to write the activity entry + return nil + }) + if err != nil { + return types.MergeResponse{}, fmt.Errorf("failed to update pull request: %w", err) + } + + activityPayload := &types.PullRequestActivityPayloadMerge{ + MergeMethod: in.Method, + MergeSHA: mergeOutput.MergeSHA, + TargetSHA: mergeOutput.BaseSHA, + SourceSHA: mergeOutput.HeadSHA, + } + if _, errAct := c.activityStore.CreateWithPayload(ctx, pr, session.Principal.ID, activityPayload); errAct != nil { + // non-critical error + log.Ctx(ctx).Err(errAct).Msgf("failed to write pull req merge activity") + } + + c.eventReporter.Merged(ctx, &pullreqevents.MergedPayload{ + Base: eventBase(pr, &session.Principal), + MergeMethod: in.Method, + MergeSHA: mergeOutput.MergeSHA, + TargetSHA: mergeOutput.BaseSHA, + SourceSHA: mergeOutput.HeadSHA, + }) + + return types.MergeResponse{ + SHA: sha, + }, nil +} diff --git a/internal/api/controller/pullreq/pr_commits.go b/internal/api/controller/pullreq/pr_commits.go new file mode 100644 index 0000000000..31d6d18684 --- /dev/null +++ b/internal/api/controller/pullreq/pr_commits.go @@ -0,0 +1,71 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Commits lists all commits from pr head branch. +func (c *Controller) Commits( + ctx context.Context, + session *auth.Session, + repoRef string, + pullreqNum int64, + filter *types.PaginationFilter, +) ([]types.Commit, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, pullreqNum) + if err != nil { + return nil, fmt.Errorf("failed to get pull request by number: %w", err) + } + + gitRef := pr.SourceSHA + afterRef := pr.MergeBaseSHA + + rpcOut, err := c.gitRPCClient.ListCommits(ctx, &gitrpc.ListCommitsParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + GitREF: gitRef, + After: afterRef, + Page: int32(filter.Page), + Limit: int32(filter.Limit), + }) + if err != nil { + return nil, err + } + + commits := make([]types.Commit, len(rpcOut.Commits)) + for i := range rpcOut.Commits { + var commit *types.Commit + commit, err = controller.MapCommit(&rpcOut.Commits[i]) + if err != nil { + return nil, fmt.Errorf("failed to map commit: %w", err) + } + commits[i] = *commit + } + + return commits, nil +} diff --git a/internal/api/controller/pullreq/pr_create.go b/internal/api/controller/pullreq/pr_create.go new file mode 100644 index 0000000000..67b0cb17a5 --- /dev/null +++ b/internal/api/controller/pullreq/pr_create.go @@ -0,0 +1,167 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type CreateInput struct { + IsDraft bool `json:"is_draft"` + + Title string `json:"title"` + Description string `json:"description"` + + SourceRepoRef string `json:"source_repo_ref"` + SourceBranch string `json:"source_branch"` + TargetBranch string `json:"target_branch"` +} + +// Create creates a new pull request. +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateInput, +) (*types.PullReq, error) { + in.Title = strings.TrimSpace(in.Title) + if in.Title == "" { + return nil, usererror.BadRequest("pull request title can't be empty") + } + + targetRepo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush) + if err != nil { + return nil, fmt.Errorf("failed to acquire access access to target repo: %w", err) + } + + sourceRepo := targetRepo + if in.SourceRepoRef != "" { + sourceRepo, err = c.getRepoCheckAccess(ctx, session, in.SourceRepoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access access to source repo: %w", err) + } + } + + if sourceRepo.ID == targetRepo.ID && in.TargetBranch == in.SourceBranch { + return nil, usererror.BadRequest("target and source branch can't be the same") + } + + var sourceSHA string + + if sourceSHA, err = c.verifyBranchExistence(ctx, sourceRepo, in.SourceBranch); err != nil { + return nil, err + } + + if _, err = c.verifyBranchExistence(ctx, targetRepo, in.TargetBranch); err != nil { + return nil, err + } + + if err = c.checkIfAlreadyExists(ctx, targetRepo.ID, sourceRepo.ID, in.TargetBranch, in.SourceBranch); err != nil { + return nil, err + } + + mergeBaseResult, err := c.gitRPCClient.MergeBase(ctx, gitrpc.MergeBaseParams{ + ReadParams: gitrpc.ReadParams{RepoUID: sourceRepo.GitUID}, + Ref1: in.SourceBranch, + Ref2: in.TargetBranch, + }) + if err != nil { + return nil, fmt.Errorf("failed to find merge base: %w", err) + } + + mergeBaseSHA := mergeBaseResult.MergeBaseSHA + + if mergeBaseSHA == sourceSHA { + return nil, usererror.BadRequest("The source branch doesn't contain any new commits") + } + + targetRepo, err = c.repoStore.UpdateOptLock(ctx, targetRepo, func(repo *types.Repository) error { + repo.PullReqSeq++ + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to acquire PullReqSeq number: %w", err) + } + + pr := newPullReq(session, targetRepo.PullReqSeq, sourceRepo, targetRepo, in, sourceSHA, mergeBaseSHA) + + err = c.pullreqStore.Create(ctx, pr) + if err != nil { + return nil, fmt.Errorf("pullreq creation failed: %w", err) + } + + c.eventReporter.Created(ctx, &pullreqevents.CreatedPayload{ + Base: eventBase(pr, &session.Principal), + SourceBranch: in.SourceBranch, + TargetBranch: in.TargetBranch, + SourceSHA: sourceSHA, + }) + + if err = c.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return pr, nil +} + +// newPullReq creates new pull request object. +func newPullReq( + session *auth.Session, + number int64, + sourceRepo *types.Repository, + targetRepo *types.Repository, + in *CreateInput, + sourceSHA, mergeBaseSHA string, +) *types.PullReq { + now := time.Now().UnixMilli() + return &types.PullReq{ + ID: 0, // the ID will be populated in the data layer + Version: 0, + Number: number, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + Edited: now, + State: enum.PullReqStateOpen, + IsDraft: in.IsDraft, + Title: in.Title, + Description: in.Description, + SourceRepoID: sourceRepo.ID, + SourceBranch: in.SourceBranch, + SourceSHA: sourceSHA, + TargetRepoID: targetRepo.ID, + TargetBranch: in.TargetBranch, + ActivitySeq: 0, + MergedBy: nil, + Merged: nil, + MergeCheckStatus: enum.MergeCheckStatusUnchecked, + MergeMethod: nil, + MergeBaseSHA: mergeBaseSHA, + Author: *session.Principal.ToPrincipalInfo(), + Merger: nil, + } +} diff --git a/internal/api/controller/pullreq/pr_find.go b/internal/api/controller/pullreq/pr_find.go new file mode 100644 index 0000000000..01f7da9527 --- /dev/null +++ b/internal/api/controller/pullreq/pr_find.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Find returns a pull request from the provided repository. +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + pullreqNum int64, +) (*types.PullReq, error) { + if pullreqNum <= 0 { + return nil, usererror.BadRequest("A valid pull request number must be provided.") + } + + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to the repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, pullreqNum) + if err != nil { + return nil, err + } + + headRef := pr.SourceSHA + baseRef := pr.MergeBaseSHA + + output, err := c.gitRPCClient.DiffStats(ctx, &gitrpc.DiffParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + BaseRef: baseRef, + HeadRef: headRef, + }) + if err != nil { + return nil, err + } + + pr.Stats.DiffStats.Commits = output.Commits + pr.Stats.DiffStats.FilesChanged = output.FilesChanged + + return pr, nil +} diff --git a/internal/api/controller/pullreq/pr_list.go b/internal/api/controller/pullreq/pr_list.go new file mode 100644 index 0000000000..e0086d2c03 --- /dev/null +++ b/internal/api/controller/pullreq/pr_list.go @@ -0,0 +1,78 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// List returns a list of pull requests from the provided repository. +func (c *Controller) List( + ctx context.Context, + session *auth.Session, + repoRef string, + filter *types.PullReqFilter, +) ([]*types.PullReq, int64, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, 0, fmt.Errorf("failed to acquire access to target repo: %w", err) + } + + if filter.SourceRepoRef == repoRef { + filter.SourceRepoID = repo.ID + } else if filter.SourceRepoRef != "" { + var sourceRepo *types.Repository + sourceRepo, err = c.getRepoCheckAccess(ctx, session, filter.SourceRepoRef, enum.PermissionRepoView) + if err != nil { + return nil, 0, fmt.Errorf("failed to acquire access to source repo: %w", err) + } + filter.SourceRepoID = sourceRepo.ID + } + + var list []*types.PullReq + var count int64 + + filter.TargetRepoID = repo.ID + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + list, err = c.pullreqStore.List(ctx, filter) + if err != nil { + return fmt.Errorf("failed to list pull requests: %w", err) + } + + if filter.Page == 1 && len(list) < filter.Size { + count = int64(len(list)) + return nil + } + + count, err = c.pullreqStore.Count(ctx, filter) + if err != nil { + return fmt.Errorf("failed to count pull requests: %w", err) + } + + return nil + }, dbtx.TxDefaultReadOnly) + if err != nil { + return nil, 0, err + } + + return list, count, nil +} diff --git a/internal/api/controller/pullreq/pr_recheck.go b/internal/api/controller/pullreq/pr_recheck.go new file mode 100644 index 0000000000..21ee8ce738 --- /dev/null +++ b/internal/api/controller/pullreq/pr_recheck.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// Recheck re-checks all system PR checks (mergeability check, ...). +func (c *Controller) Recheck( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush) + if err != nil { + return fmt.Errorf("failed to acquire access to repo: %w", err) + } + + err = c.pullreqService.UpdateMergeDataIfRequired(ctx, repo.ID, prNum) + if err != nil { + return fmt.Errorf("failed to refresh merge data: %w", err) + } + + return nil +} diff --git a/internal/api/controller/pullreq/pr_state.go b/internal/api/controller/pullreq/pr_state.go new file mode 100644 index 0000000000..9fa190976e --- /dev/null +++ b/internal/api/controller/pullreq/pr_state.go @@ -0,0 +1,199 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type StateInput struct { + State enum.PullReqState `json:"state"` + IsDraft bool `json:"is_draft"` + Message string `json:"message"` +} + +func (in *StateInput) Check() error { + state, ok := in.State.Sanitize() // Sanitize will pass through also merged state, so we must check later for it. + if !ok { + return usererror.BadRequest(fmt.Sprintf("Allowed states are: %s and %s", + enum.PullReqStateOpen, enum.PullReqStateClosed)) + } + + in.State = state + in.Message = strings.TrimSpace(in.Message) + + if in.State == enum.PullReqStateMerged { + return usererror.BadRequest("Pull requests can't be merged with this API") + } + + // TODO: Need to check the length of the message string + + return nil +} + +// State updates the pull request's current state. +// +//nolint:gocognit,funlen +func (c *Controller) State(ctx context.Context, + session *auth.Session, repoRef string, pullreqNum int64, in *StateInput, +) (*types.PullReq, error) { + if err := in.Check(); err != nil { + return nil, err + } + + targetRepo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to target repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, targetRepo.ID, pullreqNum) + if err != nil { + return nil, fmt.Errorf("failed to get pull request by number: %w", err) + } + + sourceRepo := targetRepo + if pr.SourceRepoID != pr.TargetRepoID { + sourceRepo, err = c.repoStore.Find(ctx, pr.SourceRepoID) + if err != nil { + return nil, fmt.Errorf("failed to get source repo by id: %w", err) + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, sourceRepo, + enum.PermissionRepoView, false); err != nil { + return nil, fmt.Errorf("failed to acquire access to source repo: %w", err) + } + } + + if pr.State == enum.PullReqStateMerged { + return nil, usererror.BadRequest("Merged pull requests can't be modified.") + } + + if pr.State == in.State && in.IsDraft == pr.IsDraft { + return pr, nil // no changes are necessary: state is the same and is_draft hasn't change + } + + oldState := pr.State + oldDraft := pr.IsDraft + + type change int + const ( + changeReopen change = iota + 1 + changeClose + ) + + var sourceSHA string + var mergeBaseSHA string + var stateChange change + + //nolint:nestif // refactor if needed + if pr.State != enum.PullReqStateOpen && in.State == enum.PullReqStateOpen { + if sourceSHA, err = c.verifyBranchExistence(ctx, sourceRepo, pr.SourceBranch); err != nil { + return nil, err + } + + if _, err = c.verifyBranchExistence(ctx, targetRepo, pr.TargetBranch); err != nil { + return nil, err + } + + err = c.checkIfAlreadyExists(ctx, pr.TargetRepoID, pr.SourceRepoID, pr.TargetBranch, pr.SourceBranch) + if err != nil { + return nil, err + } + + var mergeBaseResult gitrpc.MergeBaseOutput + + mergeBaseResult, err = c.gitRPCClient.MergeBase(ctx, gitrpc.MergeBaseParams{ + ReadParams: gitrpc.ReadParams{RepoUID: sourceRepo.GitUID}, + Ref1: pr.SourceBranch, + Ref2: pr.TargetBranch, + }) + if err != nil { + return nil, fmt.Errorf("failed to find merge base: %w", err) + } + + mergeBaseSHA = mergeBaseResult.MergeBaseSHA + + stateChange = changeReopen + } else if pr.State == enum.PullReqStateOpen && in.State != enum.PullReqStateOpen { + stateChange = changeClose + } + + pr, err = c.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.State = in.State + pr.IsDraft = in.IsDraft + pr.Edited = time.Now().UnixMilli() + + switch stateChange { + case changeClose: + // clear all merge (check) related fields + pr.MergeCheckStatus = enum.MergeCheckStatusUnchecked + pr.MergeSHA = nil + pr.MergeConflicts = nil + case changeReopen: + pr.SourceSHA = sourceSHA + pr.MergeBaseSHA = mergeBaseSHA + } + + pr.ActivitySeq++ // because we need to add the activity entry + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to update pull request: %w", err) + } + + payload := &types.PullRequestActivityPayloadStateChange{ + Old: oldState, + New: pr.State, + OldDraft: oldDraft, + NewDraft: pr.IsDraft, + Message: in.Message, + } + if _, errAct := c.activityStore.CreateWithPayload(ctx, pr, session.Principal.ID, payload); errAct != nil { + // non-critical error + log.Ctx(ctx).Err(errAct).Msgf("failed to write pull request activity after state change") + } + + switch stateChange { + case changeReopen: + c.eventReporter.Reopened(ctx, &pullreqevents.ReopenedPayload{ + Base: eventBase(pr, &session.Principal), + SourceSHA: sourceSHA, + MergeBaseSHA: mergeBaseSHA, + }) + case changeClose: + c.eventReporter.Closed(ctx, &pullreqevents.ClosedPayload{ + Base: eventBase(pr, &session.Principal), + }) + } + + if err = c.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return pr, nil +} diff --git a/internal/api/controller/pullreq/pr_update.go b/internal/api/controller/pullreq/pr_update.go new file mode 100644 index 0000000000..56f220c7fa --- /dev/null +++ b/internal/api/controller/pullreq/pr_update.go @@ -0,0 +1,118 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type UpdateInput struct { + Title string `json:"title"` + Description string `json:"description"` +} + +func (in *UpdateInput) Check() error { + in.Title = strings.TrimSpace(in.Title) + if in.Title == "" { + return usererror.BadRequest("pull request title can't be empty") + } + + in.Description = strings.TrimSpace(in.Description) + + // TODO: Check the length of the input strings + + return nil +} + +// Update updates an pull request. +func (c *Controller) Update(ctx context.Context, + session *auth.Session, repoRef string, pullreqNum int64, in *UpdateInput, +) (*types.PullReq, error) { + if err := in.Check(); err != nil { + return nil, err + } + + targetRepo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to target repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, targetRepo.ID, pullreqNum) + if err != nil { + return nil, fmt.Errorf("failed to get pull request by number: %w", err) + } + + if pr.SourceRepoID != pr.TargetRepoID { + var sourceRepo *types.Repository + + sourceRepo, err = c.repoStore.Find(ctx, pr.SourceRepoID) + if err != nil { + return nil, fmt.Errorf("failed to get source repo by id: %w", err) + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, sourceRepo, + enum.PermissionRepoView, false); err != nil { + return nil, fmt.Errorf("failed to acquire access to source repo: %w", err) + } + } + + if pr.Title == in.Title && pr.Description == in.Description { + return pr, nil + } + + needToWriteActivity := in.Title != pr.Title + oldTitle := pr.Title + + pr, err = c.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.Title = in.Title + pr.Description = in.Description + pr.Edited = time.Now().UnixMilli() + if needToWriteActivity { + pr.ActivitySeq++ + } + return nil + }) + if err != nil { + return pr, fmt.Errorf("failed to update pull request: %w", err) + } + + if needToWriteActivity { + payload := &types.PullRequestActivityPayloadTitleChange{ + Old: oldTitle, + New: pr.Title, + } + if _, errAct := c.activityStore.CreateWithPayload(ctx, pr, session.Principal.ID, payload); errAct != nil { + // non-critical error + log.Ctx(ctx).Err(errAct).Msgf("failed to write pull request activity after title change") + } + } + + if err = c.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return pr, nil +} diff --git a/internal/api/controller/pullreq/review_submit.go b/internal/api/controller/pullreq/review_submit.go new file mode 100644 index 0000000000..96166431e0 --- /dev/null +++ b/internal/api/controller/pullreq/review_submit.go @@ -0,0 +1,182 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "errors" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type ReviewSubmitInput struct { + CommitSHA string `json:"commit_sha"` + Decision enum.PullReqReviewDecision `json:"decision"` + Message string `json:"message"` +} + +func (in *ReviewSubmitInput) Validate() error { + if in.CommitSHA == "" { + return usererror.BadRequest("CommitSHA is a mandatory field") + } + + decision, ok := in.Decision.Sanitize() + if !ok || decision == enum.PullReqReviewDecisionPending { + msg := fmt.Sprintf("Decision must be: %q, %q or %q.", + enum.PullReqReviewDecisionApproved, + enum.PullReqReviewDecisionChangeReq, + enum.PullReqReviewDecisionReviewed) + return usererror.BadRequest(msg) + } + + in.Decision = decision + in.Message = strings.TrimSpace(in.Message) + + // TODO: Check the length of the message string + + return nil +} + +// ReviewSubmit creates a new pull request review. +func (c *Controller) ReviewSubmit( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + in *ReviewSubmitInput, +) (*types.PullReqReview, error) { + if err := in.Validate(); err != nil { + return nil, err + } + + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + if pr.CreatedBy == session.Principal.ID { + return nil, usererror.BadRequest("Can't submit review to own pull requests.") + } + + commit, err := c.gitRPCClient.GetCommit(ctx, &gitrpc.GetCommitParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repo.GitUID}, + SHA: in.CommitSHA, + }) + if err != nil { + return nil, fmt.Errorf("failed to get git branch sha: %w", err) + } + + commitSHA := commit.Commit.SHA + + var review *types.PullReqReview + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + now := time.Now().UnixMilli() + review = &types.PullReqReview{ + ID: 0, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + PullReqID: pr.ID, + Decision: in.Decision, + SHA: commitSHA, + } + + err = c.reviewStore.Create(ctx, review) + if err != nil { + return err + } + + _, err = c.updateReviewer(ctx, session, pr, review, commitSHA) + return err + }) + if err != nil { + return nil, err + } + + err = func() error { + if pr, err = c.pullreqStore.UpdateActivitySeq(ctx, pr); err != nil { + return fmt.Errorf("failed to increment pull request activity sequence: %w", err) + } + + payload := &types.PullRequestActivityPayloadReviewSubmit{ + CommitSHA: commitSHA, + Message: in.Message, + Decision: in.Decision, + } + _, err = c.activityStore.CreateWithPayload(ctx, pr, session.Principal.ID, payload) + return err + }() + if err != nil { + // non-critical error + log.Ctx(ctx).Err(err).Msgf("failed to write pull request activity after review submit") + } + + return review, nil +} + +// updateReviewer updates pull request reviewer object. +func (c *Controller) updateReviewer(ctx context.Context, session *auth.Session, + pr *types.PullReq, review *types.PullReqReview, sha string) (*types.PullReqReviewer, error) { + reviewer, err := c.reviewerStore.Find(ctx, pr.ID, session.Principal.ID) + if err != nil && !errors.Is(err, store.ErrResourceNotFound) { + return nil, err + } + + if reviewer != nil { + reviewer.LatestReviewID = &review.ID + reviewer.ReviewDecision = review.Decision + reviewer.SHA = sha + err = c.reviewerStore.Update(ctx, reviewer) + } else { + now := time.Now().UnixMilli() + reviewer = &types.PullReqReviewer{ + PullReqID: pr.ID, + PrincipalID: session.Principal.ID, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + RepoID: pr.TargetRepoID, + Type: enum.PullReqReviewerTypeSelfAssigned, + LatestReviewID: &review.ID, + ReviewDecision: review.Decision, + SHA: sha, + Reviewer: types.PrincipalInfo{}, + AddedBy: types.PrincipalInfo{}, + } + err = c.reviewerStore.Create(ctx, reviewer) + } + if err != nil { + return nil, fmt.Errorf("failed to create/update reviewer") + } + + return reviewer, nil +} diff --git a/internal/api/controller/pullreq/reviewer_add.go b/internal/api/controller/pullreq/reviewer_add.go new file mode 100644 index 0000000000..7b685206e2 --- /dev/null +++ b/internal/api/controller/pullreq/reviewer_add.go @@ -0,0 +1,138 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "errors" + "fmt" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type ReviewerAddInput struct { + ReviewerID int64 `json:"reviewer_id"` +} + +// ReviewerAdd adds a new reviewer to the pull request. +func (c *Controller) ReviewerAdd( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64, + in *ReviewerAddInput, +) (*types.PullReqReviewer, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + if in.ReviewerID == 0 { + return nil, usererror.BadRequest("Must specify reviewer ID.") + } + + if in.ReviewerID == pr.CreatedBy { + return nil, usererror.BadRequest("Pull request author can't be added as a reviewer.") + } + + addedByInfo := session.Principal.ToPrincipalInfo() + + var reviewerType enum.PullReqReviewerType + switch session.Principal.ID { + case pr.CreatedBy: + reviewerType = enum.PullReqReviewerTypeRequested + case in.ReviewerID: + reviewerType = enum.PullReqReviewerTypeSelfAssigned + default: + reviewerType = enum.PullReqReviewerTypeAssigned + } + + reviewerInfo := addedByInfo + if reviewerType != enum.PullReqReviewerTypeSelfAssigned { + var reviewerPrincipal *types.Principal + reviewerPrincipal, err = c.principalStore.Find(ctx, in.ReviewerID) + if err != nil { + return nil, err + } + + reviewerInfo = reviewerPrincipal.ToPrincipalInfo() + + // TODO: To check the reviewer's access to the repo we create a dummy session object. Fix it. + if err = apiauth.CheckRepo(ctx, c.authorizer, &auth.Session{ + Principal: *reviewerPrincipal, + Metadata: nil, + }, repo, enum.PermissionRepoView, false); err != nil { + log.Ctx(ctx).Info().Msgf("Reviewer principal: %s access error: %s", reviewerInfo.UID, err) + return nil, usererror.BadRequest("The reviewer doesn't have enough permissions for the repository.") + } + } + + var reviewer *types.PullReqReviewer + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + reviewer, err = c.reviewerStore.Find(ctx, pr.ID, in.ReviewerID) + if err != nil && !errors.Is(err, store.ErrResourceNotFound) { + return err + } + + if reviewer != nil { + return nil + } + + reviewer = newPullReqReviewer(session, pr, repo, reviewerInfo, addedByInfo, reviewerType, in) + + return c.reviewerStore.Create(ctx, reviewer) + }) + if err != nil { + return nil, fmt.Errorf("failed to create pull request reviewer: %w", err) + } + + return reviewer, err +} + +// newPullReqReviewer creates new pull request reviewer object. +func newPullReqReviewer(session *auth.Session, pullReq *types.PullReq, + repo *types.Repository, reviewerInfo, addedByInfo *types.PrincipalInfo, + reviewerType enum.PullReqReviewerType, in *ReviewerAddInput) *types.PullReqReviewer { + now := time.Now().UnixMilli() + return &types.PullReqReviewer{ + PullReqID: pullReq.ID, + PrincipalID: in.ReviewerID, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + RepoID: repo.ID, + Type: reviewerType, + LatestReviewID: nil, + ReviewDecision: enum.PullReqReviewDecisionPending, + SHA: "", + Reviewer: *reviewerInfo, + AddedBy: *addedByInfo, + } +} diff --git a/internal/api/controller/pullreq/reviewer_delete.go b/internal/api/controller/pullreq/reviewer_delete.go new file mode 100644 index 0000000000..6888d49c02 --- /dev/null +++ b/internal/api/controller/pullreq/reviewer_delete.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// ReviewerDelete deletes reviewer from the reviewerlist for the given PR. +func (c *Controller) ReviewerDelete(ctx context.Context, session *auth.Session, + repoRef string, prNum, reviewerID int64) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return fmt.Errorf("failed to find pull request: %w", err) + } + + err = c.reviewerStore.Delete(ctx, pr.ID, reviewerID) + if err != nil { + return fmt.Errorf("failed to delete reviewer: %w", err) + } + return nil +} diff --git a/internal/api/controller/pullreq/reviewer_list.go b/internal/api/controller/pullreq/reviewer_list.go new file mode 100644 index 0000000000..6795032e93 --- /dev/null +++ b/internal/api/controller/pullreq/reviewer_list.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ReviewerList returns reviewer list for the pull request. +func (c *Controller) ReviewerList( + ctx context.Context, + session *auth.Session, + repoRef string, + prNum int64) ([]*types.PullReqReviewer, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to repo: %w", err) + } + + pr, err := c.pullreqStore.FindByNumber(ctx, repo.ID, prNum) + if err != nil { + return nil, fmt.Errorf("failed to find pull request by number: %w", err) + } + + reviewers, err := c.reviewerStore.List(ctx, pr.ID) + + return reviewers, err +} diff --git a/internal/api/controller/pullreq/wire.go b/internal/api/controller/pullreq/wire.go new file mode 100644 index 0000000000..2ebb8a0682 --- /dev/null +++ b/internal/api/controller/pullreq/wire.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth/authz" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/services/pullreq" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/lock" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, urlProvider *url.Provider, authorizer authz.Authorizer, + pullReqStore store.PullReqStore, pullReqActivityStore store.PullReqActivityStore, + codeCommentsView store.CodeCommentView, + pullReqReviewStore store.PullReqReviewStore, pullReqReviewerStore store.PullReqReviewerStore, + repoStore store.RepoStore, principalStore store.PrincipalStore, fileViewStore store.PullReqFileViewStore, + rpcClient gitrpc.Interface, eventReporter *pullreqevents.Reporter, + mtxManager lock.MutexManager, codeCommentMigrator *codecomments.Migrator, + pullreqService *pullreq.Service, sseStreamer sse.Streamer, +) *Controller { + return NewController(db, urlProvider, authorizer, + pullReqStore, pullReqActivityStore, + codeCommentsView, + pullReqReviewStore, pullReqReviewerStore, + repoStore, principalStore, fileViewStore, + rpcClient, eventReporter, + mtxManager, codeCommentMigrator, pullreqService, sseStreamer) +} diff --git a/internal/api/controller/repo/blame.go b/internal/api/controller/repo/blame.go new file mode 100644 index 0000000000..8a405bf519 --- /dev/null +++ b/internal/api/controller/repo/blame.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "strings" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Blame(ctx context.Context, + session *auth.Session, + repoRef, gitRef, path string, + lineFrom, lineTo int, +) (types.Stream[*gitrpc.BlamePart], error) { + path = strings.TrimSpace(path) + if path == "" { + return nil, usererror.BadRequest("File path needs to specified.") + } + + if lineTo > 0 && lineFrom > lineTo { + return nil, usererror.BadRequest("Line range must be valid.") + } + + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + if gitRef == "" { + gitRef = repo.DefaultBranch + } + + reader := gitrpc.NewStreamReader( + c.gitRPCClient.Blame(ctx, &gitrpc.BlameParams{ + ReadParams: CreateRPCReadParams(repo), + GitRef: gitRef, + Path: path, + LineFrom: lineFrom, + LineTo: lineTo, + })) + + return reader, nil +} diff --git a/internal/api/controller/repo/commit.go b/internal/api/controller/repo/commit.go new file mode 100644 index 0000000000..005d26ee4d --- /dev/null +++ b/internal/api/controller/repo/commit.go @@ -0,0 +1,110 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "encoding/base64" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/types/enum" +) + +// CommitFileAction holds file operation data. +type CommitFileAction struct { + Action gitrpc.FileAction `json:"action"` + Path string `json:"path"` + Payload string `json:"payload"` + Encoding enum.ContentEncodingType `json:"encoding"` + SHA string `json:"sha"` +} + +// CommitFilesOptions holds the data for file operations. +type CommitFilesOptions struct { + Title string `json:"title"` + Message string `json:"message"` + Branch string `json:"branch"` + NewBranch string `json:"new_branch"` + Actions []CommitFileAction `json:"actions"` +} + +// CommitFilesResponse holds commit id. +type CommitFilesResponse struct { + CommitID string `json:"commit_id"` +} + +func (c *Controller) CommitFiles(ctx context.Context, + session *auth.Session, + repoRef string, + in *CommitFilesOptions, +) (CommitFilesResponse, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false) + if err != nil { + return CommitFilesResponse{}, err + } + + actions := make([]gitrpc.CommitFileAction, len(in.Actions)) + for i, action := range in.Actions { + var rawPayload []byte + switch action.Encoding { + case enum.ContentEncodingTypeBase64: + rawPayload, err = base64.StdEncoding.DecodeString(action.Payload) + if err != nil { + return CommitFilesResponse{}, fmt.Errorf("failed to decode base64 payload: %w", err) + } + case enum.ContentEncodingTypeUTF8: + fallthrough + default: + // by default we treat content as is + rawPayload = []byte(action.Payload) + } + + actions[i] = gitrpc.CommitFileAction{ + Action: action.Action, + Path: action.Path, + Payload: rawPayload, + SHA: action.SHA, + } + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return CommitFilesResponse{}, fmt.Errorf("failed to create RPC write params: %w", err) + } + + now := time.Now() + commit, err := c.gitRPCClient.CommitFiles(ctx, &gitrpc.CommitFilesParams{ + WriteParams: writeParams, + Title: in.Title, + Message: in.Message, + Branch: in.Branch, + NewBranch: in.NewBranch, + Actions: actions, + Committer: rpcIdentityFromPrincipal(bootstrap.NewSystemServiceSession().Principal), + CommitterDate: &now, + Author: rpcIdentityFromPrincipal(session.Principal), + AuthorDate: &now, + }) + if err != nil { + return CommitFilesResponse{}, err + } + return CommitFilesResponse{ + CommitID: commit.CommitID, + }, nil +} diff --git a/internal/api/controller/repo/content_get.go b/internal/api/controller/repo/content_get.go new file mode 100644 index 0000000000..8b3cf127b0 --- /dev/null +++ b/internal/api/controller/repo/content_get.go @@ -0,0 +1,296 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "encoding/base64" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + // maxGetContentFileSize specifies the maximum number of bytes a file content response contains. + // If a file is any larger, the content is truncated. + maxGetContentFileSize = 1 << 22 // 4 MB +) + +type ContentType string + +const ( + ContentTypeFile ContentType = "file" + ContentTypeDir ContentType = "dir" + ContentTypeSymlink ContentType = "symlink" + ContentTypeSubmodule ContentType = "submodule" +) + +type ContentInfo struct { + Type ContentType `json:"type"` + SHA string `json:"sha"` + Name string `json:"name"` + Path string `json:"path"` + LatestCommit *types.Commit `json:"latest_commit,omitempty"` +} + +type GetContentOutput struct { + ContentInfo + Content Content `json:"content"` +} + +// Content restricts the possible types of content returned by the api. +type Content interface { + isContent() +} + +type FileContent struct { + Encoding enum.ContentEncodingType `json:"encoding"` + Data string `json:"data"` + Size int64 `json:"size"` + DataSize int64 `json:"data_size"` +} + +func (c *FileContent) isContent() {} + +type SymlinkContent struct { + Target string `json:"target"` + Size int64 `json:"size"` +} + +func (c *SymlinkContent) isContent() {} + +type DirContent struct { + Entries []ContentInfo `json:"entries"` +} + +func (c *DirContent) isContent() {} + +type SubmoduleContent struct { + URL string `json:"url"` + CommitSHA string `json:"commit_sha"` +} + +func (c *SubmoduleContent) isContent() {} + +// GetContent finds the content of the repo at the given path. +// If no gitRef is provided, the content is retrieved from the default branch. +func (c *Controller) GetContent(ctx context.Context, + session *auth.Session, + repoRef string, + gitRef string, + repoPath string, + includeLatestCommit bool, +) (*GetContentOutput, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + // set gitRef to default branch in case an empty reference was provided + if gitRef == "" { + gitRef = repo.DefaultBranch + } + + // create read params once + readParams := CreateRPCReadParams(repo) + + treeNodeOutput, err := c.gitRPCClient.GetTreeNode(ctx, &gitrpc.GetTreeNodeParams{ + ReadParams: readParams, + GitREF: gitRef, + Path: repoPath, + IncludeLatestCommit: includeLatestCommit, + }) + if err != nil { + return nil, fmt.Errorf("failed to read tree node: %w", err) + } + + info, err := mapToContentInfo(treeNodeOutput.Node, treeNodeOutput.Commit, includeLatestCommit) + if err != nil { + return nil, err + } + + var content Content + switch info.Type { + case ContentTypeDir: + content, err = c.getDirContent(ctx, readParams, gitRef, repoPath, includeLatestCommit) + case ContentTypeFile: + content, err = c.getFileContent(ctx, readParams, info.SHA) + case ContentTypeSymlink: + content, err = c.getSymlinkContent(ctx, readParams, info.SHA) + case ContentTypeSubmodule: + content, err = c.getSubmoduleContent(ctx, readParams, gitRef, repoPath, info.SHA) + default: + err = fmt.Errorf("unknown tree node type '%s'", treeNodeOutput.Node.Type) + } + + if err != nil { + return nil, err + } + + return &GetContentOutput{ + ContentInfo: info, + Content: content, + }, nil +} + +func (c *Controller) getSubmoduleContent(ctx context.Context, + readParams gitrpc.ReadParams, + gitRef string, + repoPath string, + commitSHA string, +) (*SubmoduleContent, error) { + output, err := c.gitRPCClient.GetSubmodule(ctx, &gitrpc.GetSubmoduleParams{ + ReadParams: readParams, + GitREF: gitRef, + Path: repoPath, + }) + if err != nil { + // TODO: handle not found error + // This requires gitrpc to also return notfound though! + return nil, fmt.Errorf("failed to get submodule: %w", err) + } + + return &SubmoduleContent{ + URL: output.Submodule.URL, + CommitSHA: commitSHA, + }, nil +} + +func (c *Controller) getFileContent(ctx context.Context, + readParams gitrpc.ReadParams, + blobSHA string, +) (*FileContent, error) { + output, err := c.gitRPCClient.GetBlob(ctx, &gitrpc.GetBlobParams{ + ReadParams: readParams, + SHA: blobSHA, + SizeLimit: maxGetContentFileSize, + }) + if err != nil { + return nil, fmt.Errorf("failed to get file content: %w", err) + } + + content, err := io.ReadAll(output.Content) + if err != nil { + return nil, fmt.Errorf("failed to read blob content: %w", err) + } + + return &FileContent{ + Size: output.Size, + DataSize: output.ContentSize, + Encoding: enum.ContentEncodingTypeBase64, + Data: base64.StdEncoding.EncodeToString(content), + }, nil +} + +func (c *Controller) getSymlinkContent(ctx context.Context, + readParams gitrpc.ReadParams, + blobSHA string, +) (*SymlinkContent, error) { + output, err := c.gitRPCClient.GetBlob(ctx, &gitrpc.GetBlobParams{ + ReadParams: readParams, + SHA: blobSHA, + SizeLimit: maxGetContentFileSize, // TODO: do we need to guard against too big symlinks? + }) + if err != nil { + // TODO: handle not found error + // This requires gitrpc to also return notfound though! + return nil, fmt.Errorf("failed to get symlink: %w", err) + } + + content, err := io.ReadAll(output.Content) + if err != nil { + return nil, fmt.Errorf("failed to read blob content: %w", err) + } + + return &SymlinkContent{ + Size: output.Size, + Target: string(content), + }, nil +} + +func (c *Controller) getDirContent(ctx context.Context, + readParams gitrpc.ReadParams, + gitRef string, + repoPath string, + includeLatestCommit bool, +) (*DirContent, error) { + output, err := c.gitRPCClient.ListTreeNodes(ctx, &gitrpc.ListTreeNodeParams{ + ReadParams: readParams, + GitREF: gitRef, + Path: repoPath, + IncludeLatestCommit: includeLatestCommit, + }) + if err != nil { + // TODO: handle not found error + // This requires gitrpc to also return notfound though! + return nil, fmt.Errorf("failed to get content of dir: %w", err) + } + + entries := make([]ContentInfo, len(output.Nodes)) + for i, node := range output.Nodes { + entries[i], err = mapToContentInfo(node, nil, false) + if err != nil { + return nil, err + } + } + + return &DirContent{ + Entries: entries, + }, nil +} + +func mapToContentInfo(node gitrpc.TreeNode, commit *gitrpc.Commit, includeLatestCommit bool) (ContentInfo, error) { + typ, err := mapNodeModeToContentType(node.Mode) + if err != nil { + return ContentInfo{}, err + } + + res := ContentInfo{ + Type: typ, + SHA: node.SHA, + Name: node.Name, + Path: node.Path, + } + + // parse commit only if available + if commit != nil && includeLatestCommit { + res.LatestCommit, err = controller.MapCommit(commit) + if err != nil { + return ContentInfo{}, err + } + } + + return res, nil +} + +func mapNodeModeToContentType(m gitrpc.TreeNodeMode) (ContentType, error) { + switch m { + case gitrpc.TreeNodeModeFile, gitrpc.TreeNodeModeExec: + return ContentTypeFile, nil + case gitrpc.TreeNodeModeSymlink: + return ContentTypeSymlink, nil + case gitrpc.TreeNodeModeCommit: + return ContentTypeSubmodule, nil + case gitrpc.TreeNodeModeTree: + return ContentTypeDir, nil + default: + return ContentTypeFile, fmt.Errorf("unsupported tree node mode '%s'", m) + } +} diff --git a/internal/api/controller/repo/content_paths_details.go b/internal/api/controller/repo/content_paths_details.go new file mode 100644 index 0000000000..9a2ae01cab --- /dev/null +++ b/internal/api/controller/repo/content_paths_details.go @@ -0,0 +1,77 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +type PathsDetailsInput struct { + Paths []string `json:"paths"` +} + +type PathsDetailsOutput struct { + Details []gitrpc.PathDetails `json:"details"` +} + +// PathsDetails finds the additional info about the provided paths of the repo. +// If no gitRef is provided, the content is retrieved from the default branch. +func (c *Controller) PathsDetails(ctx context.Context, + session *auth.Session, + repoRef string, + gitRef string, + input PathsDetailsInput, +) (PathsDetailsOutput, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return PathsDetailsOutput{}, err + } + + if len(input.Paths) == 0 { + return PathsDetailsOutput{}, nil + } + + const maxInputPaths = 50 + if len(input.Paths) > maxInputPaths { + return PathsDetailsOutput{}, + usererror.BadRequestf("maximum number of elements in the Paths array is %d", maxInputPaths) + } + + // set gitRef to default branch in case an empty reference was provided + if gitRef == "" { + gitRef = repo.DefaultBranch + } + + // create read params once + readParams := CreateRPCReadParams(repo) + + result, err := c.gitRPCClient.PathsDetails(ctx, gitrpc.PathsDetailsParams{ + ReadParams: readParams, + GitREF: gitRef, + Paths: input.Paths, + }) + if err != nil { + return PathsDetailsOutput{}, err + } + + return PathsDetailsOutput{ + Details: result.Details, + }, nil +} diff --git a/internal/api/controller/repo/controller.go b/internal/api/controller/repo/controller.go new file mode 100644 index 0000000000..7d4900b765 --- /dev/null +++ b/internal/api/controller/repo/controller.go @@ -0,0 +1,151 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + defaultBranch string + db *sqlx.DB + urlProvider *url.Provider + uidCheck check.PathUID + authorizer authz.Authorizer + repoStore store.RepoStore + spaceStore store.SpaceStore + pipelineStore store.PipelineStore + principalStore store.PrincipalStore + gitRPCClient gitrpc.Interface + importer *importer.Repository +} + +func NewController( + defaultBranch string, + db *sqlx.DB, + urlProvider *url.Provider, + uidCheck check.PathUID, + authorizer authz.Authorizer, + repoStore store.RepoStore, + spaceStore store.SpaceStore, + pipelineStore store.PipelineStore, + principalStore store.PrincipalStore, + gitRPCClient gitrpc.Interface, + importer *importer.Repository, +) *Controller { + return &Controller{ + defaultBranch: defaultBranch, + db: db, + urlProvider: urlProvider, + uidCheck: uidCheck, + authorizer: authorizer, + repoStore: repoStore, + spaceStore: spaceStore, + pipelineStore: pipelineStore, + principalStore: principalStore, + gitRPCClient: gitRPCClient, + importer: importer, + } +} + +// getRepoCheckAccess fetches an active repo (not one that is currently being imported) +// and checks if the current user has permission to access it. +func (c *Controller) getRepoCheckAccess( + ctx context.Context, + session *auth.Session, + repoRef string, + reqPermission enum.Permission, + orPublic bool, +) (*types.Repository, error) { + if repoRef == "" { + return nil, usererror.BadRequest("A valid repository reference must be provided.") + } + + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repository: %w", err) + } + + if repo.Importing { + return nil, usererror.BadRequest("Repository import is in progress.") + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, reqPermission, orPublic); err != nil { + return nil, fmt.Errorf("access check failed: %w", err) + } + + return repo, nil +} + +// CreateRPCWriteParams creates base write parameters for gitrpc write operations. +// IMPORTANT: session & repo are assumed to be not nil! +func CreateRPCWriteParams(ctx context.Context, urlProvider *url.Provider, + session *auth.Session, repo *types.Repository) (gitrpc.WriteParams, error) { + // generate envars (add everything githook CLI needs for execution) + envVars, err := githook.GenerateEnvironmentVariables( + ctx, + urlProvider.GetAPIBaseURLInternal(), + repo.ID, + session.Principal.ID, + false, + ) + if err != nil { + return gitrpc.WriteParams{}, fmt.Errorf("failed to generate git hook environment variables: %w", err) + } + + return gitrpc.WriteParams{ + Actor: gitrpc.Identity{ + Name: session.Principal.DisplayName, + Email: session.Principal.Email, + }, + RepoUID: repo.GitUID, + EnvVars: envVars, + }, nil +} + +// CreateRPCReadParams creates base read parameters for gitrpc read operations. +// IMPORTANT: repo is assumed to be not nil! +func CreateRPCReadParams(repo *types.Repository) gitrpc.ReadParams { + return gitrpc.ReadParams{ + RepoUID: repo.GitUID, + } +} + +func (c *Controller) validateParentRef(parentRef string) error { + parentRefAsID, err := strconv.ParseInt(parentRef, 10, 64) + if (err == nil && parentRefAsID <= 0) || (len(strings.TrimSpace(parentRef)) == 0) { + return errRepositoryRequiresParent + } + + return nil +} diff --git a/internal/api/controller/repo/create.go b/internal/api/controller/repo/create.go new file mode 100644 index 0000000000..c8c5ce029d --- /dev/null +++ b/internal/api/controller/repo/create.go @@ -0,0 +1,227 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "bytes" + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/resources" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +var ( + // errRepositoryRequiresParent if the user tries to create a repo without a parent space. + errRepositoryRequiresParent = usererror.BadRequest( + "Parent space required - standalone repositories are not supported.") +) + +type CreateInput struct { + ParentRef string `json:"parent_ref"` + UID string `json:"uid"` + DefaultBranch string `json:"default_branch"` + Description string `json:"description"` + IsPublic bool `json:"is_public"` + ForkID int64 `json:"fork_id"` + Readme bool `json:"readme"` + License string `json:"license"` + GitIgnore string `json:"git_ignore"` +} + +// Create creates a new repository. +func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.Repository, error) { + parentSpace, err := c.getSpaceCheckAuthRepoCreation(ctx, session, in.ParentRef) + if err != nil { + return nil, err + } + + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + gitRPCResp, err := c.createGitRPCRepository(ctx, session, in) + if err != nil { + return nil, fmt.Errorf("error creating repository on GitRPC: %w", err) + } + + now := time.Now().UnixMilli() + repo := &types.Repository{ + Version: 0, + ParentID: parentSpace.ID, + UID: in.UID, + GitUID: gitRPCResp.UID, + Description: in.Description, + IsPublic: in.IsPublic, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + ForkID: in.ForkID, + DefaultBranch: in.DefaultBranch, + } + err = c.repoStore.Create(ctx, repo) + if err != nil { + if dErr := c.DeleteGitRPCRepositories(ctx, session, repo); dErr != nil { + log.Ctx(ctx).Warn().Err(dErr).Msg("gitrpc failed to delete repo for cleanup") + } + return nil, fmt.Errorf("failed to create repository in storage: %w", err) + } + + // backfil GitURL + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + + return repo, nil +} + +func (c *Controller) getSpaceCheckAuthRepoCreation( + ctx context.Context, + session *auth.Session, + parentRef string, +) (*types.Space, error) { + space, err := c.spaceStore.FindByRef(ctx, parentRef) + if err != nil { + return nil, fmt.Errorf("parent space not found: %w", err) + } + + // create is a special case - check permission without specific resource + scope := &types.Scope{SpacePath: space.Path} + resource := &types.Resource{ + Type: enum.ResourceTypeRepo, + Name: "", + } + + err = apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionRepoEdit) + if err != nil { + return nil, fmt.Errorf("auth check failed: %w", err) + } + + return space, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + if err := c.validateParentRef(in.ParentRef); err != nil { + return err + } + + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + if err := check.Description(in.Description); err != nil { + return err + } + + if in.DefaultBranch == "" { + in.DefaultBranch = c.defaultBranch + } + + return nil +} + +func (c *Controller) createGitRPCRepository(ctx context.Context, session *auth.Session, + in *CreateInput) (*gitrpc.CreateRepositoryOutput, error) { + var ( + err error + content []byte + ) + files := make([]gitrpc.File, 0, 3) // readme, gitignore, licence + if in.Readme { + content = createReadme(in.UID, in.Description) + files = append(files, gitrpc.File{ + Path: "README.md", + Content: content, + }) + } + if in.License != "" && in.License != "none" { + content, err = resources.ReadLicense(in.License) + if err != nil { + return nil, fmt.Errorf("failed to read license '%s': %w", in.License, err) + } + files = append(files, gitrpc.File{ + Path: "LICENSE", + Content: content, + }) + } + if in.GitIgnore != "" { + content, err = resources.ReadGitIgnore(in.GitIgnore) + if err != nil { + return nil, fmt.Errorf("failed to read git ignore '%s': %w", in.GitIgnore, err) + } + files = append(files, gitrpc.File{ + Path: ".gitignore", + Content: content, + }) + } + + // generate envars (add everything githook CLI needs for execution) + envVars, err := githook.GenerateEnvironmentVariables( + ctx, + c.urlProvider.GetAPIBaseURLInternal(), + 0, + session.Principal.ID, + true, + ) + if err != nil { + return nil, fmt.Errorf("failed to generate git hook environment variables: %w", err) + } + + actor := rpcIdentityFromPrincipal(session.Principal) + committer := rpcIdentityFromPrincipal(bootstrap.NewSystemServiceSession().Principal) + now := time.Now() + resp, err := c.gitRPCClient.CreateRepository(ctx, &gitrpc.CreateRepositoryParams{ + Actor: *actor, + EnvVars: envVars, + DefaultBranch: in.DefaultBranch, + Files: files, + Author: actor, + AuthorDate: &now, + Committer: committer, + CommitterDate: &now, + }) + if err != nil { + return nil, fmt.Errorf("failed to create repo on gitrpc: %w", err) + } + + return resp, nil +} + +func createReadme(name, description string) []byte { + content := bytes.Buffer{} + content.WriteString("# " + name + "\n") + if description != "" { + content.WriteString(description) + } + return content.Bytes() +} + +func rpcIdentityFromPrincipal(p types.Principal) *gitrpc.Identity { + return &gitrpc.Identity{ + Name: p.DisplayName, + Email: p.Email, + } +} diff --git a/internal/api/controller/repo/create_branch.go b/internal/api/controller/repo/create_branch.go new file mode 100644 index 0000000000..a7f6df0831 --- /dev/null +++ b/internal/api/controller/repo/create_branch.go @@ -0,0 +1,71 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// CreateBranchInput used for branch creation apis. +type CreateBranchInput struct { + Name string `json:"name"` + + // Target is the commit (or points to the commit) the new branch will be pointing to. + // If no target is provided, the branch points to the same commit as the default branch of the repo. + Target string `json:"target"` +} + +// CreateBranch creates a new branch for a repo. +func (c *Controller) CreateBranch(ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateBranchInput, +) (*Branch, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false) + if err != nil { + return nil, err + } + + // set target to default branch in case no target was provided + if in.Target == "" { + in.Target = repo.DefaultBranch + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return nil, fmt.Errorf("failed to create RPC write params: %w", err) + } + + rpcOut, err := c.gitRPCClient.CreateBranch(ctx, &gitrpc.CreateBranchParams{ + WriteParams: writeParams, + BranchName: in.Name, + Target: in.Target, + }) + if err != nil { + return nil, err + } + + branch, err := mapBranch(rpcOut.Branch) + if err != nil { + return nil, fmt.Errorf("failed to map branch: %w", err) + } + + return &branch, nil +} diff --git a/internal/api/controller/repo/create_commit_tag.go b/internal/api/controller/repo/create_commit_tag.go new file mode 100644 index 0000000000..0adec5df79 --- /dev/null +++ b/internal/api/controller/repo/create_commit_tag.go @@ -0,0 +1,79 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// CreateCommitTagInput used for tag creation apis. +type CreateCommitTagInput struct { + Name string `json:"name"` + // Target is the commit (or points to the commit) the new tag will be pointing to. + // If no target is provided, the tag points to the same commit as the default branch of the repo. + Target string `json:"target"` + + // Message is the optional message the tag will be created with - if the message is empty + // the tag will be lightweight, otherwise it'll be annotated. + Message string `json:"message"` +} + +// CreateCommitTag creates a new tag for a repo. +func (c *Controller) CreateCommitTag(ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateCommitTagInput, +) (*CommitTag, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false) + if err != nil { + return nil, err + } + + // set target to default branch in case no branch or commit was provided + if in.Target == "" { + in.Target = repo.DefaultBranch + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return nil, fmt.Errorf("failed to create RPC write params: %w", err) + } + + now := time.Now() + rpcOut, err := c.gitRPCClient.CreateCommitTag(ctx, &gitrpc.CreateCommitTagParams{ + WriteParams: writeParams, + Name: in.Name, + Target: in.Target, + Message: in.Message, + Tagger: rpcIdentityFromPrincipal(session.Principal), + TaggerDate: &now, + }) + + if err != nil { + return nil, err + } + commitTag, err := mapCommitTag(rpcOut.CommitTag) + + if err != nil { + return nil, fmt.Errorf("failed to map tag received from service output: %w", err) + } + return &commitTag, nil +} diff --git a/internal/api/controller/repo/delete.go b/internal/api/controller/repo/delete.go new file mode 100644 index 0000000000..3742d6bfe9 --- /dev/null +++ b/internal/api/controller/repo/delete.go @@ -0,0 +1,85 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// Delete deletes a repo. +func (c *Controller) Delete(ctx context.Context, session *auth.Session, repoRef string) error { + // note: can't use c.getRepoCheckAccess because import job for repositories being imported must be cancelled. + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoDelete, false); err != nil { + return err + } + + if repo.Importing { + err = c.importer.Cancel(ctx, repo) + if err != nil { + return fmt.Errorf("failed to cancel repository import") + } + + return c.DeleteNoAuth(ctx, session, repo) + } + + log.Ctx(ctx).Info().Msgf("Delete request received for repo %s , id: %d", repo.Path, repo.ID) + + return c.DeleteNoAuth(ctx, session, repo) +} + +func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, repo *types.Repository) error { + if err := c.DeleteGitRPCRepositories(ctx, session, repo); err != nil { + return err + } + + if err := c.repoStore.Delete(ctx, repo.ID); err != nil { + return err + } + return nil +} + +func (c *Controller) DeleteGitRPCRepositories(ctx context.Context, session *auth.Session, repo *types.Repository) error { + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return fmt.Errorf("failed to create RPC write params: %w", err) + } + + err = c.gitRPCClient.DeleteRepository(ctx, &gitrpc.DeleteRepositoryParams{ + WriteParams: writeParams, + }) + + // deletion should not fail if dir does not exist in repos dir + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotFound { + log.Ctx(ctx).Warn().Msgf("gitrpc repo %s does not exist", repo.GitUID) + } else if err != nil { + // deletion has failed before removing(rename) the repo dir + return fmt.Errorf("gitrpc failed to delete repo %s: %w", repo.GitUID, err) + } + return nil +} diff --git a/internal/api/controller/repo/delete_branch.go b/internal/api/controller/repo/delete_branch.go new file mode 100644 index 0000000000..68c7dc339b --- /dev/null +++ b/internal/api/controller/repo/delete_branch.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// DeleteBranch deletes a repo branch. +func (c *Controller) DeleteBranch(ctx context.Context, + session *auth.Session, + repoRef string, + branchName string, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false) + if err != nil { + return err + } + + // make sure user isn't deleting the default branch + // ASSUMPTION: lower layer calls explicit branch api + // and 'refs/heads/branch1' would fail if 'branch1' exists. + // TODO: Add functional test to ensure the scenario is covered! + if branchName == repo.DefaultBranch { + return usererror.ErrDefaultBranchCantBeDeleted + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return fmt.Errorf("failed to create RPC write params: %w", err) + } + + err = c.gitRPCClient.DeleteBranch(ctx, &gitrpc.DeleteBranchParams{ + WriteParams: writeParams, + BranchName: branchName, + }) + if err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/repo/delete_tag.go b/internal/api/controller/repo/delete_tag.go new file mode 100644 index 0000000000..e7207103a3 --- /dev/null +++ b/internal/api/controller/repo/delete_tag.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// DeleteTag deletes a tag from the repo. +func (c *Controller) DeleteTag(ctx context.Context, + session *auth.Session, + repoRef, + tagName string, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoPush, false) + if err != nil { + return err + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return fmt.Errorf("failed to create RPC write params: %w", err) + } + + err = c.gitRPCClient.DeleteTag(ctx, &gitrpc.DeleteTagParams{ + Name: tagName, + WriteParams: writeParams, + }) + if err != nil { + return err + } + return nil +} diff --git a/internal/api/controller/repo/diff.go b/internal/api/controller/repo/diff.go new file mode 100644 index 0000000000..b66eb3a898 --- /dev/null +++ b/internal/api/controller/repo/diff.go @@ -0,0 +1,160 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "io" + "strings" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) RawDiff( + ctx context.Context, + session *auth.Session, + repoRef string, + path string, + w io.Writer, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return err + } + + info, err := parseDiffPath(path) + if err != nil { + return err + } + + return c.gitRPCClient.RawDiff(ctx, &gitrpc.DiffParams{ + ReadParams: CreateRPCReadParams(repo), + BaseRef: info.BaseRef, + HeadRef: info.HeadRef, + MergeBase: info.MergeBase, + }, w) +} + +func (c *Controller) CommitDiff( + ctx context.Context, + session *auth.Session, + repoRef string, + sha string, + w io.Writer, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return err + } + + return c.gitRPCClient.CommitDiff(ctx, &gitrpc.GetCommitParams{ + ReadParams: CreateRPCReadParams(repo), + SHA: sha, + }, w) +} + +type CompareInfo struct { + BaseRef string + HeadRef string + MergeBase bool +} + +func parseDiffPath(path string) (CompareInfo, error) { + infos := strings.SplitN(path, "...", 2) + if len(infos) != 2 { + infos = strings.SplitN(path, "..", 2) + } + if len(infos) != 2 { + return CompareInfo{}, usererror.BadRequestf("invalid format \"%s\"", path) + } + return CompareInfo{ + BaseRef: infos[0], + HeadRef: infos[1], + MergeBase: strings.Contains(path, "..."), + }, nil +} + +func (c *Controller) DiffStats( + ctx context.Context, + session *auth.Session, + repoRef string, + path string, +) (types.DiffStats, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return types.DiffStats{}, err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoView, false); err != nil { + return types.DiffStats{}, err + } + + info, err := parseDiffPath(path) + if err != nil { + return types.DiffStats{}, err + } + + output, err := c.gitRPCClient.DiffStats(ctx, &gitrpc.DiffParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + BaseRef: info.BaseRef, + HeadRef: info.HeadRef, + MergeBase: info.MergeBase, + }) + if err != nil { + return types.DiffStats{}, err + } + + return types.DiffStats{ + Commits: output.Commits, + FilesChanged: output.FilesChanged, + }, nil +} + +func (c *Controller) Diff( + ctx context.Context, + session *auth.Session, + repoRef string, + path string, + includePatch bool, +) (types.Stream[*gitrpc.FileDiff], error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoView, false); err != nil { + return nil, err + } + + info, err := parseDiffPath(path) + if err != nil { + return nil, err + } + + reader := gitrpc.NewStreamReader(c.gitRPCClient.Diff(ctx, &gitrpc.DiffParams{ + ReadParams: gitrpc.CreateRPCReadParams(repo), + BaseRef: info.BaseRef, + HeadRef: info.HeadRef, + MergeBase: info.MergeBase, + IncludePatch: includePatch, + })) + + return reader, nil +} diff --git a/internal/api/controller/repo/find.go b/internal/api/controller/repo/find.go new file mode 100644 index 0000000000..dafa5fe574 --- /dev/null +++ b/internal/api/controller/repo/find.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Find finds a repo. +func (c *Controller) Find(ctx context.Context, session *auth.Session, repoRef string) (*types.Repository, error) { + // note: can't use c.getRepoCheckAccess because even repositories that are currently being imported can be fetched. + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoView, true); err != nil { + return nil, err + } + + // backfill clone url + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + + return repo, nil +} diff --git a/internal/api/controller/repo/get_branch.go b/internal/api/controller/repo/get_branch.go new file mode 100644 index 0000000000..0eb4f69af6 --- /dev/null +++ b/internal/api/controller/repo/get_branch.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// GetBranch gets a repo branch. +func (c *Controller) GetBranch(ctx context.Context, + session *auth.Session, + repoRef string, + branchName string, +) (*Branch, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + rpcOut, err := c.gitRPCClient.GetBranch(ctx, &gitrpc.GetBranchParams{ + ReadParams: CreateRPCReadParams(repo), + BranchName: branchName, + }) + if err != nil { + return nil, fmt.Errorf("failed to get branch from gitrpc: %w", err) + } + + branch, err := mapBranch(rpcOut.Branch) + if err != nil { + return nil, fmt.Errorf("failed to map branch: %w", err) + } + + return &branch, nil +} diff --git a/internal/api/controller/repo/get_commit.go b/internal/api/controller/repo/get_commit.go new file mode 100644 index 0000000000..30e4f61dbe --- /dev/null +++ b/internal/api/controller/repo/get_commit.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// GetCommit gets a repo commit. +func (c *Controller) GetCommit(ctx context.Context, + session *auth.Session, + repoRef string, + sha string, +) (*types.Commit, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + rpcOut, err := c.gitRPCClient.GetCommit(ctx, &gitrpc.GetCommitParams{ + ReadParams: CreateRPCReadParams(repo), + SHA: sha, + }) + if err != nil { + return nil, fmt.Errorf("failed to get commit from gitrpc: %w", err) + } + + rpcCommit := rpcOut.Commit + commit, err := controller.MapCommit(&rpcCommit) + if err != nil { + return nil, fmt.Errorf("failed to map commit: %w", err) + } + + return commit, nil +} diff --git a/internal/api/controller/repo/get_commit_divergences.go b/internal/api/controller/repo/get_commit_divergences.go new file mode 100644 index 0000000000..713fa29643 --- /dev/null +++ b/internal/api/controller/repo/get_commit_divergences.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +type GetCommitDivergencesInput struct { + // MaxCount restricts the maximum number of diverging commits that are counted. + // IMPORTANT: This restricts the total commit count, so a (5, 18) restricted to 10 will return (0, 10) + MaxCount int32 `json:"max_count"` + Requests []CommitDivergenceRequest `json:"requests"` +} + +// CommitDivergenceRequest contains the refs for which the converging commits should be counted. +type CommitDivergenceRequest struct { + // From is the ref from which the counting of the diverging commits starts. + From string `json:"from"` + // To is the ref at which the counting of the diverging commits ends. + // If the value is empty the divergence is calculated to the default branch of the repo. + To string `json:"to"` +} + +// CommitDivergence contains the information of the count of converging commits between two refs. +type CommitDivergence struct { + // Ahead is the count of commits the 'From' ref is ahead of the 'To' ref. + Ahead int32 `json:"ahead"` + // Behind is the count of commits the 'From' ref is behind the 'To' ref. + Behind int32 `json:"behind"` +} + +// GetCommitDivergences returns the commit divergences between reference pairs. +func (c *Controller) GetCommitDivergences(ctx context.Context, + session *auth.Session, + repoRef string, + in *GetCommitDivergencesInput, +) ([]CommitDivergence, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + // if no requests were provided return an empty list + if in == nil || len(in.Requests) == 0 { + return []CommitDivergence{}, nil + } + + // if num of requests > page max return error + if len(in.Requests) > request.PerPageMax { + return nil, usererror.ErrRequestTooLarge + } + + // map to rpc params + options := &gitrpc.GetCommitDivergencesParams{ + ReadParams: CreateRPCReadParams(repo), + MaxCount: in.MaxCount, + Requests: make([]gitrpc.CommitDivergenceRequest, len(in.Requests)), + } + for i := range in.Requests { + options.Requests[i].From = in.Requests[i].From + options.Requests[i].To = in.Requests[i].To + // backfil default branch if no 'to' was provided + if len(options.Requests[i].To) == 0 { + options.Requests[i].To = repo.DefaultBranch + } + } + + // TODO: We should cache the responses as times can reach multiple seconds + rpcOutput, err := c.gitRPCClient.GetCommitDivergences(ctx, options) + if err != nil { + return nil, err + } + + // map to output type + divergences := make([]CommitDivergence, len(rpcOutput.Divergences)) + for i := range rpcOutput.Divergences { + divergences[i].Ahead = rpcOutput.Divergences[i].Ahead + divergences[i].Behind = rpcOutput.Divergences[i].Behind + } + + return divergences, nil +} diff --git a/internal/api/controller/repo/import.go b/internal/api/controller/repo/import.go new file mode 100644 index 0000000000..d9f4b65c6c --- /dev/null +++ b/internal/api/controller/repo/import.go @@ -0,0 +1,88 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" +) + +type ImportInput struct { + ParentRef string `json:"parent_ref"` + UID string `json:"uid"` + Description string `json:"description"` + + Provider importer.Provider `json:"provider"` + ProviderRepo string `json:"provider_repo"` +} + +// Import creates a new empty repository and starts git import to it from a remote repository. +func (c *Controller) Import(ctx context.Context, session *auth.Session, in *ImportInput) (*types.Repository, error) { + parentSpace, err := c.getSpaceCheckAuthRepoCreation(ctx, session, in.ParentRef) + if err != nil { + return nil, err + } + + err = c.sanitizeImportInput(in) + if err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + remoteRepository, err := importer.LoadRepositoryFromProvider(ctx, in.Provider, in.ProviderRepo) + if err != nil { + return nil, err + } + + var repo *types.Repository + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + repo = remoteRepository.ToRepo(parentSpace.ID, in.UID, in.Description, &session.Principal) + + err = c.repoStore.Create(ctx, repo) + if err != nil { + return fmt.Errorf("failed to create repository in storage: %w", err) + } + + err = c.importer.Run(ctx, in.Provider, repo, remoteRepository.CloneURL) + if err != nil { + return fmt.Errorf("failed to start import repository job: %w", err) + } + + return nil + }) + if err != nil { + return nil, err + } + + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + + return repo, nil +} + +func (c *Controller) sanitizeImportInput(in *ImportInput) error { + if err := c.validateParentRef(in.ParentRef); err != nil { + return err + } + + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/repo/import_cancel.go b/internal/api/controller/repo/import_cancel.go new file mode 100644 index 0000000000..eec36ef4d9 --- /dev/null +++ b/internal/api/controller/repo/import_cancel.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// ImportCancel cancels a repository import. +func (c *Controller) ImportCancel(ctx context.Context, + session *auth.Session, + repoRef string, +) error { + // note: can't use c.getRepoCheckAccess because this needs to fetch a repo being imported. + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoDelete, false); err != nil { + return err + } + + if !repo.Importing { + return usererror.BadRequest("repository is not being imported") + } + + if err = c.importer.Cancel(ctx, repo); err != nil { + return fmt.Errorf("failed to cancel repository import") + } + + return c.DeleteNoAuth(ctx, session, repo) +} diff --git a/internal/api/controller/repo/import_progress.go b/internal/api/controller/repo/import_progress.go new file mode 100644 index 0000000000..0d283d558c --- /dev/null +++ b/internal/api/controller/repo/import_progress.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "errors" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ImportProgress returns progress of the import job. +func (c *Controller) ImportProgress(ctx context.Context, + session *auth.Session, + repoRef string, +) (types.JobProgress, error) { + // note: can't use c.getRepoCheckAccess because this needs to fetch a repo being imported. + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return types.JobProgress{}, err + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoView, false); err != nil { + return types.JobProgress{}, err + } + + progress, err := c.importer.GetProgress(ctx, repo) + if errors.Is(err, importer.ErrNotFound) { + return types.JobProgress{}, usererror.NotFound("No recent or ongoing import found for repository.") + } + if err != nil { + return types.JobProgress{}, fmt.Errorf("failed to retrieve import progress: %w", err) + } + + return progress, err +} diff --git a/internal/api/controller/repo/list_branches.go b/internal/api/controller/repo/list_branches.go new file mode 100644 index 0000000000..72b3e7895c --- /dev/null +++ b/internal/api/controller/repo/list_branches.go @@ -0,0 +1,112 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type Branch struct { + Name string `json:"name"` + SHA string `json:"sha"` + Commit *types.Commit `json:"commit,omitempty"` +} + +// ListBranches lists the branches of a repo. +func (c *Controller) ListBranches(ctx context.Context, + session *auth.Session, + repoRef string, + includeCommit bool, + filter *types.BranchFilter, +) ([]Branch, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + rpcOut, err := c.gitRPCClient.ListBranches(ctx, &gitrpc.ListBranchesParams{ + ReadParams: CreateRPCReadParams(repo), + IncludeCommit: includeCommit, + Query: filter.Query, + Sort: mapToRPCBranchSortOption(filter.Sort), + Order: mapToRPCSortOrder(filter.Order), + Page: int32(filter.Page), + PageSize: int32(filter.Size), + }) + if err != nil { + return nil, err + } + + branches := make([]Branch, len(rpcOut.Branches)) + for i := range rpcOut.Branches { + branches[i], err = mapBranch(rpcOut.Branches[i]) + if err != nil { + return nil, fmt.Errorf("failed to map branch: %w", err) + } + } + + return branches, nil +} + +func mapToRPCBranchSortOption(o enum.BranchSortOption) gitrpc.BranchSortOption { + switch o { + case enum.BranchSortOptionDate: + return gitrpc.BranchSortOptionDate + case enum.BranchSortOptionName: + return gitrpc.BranchSortOptionName + case enum.BranchSortOptionDefault: + return gitrpc.BranchSortOptionDefault + default: + // no need to error out - just use default for sorting + return gitrpc.BranchSortOptionDefault + } +} + +func mapToRPCSortOrder(o enum.Order) gitrpc.SortOrder { + switch o { + case enum.OrderAsc: + return gitrpc.SortOrderAsc + case enum.OrderDesc: + return gitrpc.SortOrderDesc + case enum.OrderDefault: + return gitrpc.SortOrderDefault + default: + // no need to error out - just use default for sorting + return gitrpc.SortOrderDefault + } +} + +func mapBranch(b gitrpc.Branch) (Branch, error) { + var commit *types.Commit + if b.Commit != nil { + var err error + commit, err = controller.MapCommit(b.Commit) + if err != nil { + return Branch{}, err + } + } + return Branch{ + Name: b.Name, + SHA: b.SHA, + Commit: commit, + }, nil +} diff --git a/internal/api/controller/repo/list_commit_tags.go b/internal/api/controller/repo/list_commit_tags.go new file mode 100644 index 0000000000..f76e03ef0b --- /dev/null +++ b/internal/api/controller/repo/list_commit_tags.go @@ -0,0 +1,116 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type CommitTag struct { + Name string `json:"name"` + SHA string `json:"sha"` + IsAnnotated bool `json:"is_annotated"` + Title string `json:"title,omitempty"` + Message string `json:"message,omitempty"` + Tagger *types.Signature `json:"tagger,omitempty"` + Commit *types.Commit `json:"commit,omitempty"` +} + +// ListCommitTags lists the commit tags of a repo. +func (c *Controller) ListCommitTags(ctx context.Context, + session *auth.Session, + repoRef string, + includeCommit bool, + filter *types.TagFilter, +) ([]CommitTag, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + rpcOut, err := c.gitRPCClient.ListCommitTags(ctx, &gitrpc.ListCommitTagsParams{ + ReadParams: CreateRPCReadParams(repo), + IncludeCommit: includeCommit, + Query: filter.Query, + Sort: mapToRPCTagSortOption(filter.Sort), + Order: mapToRPCSortOrder(filter.Order), + Page: int32(filter.Page), + PageSize: int32(filter.Size), + }) + if err != nil { + return nil, err + } + + tags := make([]CommitTag, len(rpcOut.Tags)) + for i := range rpcOut.Tags { + tags[i], err = mapCommitTag(rpcOut.Tags[i]) + if err != nil { + return nil, fmt.Errorf("failed to map CommitTag: %w", err) + } + } + + return tags, nil +} + +func mapToRPCTagSortOption(o enum.TagSortOption) gitrpc.TagSortOption { + switch o { + case enum.TagSortOptionDate: + return gitrpc.TagSortOptionDate + case enum.TagSortOptionName: + return gitrpc.TagSortOptionName + case enum.TagSortOptionDefault: + return gitrpc.TagSortOptionDefault + default: + // no need to error out - just use default for sorting + return gitrpc.TagSortOptionDefault + } +} + +func mapCommitTag(t gitrpc.CommitTag) (CommitTag, error) { + var commit *types.Commit + if t.Commit != nil { + var err error + commit, err = controller.MapCommit(t.Commit) + if err != nil { + return CommitTag{}, err + } + } + + var tagger *types.Signature + if t.Tagger != nil { + var err error + tagger, err = controller.MapSignature(t.Tagger) + if err != nil { + return CommitTag{}, err + } + } + + return CommitTag{ + Name: t.Name, + SHA: t.SHA, + IsAnnotated: t.IsAnnotated, + Title: t.Title, + Message: t.Message, + Tagger: tagger, + Commit: commit, + }, nil +} diff --git a/internal/api/controller/repo/list_commits.go b/internal/api/controller/repo/list_commits.go new file mode 100644 index 0000000000..81eef11ba0 --- /dev/null +++ b/internal/api/controller/repo/list_commits.go @@ -0,0 +1,83 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListCommits lists the commits of a repo. +func (c *Controller) ListCommits(ctx context.Context, + session *auth.Session, + repoRef string, + gitRef string, + filter *types.CommitFilter, +) (types.ListCommitResponse, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return types.ListCommitResponse{}, err + } + + // set gitRef to default branch in case an empty reference was provided + if gitRef == "" { + gitRef = repo.DefaultBranch + } + + rpcOut, err := c.gitRPCClient.ListCommits(ctx, &gitrpc.ListCommitsParams{ + ReadParams: CreateRPCReadParams(repo), + GitREF: gitRef, + After: filter.After, + Page: int32(filter.Page), + Limit: int32(filter.Limit), + Path: filter.Path, + Since: filter.Since, + Until: filter.Until, + Committer: filter.Committer, + }) + if err != nil { + return types.ListCommitResponse{}, err + } + + commits := make([]types.Commit, len(rpcOut.Commits)) + for i := range rpcOut.Commits { + var commit *types.Commit + commit, err = controller.MapCommit(&rpcOut.Commits[i]) + if err != nil { + return types.ListCommitResponse{}, fmt.Errorf("failed to map commit: %w", err) + } + commits[i] = *commit + } + + renameDetailList := make([]types.RenameDetails, len(rpcOut.RenameDetails)) + for i := range rpcOut.RenameDetails { + renameDetails := controller.MapRenameDetails(rpcOut.RenameDetails[i]) + if renameDetails == nil { + return types.ListCommitResponse{}, fmt.Errorf("rename details was nil") + } + renameDetailList[i] = *renameDetails + } + return types.ListCommitResponse{ + Commits: commits, + RenameDetails: renameDetailList, + TotalCommits: rpcOut.TotalCommits, + }, nil +} diff --git a/internal/api/controller/repo/list_pipelines.go b/internal/api/controller/repo/list_pipelines.go new file mode 100644 index 0000000000..827cac5f36 --- /dev/null +++ b/internal/api/controller/repo/list_pipelines.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListPipelines lists the pipelines under a repository. +func (c *Controller) ListPipelines( + ctx context.Context, + session *auth.Session, + repoRef string, + latest bool, + filter types.ListQueryFilter, +) ([]*types.Pipeline, int64, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, 0, err + } + + var count int64 + var pipelines []*types.Pipeline + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { + count, err = c.pipelineStore.Count(ctx, repo.ID, filter) + if err != nil { + return fmt.Errorf("failed to count child executions: %w", err) + } + + if !latest { + pipelines, err = c.pipelineStore.List(ctx, repo.ID, filter) + if err != nil { + return fmt.Errorf("failed to list pipelines: %w", err) + } + } else { + pipelines, err = c.pipelineStore.ListLatest(ctx, repo.ID, filter) + if err != nil { + return fmt.Errorf("failed to list latest pipelines: %w", err) + } + } + + return + }, dbtx.TxDefaultReadOnly) + if err != nil { + return pipelines, count, fmt.Errorf("failed to list pipelines: %w", err) + } + + return pipelines, count, nil +} diff --git a/internal/api/controller/repo/list_service_accounts.go b/internal/api/controller/repo/list_service_accounts.go new file mode 100644 index 0000000000..f761aafd2e --- /dev/null +++ b/internal/api/controller/repo/list_service_accounts.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListServiceAccounts lists the service accounts of a repo. + +func (c *Controller) ListServiceAccounts(ctx context.Context, + session *auth.Session, + repoRef string, +) ([]*types.ServiceAccount, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, false) + if err != nil { + return nil, err + } + + return c.principalStore.ListServiceAccounts(ctx, enum.ParentResourceTypeRepo, repo.ID) +} diff --git a/internal/api/controller/repo/merge_check.go b/internal/api/controller/repo/merge_check.go new file mode 100644 index 0000000000..c9804c5dfc --- /dev/null +++ b/internal/api/controller/repo/merge_check.go @@ -0,0 +1,71 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +type MergeCheck struct { + Mergeable bool `json:"mergeable"` + ConflictFiles []string `json:"conflict_files,omitempty"` +} + +func (c *Controller) MergeCheck( + ctx context.Context, + session *auth.Session, + repoRef string, + diffPath string, +) (MergeCheck, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, false) + if err != nil { + return MergeCheck{}, err + } + + info, err := parseDiffPath(diffPath) + if err != nil { + return MergeCheck{}, err + } + + writeParams, err := CreateRPCWriteParams(ctx, c.urlProvider, session, repo) + if err != nil { + return MergeCheck{}, fmt.Errorf("failed to create rpc write params: %w", err) + } + + _, err = c.gitRPCClient.Merge(ctx, &gitrpc.MergeParams{ + WriteParams: writeParams, + BaseBranch: info.BaseRef, + HeadRepoUID: writeParams.RepoUID, // forks are not supported for now + HeadBranch: info.HeadRef, + }) + if err != nil { + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotMergeable { + return MergeCheck{ + Mergeable: false, + ConflictFiles: gitrpc.AsConflictFilesError(err), + }, nil + } + return MergeCheck{}, fmt.Errorf("merge check execution failed: %w", err) + } + + return MergeCheck{ + Mergeable: true, + }, nil +} diff --git a/internal/api/controller/repo/move.go b/internal/api/controller/repo/move.go new file mode 100644 index 0000000000..73f283725a --- /dev/null +++ b/internal/api/controller/repo/move.go @@ -0,0 +1,94 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// MoveInput is used for moving a repo. +type MoveInput struct { + UID *string `json:"uid"` +} + +func (i *MoveInput) hasChanges(repo *types.Repository) bool { + if i.UID != nil && *i.UID != repo.UID { + return true + } + + return false +} + +// Move moves a repository to a new space uid. +// TODO: Add support for moving to other parents and aliases. +// +//nolint:gocognit // refactor if needed +func (c *Controller) Move(ctx context.Context, + session *auth.Session, + repoRef string, + in *MoveInput, +) (*types.Repository, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, err + } + + if repo.Importing { + return nil, usererror.BadRequest("can't move a repo that is being imported") + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, enum.PermissionRepoEdit, false); err != nil { + return nil, err + } + + if !in.hasChanges(repo) { + return repo, nil + } + + if err = c.sanitizeMoveInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + repo, err = c.repoStore.UpdateOptLock(ctx, repo, func(r *types.Repository) error { + if in.UID != nil { + r.UID = *in.UID + } + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to update repo: %w", err) + } + + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + + return repo, nil +} + +func (c *Controller) sanitizeMoveInput(in *MoveInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/repo/pipeline_generate.go b/internal/api/controller/repo/pipeline_generate.go new file mode 100644 index 0000000000..4a0fafc230 --- /dev/null +++ b/internal/api/controller/repo/pipeline_generate.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// PipelineGenerate returns automatically generate pipeline YAML for a repository. +func (c *Controller) PipelineGenerate( + ctx context.Context, + session *auth.Session, + repoRef string, +) ([]byte, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, err + } + + result, err := c.gitRPCClient.GeneratePipeline(ctx, &gitrpc.GeneratePipelineParams{ + ReadParams: CreateRPCReadParams(repo), + }) + if err != nil { + return nil, fmt.Errorf("failed to generate pipeline: %s", err) + } + + return result.PipelineYAML, nil +} diff --git a/internal/api/controller/repo/raw.go b/internal/api/controller/repo/raw.go new file mode 100644 index 0000000000..aade598824 --- /dev/null +++ b/internal/api/controller/repo/raw.go @@ -0,0 +1,75 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// Raw finds the file of the repo at the given path and returns its raw content. +// If no gitRef is provided, the content is retrieved from the default branch. +func (c *Controller) Raw(ctx context.Context, + session *auth.Session, + repoRef string, + gitRef string, + repoPath string, +) (io.Reader, int64, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView, true) + if err != nil { + return nil, 0, err + } + + // set gitRef to default branch in case an empty reference was provided + if gitRef == "" { + gitRef = repo.DefaultBranch + } + + // create read params once + readParams := CreateRPCReadParams(repo) + treeNodeOutput, err := c.gitRPCClient.GetTreeNode(ctx, &gitrpc.GetTreeNodeParams{ + ReadParams: readParams, + GitREF: gitRef, + Path: repoPath, + IncludeLatestCommit: false, + }) + if err != nil { + return nil, 0, fmt.Errorf("failed to read tree node: %w", err) + } + + // viewing Raw content is only supported for blob content + if treeNodeOutput.Node.Type != gitrpc.TreeNodeTypeBlob { + return nil, 0, usererror.BadRequestf( + "Object in '%s' at '/%s' is of type '%s'. Only objects of type %s support raw viewing.", + gitRef, repoPath, treeNodeOutput.Node.Type, gitrpc.TreeNodeTypeBlob) + } + + blobReader, err := c.gitRPCClient.GetBlob(ctx, &gitrpc.GetBlobParams{ + ReadParams: readParams, + SHA: treeNodeOutput.Node.SHA, + SizeLimit: 0, // no size limit, we stream whatever data there is + }) + if err != nil { + return nil, 0, fmt.Errorf("failed to read blob from gitrpc: %w", err) + } + + return blobReader.Content, blobReader.ContentSize, nil +} diff --git a/internal/api/controller/repo/update.go b/internal/api/controller/repo/update.go new file mode 100644 index 0000000000..5aba21a6f0 --- /dev/null +++ b/internal/api/controller/repo/update.go @@ -0,0 +1,88 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a repo. +type UpdateInput struct { + Description *string `json:"description"` + IsPublic *bool `json:"is_public"` +} + +func (in *UpdateInput) hasChanges(repo *types.Repository) bool { + return (in.Description != nil && *in.Description != repo.Description) || + (in.IsPublic != nil && *in.IsPublic != repo.IsPublic) +} + +// Update updates a repository. +func (c *Controller) Update(ctx context.Context, + session *auth.Session, + repoRef string, + in *UpdateInput, +) (*types.Repository, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit, false) + if err != nil { + return nil, err + } + + if !in.hasChanges(repo) { + return repo, nil + } + + if err = sanitizeUpdateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + repo, err = c.repoStore.UpdateOptLock(ctx, repo, func(repo *types.Repository) error { + // update values only if provided + if in.Description != nil { + repo.Description = *in.Description + } + if in.IsPublic != nil { + repo.IsPublic = *in.IsPublic + } + + return nil + }) + if err != nil { + return nil, err + } + + // backfill repo url + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + + return repo, nil +} + +func sanitizeUpdateInput(in *UpdateInput) error { + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/repo/wire.go b/internal/api/controller/repo/wire.go new file mode 100644 index 0000000000..ff3b2c126b --- /dev/null +++ b/internal/api/controller/repo/wire.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(config *types.Config, db *sqlx.DB, urlProvider *url.Provider, + uidCheck check.PathUID, authorizer authz.Authorizer, repoStore store.RepoStore, + spaceStore store.SpaceStore, pipelineStore store.PipelineStore, + principalStore store.PrincipalStore, rpcClient gitrpc.Interface, + importer *importer.Repository, +) *Controller { + return NewController(config.Git.DefaultBranch, db, urlProvider, + uidCheck, authorizer, repoStore, + spaceStore, pipelineStore, principalStore, rpcClient, + importer) +} diff --git a/internal/api/controller/secret/controller.go b/internal/api/controller/secret/controller.go new file mode 100644 index 0000000000..f8deb0a465 --- /dev/null +++ b/internal/api/controller/secret/controller.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + uidCheck check.PathUID + encrypter encrypt.Encrypter + secretStore store.SecretStore + authorizer authz.Authorizer + spaceStore store.SpaceStore +} + +func NewController( + db *sqlx.DB, + uidCheck check.PathUID, + authorizer authz.Authorizer, + encrypter encrypt.Encrypter, + secretStore store.SecretStore, + spaceStore store.SpaceStore, +) *Controller { + return &Controller{ + db: db, + uidCheck: uidCheck, + encrypter: encrypter, + secretStore: secretStore, + authorizer: authorizer, + spaceStore: spaceStore, + } +} diff --git a/internal/api/controller/secret/create.go b/internal/api/controller/secret/create.go new file mode 100644 index 0000000000..d33cf38872 --- /dev/null +++ b/internal/api/controller/secret/create.go @@ -0,0 +1,126 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/harness/gitness/encrypt" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +var ( + // errSecretRequiresParent if the user tries to create a secret without a parent space. + errSecretRequiresParent = usererror.BadRequest( + "Parent space required - standalone secret are not supported.") +) + +type CreateInput struct { + Description string `json:"description"` + SpaceRef string `json:"space_ref"` // Ref of the parent space + UID string `json:"uid"` + Data string `json:"data"` +} + +func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.Secret, error) { + parentSpace, err := c.spaceStore.FindByRef(ctx, in.SpaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find parent by ref: %w", err) + } + + err = apiauth.CheckSecret(ctx, c.authorizer, session, parentSpace.Path, in.UID, enum.PermissionSecretEdit) + if err != nil { + return nil, err + } + + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + var secret *types.Secret + now := time.Now().UnixMilli() + secret = &types.Secret{ + CreatedBy: session.Principal.ID, + Description: in.Description, + Data: in.Data, + SpaceID: parentSpace.ID, + UID: in.UID, + Created: now, + Updated: now, + Version: 0, + } + secret, err = enc(c.encrypter, secret) + if err != nil { + return nil, fmt.Errorf("could not encrypt secret: %w", err) + } + err = c.secretStore.Create(ctx, secret) + if err != nil { + return nil, fmt.Errorf("secret creation failed: %w", err) + } + + return secret, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + parentRefAsID, err := strconv.ParseInt(in.SpaceRef, 10, 64) + + if (err == nil && parentRefAsID <= 0) || (len(strings.TrimSpace(in.SpaceRef)) == 0) { + return errSecretRequiresParent + } + + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + return check.Description(in.Description) +} + +// helper function returns the same secret with encrypted data. +func enc(encrypt encrypt.Encrypter, secret *types.Secret) (*types.Secret, error) { + if secret == nil { + return nil, fmt.Errorf("cannot encrypt a nil secret") + } + s := *secret + ciphertext, err := encrypt.Encrypt(secret.Data) + if err != nil { + return nil, err + } + s.Data = string(ciphertext) + return &s, nil +} + +// helper function returns the same secret with decrypted data. +func dec(encrypt encrypt.Encrypter, secret *types.Secret) (*types.Secret, error) { + if secret == nil { + return nil, fmt.Errorf("cannot decrypt a nil secret") + } + s := *secret + plaintext, err := encrypt.Decrypt([]byte(secret.Data)) + if err != nil { + return nil, err + } + s.Data = plaintext + return &s, nil +} diff --git a/internal/api/controller/secret/delete.go b/internal/api/controller/secret/delete.go new file mode 100644 index 0000000000..5287537a46 --- /dev/null +++ b/internal/api/controller/secret/delete.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef string, uid string) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckSecret(ctx, c.authorizer, session, space.Path, uid, enum.PermissionSecretDelete) + if err != nil { + return fmt.Errorf("failed to authorize: %w", err) + } + err = c.secretStore.DeleteByUID(ctx, space.ID, uid) + if err != nil { + return fmt.Errorf("could not delete secret: %w", err) + } + return nil +} diff --git a/internal/api/controller/secret/find.go b/internal/api/controller/secret/find.go new file mode 100644 index 0000000000..0fdd2c47f3 --- /dev/null +++ b/internal/api/controller/secret/find.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, +) (*types.Secret, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + err = apiauth.CheckSecret(ctx, c.authorizer, session, space.Path, uid, enum.PermissionSecretView) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + secret, err := c.secretStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find secret: %w", err) + } + secret, err = dec(c.encrypter, secret) + if err != nil { + return nil, fmt.Errorf("could not decrypt secret: %w", err) + } + return secret, nil +} diff --git a/internal/api/controller/secret/update.go b/internal/api/controller/secret/update.go new file mode 100644 index 0000000000..7bf0f53e5d --- /dev/null +++ b/internal/api/controller/secret/update.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a repo. +type UpdateInput struct { + UID *string `json:"uid"` + Description *string `json:"description"` + Data *string `json:"data"` +} + +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, + in *UpdateInput, +) (*types.Secret, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckSecret(ctx, c.authorizer, session, space.Path, uid, enum.PermissionSecretEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + secret, err := c.secretStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find secret: %w", err) + } + + return c.secretStore.UpdateOptLock(ctx, secret, func(original *types.Secret) error { + if in.UID != nil { + original.UID = *in.UID + } + if in.Description != nil { + original.Description = *in.Description + } + if in.Data != nil { + data, err := c.encrypter.Encrypt(*in.Data) + if err != nil { + return fmt.Errorf("could not encrypt secret: %w", err) + } + original.Data = string(data) + } + + return nil + }) +} + +func (c *Controller) sanitizeUpdateInput(in *UpdateInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/secret/wire.go b/internal/api/controller/secret/wire.go new file mode 100644 index 0000000000..d5b37a4556 --- /dev/null +++ b/internal/api/controller/secret/wire.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + uidCheck check.PathUID, + encrypter encrypt.Encrypter, + secretStore store.SecretStore, + authorizer authz.Authorizer, + spaceStore store.SpaceStore, +) *Controller { + return NewController(db, uidCheck, authorizer, encrypter, secretStore, spaceStore) +} diff --git a/internal/api/controller/service/controller.go b/internal/api/controller/service/controller.go new file mode 100644 index 0000000000..b874a778b9 --- /dev/null +++ b/internal/api/controller/service/controller.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" +) + +type Controller struct { + principalUIDCheck check.PrincipalUID + authorizer authz.Authorizer + principalStore store.PrincipalStore +} + +func NewController(principalUIDCheck check.PrincipalUID, authorizer authz.Authorizer, + principalStore store.PrincipalStore) *Controller { + return &Controller{ + principalUIDCheck: principalUIDCheck, + authorizer: authorizer, + principalStore: principalStore, + } +} + +func findServiceFromUID(ctx context.Context, + principalStore store.PrincipalStore, serviceUID string) (*types.Service, error) { + return principalStore.FindServiceByUID(ctx, serviceUID) +} diff --git a/internal/api/controller/service/create.go b/internal/api/controller/service/create.go new file mode 100644 index 0000000000..56d791ef94 --- /dev/null +++ b/internal/api/controller/service/create.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/dchest/uniuri" +) + +// CreateInput is the input used for create operations. +type CreateInput struct { + UID string `json:"uid"` + Email string `json:"email"` + DisplayName string `json:"display_name"` +} + +// Create creates a new service. +func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.Service, error) { + // Ensure principal has required permissions (service is global, no explicit resource) + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeService, + } + if err := apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionServiceCreate); err != nil { + return nil, err + } + + return c.CreateNoAuth(ctx, in, false) +} + +/* + * CreateNoAuth creates a new service without auth checks. + * WARNING: Never call as part of user flow. + * + * Note: take admin separately to avoid potential vulnerabilities for user calls. + */ +func (c *Controller) CreateNoAuth(ctx context.Context, in *CreateInput, admin bool) (*types.Service, error) { + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + svc := &types.Service{ + UID: in.UID, + Email: in.Email, + DisplayName: in.DisplayName, + Admin: admin, + Salt: uniuri.NewLen(uniuri.UUIDLen), + Created: time.Now().UnixMilli(), + Updated: time.Now().UnixMilli(), + } + + err := c.principalStore.CreateService(ctx, svc) + if err != nil { + return nil, err + } + + return svc, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + if err := c.principalUIDCheck(in.UID); err != nil { + return err + } + + in.Email = strings.TrimSpace(in.Email) + if err := check.Email(in.Email); err != nil { + return err + } + + in.DisplayName = strings.TrimSpace(in.DisplayName) + if err := check.DisplayName(in.DisplayName); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/service/delete.go b/internal/api/controller/service/delete.go new file mode 100644 index 0000000000..588dd6b154 --- /dev/null +++ b/internal/api/controller/service/delete.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +/* + * Delete deletes a service. + */ +func (c *Controller) Delete(ctx context.Context, session *auth.Session, + serviceUID string) error { + svc, err := findServiceFromUID(ctx, c.principalStore, serviceUID) + if err != nil { + return err + } + + // Ensure principal has required permissions on parent + if err = apiauth.CheckService(ctx, c.authorizer, session, svc, enum.PermissionServiceDelete); err != nil { + return err + } + + return c.principalStore.DeleteService(ctx, svc.ID) +} diff --git a/internal/api/controller/service/find.go b/internal/api/controller/service/find.go new file mode 100644 index 0000000000..c0dc20b21c --- /dev/null +++ b/internal/api/controller/service/find.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Find tries to find the provided service. +func (c *Controller) Find(ctx context.Context, session *auth.Session, + serviceUID string) (*types.Service, error) { + svc, err := c.FindNoAuth(ctx, serviceUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckService(ctx, c.authorizer, session, svc, enum.PermissionServiceView); err != nil { + return nil, err + } + + return svc, nil +} + +/* + * FindNoAuth finds a service without auth checks. + * WARNING: Never call as part of user flow. + */ +func (c *Controller) FindNoAuth(ctx context.Context, serviceUID string) (*types.Service, error) { + return findServiceFromUID(ctx, c.principalStore, serviceUID) +} diff --git a/internal/api/controller/service/list.go b/internal/api/controller/service/list.go new file mode 100644 index 0000000000..b18d85c260 --- /dev/null +++ b/internal/api/controller/service/list.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// List lists all services of the system. +func (c *Controller) List(ctx context.Context, session *auth.Session) (int64, []*types.Service, error) { + // Ensure principal has required permissions (service is global, no explicit resource) + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeService, + } + if err := apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionServiceView); err != nil { + return 0, nil, err + } + + count, err := c.principalStore.CountServices(ctx) + if err != nil { + return 0, nil, fmt.Errorf("failed to count services: %w", err) + } + + repos, err := c.principalStore.ListServices(ctx) + if err != nil { + return 0, nil, fmt.Errorf("failed to list services: %w", err) + } + + return count, repos, nil +} diff --git a/internal/api/controller/service/update.go b/internal/api/controller/service/update.go new file mode 100644 index 0000000000..019cd63697 --- /dev/null +++ b/internal/api/controller/service/update.go @@ -0,0 +1,87 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" +) + +// UpdateInput store infos to update an existing service. +type UpdateInput struct { + Email *string `json:"email"` + DisplayName *string `json:"display_name"` +} + +// Update updates the provided service. +func (c *Controller) Update(ctx context.Context, session *auth.Session, + serviceUID string, in *UpdateInput) (*types.Service, error) { + svc, err := findServiceFromUID(ctx, c.principalStore, serviceUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckService(ctx, c.authorizer, session, svc, enum.PermissionServiceEdit); err != nil { + return nil, err + } + + if err = c.sanitizeUpdateInput(in); err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + if in.Email != nil { + svc.DisplayName = ptr.ToString(in.Email) + } + if in.DisplayName != nil { + svc.DisplayName = ptr.ToString(in.DisplayName) + } + svc.Updated = time.Now().UnixMilli() + + err = c.principalStore.UpdateService(ctx, svc) + if err != nil { + return nil, err + } + + return svc, nil +} + +func (c *Controller) sanitizeUpdateInput(in *UpdateInput) error { + if in.Email != nil { + *in.Email = strings.TrimSpace(*in.Email) + if err := check.Email(*in.Email); err != nil { + return err + } + } + + if in.DisplayName != nil { + *in.DisplayName = strings.TrimSpace(*in.DisplayName) + if err := check.DisplayName(*in.DisplayName); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/service/update_admin.go b/internal/api/controller/service/update_admin.go new file mode 100644 index 0000000000..cb87cae048 --- /dev/null +++ b/internal/api/controller/service/update_admin.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// UpdateAdmin updates the admin state of a service. +func (c *Controller) UpdateAdmin(ctx context.Context, session *auth.Session, + serviceUID string, admin bool) (*types.Service, error) { + sbc, err := findServiceFromUID(ctx, c.principalStore, serviceUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckService(ctx, c.authorizer, session, sbc, enum.PermissionServiceEditAdmin); err != nil { + return nil, err + } + + sbc.Admin = admin + sbc.Updated = time.Now().UnixMilli() + + err = c.principalStore.UpdateService(ctx, sbc) + if err != nil { + return nil, err + } + + return sbc, nil +} diff --git a/internal/api/controller/service/wire.go b/internal/api/controller/service/wire.go new file mode 100644 index 0000000000..edf28fec3d --- /dev/null +++ b/internal/api/controller/service/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + NewController, +) + +func ProvideController(principalUIDCheck check.PrincipalUID, authorizer authz.Authorizer, + principalStore store.PrincipalStore) *Controller { + return NewController(principalUIDCheck, authorizer, principalStore) +} diff --git a/internal/api/controller/serviceaccount/controller.go b/internal/api/controller/serviceaccount/controller.go new file mode 100644 index 0000000000..8905386b58 --- /dev/null +++ b/internal/api/controller/serviceaccount/controller.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" +) + +type Controller struct { + principalUIDCheck check.PrincipalUID + authorizer authz.Authorizer + principalStore store.PrincipalStore + spaceStore store.SpaceStore + repoStore store.RepoStore + tokenStore store.TokenStore +} + +func NewController(principalUIDCheck check.PrincipalUID, authorizer authz.Authorizer, + principalStore store.PrincipalStore, spaceStore store.SpaceStore, repoStore store.RepoStore, + tokenStore store.TokenStore) *Controller { + return &Controller{ + principalUIDCheck: principalUIDCheck, + authorizer: authorizer, + principalStore: principalStore, + spaceStore: spaceStore, + repoStore: repoStore, + tokenStore: tokenStore, + } +} + +func findServiceAccountFromUID(ctx context.Context, + principalStore store.PrincipalStore, saUID string) (*types.ServiceAccount, error) { + return principalStore.FindServiceAccountByUID(ctx, saUID) +} diff --git a/internal/api/controller/serviceaccount/create.go b/internal/api/controller/serviceaccount/create.go new file mode 100644 index 0000000000..f447f2b981 --- /dev/null +++ b/internal/api/controller/serviceaccount/create.go @@ -0,0 +1,134 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/dchest/uniuri" + gonanoid "github.com/matoous/go-nanoid/v2" +) + +var ( + serviceAccountUIDAlphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + serviceAccountUIDLength = 16 +) + +type CreateInput struct { + Email string `json:"email"` + DisplayName string `json:"display_name"` + ParentType enum.ParentResourceType `json:"parent_type"` + ParentID int64 `json:"parent_id"` +} + +// Create creates a new service account. +func (c *Controller) Create(ctx context.Context, session *auth.Session, + in *CreateInput) (*types.ServiceAccount, error) { + // Ensure principal has required permissions on parent (ensures that parent exists) + // since it's a create, we use don't pass a resource name. + if err := apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + in.ParentType, in.ParentID, "", enum.PermissionServiceAccountCreate); err != nil { + return nil, err + } + + uid, err := generateServiceAccountUID(in.ParentType, in.ParentID) + if err != nil { + return nil, fmt.Errorf("failed to generate service account UID: %w", err) + } + + // TODO: There's a chance of duplicate error - we should retry? + return c.CreateNoAuth(ctx, in, uid) +} + +/* + * CreateNoAuth creates a new service account without auth checks. + * WARNING: Never call as part of user flow. + * + * Note: take uid separately to allow internally created non-random uids. + */ +func (c *Controller) CreateNoAuth(ctx context.Context, + in *CreateInput, uid string) (*types.ServiceAccount, error) { + if err := c.sanitizeCreateInput(in, uid); err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + sa := &types.ServiceAccount{ + UID: uid, + Email: in.Email, + DisplayName: in.DisplayName, + Salt: uniuri.NewLen(uniuri.UUIDLen), + Created: time.Now().UnixMilli(), + Updated: time.Now().UnixMilli(), + ParentType: in.ParentType, + ParentID: in.ParentID, + } + + err := c.principalStore.CreateServiceAccount(ctx, sa) + if err != nil { + return nil, err + } + + return sa, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput, uid string) error { + if err := c.principalUIDCheck(uid); err != nil { + return err + } + + in.Email = strings.TrimSpace(in.Email) + if err := check.Email(in.Email); err != nil { + return err + } + + in.DisplayName = strings.TrimSpace(in.DisplayName) + if err := check.DisplayName(in.DisplayName); err != nil { + return err + } + + if err := check.ServiceAccountParent(in.ParentType, in.ParentID); err != nil { + return err + } + + return nil +} + +// generateServiceAccountUID generates a new unique UID for a service account +// NOTE: +// This method generates 36^10 = ~8*10^24 unique UIDs per parent. +// This should be enough for a very low chance of duplications. +// +// NOTE: +// We generate it automatically to ensure unique UIDs on principals. +// The downside is that they don't have very userfriendly handlers - though that should be okay for service accounts. +// The other option would be take it as an input, but a globally unique uid of a service account +// which itself is scoped to a space / repo might be weird. +func generateServiceAccountUID(parentType enum.ParentResourceType, parentID int64) (string, error) { + nid, err := gonanoid.Generate(serviceAccountUIDAlphabet, serviceAccountUIDLength) + if err != nil { + return "", err + } + + return fmt.Sprintf("sa-%s-%d-%s", string(parentType), parentID, nid), nil +} diff --git a/internal/api/controller/serviceaccount/create_token.go b/internal/api/controller/serviceaccount/create_token.go new file mode 100644 index 0000000000..c270cadb51 --- /dev/null +++ b/internal/api/controller/serviceaccount/create_token.go @@ -0,0 +1,71 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/token" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +type CreateTokenInput struct { + UID string `json:"uid"` + Lifetime *time.Duration `json:"lifetime"` +} + +// CreateToken creates a new service account access token. +func (c *Controller) CreateToken( + ctx context.Context, + session *auth.Session, + saUID string, + in *CreateTokenInput, +) (*types.TokenResponse, error) { + sa, err := findServiceAccountFromUID(ctx, c.principalStore, saUID) + if err != nil { + return nil, err + } + + if err = check.UID(in.UID); err != nil { + return nil, err + } + if err = check.TokenLifetime(in.Lifetime, true); err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent (ensures that parent exists) + if err = apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + sa.ParentType, sa.ParentID, sa.UID, enum.PermissionServiceAccountEdit); err != nil { + return nil, err + } + token, jwtToken, err := token.CreateSAT( + ctx, + c.tokenStore, + &session.Principal, + sa, + in.UID, + in.Lifetime, + ) + if err != nil { + return nil, err + } + + return &types.TokenResponse{Token: *token, AccessToken: jwtToken}, nil +} diff --git a/internal/api/controller/serviceaccount/delete.go b/internal/api/controller/serviceaccount/delete.go new file mode 100644 index 0000000000..8282354ad8 --- /dev/null +++ b/internal/api/controller/serviceaccount/delete.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// Delete deletes a service account. +func (c *Controller) Delete(ctx context.Context, session *auth.Session, + saUID string) error { + sa, err := findServiceAccountFromUID(ctx, c.principalStore, saUID) + if err != nil { + return err + } + + // Ensure principal has required permissions on parent (ensures that parent exists) + if err = apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + sa.ParentType, sa.ParentID, sa.UID, enum.PermissionServiceAccountDelete); err != nil { + return err + } + + // delete all tokens (okay if we fail after - user intends to delete service account anyway) + // TODO: cascading delete? + err = c.tokenStore.DeleteForPrincipal(ctx, sa.ID) + if err != nil { + return fmt.Errorf("failed to delete tokens for service account: %w", err) + } + + return c.principalStore.DeleteServiceAccount(ctx, sa.ID) +} diff --git a/internal/api/controller/serviceaccount/delete_token.go b/internal/api/controller/serviceaccount/delete_token.go new file mode 100644 index 0000000000..0d9170ef97 --- /dev/null +++ b/internal/api/controller/serviceaccount/delete_token.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// DeleteToken deletes a token of a service account. +func (c *Controller) DeleteToken(ctx context.Context, session *auth.Session, + saUID string, tokenUID string) error { + sa, err := findServiceAccountFromUID(ctx, c.principalStore, saUID) + if err != nil { + return err + } + + // Ensure principal has required permissions on parent (ensures that parent exists) + if err = apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + sa.ParentType, sa.ParentID, sa.UID, enum.PermissionServiceAccountEdit); err != nil { + return err + } + + token, err := c.tokenStore.FindByUID(ctx, sa.ID, tokenUID) + if err != nil { + return err + } + + // Ensure sat belongs to service account + if token.Type != enum.TokenTypeSAT || token.PrincipalID != sa.ID { + log.Warn().Msg("Principal tried to delete token that doesn't belong to the service account") + + // throw a not found error - no need for user to know about token? + return usererror.ErrNotFound + } + + return c.tokenStore.Delete(ctx, token.ID) +} diff --git a/internal/api/controller/serviceaccount/find.go b/internal/api/controller/serviceaccount/find.go new file mode 100644 index 0000000000..2fadc5e8b6 --- /dev/null +++ b/internal/api/controller/serviceaccount/find.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Find tries to find the provided service account. +func (c *Controller) Find(ctx context.Context, session *auth.Session, + saUID string) (*types.ServiceAccount, error) { + sa, err := c.FindNoAuth(ctx, saUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent (ensures that parent exists) + if err = apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + sa.ParentType, sa.ParentID, sa.UID, enum.PermissionServiceAccountView); err != nil { + return nil, err + } + + return sa, nil +} + +/* + * FindNoAuth finds a service account without auth checks. + * WARNING: Never call as part of user flow. + */ +func (c *Controller) FindNoAuth(ctx context.Context, saUID string) (*types.ServiceAccount, error) { + return findServiceAccountFromUID(ctx, c.principalStore, saUID) +} diff --git a/internal/api/controller/serviceaccount/list_token.go b/internal/api/controller/serviceaccount/list_token.go new file mode 100644 index 0000000000..f1f2ba89a6 --- /dev/null +++ b/internal/api/controller/serviceaccount/list_token.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListTokens lists all tokens of a service account. +func (c *Controller) ListTokens(ctx context.Context, session *auth.Session, + saUID string) ([]*types.Token, error) { + sa, err := findServiceAccountFromUID(ctx, c.principalStore, saUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent (ensures that parent exists) + if err = apiauth.CheckServiceAccount(ctx, c.authorizer, session, c.spaceStore, c.repoStore, + sa.ParentType, sa.ParentID, sa.UID, enum.PermissionServiceAccountView); err != nil { + return nil, err + } + + return c.tokenStore.List(ctx, sa.ID, enum.TokenTypeSAT) +} diff --git a/internal/api/controller/serviceaccount/wire.go b/internal/api/controller/serviceaccount/wire.go new file mode 100644 index 0000000000..5c2063d576 --- /dev/null +++ b/internal/api/controller/serviceaccount/wire.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + NewController, +) + +func ProvideController(principalUIDCheck check.PrincipalUID, authorizer authz.Authorizer, + principalStore store.PrincipalStore, spaceStore store.SpaceStore, repoStore store.RepoStore, + tokenStore store.TokenStore) *Controller { + return NewController(principalUIDCheck, authorizer, principalStore, spaceStore, repoStore, tokenStore) +} diff --git a/internal/api/controller/space/controller.go b/internal/api/controller/space/controller.go new file mode 100644 index 0000000000..69bd2d4d7e --- /dev/null +++ b/internal/api/controller/space/controller.go @@ -0,0 +1,86 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +var ( + // TODO (Nested Spaces): Remove once full support is added + errNestedSpacesNotSupported = usererror.BadRequestf("Nested spaces are not supported.") +) + +type Controller struct { + nestedSpacesEnabled bool + + db *sqlx.DB + urlProvider *url.Provider + sseStreamer sse.Streamer + uidCheck check.PathUID + authorizer authz.Authorizer + spacePathStore store.SpacePathStore + pipelineStore store.PipelineStore + secretStore store.SecretStore + connectorStore store.ConnectorStore + templateStore store.TemplateStore + spaceStore store.SpaceStore + repoStore store.RepoStore + principalStore store.PrincipalStore + repoCtrl *repo.Controller + membershipStore store.MembershipStore + importer *importer.Repository + exporter *exporter.Repository +} + +func NewController(config *types.Config, db *sqlx.DB, urlProvider *url.Provider, + sseStreamer sse.Streamer, uidCheck check.PathUID, authorizer authz.Authorizer, + spacePathStore store.SpacePathStore, pipelineStore store.PipelineStore, secretStore store.SecretStore, + connectorStore store.ConnectorStore, templateStore store.TemplateStore, spaceStore store.SpaceStore, + repoStore store.RepoStore, principalStore store.PrincipalStore, repoCtrl *repo.Controller, + membershipStore store.MembershipStore, importer *importer.Repository, exporter *exporter.Repository, +) *Controller { + return &Controller{ + nestedSpacesEnabled: config.NestedSpacesEnabled, + db: db, + urlProvider: urlProvider, + sseStreamer: sseStreamer, + uidCheck: uidCheck, + authorizer: authorizer, + spacePathStore: spacePathStore, + pipelineStore: pipelineStore, + secretStore: secretStore, + connectorStore: connectorStore, + templateStore: templateStore, + spaceStore: spaceStore, + repoStore: repoStore, + principalStore: principalStore, + repoCtrl: repoCtrl, + membershipStore: membershipStore, + importer: importer, + exporter: exporter, + } +} diff --git a/internal/api/controller/space/create.go b/internal/api/controller/space/create.go new file mode 100644 index 0000000000..2991809e86 --- /dev/null +++ b/internal/api/controller/space/create.go @@ -0,0 +1,211 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +var ( + errParentIDNegative = usererror.BadRequest( + "Parent ID has to be either zero for a root space or greater than zero for a child space.") +) + +type CreateInput struct { + ParentRef string `json:"parent_ref"` + UID string `json:"uid"` + Description string `json:"description"` + IsPublic bool `json:"is_public"` +} + +// Create creates a new space. +// +//nolint:gocognit // refactor if required +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + in *CreateInput, +) (*types.Space, error) { + parentID, err := c.getSpaceCheckAuthSpaceCreation(ctx, session, in.ParentRef) + if err != nil { + return nil, err + } + + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + var space *types.Space + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + space, err = c.createSpaceInnerInTX(ctx, session, parentID, in) + return err + }) + if err != nil { + return nil, err + } + + return space, nil +} + +func (c *Controller) createSpaceInnerInTX( + ctx context.Context, + session *auth.Session, + parentID int64, + in *CreateInput, +) (*types.Space, error) { + spacePath := in.UID + if parentID > 0 { + // (re-)read parent path in transaction to ensure correctness + parentPath, err := c.spacePathStore.FindPrimaryBySpaceID(ctx, parentID) + if err != nil { + return nil, fmt.Errorf("failed to find primary path for parent '%d': %w", parentID, err) + } + spacePath = paths.Concatinate(parentPath.Value, in.UID) + + // ensure path is within accepted depth! + err = check.PathDepth(spacePath, true) + if err != nil { + return nil, fmt.Errorf("path is invalid: %w", err) + } + } + + now := time.Now().UnixMilli() + space := &types.Space{ + Version: 0, + ParentID: parentID, + UID: in.UID, + Description: in.Description, + IsPublic: in.IsPublic, + Path: spacePath, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + } + err := c.spaceStore.Create(ctx, space) + if err != nil { + return nil, fmt.Errorf("space creation failed: %w", err) + } + + pathSegment := &types.SpacePathSegment{ + UID: space.UID, + IsPrimary: true, + SpaceID: space.ID, + ParentID: parentID, + CreatedBy: space.CreatedBy, + Created: now, + Updated: now, + } + err = c.spacePathStore.InsertSegment(ctx, pathSegment) + if err != nil { + return nil, fmt.Errorf("failed to insert primary path segment: %w", err) + } + + // add space membership to top level space only (as the user doesn't have inherited permissions already) + if parentID == 0 { + membership := &types.Membership{ + MembershipKey: types.MembershipKey{ + SpaceID: space.ID, + PrincipalID: session.Principal.ID, + }, + Role: enum.MembershipRoleSpaceOwner, + + // membership has been created by the system + CreatedBy: bootstrap.NewSystemServiceSession().Principal.ID, + Created: now, + Updated: now, + } + err = c.membershipStore.Create(ctx, membership) + if err != nil { + return nil, fmt.Errorf("failed to make user owner of the space: %w", err) + } + } + + return space, nil +} + +func (c *Controller) getSpaceCheckAuthSpaceCreation( + ctx context.Context, + session *auth.Session, + parentRef string, +) (int64, error) { + parentRefAsID, err := strconv.ParseInt(parentRef, 10, 64) + if (parentRefAsID <= 0 && err == nil) || (len(strings.TrimSpace(parentRef)) == 0) { + // TODO: Restrict top level space creation - should be move to authorizer? + if session == nil { + return 0, fmt.Errorf("anonymous user not allowed to create top level spaces: %w", usererror.ErrUnauthorized) + } + + return 0, nil + } + + parentSpace, err := c.spaceStore.FindByRef(ctx, parentRef) + if err != nil { + return 0, fmt.Errorf("failed to get parent space: %w", err) + } + + // create is a special case - check permission without specific resource + scope := &types.Scope{SpacePath: parentSpace.Path} + resource := &types.Resource{ + Type: enum.ResourceTypeSpace, + Name: "", + } + if err = apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionSpaceCreate); err != nil { + return 0, fmt.Errorf("authorization failed: %w", err) + } + + return parentSpace.ID, nil + +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + if len(in.ParentRef) > 0 && !c.nestedSpacesEnabled { + // TODO (Nested Spaces): Remove once support is added + return errNestedSpacesNotSupported + } + + parentRefAsID, err := strconv.ParseInt(in.ParentRef, 10, 64) + if err == nil && parentRefAsID < 0 { + return errParentIDNegative + } + + isRoot := false + if (err == nil && parentRefAsID == 0) || (len(strings.TrimSpace(in.ParentRef)) == 0) { + isRoot = true + } + + if err := c.uidCheck(in.UID, isRoot); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + if err := check.Description(in.Description); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/space/delete.go b/internal/api/controller/space/delete.go new file mode 100644 index 0000000000..ecd4f8adf6 --- /dev/null +++ b/internal/api/controller/space/delete.go @@ -0,0 +1,93 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + "math" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Delete deletes a space. +func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef string) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return err + } + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceDelete, false); err != nil { + return err + } + + return c.DeleteNoAuth(ctx, session, space.ID) +} + +// DeleteNoAuth deletes the space - no authorization is verified. +// WARNING this is meant for internal calls only. +func (c *Controller) DeleteNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error { + filter := &types.SpaceFilter{ + Page: 1, + Size: math.MaxInt, + Query: "", + Order: enum.OrderAsc, + Sort: enum.SpaceAttrNone, + } + subSpaces, _, err := c.ListSpacesNoAuth(ctx, spaceID, filter) + if err != nil { + return fmt.Errorf("failed to list space %d sub spaces: %w", spaceID, err) + } + for _, space := range subSpaces { + err = c.DeleteNoAuth(ctx, session, space.ID) + if err != nil { + return fmt.Errorf("failed to delete space %d: %w", space.ID, err) + } + } + err = c.deleteRepositoriesNoAuth(ctx, session, spaceID) + if err != nil { + return fmt.Errorf("failed to delete repositories of space %d: %w", spaceID, err) + } + err = c.spaceStore.Delete(ctx, spaceID) + if err != nil { + return fmt.Errorf("spaceStore failed to delete space %d: %w", spaceID, err) + } + return nil +} + +// deleteRepositoriesNoAuth deletes all repositories in a space - no authorization is verified. +// WARNING this is meant for internal calls only. +func (c *Controller) deleteRepositoriesNoAuth(ctx context.Context, session *auth.Session, spaceID int64) error { + filter := &types.RepoFilter{ + Page: 1, + Size: int(math.MaxInt), + Query: "", + Order: enum.OrderAsc, + Sort: enum.RepoAttrNone, + } + repos, _, err := c.ListRepositoriesNoAuth(ctx, spaceID, filter) + if err != nil { + return fmt.Errorf("failed to list space repositories: %w", err) + } + for _, repo := range repos { + err = c.repoCtrl.DeleteNoAuth(ctx, session, repo) + if err != nil { + return fmt.Errorf("failed to delete repository %d: %w", repo.ID, err) + } + } + return nil +} diff --git a/internal/api/controller/space/events.go b/internal/api/controller/space/events.go new file mode 100644 index 0000000000..ac0375de95 --- /dev/null +++ b/internal/api/controller/space/events.go @@ -0,0 +1,112 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "encoding/json" + "fmt" + "io" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/writer" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +var ( + pingInterval = 30 * time.Second + tailMaxTime = 2 * time.Hour +) + +func (c *Controller) Events( + ctx context.Context, + session *auth.Session, + spaceRef string, + w writer.WriterFlusher, +) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return fmt.Errorf("failed to find space ref: %w", err) + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceView, true); err != nil { + return fmt.Errorf("failed to authorize stream: %w", err) + } + + ctx, ctxCancel := context.WithTimeout(ctx, tailMaxTime) + defer ctxCancel() + + io.WriteString(w, ": ping\n\n") + w.Flush() + + eventStream, errorStream, sseCancel := c.sseStreamer.Stream(ctx, space.ID) + defer func() { + uerr := sseCancel(ctx) + if uerr != nil { + log.Ctx(ctx).Warn().Err(uerr).Msgf("failed to cancel sse stream for space '%s'", space.Path) + } + }() + // could not get error channel + if errorStream == nil { + io.WriteString(w, "event: error\ndata: eof\n\n") + w.Flush() + return fmt.Errorf("could not get error channel") + } + pingTimer := time.NewTimer(pingInterval) + defer pingTimer.Stop() + + enc := json.NewEncoder(w) +L: + for { + // ensure timer is stopped before resetting (see documentation) + if !pingTimer.Stop() { + // in this specific case the timer's channel could be both, empty or full + select { + case <-pingTimer.C: + default: + } + } + pingTimer.Reset(pingInterval) + select { + case <-ctx.Done(): + log.Debug().Msg("events: stream cancelled") + break L + case err := <-errorStream: + log.Err(err).Msg("events: received error in the tail channel") + break L + case <-pingTimer.C: + // if time b/w messages takes longer, send a ping + io.WriteString(w, ": ping\n\n") + w.Flush() + case event := <-eventStream: + io.WriteString(w, fmt.Sprintf("event: %s\n", event.Type)) + io.WriteString(w, "data: ") + enc.Encode(event.Data) + // NOTE: enc.Encode is ending the data with a new line, only add one more + // Source: https://cs.opensource.google/go/go/+/refs/tags/go1.21.1:src/encoding/json/stream.go;l=220 + io.WriteString(w, "\n") + w.Flush() + } + } + + io.WriteString(w, "event: error\ndata: eof\n\n") + w.Flush() + log.Debug().Msg("events: stream closed") + return nil +} diff --git a/internal/api/controller/space/export.go b/internal/api/controller/space/export.go new file mode 100644 index 0000000000..58731aa3a4 --- /dev/null +++ b/internal/api/controller/space/export.go @@ -0,0 +1,110 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "errors" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type ExportInput struct { + AccountId string `json:"accountId"` + OrgIdentifier string `json:"orgIdentifier"` + ProjectIdentifier string `json:"projectIdentifier"` + Token string `json:"token"` +} + +// Export creates a new empty repository in harness code and does git push to it. +func (c *Controller) Export(ctx context.Context, session *auth.Session, spaceRef string, in *ExportInput) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return err + } + + err = c.sanitizeExportInput(in) + if err != nil { + return fmt.Errorf("failed to sanitize input: %w", err) + } + + providerInfo := &exporter.HarnessCodeInfo{ + AccountId: in.AccountId, + ProjectIdentifier: in.ProjectIdentifier, + OrgIdentifier: in.OrgIdentifier, + Token: in.Token, + } + + var repos []*types.Repository + page := 1 + for { + reposInPage, err := c.repoStore.List(ctx, space.ID, &types.RepoFilter{Size: 200, Page: page, Order: enum.OrderDesc}) + if err != nil { + return err + } + if len(reposInPage) == 0 { + break + } + page += 1 + repos = append(repos, reposInPage...) + } + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + err = c.exporter.RunManyForSpace(ctx, space.ID, repos, providerInfo) + if errors.Is(err, exporter.ErrJobRunning) { + return usererror.ConflictWithPayload("export already in progress") + } + if err != nil { + return fmt.Errorf("failed to start export repository job: %w", err) + } + return nil + }) + if err != nil { + return err + } + + return nil +} + +func (c *Controller) sanitizeExportInput(in *ExportInput) error { + if in.AccountId == "" { + return usererror.BadRequest("account id must be provided") + } + + if in.OrgIdentifier == "" { + return usererror.BadRequest("organization identifier must be provided") + } + + if in.ProjectIdentifier == "" { + return usererror.BadRequest("project identifier must be provided") + } + + if in.Token == "" { + return usererror.BadRequest("token for harness code must be provided") + } + + return nil +} diff --git a/internal/api/controller/space/export_progress.go b/internal/api/controller/space/export_progress.go new file mode 100644 index 0000000000..5b76db7822 --- /dev/null +++ b/internal/api/controller/space/export_progress.go @@ -0,0 +1,43 @@ +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + "github.com/pkg/errors" +) + +type ExportProgressOutput struct { + Repos []types.JobProgress `json:"repos"` +} + +// ExportProgress returns progress of the export job. +func (c *Controller) ExportProgress(ctx context.Context, + session *auth.Session, + spaceRef string, +) (ExportProgressOutput, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return ExportProgressOutput{}, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceView, false); err != nil { + return ExportProgressOutput{}, err + } + + progress, err := c.exporter.GetProgressForSpace(ctx, space.ID) + if errors.Is(err, exporter.ErrNotFound) { + return ExportProgressOutput{}, usererror.NotFound("No recent or ongoing export found for space.") + } + if err != nil { + return ExportProgressOutput{}, fmt.Errorf("failed to retrieve export progress: %w", err) + } + + return ExportProgressOutput{Repos: progress}, nil +} diff --git a/internal/api/controller/space/find.go b/internal/api/controller/space/find.go new file mode 100644 index 0000000000..62dd58ef01 --- /dev/null +++ b/internal/api/controller/space/find.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* +* Find finds a space. + */ +func (c *Controller) Find(ctx context.Context, session *auth.Session, spaceRef string) (*types.Space, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceView, true); err != nil { + return nil, err + } + + return space, nil +} diff --git a/internal/api/controller/space/import.go b/internal/api/controller/space/import.go new file mode 100644 index 0000000000..58299ea030 --- /dev/null +++ b/internal/api/controller/space/import.go @@ -0,0 +1,95 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" +) + +type ImportInput struct { + CreateInput + Provider importer.Provider `json:"provider"` + ProviderSpace string `json:"provider_space"` +} + +// Import creates new space and starts import of all repositories from the remote provider's space into it. +func (c *Controller) Import(ctx context.Context, session *auth.Session, in *ImportInput) (*types.Space, error) { + parentSpaceID, err := c.getSpaceCheckAuthSpaceCreation(ctx, session, in.ParentRef) + if err != nil { + return nil, err + } + + if in.UID == "" { + in.UID = in.ProviderSpace + } + + err = c.sanitizeCreateInput(&in.CreateInput) + if err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + remoteRepositories, err := importer.LoadRepositoriesFromProviderSpace(ctx, in.Provider, in.ProviderSpace) + if err != nil { + return nil, err + } + + if len(remoteRepositories) == 0 { + return nil, usererror.BadRequestf("found no repositories at %s", in.ProviderSpace) + } + + repoIDs := make([]int64, len(remoteRepositories)) + cloneURLs := make([]string, len(remoteRepositories)) + + var space *types.Space + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + space, err = c.createSpaceInnerInTX(ctx, session, parentSpaceID, &in.CreateInput) + if err != nil { + return err + } + + for i, remoteRepository := range remoteRepositories { + repo := remoteRepository.ToRepo( + space.ID, remoteRepository.UID, "", &session.Principal) + + err = c.repoStore.Create(ctx, repo) + if err != nil { + return fmt.Errorf("failed to create repository in storage: %w", err) + } + + repoIDs[i] = repo.ID + cloneURLs[i] = remoteRepository.CloneURL + } + + jobGroupID := fmt.Sprintf("space-import-%d", space.ID) + err = c.importer.RunMany(ctx, jobGroupID, in.Provider, repoIDs, cloneURLs) + if err != nil { + return fmt.Errorf("failed to start import repository jobs: %w", err) + } + + return nil + }) + if err != nil { + return nil, err + } + + return space, nil +} diff --git a/internal/api/controller/space/list_connectors.go b/internal/api/controller/space/list_connectors.go new file mode 100644 index 0000000000..91dbcc1a35 --- /dev/null +++ b/internal/api/controller/space/list_connectors.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListConnectors lists the connectors in a space. +func (c *Controller) ListConnectors( + ctx context.Context, + session *auth.Session, + spaceRef string, + filter types.ListQueryFilter, +) ([]*types.Connector, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, fmt.Errorf("failed to find parent space: %w", err) + } + + err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionConnectorView, false) + if err != nil { + return nil, 0, fmt.Errorf("could not authorize: %w", err) + } + + count, err := c.connectorStore.Count(ctx, space.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count connectors in space: %w", err) + } + + connectors, err := c.connectorStore.List(ctx, space.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list connectors: %w", err) + } + + return connectors, count, nil +} diff --git a/internal/api/controller/space/list_repositories.go b/internal/api/controller/space/list_repositories.go new file mode 100644 index 0000000000..001674e9f1 --- /dev/null +++ b/internal/api/controller/space/list_repositories.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListRepositories lists the repositories of a space. +func (c *Controller) ListRepositories(ctx context.Context, session *auth.Session, + spaceRef string, filter *types.RepoFilter) ([]*types.Repository, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionRepoView, true); err != nil { + return nil, 0, err + } + return c.ListRepositoriesNoAuth(ctx, space.ID, filter) +} + +// ListRepositoriesNoAuth list repositories WITHOUT checking for PermissionRepoView. +func (c *Controller) ListRepositoriesNoAuth( + ctx context.Context, + spaceID int64, + filter *types.RepoFilter, +) ([]*types.Repository, int64, error) { + count, err := c.repoStore.Count(ctx, spaceID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count child repos: %w", err) + } + + repos, err := c.repoStore.List(ctx, spaceID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list child repos: %w", err) + } + + // backfill URLs + for _, repo := range repos { + repo.GitURL = c.urlProvider.GenerateRepoCloneURL(repo.Path) + } + + return repos, count, nil +} diff --git a/internal/api/controller/space/list_secrets.go b/internal/api/controller/space/list_secrets.go new file mode 100644 index 0000000000..31f70ab1d6 --- /dev/null +++ b/internal/api/controller/space/list_secrets.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListSecrets lists the secrets in a space. +func (c *Controller) ListSecrets( + ctx context.Context, + session *auth.Session, + spaceRef string, + filter types.ListQueryFilter, +) ([]*types.Secret, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, fmt.Errorf("failed to find parent space: %w", err) + } + + err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSecretView, false) + if err != nil { + return nil, 0, fmt.Errorf("could not authorize: %w", err) + } + + var count int64 + var secrets []*types.Secret + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { + count, err = c.secretStore.Count(ctx, space.ID, filter) + if err != nil { + return fmt.Errorf("failed to count child executions: %w", err) + } + + secrets, err = c.secretStore.List(ctx, space.ID, filter) + if err != nil { + return fmt.Errorf("failed to list child executions: %w", err) + } + return + }, dbtx.TxDefaultReadOnly) + if err != nil { + return secrets, count, fmt.Errorf("failed to list secrets: %w", err) + } + + return secrets, count, nil +} diff --git a/internal/api/controller/space/list_service_accounts.go b/internal/api/controller/space/list_service_accounts.go new file mode 100644 index 0000000000..a81456a2d0 --- /dev/null +++ b/internal/api/controller/space/list_service_accounts.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* +* ListServiceAccounts lists the service accounts of a space. + */ +func (c *Controller) ListServiceAccounts(ctx context.Context, session *auth.Session, + spaceRef string) ([]*types.ServiceAccount, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionServiceAccountView, false); err != nil { + return nil, err + } + + return c.principalStore.ListServiceAccounts(ctx, enum.ParentResourceTypeSpace, space.ID) +} diff --git a/internal/api/controller/space/list_spaces.go b/internal/api/controller/space/list_spaces.go new file mode 100644 index 0000000000..d26602521f --- /dev/null +++ b/internal/api/controller/space/list_spaces.go @@ -0,0 +1,72 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListSpaces lists the child spaces of a space. +func (c *Controller) ListSpaces(ctx context.Context, + session *auth.Session, + spaceRef string, + filter *types.SpaceFilter, +) ([]*types.Space, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceView, true); err != nil { + return nil, 0, err + } + return c.ListSpacesNoAuth(ctx, space.ID, filter) +} + +// ListSpacesNoAuth lists spaces WITHOUT checking PermissionSpaceView. +func (c *Controller) ListSpacesNoAuth( + ctx context.Context, + spaceID int64, + filter *types.SpaceFilter, +) ([]*types.Space, int64, error) { + var spaces []*types.Space + var count int64 + + err := dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) (err error) { + count, err = c.spaceStore.Count(ctx, spaceID, filter) + if err != nil { + return fmt.Errorf("failed to count child spaces: %w", err) + } + + spaces, err = c.spaceStore.List(ctx, spaceID, filter) + if err != nil { + return fmt.Errorf("failed to list child spaces: %w", err) + } + + return nil + }, dbtx.TxDefaultReadOnly) + if err != nil { + return nil, 0, err + } + + return spaces, count, nil +} diff --git a/internal/api/controller/space/list_templates.go b/internal/api/controller/space/list_templates.go new file mode 100644 index 0000000000..1079db9511 --- /dev/null +++ b/internal/api/controller/space/list_templates.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListTemplates lists the templates in a space. +func (c *Controller) ListTemplates( + ctx context.Context, + session *auth.Session, + spaceRef string, + filter types.ListQueryFilter, +) ([]*types.Template, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, fmt.Errorf("failed to find parent space: %w", err) + } + + err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionTemplateView, false) + if err != nil { + return nil, 0, fmt.Errorf("could not authorize: %w", err) + } + + count, err := c.templateStore.Count(ctx, space.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count templates in the space: %w", err) + } + + templates, err := c.templateStore.List(ctx, space.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list templates: %w", err) + } + + return templates, count, nil +} diff --git a/internal/api/controller/space/membership_add.go b/internal/api/controller/space/membership_add.go new file mode 100644 index 0000000000..0eac2ab4c9 --- /dev/null +++ b/internal/api/controller/space/membership_add.go @@ -0,0 +1,110 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" +) + +type MembershipAddInput struct { + UserUID string `json:"user_uid"` + Role enum.MembershipRole `json:"role"` +} + +func (in *MembershipAddInput) Validate() error { + if in.UserUID == "" { + return usererror.BadRequest("UserUID must be provided") + } + + if in.Role == "" { + return usererror.BadRequest("Role must be provided") + } + + role, ok := in.Role.Sanitize() + if !ok { + msg := fmt.Sprintf("Provided role '%s' is not suppored. Valid values are: %v", + in.Role, enum.MembershipRoles) + return usererror.BadRequest(msg) + } + + in.Role = role + + return nil +} + +// MembershipAdd adds a new membership to a space. +func (c *Controller) MembershipAdd(ctx context.Context, + session *auth.Session, + spaceRef string, + in *MembershipAddInput, +) (*types.MembershipUser, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return nil, err + } + + err = in.Validate() + if err != nil { + return nil, err + } + + user, err := c.principalStore.FindUserByUID(ctx, in.UserUID) + if errors.Is(err, store.ErrResourceNotFound) { + return nil, usererror.BadRequestf("User '%s' not found", in.UserUID) + } else if err != nil { + return nil, fmt.Errorf("failed to find the user: %w", err) + } + + now := time.Now().UnixMilli() + + membership := types.Membership{ + MembershipKey: types.MembershipKey{ + SpaceID: space.ID, + PrincipalID: user.ID, + }, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + Role: in.Role, + } + + err = c.membershipStore.Create(ctx, &membership) + if err != nil { + return nil, fmt.Errorf("failed to create new membership: %w", err) + } + + result := &types.MembershipUser{ + Membership: membership, + Principal: *user.ToPrincipalInfo(), + AddedBy: *session.Principal.ToPrincipalInfo(), + } + + return result, nil +} diff --git a/internal/api/controller/space/membership_delete.go b/internal/api/controller/space/membership_delete.go new file mode 100644 index 0000000000..e3b18b9132 --- /dev/null +++ b/internal/api/controller/space/membership_delete.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// MembershipDelete removes an existing membership from a space. +func (c *Controller) MembershipDelete(ctx context.Context, + session *auth.Session, + spaceRef string, + userUID string, +) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return err + } + + user, err := c.principalStore.FindUserByUID(ctx, userUID) + if err != nil { + return fmt.Errorf("failed to find user by uid: %w", err) + } + + err = c.membershipStore.Delete(ctx, types.MembershipKey{ + SpaceID: space.ID, + PrincipalID: user.ID, + }) + if err != nil { + return fmt.Errorf("failed to delete user membership: %w", err) + } + + return nil +} diff --git a/internal/api/controller/space/membership_list.go b/internal/api/controller/space/membership_list.go new file mode 100644 index 0000000000..1fcd528444 --- /dev/null +++ b/internal/api/controller/space/membership_list.go @@ -0,0 +1,69 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// MembershipList lists all space memberships. +func (c *Controller) MembershipList(ctx context.Context, + session *auth.Session, + spaceRef string, + filter types.MembershipUserFilter, +) ([]types.MembershipUser, int64, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, 0, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceView, false); err != nil { + return nil, 0, err + } + + var memberships []types.MembershipUser + var membershipsCount int64 + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + memberships, err = c.membershipStore.ListUsers(ctx, space.ID, filter) + if err != nil { + return fmt.Errorf("failed to list memberships for space: %w", err) + } + + if filter.Page == 1 && len(memberships) < filter.Size { + membershipsCount = int64(len(memberships)) + return nil + } + + membershipsCount, err = c.membershipStore.CountUsers(ctx, space.ID, filter) + if err != nil { + return fmt.Errorf("failed to count memberships for space: %w", err) + } + + return nil + }, dbtx.TxDefaultReadOnly) + if err != nil { + return nil, 0, err + } + + return memberships, membershipsCount, nil +} diff --git a/internal/api/controller/space/membership_update.go b/internal/api/controller/space/membership_update.go new file mode 100644 index 0000000000..8363969240 --- /dev/null +++ b/internal/api/controller/space/membership_update.go @@ -0,0 +1,95 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type MembershipUpdateInput struct { + Role enum.MembershipRole `json:"role"` +} + +func (in *MembershipUpdateInput) Validate() error { + if in.Role == "" { + return usererror.BadRequest("Role must be provided") + } + + role, ok := in.Role.Sanitize() + if !ok { + msg := fmt.Sprintf("Provided role '%s' is not suppored. Valid values are: %v", + in.Role, enum.MembershipRoles) + return usererror.BadRequest(msg) + } + + in.Role = role + + return nil +} + +// MembershipUpdate changes the role of an existing membership. +func (c *Controller) MembershipUpdate(ctx context.Context, + session *auth.Session, + spaceRef string, + userUID string, + in *MembershipUpdateInput, +) (*types.MembershipUser, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return nil, err + } + + err = in.Validate() + if err != nil { + return nil, err + } + + user, err := c.principalStore.FindUserByUID(ctx, userUID) + if err != nil { + return nil, fmt.Errorf("failed to find user by uid: %w", err) + } + + membership, err := c.membershipStore.FindUser(ctx, types.MembershipKey{ + SpaceID: space.ID, + PrincipalID: user.ID, + }) + if err != nil { + return nil, fmt.Errorf("failed to find membership for update: %w", err) + } + + if membership.Role == in.Role { + return membership, nil + } + + membership.Role = in.Role + + err = c.membershipStore.Update(ctx, &membership.Membership) + if err != nil { + return nil, fmt.Errorf("failed to update membership") + } + + return membership, nil +} diff --git a/internal/api/controller/space/move.go b/internal/api/controller/space/move.go new file mode 100644 index 0000000000..2f7c5a2102 --- /dev/null +++ b/internal/api/controller/space/move.go @@ -0,0 +1,134 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// MoveInput is used for moving a space. +type MoveInput struct { + UID *string `json:"uid"` +} + +func (i *MoveInput) hasChanges(space *types.Space) bool { + if i.UID != nil && *i.UID != space.UID { + return true + } + + return false +} + +// Move moves a space to a new UID. +// TODO: Add support for moving to other parents and alias. +// +//nolint:gocognit // refactor if needed +func (c *Controller) Move( + ctx context.Context, + session *auth.Session, + spaceRef string, + in *MoveInput, +) (*types.Space, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return nil, err + } + + if err = c.sanitizeMoveInput(in, space.ParentID == 0); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + // exit early if there are no changes + if !in.hasChanges(space) { + return space, nil + } + + if err = c.moveInner( + ctx, + session, + space, + in.UID, + ); err != nil { + return nil, err + } + + return space, nil +} + +func (c *Controller) sanitizeMoveInput(in *MoveInput, isRoot bool) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, isRoot); err != nil { + return err + } + } + + return nil +} + +func (c *Controller) moveInner( + ctx context.Context, + session *auth.Session, + space *types.Space, + inUID *string, +) error { + return dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + // delete old primary segment + err := c.spacePathStore.DeletePrimarySegment(ctx, space.ID) + if err != nil { + return fmt.Errorf("failed to delete primary path segment: %w", err) + } + + // update space with move inputs + if inUID != nil { + space.UID = *inUID + } + + // add new primary segment using updated space data + now := time.Now().UnixMilli() + newPrimarySegment := &types.SpacePathSegment{ + ParentID: space.ParentID, + UID: space.UID, + SpaceID: space.ID, + IsPrimary: true, + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + } + err = c.spacePathStore.InsertSegment(ctx, newPrimarySegment) + if err != nil { + return fmt.Errorf("failed to create new primary path segment: %w", err) + } + + // update space itself + err = c.spaceStore.Update(ctx, space) + if err != nil { + return fmt.Errorf("failed to update the space in the db: %w", err) + } + + return nil + }) +} diff --git a/internal/api/controller/space/update.go b/internal/api/controller/space/update.go new file mode 100644 index 0000000000..6b08faf49c --- /dev/null +++ b/internal/api/controller/space/update.go @@ -0,0 +1,87 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a space. +type UpdateInput struct { + Description *string `json:"description"` + IsPublic *bool `json:"is_public"` +} + +func (in *UpdateInput) hasChanges(space *types.Space) bool { + return (in.Description != nil && *in.Description != space.Description) || + (in.IsPublic != nil && *in.IsPublic != space.IsPublic) +} + +// Update updates a space. +func (c *Controller) Update(ctx context.Context, session *auth.Session, + spaceRef string, in *UpdateInput) (*types.Space, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, err + } + + if err = apiauth.CheckSpace(ctx, c.authorizer, session, space, enum.PermissionSpaceEdit, false); err != nil { + return nil, err + } + + if !in.hasChanges(space) { + return space, nil + } + + if err = sanitizeUpdateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + space, err = c.spaceStore.UpdateOptLock(ctx, space, func(space *types.Space) error { + // update values only if provided + if in.Description != nil { + space.Description = *in.Description + } + if in.IsPublic != nil { + space.IsPublic = *in.IsPublic + } + + return nil + }) + if err != nil { + return nil, err + } + + return space, nil +} + +func sanitizeUpdateInput(in *UpdateInput) error { + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/space/wire.go b/internal/api/controller/space/wire.go new file mode 100644 index 0000000000..6ab9522216 --- /dev/null +++ b/internal/api/controller/space/wire.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/services/exporter" + "github.com/harness/gitness/internal/services/importer" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(config *types.Config, db *sqlx.DB, urlProvider *url.Provider, sseStreamer sse.Streamer, + uidCheck check.PathUID, authorizer authz.Authorizer, spacePathStore store.SpacePathStore, + pipelineStore store.PipelineStore, secretStore store.SecretStore, + connectorStore store.ConnectorStore, templateStore store.TemplateStore, + spaceStore store.SpaceStore, repoStore store.RepoStore, principalStore store.PrincipalStore, + repoCtrl *repo.Controller, membershipStore store.MembershipStore, importer *importer.Repository, exporter *exporter.Repository, +) *Controller { + return NewController(config, db, urlProvider, sseStreamer, uidCheck, authorizer, + spacePathStore, pipelineStore, secretStore, + connectorStore, templateStore, + spaceStore, repoStore, principalStore, + repoCtrl, membershipStore, importer, exporter) +} diff --git a/internal/api/controller/system/controller.go b/internal/api/controller/system/controller.go new file mode 100644 index 0000000000..b7ba3b766b --- /dev/null +++ b/internal/api/controller/system/controller.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" +) + +type Controller struct { + principalStore store.PrincipalStore + config *types.Config +} + +func NewController(principalStore store.PrincipalStore, config *types.Config) *Controller { + return &Controller{ + principalStore: principalStore, + config: config, + } +} + +func (c *Controller) IsUserSignupAllowed(ctx context.Context) (bool, error) { + usrCount, err := c.principalStore.CountUsers(ctx, &types.UserFilter{}) + if err != nil { + return false, err + } + + return usrCount == 0 || c.config.UserSignupEnabled, nil +} diff --git a/internal/api/controller/system/wire.go b/internal/api/controller/system/wire.go new file mode 100644 index 0000000000..a80c85cada --- /dev/null +++ b/internal/api/controller/system/wire.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + NewController, +) + +func ProvideController(principalStore store.PrincipalStore, config *types.Config) *Controller { + return NewController(principalStore, config) +} diff --git a/internal/api/controller/template/controller.go b/internal/api/controller/template/controller.go new file mode 100644 index 0000000000..06c723b1ce --- /dev/null +++ b/internal/api/controller/template/controller.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + uidCheck check.PathUID + templateStore store.TemplateStore + authorizer authz.Authorizer + spaceStore store.SpaceStore +} + +func NewController( + db *sqlx.DB, + uidCheck check.PathUID, + authorizer authz.Authorizer, + templateStore store.TemplateStore, + spaceStore store.SpaceStore, +) *Controller { + return &Controller{ + db: db, + uidCheck: uidCheck, + templateStore: templateStore, + authorizer: authorizer, + spaceStore: spaceStore, + } +} diff --git a/internal/api/controller/template/create.go b/internal/api/controller/template/create.go new file mode 100644 index 0000000000..cf76dca98f --- /dev/null +++ b/internal/api/controller/template/create.go @@ -0,0 +1,93 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +var ( + // errTemplateRequiresParent if the user tries to create a template without a parent space. + errTemplateRequiresParent = usererror.BadRequest( + "Parent space required - standalone templates are not supported.") +) + +type CreateInput struct { + Description string `json:"description"` + SpaceRef string `json:"space_ref"` // Ref of the parent space + UID string `json:"uid"` + Type string `json:"type"` + Data string `json:"data"` +} + +func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.Template, error) { + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("failed to sanitize input: %w", err) + } + + parentSpace, err := c.spaceStore.FindByRef(ctx, in.SpaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find parent by ref: %w", err) + } + + err = apiauth.CheckTemplate(ctx, c.authorizer, session, parentSpace.Path, in.UID, enum.PermissionTemplateEdit) + if err != nil { + return nil, err + } + + var template *types.Template + now := time.Now().UnixMilli() + template = &types.Template{ + Description: in.Description, + Data: in.Data, + SpaceID: parentSpace.ID, + UID: in.UID, + Created: now, + Updated: now, + Version: 0, + } + err = c.templateStore.Create(ctx, template) + if err != nil { + return nil, fmt.Errorf("template creation failed: %w", err) + } + + return template, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + parentRefAsID, err := strconv.ParseInt(in.SpaceRef, 10, 64) + + if (err == nil && parentRefAsID <= 0) || (len(strings.TrimSpace(in.SpaceRef)) == 0) { + return errTemplateRequiresParent + } + + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + in.Description = strings.TrimSpace(in.Description) + return check.Description(in.Description) +} diff --git a/internal/api/controller/template/delete.go b/internal/api/controller/template/delete.go new file mode 100644 index 0000000000..51deeda886 --- /dev/null +++ b/internal/api/controller/template/delete.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete(ctx context.Context, session *auth.Session, spaceRef string, uid string) error { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckTemplate(ctx, c.authorizer, session, space.Path, uid, enum.PermissionTemplateDelete) + if err != nil { + return fmt.Errorf("failed to authorize: %w", err) + } + err = c.templateStore.DeleteByUID(ctx, space.ID, uid) + if err != nil { + return fmt.Errorf("could not delete template: %w", err) + } + return nil +} diff --git a/internal/api/controller/template/find.go b/internal/api/controller/template/find.go new file mode 100644 index 0000000000..006293d48a --- /dev/null +++ b/internal/api/controller/template/find.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, +) (*types.Template, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + err = apiauth.CheckTemplate(ctx, c.authorizer, session, space.Path, uid, enum.PermissionTemplateView) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + template, err := c.templateStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find template: %w", err) + } + return template, nil +} diff --git a/internal/api/controller/template/update.go b/internal/api/controller/template/update.go new file mode 100644 index 0000000000..9147c3e5b1 --- /dev/null +++ b/internal/api/controller/template/update.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a template. +type UpdateInput struct { + UID *string `json:"uid"` + Description *string `json:"description"` + Data *string `json:"data"` +} + +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + spaceRef string, + uid string, + in *UpdateInput, +) (*types.Template, error) { + space, err := c.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to find space: %w", err) + } + + err = apiauth.CheckTemplate(ctx, c.authorizer, session, space.Path, uid, enum.PermissionTemplateEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize: %w", err) + } + + template, err := c.templateStore.FindByUID(ctx, space.ID, uid) + if err != nil { + return nil, fmt.Errorf("failed to find template: %w", err) + } + + return c.templateStore.UpdateOptLock(ctx, template, func(original *types.Template) error { + if in.UID != nil { + original.UID = *in.UID + } + if in.Description != nil { + original.Description = *in.Description + } + if in.Data != nil { + original.Data = *in.Data + } + + return nil + }) +} + +func (c *Controller) sanitizeUpdateInput(in *UpdateInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + // TODO: Validate Data + + return nil +} diff --git a/internal/api/controller/template/wire.go b/internal/api/controller/template/wire.go new file mode 100644 index 0000000000..dc2f704285 --- /dev/null +++ b/internal/api/controller/template/wire.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + uidCheck check.PathUID, + templateStore store.TemplateStore, + authorizer authz.Authorizer, + spaceStore store.SpaceStore, +) *Controller { + return NewController(db, uidCheck, authorizer, templateStore, spaceStore) +} diff --git a/internal/api/controller/trigger/common.go b/internal/api/controller/trigger/common.go new file mode 100644 index 0000000000..c58e798b91 --- /dev/null +++ b/internal/api/controller/trigger/common.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +const ( + // triggerMaxSecretLength defines the max allowed length of a trigger secret. + // TODO: Check whether this is sufficient for other SCM providers once we + // add support. For now it's good to have a limit and increase if needed. + triggerMaxSecretLength = 4096 +) + +// checkSecret validates the secret of a trigger. +func checkSecret(secret string) error { + if len(secret) > triggerMaxSecretLength { + return check.NewValidationErrorf("The secret of a trigger can be at most %d characters long.", + triggerMaxSecretLength) + } + + return nil +} + +// checkActions validates the trigger actions. +func checkActions(actions []enum.TriggerAction) error { + // ignore duplicates here, should be deduplicated later + for _, action := range actions { + if _, ok := action.Sanitize(); !ok { + return check.NewValidationErrorf("The provided trigger action '%s' is invalid.", action) + } + } + + return nil +} + +// deduplicateActions de-duplicates the actions provided by in the trigger. +func deduplicateActions(in []enum.TriggerAction) []enum.TriggerAction { + if len(in) == 0 { + return []enum.TriggerAction{} + } + + actionSet := make(map[enum.TriggerAction]struct{}) + out := make([]enum.TriggerAction, 0, len(in)) + for _, action := range in { + if _, ok := actionSet[action]; ok { + continue + } + actionSet[action] = struct{}{} + out = append(out, action) + } + + return out +} diff --git a/internal/api/controller/trigger/controller.go b/internal/api/controller/trigger/controller.go new file mode 100644 index 0000000000..fd86158f3f --- /dev/null +++ b/internal/api/controller/trigger/controller.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + db *sqlx.DB + authorizer authz.Authorizer + triggerStore store.TriggerStore + uidCheck check.PathUID + pipelineStore store.PipelineStore + repoStore store.RepoStore +} + +func NewController( + db *sqlx.DB, + authorizer authz.Authorizer, + triggerStore store.TriggerStore, + uidCheck check.PathUID, + pipelineStore store.PipelineStore, + repoStore store.RepoStore, +) *Controller { + return &Controller{ + db: db, + authorizer: authorizer, + triggerStore: triggerStore, + uidCheck: uidCheck, + pipelineStore: pipelineStore, + repoStore: repoStore, + } +} diff --git a/internal/api/controller/trigger/create.go b/internal/api/controller/trigger/create.go new file mode 100644 index 0000000000..aa2baa05db --- /dev/null +++ b/internal/api/controller/trigger/create.go @@ -0,0 +1,103 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// TODO: Add more as needed. +type CreateInput struct { + Description string `json:"description"` + UID string `json:"uid"` + Secret string `json:"secret"` + Disabled bool `json:"disabled"` + Actions []enum.TriggerAction `json:"actions"` +} + +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + in *CreateInput, +) (*types.Trigger, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to edit the pipeline, they will have permissions to create a trigger as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + err = c.checkCreateInput(in) + if err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + now := time.Now().UnixMilli() + trigger := &types.Trigger{ + Description: in.Description, + Disabled: in.Disabled, + Secret: in.Secret, + CreatedBy: session.Principal.ID, + RepoID: repo.ID, + Actions: deduplicateActions(in.Actions), + UID: in.UID, + PipelineID: pipeline.ID, + Created: now, + Updated: now, + Version: 0, + } + err = c.triggerStore.Create(ctx, trigger) + if err != nil { + return nil, fmt.Errorf("trigger creation failed: %w", err) + } + + return trigger, nil +} + +func (c *Controller) checkCreateInput(in *CreateInput) error { + if err := check.Description(in.Description); err != nil { + return err + } + if err := checkSecret(in.Secret); err != nil { + return err + } + if err := checkActions(in.Actions); err != nil { + return err + } + if err := c.uidCheck(in.UID, false); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/trigger/delete.go b/internal/api/controller/trigger/delete.go new file mode 100644 index 0000000000..cc24a996ac --- /dev/null +++ b/internal/api/controller/trigger/delete.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Delete( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + triggerUID string, +) error { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to edit the pipeline, they will have permissions to remove a trigger as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + if err != nil { + return fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return fmt.Errorf("failed to find pipeline: %w", err) + } + + err = c.triggerStore.DeleteByUID(ctx, pipeline.ID, triggerUID) + if err != nil { + return fmt.Errorf("could not delete trigger: %w", err) + } + return nil +} diff --git a/internal/api/controller/trigger/find.go b/internal/api/controller/trigger/find.go new file mode 100644 index 0000000000..11bdb2bdcd --- /dev/null +++ b/internal/api/controller/trigger/find.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + triggerUID string, +) (*types.Trigger, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + trigger, err := c.triggerStore.FindByUID(ctx, pipeline.ID, triggerUID) + if err != nil { + return nil, fmt.Errorf("failed to find trigger %s: %w", triggerUID, err) + } + + return trigger, nil +} diff --git a/internal/api/controller/trigger/list.go b/internal/api/controller/trigger/list.go new file mode 100644 index 0000000000..061fbcff2d --- /dev/null +++ b/internal/api/controller/trigger/list.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package trigger + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +func (c *Controller) List( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + filter types.ListQueryFilter, +) ([]*types.Trigger, int64, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, 0, fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to view the pipeline, they will have permissions to list triggers as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineView) + if err != nil { + return nil, 0, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, 0, fmt.Errorf("failed to find pipeline: %w", err) + } + + count, err := c.triggerStore.Count(ctx, pipeline.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count triggers in space: %w", err) + } + + triggers, err := c.triggerStore.List(ctx, pipeline.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list triggers: %w", err) + } + + return triggers, count, nil +} diff --git a/internal/api/controller/trigger/update.go b/internal/api/controller/trigger/update.go new file mode 100644 index 0000000000..46b4a12bde --- /dev/null +++ b/internal/api/controller/trigger/update.go @@ -0,0 +1,120 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + "strings" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +// UpdateInput is used for updating a trigger. +type UpdateInput struct { + Description *string `json:"description"` + UID *string `json:"uid"` + Actions []enum.TriggerAction `json:"actions"` + Secret *string `json:"secret"` + Disabled *bool `json:"disabled"` // can be nil, so keeping it a pointer +} + +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + repoRef string, + pipelineUID string, + triggerUID string, + in *UpdateInput) (*types.Trigger, error) { + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo by ref: %w", err) + } + // Trigger permissions are associated with pipeline permissions. If a user has permissions + // to edit the pipeline, they will have permissions to edit the trigger as well. + err = apiauth.CheckPipeline(ctx, c.authorizer, session, repo.Path, pipelineUID, enum.PermissionPipelineEdit) + if err != nil { + return nil, fmt.Errorf("failed to authorize pipeline: %w", err) + } + + err = c.checkUpdateInput(in) + if err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + pipeline, err := c.pipelineStore.FindByUID(ctx, repo.ID, pipelineUID) + if err != nil { + return nil, fmt.Errorf("failed to find pipeline: %w", err) + } + + trigger, err := c.triggerStore.FindByUID(ctx, pipeline.ID, triggerUID) + if err != nil { + return nil, fmt.Errorf("failed to find trigger: %w", err) + } + + return c.triggerStore.UpdateOptLock(ctx, + trigger, func(original *types.Trigger) error { + if in.UID != nil { + original.UID = *in.UID + } + if in.Description != nil { + original.Description = *in.Description + } + if in.Actions != nil { + original.Actions = deduplicateActions(in.Actions) + } + if in.Secret != nil { + original.Secret = *in.Secret + } + if in.Disabled != nil { + original.Disabled = *in.Disabled + } + + return nil + }) +} + +func (c *Controller) checkUpdateInput(in *UpdateInput) error { + if in.UID != nil { + if err := c.uidCheck(*in.UID, false); err != nil { + return err + } + } + + if in.Description != nil { + *in.Description = strings.TrimSpace(*in.Description) + if err := check.Description(*in.Description); err != nil { + return err + } + } + + if in.Secret != nil { + if err := checkSecret(*in.Secret); err != nil { + return err + } + } + + if in.Actions != nil { + if err := checkActions(in.Actions); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/trigger/wire.go b/internal/api/controller/trigger/wire.go new file mode 100644 index 0000000000..5783c14ca8 --- /dev/null +++ b/internal/api/controller/trigger/wire.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(db *sqlx.DB, + authorizer authz.Authorizer, + triggerStore store.TriggerStore, + uidCheck check.PathUID, + pipelineStore store.PipelineStore, + repoStore store.RepoStore, +) *Controller { + return NewController(db, authorizer, triggerStore, uidCheck, pipelineStore, repoStore) +} diff --git a/internal/api/controller/tx.go b/internal/api/controller/tx.go new file mode 100644 index 0000000000..220bcf5a4d --- /dev/null +++ b/internal/api/controller/tx.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "context" + "errors" + + "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database/dbtx" + + "github.com/jmoiron/sqlx" +) + +type TxOptionRetryCount int + +// TxOptLock runs the provided function inside a database transaction. If optimistic lock error occurs +// during the operation, the function will retry the whole transaction again (to the maximum of 5 times, +// but this can be overridden by providing an additional TxOptionRetryCount option). +func TxOptLock(ctx context.Context, + db *sqlx.DB, + txFn func(ctx context.Context) error, + opts ...interface{}, +) (err error) { + tries := 5 + for _, opt := range opts { + if n, ok := opt.(TxOptionRetryCount); ok { + tries = int(n) + } + } + + for try := 0; try < tries; try++ { + err = dbtx.New(db).WithTx(ctx, txFn, opts...) + if !errors.Is(err, store.ErrVersionConflict) { + break + } + } + + return +} diff --git a/internal/api/controller/user/controller.go b/internal/api/controller/user/controller.go new file mode 100644 index 0000000000..64d9a338ea --- /dev/null +++ b/internal/api/controller/user/controller.go @@ -0,0 +1,73 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + "golang.org/x/crypto/bcrypt" +) + +type Controller struct { + db *sqlx.DB + principalUIDCheck check.PrincipalUID + authorizer authz.Authorizer + principalStore store.PrincipalStore + tokenStore store.TokenStore + membershipStore store.MembershipStore +} + +func NewController( + db *sqlx.DB, + principalUIDCheck check.PrincipalUID, + authorizer authz.Authorizer, + principalStore store.PrincipalStore, + tokenStore store.TokenStore, + membershipStore store.MembershipStore, +) *Controller { + return &Controller{ + db: db, + principalUIDCheck: principalUIDCheck, + authorizer: authorizer, + principalStore: principalStore, + tokenStore: tokenStore, + membershipStore: membershipStore, + } +} + +var hashPassword = bcrypt.GenerateFromPassword + +func findUserFromUID(ctx context.Context, + principalStore store.PrincipalStore, userUID string, +) (*types.User, error) { + return principalStore.FindUserByUID(ctx, userUID) +} + +func findUserFromEmail(ctx context.Context, + principalStore store.PrincipalStore, email string, +) (*types.User, error) { + return principalStore.FindUserByEmail(ctx, email) +} + +func isUserTokenType(tokenType enum.TokenType) bool { + return tokenType == enum.TokenTypePAT || tokenType == enum.TokenTypeSession +} diff --git a/internal/api/controller/user/create.go b/internal/api/controller/user/create.go new file mode 100644 index 0000000000..716c5ce45d --- /dev/null +++ b/internal/api/controller/user/create.go @@ -0,0 +1,125 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "github.com/dchest/uniuri" + "golang.org/x/crypto/bcrypt" +) + +// CreateInput is the input used for create operations. +// On purpose don't expose admin, has to be enabled explicitly. +type CreateInput struct { + UID string `json:"uid"` + Email string `json:"email"` + DisplayName string `json:"display_name"` + Password string `json:"password"` +} + +// Create creates a new user. +func (c *Controller) Create(ctx context.Context, session *auth.Session, in *CreateInput) (*types.User, error) { + // Ensure principal has required permissions (user is global, no explicit resource) + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeUser, + } + if err := apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionUserCreate); err != nil { + return nil, err + } + + return c.CreateNoAuth(ctx, in, false) +} + +/* + * CreateNoAuth creates a new user without auth checks. + * WARNING: Never call as part of user flow. + * + * Note: take admin separately to avoid potential vulnerabilities for user calls. + */ +func (c *Controller) CreateNoAuth(ctx context.Context, in *CreateInput, admin bool) (*types.User, error) { + if err := c.sanitizeCreateInput(in); err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + hash, err := hashPassword([]byte(in.Password), bcrypt.DefaultCost) + if err != nil { + return nil, fmt.Errorf("failed to create hash: %w", err) + } + + user := &types.User{ + UID: in.UID, + DisplayName: in.DisplayName, + Email: in.Email, + Password: string(hash), + Salt: uniuri.NewLen(uniuri.UUIDLen), + Created: time.Now().UnixMilli(), + Updated: time.Now().UnixMilli(), + Admin: admin, + } + + err = c.principalStore.CreateUser(ctx, user) + if err != nil { + return nil, err + } + + uCount, err := c.principalStore.CountUsers(ctx, &types.UserFilter{}) + if err != nil { + return nil, err + } + + // first 'user' principal will be admin by default. + if uCount == 1 { + user.Admin = true + err = c.principalStore.UpdateUser(ctx, user) + if err != nil { + return nil, err + } + } + + return user, nil +} + +func (c *Controller) sanitizeCreateInput(in *CreateInput) error { + if err := c.principalUIDCheck(in.UID); err != nil { + return err + } + + in.Email = strings.TrimSpace(in.Email) + if err := check.Email(in.Email); err != nil { + return err + } + + in.DisplayName = strings.TrimSpace(in.DisplayName) + if err := check.DisplayName(in.DisplayName); err != nil { + return err + } + + if err := check.Password(in.Password); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/user/create_access_token.go b/internal/api/controller/user/create_access_token.go new file mode 100644 index 0000000000..5e2333d57e --- /dev/null +++ b/internal/api/controller/user/create_access_token.go @@ -0,0 +1,73 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/token" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +type CreateTokenInput struct { + UID string `json:"uid"` + Lifetime *time.Duration `json:"lifetime"` +} + +/* + * CreateToken creates a new user access token. + */ +func (c *Controller) CreateAccessToken( + ctx context.Context, + session *auth.Session, + userUID string, + in *CreateTokenInput, +) (*types.TokenResponse, error) { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserEdit); err != nil { + return nil, err + } + + if err = check.UID(in.UID); err != nil { + return nil, err + } + if err = check.TokenLifetime(in.Lifetime, true); err != nil { + return nil, err + } + + token, jwtToken, err := token.CreatePAT( + ctx, + c.tokenStore, + &session.Principal, + user, + in.UID, + in.Lifetime, + ) + if err != nil { + return nil, err + } + + return &types.TokenResponse{Token: *token, AccessToken: jwtToken}, nil +} diff --git a/internal/api/controller/user/delete.go b/internal/api/controller/user/delete.go new file mode 100644 index 0000000000..8f3fe8145a --- /dev/null +++ b/internal/api/controller/user/delete.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Delete deletes a user. +func (c *Controller) Delete(ctx context.Context, session *auth.Session, + userUID string) error { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return err + } + + // Fail if the user being deleted is the only admin in DB + if user.Admin { + admUsrCount, err := c.principalStore.CountUsers(ctx, &types.UserFilter{Admin: true}) + if err != nil { + return fmt.Errorf("failed to check admin user count: %w", err) + } + + if admUsrCount == 1 { + return usererror.BadRequest("cannot delete the only admin user") + } + } + + // Ensure principal has required permissions on parent + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserDelete); err != nil { + return err + } + + // delete all tokens (okay if we fail after - user intended to be deleted anyway) + // TODO: cascading delete? + err = c.tokenStore.DeleteForPrincipal(ctx, user.ID) + if err != nil { + return fmt.Errorf("failed to delete tokens for user: %w", err) + } + + return c.principalStore.DeleteUser(ctx, user.ID) +} diff --git a/internal/api/controller/user/delete_token.go b/internal/api/controller/user/delete_token.go new file mode 100644 index 0000000000..f5e2d056f2 --- /dev/null +++ b/internal/api/controller/user/delete_token.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +/* + * DeleteToken deletes a token of a user. + */ +func (c *Controller) DeleteToken(ctx context.Context, session *auth.Session, + userUID string, tokenType enum.TokenType, tokenUID string) error { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserEdit); err != nil { + return err + } + + token, err := c.tokenStore.FindByUID(ctx, user.ID, tokenUID) + if err != nil { + return err + } + + // Ensure token type matches the requested type and is a valid user token type + if !isUserTokenType(token.Type) || token.Type != tokenType { + // throw a not found error - no need for user to know about token. + return usererror.ErrNotFound + } + + // Ensure token belongs to user. + if token.PrincipalID != user.ID { + log.Warn().Msg("Principal tried to delete token that doesn't belong to the user") + + // throw a not found error - no need for user to know about token. + return usererror.ErrNotFound + } + + return c.tokenStore.Delete(ctx, token.ID) +} diff --git a/internal/api/controller/user/find.go b/internal/api/controller/user/find.go new file mode 100644 index 0000000000..ebd567d64c --- /dev/null +++ b/internal/api/controller/user/find.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* + * Find tries to find the provided user. + */ +func (c *Controller) Find(ctx context.Context, session *auth.Session, + userUID string) (*types.User, error) { + user, err := c.FindNoAuth(ctx, userUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserView); err != nil { + return nil, err + } + + return user, nil +} + +/* + * FindNoAuth finds a user without auth checks. + * WARNING: Never call as part of user flow. + */ +func (c *Controller) FindNoAuth(ctx context.Context, userUID string) (*types.User, error) { + return findUserFromUID(ctx, c.principalStore, userUID) +} diff --git a/internal/api/controller/user/find_email.go b/internal/api/controller/user/find_email.go new file mode 100644 index 0000000000..49131dc252 --- /dev/null +++ b/internal/api/controller/user/find_email.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* + * FindEmail tries to find the provided user using email. + */ +func (c *Controller) FindEmail(ctx context.Context, session *auth.Session, + email string) (*types.User, error) { + user, err := findUserFromEmail(ctx, c.principalStore, email) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserView); err != nil { + return nil, err + } + + return user, nil +} diff --git a/internal/api/controller/user/list.go b/internal/api/controller/user/list.go new file mode 100644 index 0000000000..7551100bd9 --- /dev/null +++ b/internal/api/controller/user/list.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* + * List lists all users of the system. + */ +func (c *Controller) List(ctx context.Context, session *auth.Session, + filter *types.UserFilter) ([]*types.User, int64, error) { + // Ensure principal has required permissions (user is global, no explicit resource) + scope := &types.Scope{} + resource := &types.Resource{ + Type: enum.ResourceTypeUser, + } + if err := apiauth.Check(ctx, c.authorizer, session, scope, resource, enum.PermissionUserView); err != nil { + return nil, 0, err + } + + count, err := c.principalStore.CountUsers(ctx, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count users: %w", err) + } + + repos, err := c.principalStore.ListUsers(ctx, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list users: %w", err) + } + + return repos, count, nil +} diff --git a/internal/api/controller/user/list_tokens.go b/internal/api/controller/user/list_tokens.go new file mode 100644 index 0000000000..375dce2d76 --- /dev/null +++ b/internal/api/controller/user/list_tokens.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* + * ListTokens lists all tokens of a user. + */ +func (c *Controller) ListTokens(ctx context.Context, session *auth.Session, + userUID string, tokenType enum.TokenType) ([]*types.Token, error) { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserView); err != nil { + return nil, err + } + + if !isUserTokenType(tokenType) { + return nil, usererror.ErrBadRequest + } + + return c.tokenStore.List(ctx, user.ID, tokenType) +} diff --git a/internal/api/controller/user/login.go b/internal/api/controller/user/login.go new file mode 100644 index 0000000000..7acd0e1291 --- /dev/null +++ b/internal/api/controller/user/login.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "crypto/rand" + "errors" + "fmt" + "math/big" + "time" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/token" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/log" + "golang.org/x/crypto/bcrypt" +) + +type LoginInput struct { + LoginIdentifier string `json:"login_identifier"` + Password string `json:"password"` +} + +/* + * Login attempts to login as a specific user - returns the session token if successful. + */ +func (c *Controller) Login(ctx context.Context, session *auth.Session, + in *LoginInput) (*types.TokenResponse, error) { + // no auth check required, password is used for it. + + user, err := findUserFromUID(ctx, c.principalStore, in.LoginIdentifier) + if errors.Is(err, store.ErrResourceNotFound) { + user, err = findUserFromEmail(ctx, c.principalStore, in.LoginIdentifier) + } + + // always return not found for security reasons. + if err != nil { + log.Ctx(ctx).Debug().Err(err). + Str("user_uid", in.LoginIdentifier). + Msgf("failed to retrieve user during login.") + return nil, usererror.ErrNotFound + } + + err = bcrypt.CompareHashAndPassword( + []byte(user.Password), + []byte(in.Password), + ) + if err != nil { + log.Debug().Err(err). + Str("user_uid", user.UID). + Msg("invalid password") + + return nil, usererror.ErrNotFound + } + + tokenUID, err := generateSessionTokenUID() + if err != nil { + return nil, err + } + token, jwtToken, err := token.CreateUserSession(ctx, c.tokenStore, user, tokenUID) + if err != nil { + return nil, err + } + + return &types.TokenResponse{Token: *token, AccessToken: jwtToken}, nil +} + +func generateSessionTokenUID() (string, error) { + r, err := rand.Int(rand.Reader, big.NewInt(10000)) + if err != nil { + return "", fmt.Errorf("failed to generate random number: %w", err) + } + return fmt.Sprintf("login-%d-%04d", time.Now().Unix(), r.Int64()), nil +} diff --git a/internal/api/controller/user/logout.go b/internal/api/controller/user/logout.go new file mode 100644 index 0000000000..6599e0fd2b --- /dev/null +++ b/internal/api/controller/user/logout.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "errors" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +var () + +// Logout searches for the user's token present in the request and proceeds to delete it. +// If no user was present, a usererror.ErrUnauthorized is returned. +func (c *Controller) Logout(ctx context.Context, session *auth.Session) error { + var ( + tokenID int64 + tokenType enum.TokenType + ) + + if session == nil { + return usererror.ErrUnauthorized + } + + switch t := session.Metadata.(type) { + case *auth.TokenMetadata: + tokenID = t.TokenID + tokenType = t.TokenType + default: + return errors.New("provided jwt doesn't support logout") + } + + if tokenType != enum.TokenTypeSession { + return usererror.BadRequestf("unsupported logout token type %v", tokenType) + } + + err := c.tokenStore.Delete(ctx, tokenID) + if err != nil { + return fmt.Errorf("failed to delete token from store: %w", err) + } + + return nil +} diff --git a/internal/api/controller/user/membership_spaces.go b/internal/api/controller/user/membership_spaces.go new file mode 100644 index 0000000000..112d0cc1ab --- /dev/null +++ b/internal/api/controller/user/membership_spaces.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// MembershipSpaces lists all spaces in which the user is a member. +func (c *Controller) MembershipSpaces(ctx context.Context, + session *auth.Session, + userUID string, + filter types.MembershipSpaceFilter, +) ([]types.MembershipSpace, int64, error) { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return nil, 0, fmt.Errorf("failed to find user by UID: %w", err) + } + + // Ensure principal has required permissions. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserView); err != nil { + return nil, 0, err + } + + var membershipSpaces []types.MembershipSpace + var membershipsCount int64 + + err = dbtx.New(c.db).WithTx(ctx, func(ctx context.Context) error { + membershipSpaces, err = c.membershipStore.ListSpaces(ctx, user.ID, filter) + if err != nil { + return fmt.Errorf("failed to list membership spaces for user: %w", err) + } + + if filter.Page == 1 && len(membershipSpaces) < filter.Size { + membershipsCount = int64(len(membershipSpaces)) + return nil + } + + membershipsCount, err = c.membershipStore.CountSpaces(ctx, user.ID, filter) + if err != nil { + return fmt.Errorf("failed to count memberships for user: %w", err) + } + + return nil + }, dbtx.TxDefaultReadOnly) + if err != nil { + return nil, 0, err + } + + return membershipSpaces, membershipsCount, nil +} diff --git a/internal/api/controller/user/register.go b/internal/api/controller/user/register.go new file mode 100644 index 0000000000..0e3e7a6b26 --- /dev/null +++ b/internal/api/controller/user/register.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/token" + "github.com/harness/gitness/types" +) + +type RegisterInput struct { + Email string `json:"email"` + DisplayName string `json:"display_name"` + UID string `json:"uid"` + Password string `json:"password"` +} + +// Register creates a new user and returns a new session token on success. +// This doesn't require auth, but has limited functionalities (unable to create admin user for example). +func (c *Controller) Register(ctx context.Context, sysCtrl *system.Controller, + in *RegisterInput) (*types.TokenResponse, error) { + signUpAllowed, err := sysCtrl.IsUserSignupAllowed(ctx) + if err != nil { + return nil, err + } + + if !signUpAllowed { + return nil, usererror.Forbidden("User sign-up is disabled") + } + + user, err := c.CreateNoAuth(ctx, &CreateInput{ + UID: in.UID, + Email: in.Email, + DisplayName: in.DisplayName, + Password: in.Password, + }, false) + if err != nil { + return nil, fmt.Errorf("failed to create user: %w", err) + } + + // TODO: how should we name session tokens? + token, jwtToken, err := token.CreateUserSession(ctx, c.tokenStore, user, "register") + if err != nil { + return nil, fmt.Errorf("failed to create token after successful user creation: %w", err) + } + + return &types.TokenResponse{Token: *token, AccessToken: jwtToken}, nil +} diff --git a/internal/api/controller/user/update.go b/internal/api/controller/user/update.go new file mode 100644 index 0000000000..655013dcfa --- /dev/null +++ b/internal/api/controller/user/update.go @@ -0,0 +1,102 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + "strings" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" + + "golang.org/x/crypto/bcrypt" +) + +// UpdateInput store infos to update an existing user. +type UpdateInput struct { + Email *string `json:"email"` + Password *string `json:"password"` + DisplayName *string `json:"display_name"` +} + +// Update updates the provided user. +func (c *Controller) Update(ctx context.Context, session *auth.Session, + userUID string, in *UpdateInput) (*types.User, error) { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserEdit); err != nil { + return nil, err + } + + if err = c.sanitizeUpdateInput(in); err != nil { + return nil, fmt.Errorf("invalid input: %w", err) + } + + if in.DisplayName != nil { + user.DisplayName = *in.DisplayName + } + if in.Email != nil { + user.Email = *in.Email + } + if in.Password != nil { + var hash []byte + hash, err = hashPassword([]byte(*in.Password), bcrypt.DefaultCost) + if err != nil { + return nil, fmt.Errorf("failed to hash password: %w", err) + } + user.Password = string(hash) + } + user.Updated = time.Now().UnixMilli() + + err = c.principalStore.UpdateUser(ctx, user) + if err != nil { + return nil, err + } + + return user, nil +} + +func (c *Controller) sanitizeUpdateInput(in *UpdateInput) error { + if in.Email != nil { + *in.Email = strings.TrimSpace(*in.Email) + if err := check.Email(*in.Email); err != nil { + return err + } + } + + if in.DisplayName != nil { + *in.DisplayName = strings.TrimSpace(*in.DisplayName) + if err := check.DisplayName(*in.DisplayName); err != nil { + return err + } + } + + if in.Password != nil { + if err := check.Password(*in.Password); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/user/update_admin.go b/internal/api/controller/user/update_admin.go new file mode 100644 index 0000000000..31ea90e636 --- /dev/null +++ b/internal/api/controller/user/update_admin.go @@ -0,0 +1,68 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "context" + "fmt" + "time" + + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type UpdateAdminInput struct { + Admin bool `json:"admin"` +} + +// UpdateAdmin updates the admin state of a user. +func (c *Controller) UpdateAdmin(ctx context.Context, session *auth.Session, + userUID string, request *UpdateAdminInput) (*types.User, error) { + user, err := findUserFromUID(ctx, c.principalStore, userUID) + if err != nil { + return nil, err + } + + // Ensure principal has required permissions on parent. + if err = apiauth.CheckUser(ctx, c.authorizer, session, user, enum.PermissionUserEditAdmin); err != nil { + return nil, err + } + + // Fail if the user being updated is the only admin in DB. + if request.Admin == false && user.Admin == true { + admUsrCount, err := c.principalStore.CountUsers(ctx, &types.UserFilter{Admin: true}) + if err != nil { + return nil, fmt.Errorf("failed to check admin user count: %w", err) + } + + if admUsrCount == 1 { + return nil, usererror.BadRequest("system requires at least one admin user") + } + + return user, nil + } + user.Admin = request.Admin + user.Updated = time.Now().UnixMilli() + + err = c.principalStore.UpdateUser(ctx, user) + if err != nil { + return nil, err + } + + return user, nil +} diff --git a/internal/api/controller/user/wire.go b/internal/api/controller/user/wire.go new file mode 100644 index 0000000000..038cb52be0 --- /dev/null +++ b/internal/api/controller/user/wire.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types/check" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController( + db *sqlx.DB, + principalUIDCheck check.PrincipalUID, + authorizer authz.Authorizer, + principalStore store.PrincipalStore, + tokenStore store.TokenStore, + membershipStore store.MembershipStore, +) *Controller { + return NewController( + db, + principalUIDCheck, + authorizer, + principalStore, + tokenStore, + membershipStore) +} diff --git a/internal/api/controller/util.go b/internal/api/controller/util.go new file mode 100644 index 0000000000..c1694ca132 --- /dev/null +++ b/internal/api/controller/util.go @@ -0,0 +1,107 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "context" + "fmt" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" +) + +// TODO: this file should be in gitrpc package and should accept +// params as interface (contract) + +// CreateRPCWriteParams creates base write parameters for gitrpc write operations. +// IMPORTANT: session & repo are assumed to be not nil! +// TODO: this is duplicate function from repo controller, we need to see where this +// function will be best fit. +func CreateRPCWriteParams(ctx context.Context, urlProvider *url.Provider, + session *auth.Session, repo *types.Repository) (gitrpc.WriteParams, error) { + // generate envars (add everything githook CLI needs for execution) + envVars, err := githook.GenerateEnvironmentVariables( + ctx, + urlProvider.GetAPIBaseURLInternal(), + repo.ID, + session.Principal.ID, + false, + ) + if err != nil { + return gitrpc.WriteParams{}, fmt.Errorf("failed to generate git hook environment variables: %w", err) + } + + return gitrpc.WriteParams{ + Actor: gitrpc.Identity{ + Name: session.Principal.DisplayName, + Email: session.Principal.Email, + }, + RepoUID: repo.GitUID, + EnvVars: envVars, + }, nil +} + +func MapCommit(c *gitrpc.Commit) (*types.Commit, error) { + if c == nil { + return nil, fmt.Errorf("commit is nil") + } + + author, err := MapSignature(&c.Author) + if err != nil { + return nil, fmt.Errorf("failed to map author: %w", err) + } + + committer, err := MapSignature(&c.Committer) + if err != nil { + return nil, fmt.Errorf("failed to map committer: %w", err) + } + + return &types.Commit{ + SHA: c.SHA, + Title: c.Title, + Message: c.Message, + Author: *author, + Committer: *committer, + }, nil +} + +func MapRenameDetails(c *gitrpc.RenameDetails) *types.RenameDetails { + if c == nil { + return nil + } + return &types.RenameDetails{ + OldPath: c.OldPath, + NewPath: c.NewPath, + CommitShaBefore: c.CommitShaBefore, + CommitShaAfter: c.CommitShaAfter, + } +} + +func MapSignature(s *gitrpc.Signature) (*types.Signature, error) { + if s == nil { + return nil, fmt.Errorf("signature is nil") + } + + return &types.Signature{ + Identity: types.Identity{ + Name: s.Identity.Name, + Email: s.Identity.Email, + }, + When: s.When, + }, nil +} diff --git a/internal/api/controller/webhook/common.go b/internal/api/controller/webhook/common.go new file mode 100644 index 0000000000..20deee0d05 --- /dev/null +++ b/internal/api/controller/webhook/common.go @@ -0,0 +1,113 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net" + "net/url" + + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +const ( + // webhookMaxURLLength defines the max allowed length of a webhook URL. + webhookMaxURLLength = 2048 + // webhookMaxSecretLength defines the max allowed length of a webhook secret. + webhookMaxSecretLength = 4096 +) + +// checkURL validates the url of a webhook. +func checkURL(rawURL string, allowLoopback bool, allowPrivateNetwork bool) error { + // check URL + if len(rawURL) > webhookMaxURLLength { + return check.NewValidationErrorf("The URL of a webhook can be at most %d characters long.", + webhookMaxURLLength) + } + + parsedURL, err := url.Parse(rawURL) + if err != nil { + return check.NewValidationErrorf("The provided webhook url is invalid: %s", err) + } + + host := parsedURL.Hostname() + if host == "" { + return check.NewValidationError("The URL of a webhook has to have a non-empty host.") + } + + // basic validation for loopback / private network addresses (only sanitary to give user an early error) + // IMPORTANT: during webook execution loopback / private network addresses are blocked (handles DNS resolution) + + if host == "localhost" { + return check.NewValidationError("localhost is not allowed.") + } + + if ip := net.ParseIP(host); ip != nil { + if !allowLoopback && ip.IsLoopback() { + return check.NewValidationError("Loopback IP addresses are not allowed.") + } + + if !allowPrivateNetwork && ip.IsPrivate() { + return check.NewValidationError("Private IP addresses are not allowed.") + } + } + + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return check.NewValidationError("The scheme of a webhook must be either http or https.") + } + + return nil +} + +// checkSecret validates the secret of a webhook. +func checkSecret(secret string) error { + if len(secret) > webhookMaxSecretLength { + return check.NewValidationErrorf("The secret of a webhook can be at most %d characters long.", + webhookMaxSecretLength) + } + + return nil +} + +// checkTriggers validates the triggers of a webhook. +func checkTriggers(triggers []enum.WebhookTrigger) error { + // ignore duplicates here, should be deduplicated later + for _, trigger := range triggers { + if _, ok := trigger.Sanitize(); !ok { + return check.NewValidationErrorf("The provided webhook trigger '%s' is invalid.", trigger) + } + } + + return nil +} + +// deduplicateTriggers de-duplicates the triggers provided by the user. +func deduplicateTriggers(in []enum.WebhookTrigger) []enum.WebhookTrigger { + if len(in) == 0 { + return []enum.WebhookTrigger{} + } + + triggerSet := make(map[enum.WebhookTrigger]bool, len(in)) + out := make([]enum.WebhookTrigger, 0, len(in)) + for _, trigger := range in { + if triggerSet[trigger] { + continue + } + triggerSet[trigger] = true + out = append(out, trigger) + } + + return out +} diff --git a/internal/api/controller/webhook/controller.go b/internal/api/controller/webhook/controller.go new file mode 100644 index 0000000000..b4c72c1870 --- /dev/null +++ b/internal/api/controller/webhook/controller.go @@ -0,0 +1,87 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/encrypt" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +type Controller struct { + allowLoopback bool + allowPrivateNetwork bool + + db *sqlx.DB + authorizer authz.Authorizer + webhookStore store.WebhookStore + webhookExecutionStore store.WebhookExecutionStore + repoStore store.RepoStore + webhookService *webhook.Service + encrypter encrypt.Encrypter +} + +func NewController( + allowLoopback bool, + allowPrivateNetwork bool, + db *sqlx.DB, + authorizer authz.Authorizer, + webhookStore store.WebhookStore, + webhookExecutionStore store.WebhookExecutionStore, + repoStore store.RepoStore, + webhookService *webhook.Service, + encrypter encrypt.Encrypter, +) *Controller { + return &Controller{ + allowLoopback: allowLoopback, + allowPrivateNetwork: allowPrivateNetwork, + db: db, + authorizer: authorizer, + webhookStore: webhookStore, + webhookExecutionStore: webhookExecutionStore, + repoStore: repoStore, + webhookService: webhookService, + encrypter: encrypter, + } +} + +func (c *Controller) getRepoCheckAccess(ctx context.Context, + session *auth.Session, repoRef string, reqPermission enum.Permission) (*types.Repository, error) { + if repoRef == "" { + return nil, usererror.BadRequest("A valid repository reference must be provided.") + } + + repo, err := c.repoStore.FindByRef(ctx, repoRef) + if err != nil { + return nil, fmt.Errorf("failed to find repo: %w", err) + } + + if err = apiauth.CheckRepo(ctx, c.authorizer, session, repo, reqPermission, false); err != nil { + return nil, fmt.Errorf("failed to verify authorization: %w", err) + } + + return repo, nil +} diff --git a/internal/api/controller/webhook/create.go b/internal/api/controller/webhook/create.go new file mode 100644 index 0000000000..76f461e278 --- /dev/null +++ b/internal/api/controller/webhook/create.go @@ -0,0 +1,112 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +type CreateInput struct { + DisplayName string `json:"display_name"` + Description string `json:"description"` + URL string `json:"url"` + Secret string `json:"secret"` + Enabled bool `json:"enabled"` + Insecure bool `json:"insecure"` + Triggers []enum.WebhookTrigger `json:"triggers"` +} + +// Create creates a new webhook. +func (c *Controller) Create( + ctx context.Context, + session *auth.Session, + repoRef string, + in *CreateInput, + internal bool, +) (*types.Webhook, error) { + now := time.Now().UnixMilli() + + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return nil, err + } + + // validate input + err = checkCreateInput(in, c.allowLoopback, c.allowPrivateNetwork || internal) + if err != nil { + return nil, err + } + + encryptedSecret, err := c.encrypter.Encrypt(in.Secret) + if err != nil { + return nil, fmt.Errorf("failed to encrypt webhook secret: %w", err) + } + + // create new webhook object + hook := &types.Webhook{ + ID: 0, // the ID will be populated in the data layer + Version: 0, // the Version will be populated in the data layer + CreatedBy: session.Principal.ID, + Created: now, + Updated: now, + ParentID: repo.ID, + ParentType: enum.WebhookParentRepo, + Internal: internal, + + // user input + DisplayName: in.DisplayName, + Description: in.Description, + URL: in.URL, + Secret: string(encryptedSecret), + Enabled: in.Enabled, + Insecure: in.Insecure, + Triggers: deduplicateTriggers(in.Triggers), + LatestExecutionResult: nil, + } + + err = c.webhookStore.Create(ctx, hook) + if err != nil { + return nil, err + } + + return hook, nil +} + +func checkCreateInput(in *CreateInput, allowLoopback bool, allowPrivateNetwork bool) error { + if err := check.DisplayName(in.DisplayName); err != nil { + return err + } + if err := check.Description(in.Description); err != nil { + return err + } + if err := checkURL(in.URL, allowLoopback, allowPrivateNetwork); err != nil { + return err + } + if err := checkSecret(in.Secret); err != nil { + return err + } + if err := checkTriggers(in.Triggers); err != nil { + return err + } + + return nil +} diff --git a/internal/api/controller/webhook/delete.go b/internal/api/controller/webhook/delete.go new file mode 100644 index 0000000000..255b5faca5 --- /dev/null +++ b/internal/api/controller/webhook/delete.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types/enum" +) + +// Delete deletes an existing webhook. +func (c *Controller) Delete( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, +) error { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return err + } + + // get the webhook and ensure it belongs to us + webhook, err := c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) + if err != nil { + return err + } + + // delete webhook + return c.webhookStore.Delete(ctx, webhook.ID) +} diff --git a/internal/api/controller/webhook/find.go b/internal/api/controller/webhook/find.go new file mode 100644 index 0000000000..1424096bee --- /dev/null +++ b/internal/api/controller/webhook/find.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Find finds a webhook from the provided repository. +func (c *Controller) Find( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, +) (*types.Webhook, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, err + } + + return c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) +} + +func (c *Controller) getWebhookVerifyOwnership(ctx context.Context, repoID int64, + webhookID int64) (*types.Webhook, error) { + if webhookID <= 0 { + return nil, usererror.BadRequest("A valid webhook ID must be provided.") + } + + webhook, err := c.webhookStore.Find(ctx, webhookID) + if err != nil { + return nil, fmt.Errorf("failed to find webhook with id %d: %w", webhookID, err) + } + + // ensure the webhook actually belongs to the repo + if webhook.ParentType != enum.WebhookParentRepo || webhook.ParentID != repoID { + return nil, fmt.Errorf("webhook doesn't belong to requested repo. Returning error %w", usererror.ErrNotFound) + } + + return webhook, nil +} diff --git a/internal/api/controller/webhook/find_execution.go b/internal/api/controller/webhook/find_execution.go new file mode 100644 index 0000000000..85ae788745 --- /dev/null +++ b/internal/api/controller/webhook/find_execution.go @@ -0,0 +1,72 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// FindExecution finds a webhook execution. +func (c *Controller) FindExecution( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, + webhookExecutionID int64, +) (*types.WebhookExecution, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, err + } + + // get the webhook and ensure it belongs to us + webhook, err := c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) + if err != nil { + return nil, err + } + // get the webhook execution and ensure it belongs to us + webhookExecution, err := c.getWebhookExecutionVerifyOwnership(ctx, webhook.ID, webhookExecutionID) + if err != nil { + return nil, err + } + + return webhookExecution, nil +} + +func (c *Controller) getWebhookExecutionVerifyOwnership(ctx context.Context, webhookID int64, + webhookExecutionID int64) (*types.WebhookExecution, error) { + if webhookExecutionID <= 0 { + return nil, usererror.BadRequest("A valid webhook execution ID must be provided.") + } + + webhookExecution, err := c.webhookExecutionStore.Find(ctx, webhookExecutionID) + if err != nil { + return nil, fmt.Errorf("failed to find webhook execution with id %d: %w", webhookExecutionID, err) + } + + // ensure the webhook execution actually belongs to the webhook + if webhookID != webhookExecution.WebhookID { + return nil, fmt.Errorf("webhook execution doesn't belong to requested webhook. Returning error %w", + usererror.ErrNotFound) + } + + return webhookExecution, nil +} diff --git a/internal/api/controller/webhook/list.go b/internal/api/controller/webhook/list.go new file mode 100644 index 0000000000..8566173163 --- /dev/null +++ b/internal/api/controller/webhook/list.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// List returns the webhooks from the provided repository. +func (c *Controller) List( + ctx context.Context, + session *auth.Session, + repoRef string, + filter *types.WebhookFilter, +) ([]*types.Webhook, int64, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, 0, err + } + + count, err := c.webhookStore.Count(ctx, enum.WebhookParentRepo, repo.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to count webhooks for repo with id %d: %w", repo.ID, err) + } + + webhooks, err := c.webhookStore.List(ctx, enum.WebhookParentRepo, repo.ID, filter) + if err != nil { + return nil, 0, fmt.Errorf("failed to list webhooks for repo with id %d: %w", repo.ID, err) + } + + return webhooks, count, nil +} diff --git a/internal/api/controller/webhook/list_executions.go b/internal/api/controller/webhook/list_executions.go new file mode 100644 index 0000000000..52101992c4 --- /dev/null +++ b/internal/api/controller/webhook/list_executions.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ListExecutions returns the executions of the webhook. +func (c *Controller) ListExecutions( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, + filter *types.WebhookExecutionFilter, +) ([]*types.WebhookExecution, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoView) + if err != nil { + return nil, err + } + + // get the webhook and ensure it belongs to us + webhook, err := c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) + if err != nil { + return nil, err + } + + // get webhook executions + webhookExecutions, err := c.webhookExecutionStore.ListForWebhook(ctx, webhook.ID, filter) + if err != nil { + return nil, fmt.Errorf("failed to list webhook executions for webhook %d: %w", webhook.ID, err) + } + + return webhookExecutions, nil +} diff --git a/internal/api/controller/webhook/retrigger_execution.go b/internal/api/controller/webhook/retrigger_execution.go new file mode 100644 index 0000000000..7f8ce24cd9 --- /dev/null +++ b/internal/api/controller/webhook/retrigger_execution.go @@ -0,0 +1,67 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// RetriggerExecution retriggers an existing webhook execution. +func (c *Controller) RetriggerExecution( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, + webhookExecutionID int64, +) (*types.WebhookExecution, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return nil, fmt.Errorf("failed to acquire access to the repo: %w", err) + } + + // get the webhook and ensure it belongs to us + webhook, err := c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) + if err != nil { + return nil, err + } + + // get the webhookexecution and ensure it belongs to us + webhookExecution, err := c.getWebhookExecutionVerifyOwnership(ctx, webhook.ID, webhookExecutionID) + if err != nil { + return nil, err + } + + // retrigger the execution ... + executionResult, err := c.webhookService.RetriggerWebhookExecution(ctx, webhookExecution.ID) + if err != nil { + return nil, fmt.Errorf("failed to retrigger webhook execution: %w", err) + } + + // log execution error so we have the necessary debug information if needed + if executionResult.Err != nil { + log.Ctx(ctx).Warn().Err(executionResult.Err).Msgf( + "retrigger of webhhook %d execution %d (new id: %d) had an error", + webhook.ID, webhookExecution.ID, executionResult.Execution.ID) + } + + return executionResult.Execution, nil +} diff --git a/internal/api/controller/webhook/update.go b/internal/api/controller/webhook/update.go new file mode 100644 index 0000000000..5141483790 --- /dev/null +++ b/internal/api/controller/webhook/update.go @@ -0,0 +1,123 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/check" + "github.com/harness/gitness/types/enum" +) + +type UpdateInput struct { + DisplayName *string `json:"display_name"` + Description *string `json:"description"` + URL *string `json:"url"` + Secret *string `json:"secret"` + Enabled *bool `json:"enabled"` + Insecure *bool `json:"insecure"` + Triggers []enum.WebhookTrigger `json:"triggers"` +} + +// Update updates an existing webhook. +func (c *Controller) Update( + ctx context.Context, + session *auth.Session, + repoRef string, + webhookID int64, + in *UpdateInput, +) (*types.Webhook, error) { + repo, err := c.getRepoCheckAccess(ctx, session, repoRef, enum.PermissionRepoEdit) + if err != nil { + return nil, err + } + + // get the hook and ensure it belongs to us + hook, err := c.getWebhookVerifyOwnership(ctx, repo.ID, webhookID) + if err != nil { + return nil, err + } + + // validate input + if err = checkUpdateInput(in, c.allowLoopback, c.allowPrivateNetwork); err != nil { + return nil, err + } + + // update webhook struct (only for values that are provided) + if in.DisplayName != nil { + hook.DisplayName = *in.DisplayName + } + if in.Description != nil { + hook.Description = *in.Description + } + if in.URL != nil { + hook.URL = *in.URL + } + if in.Secret != nil { + encryptedSecret, err := c.encrypter.Encrypt(*in.Secret) + if err != nil { + return nil, fmt.Errorf("failed to encrypt webhook secret: %w", err) + } + hook.Secret = string(encryptedSecret) + } + if in.Enabled != nil { + hook.Enabled = *in.Enabled + } + if in.Insecure != nil { + hook.Insecure = *in.Insecure + } + if in.Triggers != nil { + hook.Triggers = deduplicateTriggers(in.Triggers) + } + + if err = c.webhookStore.Update(ctx, hook); err != nil { + return nil, err + } + + return hook, nil +} + +func checkUpdateInput(in *UpdateInput, allowLoopback bool, allowPrivateNetwork bool) error { + if in.DisplayName != nil { + if err := check.DisplayName(*in.DisplayName); err != nil { + return err + } + } + if in.Description != nil { + if err := check.Description(*in.Description); err != nil { + return err + } + } + if in.URL != nil { + if err := checkURL(*in.URL, allowLoopback, allowPrivateNetwork); err != nil { + return err + } + } + if in.Secret != nil { + if err := checkSecret(*in.Secret); err != nil { + return err + } + } + if in.Triggers != nil { + if err := checkTriggers(in.Triggers); err != nil { + return err + } + } + + return nil +} diff --git a/internal/api/controller/webhook/wire.go b/internal/api/controller/webhook/wire.go new file mode 100644 index 0000000000..d06a2288c9 --- /dev/null +++ b/internal/api/controller/webhook/wire.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideController, +) + +func ProvideController(config webhook.Config, db *sqlx.DB, authorizer authz.Authorizer, + webhookStore store.WebhookStore, webhookExecutionStore store.WebhookExecutionStore, + repoStore store.RepoStore, webhookService *webhook.Service, encrypter encrypt.Encrypter) *Controller { + return NewController(config.AllowLoopback, config.AllowPrivateNetwork, + db, authorizer, webhookStore, webhookExecutionStore, repoStore, webhookService, encrypter) +} diff --git a/internal/api/handler/account/cookie.go b/internal/api/handler/account/cookie.go new file mode 100644 index 0000000000..43983e429f --- /dev/null +++ b/internal/api/handler/account/cookie.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "errors" + "net/http" + "time" + + "github.com/harness/gitness/types" +) + +func includeTokenCookie( + r *http.Request, w http.ResponseWriter, + tokenResponse *types.TokenResponse, + cookieName string, +) { + cookie := newEmptyTokenCookie(r, cookieName) + cookie.Value = tokenResponse.AccessToken + if tokenResponse.Token.ExpiresAt != nil { + cookie.Expires = time.UnixMilli(*tokenResponse.Token.ExpiresAt) + } + + http.SetCookie(w, cookie) +} + +func deleteTokenCookieIfPresent(r *http.Request, w http.ResponseWriter, cookieName string) { + // if no token is present in the cookies, nothing todo. + // No other error type expected here - and even if there is, let's try best effort deletion. + _, err := r.Cookie(cookieName) + if errors.Is(err, http.ErrNoCookie) { + return + } + + cookie := newEmptyTokenCookie(r, cookieName) + cookie.Value = "" + cookie.Expires = time.UnixMilli(0) // this effectively tells the browser to delete the cookie + + http.SetCookie(w, cookie) +} + +func newEmptyTokenCookie(r *http.Request, cookieName string) *http.Cookie { + return &http.Cookie{ + Name: cookieName, + SameSite: http.SameSiteStrictMode, + HttpOnly: true, + Path: "/", + Domain: r.URL.Hostname(), + Secure: r.URL.Scheme == "https", + } +} diff --git a/internal/api/handler/account/login.go b/internal/api/handler/account/login.go new file mode 100644 index 0000000000..4cefbae208 --- /dev/null +++ b/internal/api/handler/account/login.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleLogin returns an http.HandlerFunc that authenticates +// the user and returns an authentication token on success. +func HandleLogin(userCtrl *user.Controller, cookieName string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(user.LoginInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + tokenResponse, err := userCtrl.Login(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + if cookieName != "" { + includeTokenCookie(r, w, tokenResponse, cookieName) + } + + render.JSON(w, http.StatusOK, tokenResponse) + } +} diff --git a/internal/api/handler/account/login_test.go b/internal/api/handler/account/login_test.go new file mode 100644 index 0000000000..61c50b1f7b --- /dev/null +++ b/internal/api/handler/account/login_test.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import "testing" + +func TestLogin(t *testing.T) { + t.Skip() +} + +func TestLogin_NotFound(t *testing.T) { + t.Skip() +} + +func TestLogin_BcryptError(t *testing.T) { + t.Skip() +} + +func TestLogin_TokenError(t *testing.T) { + t.Skip() +} diff --git a/internal/api/handler/account/logout.go b/internal/api/handler/account/logout.go new file mode 100644 index 0000000000..54b1ed32a5 --- /dev/null +++ b/internal/api/handler/account/logout.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleLogout returns a http.HandlerFunc that deletes the +// user token being used in the respective request and logs the user out. +func HandleLogout(userCtrl *user.Controller, cookieName string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + err := userCtrl.Logout(ctx, session) + + // best effort delete cookie even in case of errors, to avoid clients being unable to remove the cookie. + // WARNING: It could be that the cookie is removed even though the token is still there in the DB. + // However, we have APIs to list and delete session tokens, and expiry time is usually short. + deleteTokenCookieIfPresent(r, w, cookieName) + + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/account/register.go b/internal/api/handler/account/register.go new file mode 100644 index 0000000000..b5d15c4bad --- /dev/null +++ b/internal/api/handler/account/register.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleRegister returns an http.HandlerFunc that processes an http.Request +// to register the named user account with the system. +func HandleRegister(userCtrl *user.Controller, sysCtrl *system.Controller, cookieName string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + includeCookie, err := request.GetIncludeCookieFromQueryOrDefault(r, false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(user.RegisterInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + tokenResponse, err := userCtrl.Register(ctx, sysCtrl, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + if includeCookie { + includeTokenCookie(r, w, tokenResponse, cookieName) + } + + render.JSON(w, http.StatusOK, tokenResponse) + } +} diff --git a/internal/api/handler/account/register_test.go b/internal/api/handler/account/register_test.go new file mode 100644 index 0000000000..677db4edba --- /dev/null +++ b/internal/api/handler/account/register_test.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package account + +import "testing" + +func TestRegiser(t *testing.T) { + t.Skip() +} + +func TestRegiserAdmin(t *testing.T) { + t.Skip() +} + +func TestRegiser_CreateError(t *testing.T) { + t.Skip() +} + +func TestRegiser_BcryptError(t *testing.T) { + t.Skip() +} + +func TestRegiser_TokenError(t *testing.T) { + t.Skip() +} diff --git a/internal/api/handler/check/check_list.go b/internal/api/handler/check/check_list.go new file mode 100644 index 0000000000..5be6c75e52 --- /dev/null +++ b/internal/api/handler/check/check_list.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCheckList is an HTTP handler for listing status check results for a repository. +func HandleCheckList(checkCtrl *check.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commitSHA, err := request.GetCommitSHAFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + opts := request.ParseCheckListOptions(r) + + checks, count, err := checkCtrl.ListChecks(ctx, session, repoRef, commitSHA, opts) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, opts.Page, opts.Size, count) + render.JSON(w, http.StatusOK, checks) + } +} diff --git a/internal/api/handler/check/check_report.go b/internal/api/handler/check/check_report.go new file mode 100644 index 0000000000..c26435ca47 --- /dev/null +++ b/internal/api/handler/check/check_report.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCheckReport is an HTTP handler for reporting status check results. +func HandleCheckReport(checkCtrl *check.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commitSHA, err := request.GetCommitSHAFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(check.ReportInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + statusCheck, err := checkCtrl.Report(ctx, session, + repoRef, commitSHA, in, map[string]string{}) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, statusCheck) + } +} diff --git a/internal/api/handler/connector/create.go b/internal/api/handler/connector/create.go new file mode 100644 index 0000000000..0b8314f191 --- /dev/null +++ b/internal/api/handler/connector/create.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new connector. +func HandleCreate(connectorCtrl *connector.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(connector.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + connector, err := connectorCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, connector) + } +} diff --git a/internal/api/handler/connector/delete.go b/internal/api/handler/connector/delete.go new file mode 100644 index 0000000000..ce8940ee63 --- /dev/null +++ b/internal/api/handler/connector/delete.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleDelete(connectorCtrl *connector.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + connectorRef, err := request.GetConnectorRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, connectorUID, err := paths.DisectLeaf(connectorRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = connectorCtrl.Delete(ctx, session, spaceRef, connectorUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/connector/find.go b/internal/api/handler/connector/find.go new file mode 100644 index 0000000000..02605888c2 --- /dev/null +++ b/internal/api/handler/connector/find.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +// HandleFind finds a connector from the database. +func HandleFind(connectorCtrl *connector.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + connectorRef, err := request.GetConnectorRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, connectorUID, err := paths.DisectLeaf(connectorRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + connector, err := connectorCtrl.Find(ctx, session, spaceRef, connectorUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, connector) + } +} diff --git a/internal/api/handler/connector/update.go b/internal/api/handler/connector/update.go new file mode 100644 index 0000000000..d6c1cf345e --- /dev/null +++ b/internal/api/handler/connector/update.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleUpdate(connectorCtrl *connector.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(connector.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + connectorRef, err := request.GetConnectorRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, connectorUID, err := paths.DisectLeaf(connectorRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + connector, err := connectorCtrl.Update(ctx, session, spaceRef, connectorUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, connector) + } +} diff --git a/internal/api/handler/execution/cancel.go b/internal/api/handler/execution/cancel.go new file mode 100644 index 0000000000..95a949108c --- /dev/null +++ b/internal/api/handler/execution/cancel.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package execution + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleCancel(executionCtrl *execution.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + n, err := request.GetExecutionNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + execution, err := executionCtrl.Cancel(ctx, session, repoRef, pipelineUID, n) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, execution) + } +} diff --git a/internal/api/handler/execution/create.go b/internal/api/handler/execution/create.go new file mode 100644 index 0000000000..61ecb337fd --- /dev/null +++ b/internal/api/handler/execution/create.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleCreate(executionCtrl *execution.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + branch := request.GetBranchFromQuery(r) + + execution, err := executionCtrl.Create(ctx, session, repoRef, pipelineUID, branch) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, execution) + } +} diff --git a/internal/api/handler/execution/delete.go b/internal/api/handler/execution/delete.go new file mode 100644 index 0000000000..cdcee6969d --- /dev/null +++ b/internal/api/handler/execution/delete.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleDelete(executionCtrl *execution.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + n, err := request.GetExecutionNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = executionCtrl.Delete(ctx, session, repoRef, pipelineUID, n) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/execution/find.go b/internal/api/handler/execution/find.go new file mode 100644 index 0000000000..d9c0aaf75c --- /dev/null +++ b/internal/api/handler/execution/find.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleFind(executionCtrl *execution.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + n, err := request.GetExecutionNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + execution, err := executionCtrl.Find(ctx, session, repoRef, pipelineUID, n) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, execution) + } +} diff --git a/internal/api/handler/execution/list.go b/internal/api/handler/execution/list.go new file mode 100644 index 0000000000..1ada0f1b53 --- /dev/null +++ b/internal/api/handler/execution/list.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package execution + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleList(executionCtrl *execution.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pagination := request.ParsePaginationFromRequest(r) + + repos, totalCount, err := executionCtrl.List(ctx, session, repoRef, pipelineUID, pagination) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, pagination.Page, pagination.Size, int(totalCount)) + render.JSON(w, http.StatusOK, repos) + } +} diff --git a/internal/api/handler/githook/post_receive.go b/internal/api/handler/githook/post_receive.go new file mode 100644 index 0000000000..0a5e5d8a71 --- /dev/null +++ b/internal/api/handler/githook/post_receive.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/githook" + controllergithook "github.com/harness/gitness/internal/api/controller/githook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandlePostReceive returns a handler function that handles post-receive git hooks. +func HandlePostReceive(githookCtrl *controllergithook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoID, err := request.GetRepoIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + principalID, err := request.GetPrincipalIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(githook.PostReceiveInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + out, err := githookCtrl.PostReceive(ctx, session, repoID, principalID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, out) + } +} diff --git a/internal/api/handler/githook/pre_receive.go b/internal/api/handler/githook/pre_receive.go new file mode 100644 index 0000000000..5026553b40 --- /dev/null +++ b/internal/api/handler/githook/pre_receive.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/githook" + controllergithook "github.com/harness/gitness/internal/api/controller/githook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandlePreReceive returns a handler function that handles pre-receive git hooks. +func HandlePreReceive(githookCtrl *controllergithook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoID, err := request.GetRepoIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + principalID, err := request.GetPrincipalIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(githook.PreReceiveInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + out, err := githookCtrl.PreReceive(ctx, session, repoID, principalID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, out) + } +} diff --git a/internal/api/handler/githook/update.go b/internal/api/handler/githook/update.go new file mode 100644 index 0000000000..cda5ac057c --- /dev/null +++ b/internal/api/handler/githook/update.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/githook" + githookcontroller "github.com/harness/gitness/internal/api/controller/githook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate returns a handler function that handles update git hooks. +func HandleUpdate(githookCtrl *githookcontroller.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoID, err := request.GetRepoIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + principalID, err := request.GetPrincipalIDFromQuery(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(githook.UpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + out, err := githookCtrl.Update(ctx, session, repoID, principalID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, out) + } +} diff --git a/internal/api/handler/logs/find.go b/internal/api/handler/logs/find.go new file mode 100644 index 0000000000..6b0f6dd644 --- /dev/null +++ b/internal/api/handler/logs/find.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleFind(logCtrl *logs.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + executionNum, err := request.GetExecutionNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + stageNum, err := request.GetStageNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + stepNum, err := request.GetStepNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + lines, err := logCtrl.Find( + ctx, session, repoRef, pipelineUID, + executionNum, int(stageNum), int(stepNum)) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, lines) + } +} diff --git a/internal/api/handler/logs/tail.go b/internal/api/handler/logs/tail.go new file mode 100644 index 0000000000..29aadcc4bd --- /dev/null +++ b/internal/api/handler/logs/tail.go @@ -0,0 +1,136 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "context" + "encoding/json" + "io" + "net/http" + "time" + + "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + + "github.com/rs/zerolog/log" +) + +var ( + pingInterval = 30 * time.Second + tailMaxTime = 1 * time.Hour +) + +func HandleTail(logCtrl *logs.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + executionNum, err := request.GetExecutionNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + stageNum, err := request.GetStageNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + stepNum, err := request.GetStepNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + f, ok := w.(http.Flusher) + if !ok { + log.Error().Msg("http writer type assertion failed") + render.InternalError(w) + return + } + + h := w.Header() + h.Set("Content-Type", "text/event-stream") + h.Set("Cache-Control", "no-cache") + h.Set("Connection", "keep-alive") + h.Set("X-Accel-Buffering", "no") + h.Set("Access-Control-Allow-Origin", "*") + + io.WriteString(w, ": ping\n\n") + f.Flush() + + linec, errc, err := logCtrl.Tail( + ctx, session, repoRef, pipelineUID, + executionNum, int(stageNum), int(stepNum)) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // could not get error channel + if errc == nil { + io.WriteString(w, "event: error\ndata: eof\n\n") + return + } + + ctx, cancel := context.WithTimeout(r.Context(), tailMaxTime) + defer cancel() + + enc := json.NewEncoder(w) + + pingTimer := time.NewTimer(pingInterval) + defer pingTimer.Stop() + L: + for { + // ensure timer is stopped before resetting (see documentation) + if !pingTimer.Stop() { + // in this specific case the timer's channel could be both, empty or full + select { + case <-pingTimer.C: + default: + } + } + pingTimer.Reset(pingInterval) + select { + case <-ctx.Done(): + break L + case err := <-errc: + log.Err(err).Msg("received error in the tail channel") + break L + case <-pingTimer.C: + // if time b/w messages takes longer, send a ping + io.WriteString(w, ": ping\n\n") + f.Flush() + case line := <-linec: + io.WriteString(w, "data: ") + enc.Encode(line) + io.WriteString(w, "\n\n") + f.Flush() + } + } + + io.WriteString(w, "event: error\ndata: eof\n\n") + f.Flush() + } +} diff --git a/internal/api/handler/pipeline/create.go b/internal/api/handler/pipeline/create.go new file mode 100644 index 0000000000..84e22985c2 --- /dev/null +++ b/internal/api/handler/pipeline/create.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleCreate(pipelineCtrl *pipeline.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pipeline.CreateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pipeline, err := pipelineCtrl.Create(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, pipeline) + } +} diff --git a/internal/api/handler/pipeline/delete.go b/internal/api/handler/pipeline/delete.go new file mode 100644 index 0000000000..237642cfdb --- /dev/null +++ b/internal/api/handler/pipeline/delete.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleDelete(pipelineCtrl *pipeline.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = pipelineCtrl.Delete(ctx, session, repoRef, pipelineUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/pipeline/find.go b/internal/api/handler/pipeline/find.go new file mode 100644 index 0000000000..64310f13bf --- /dev/null +++ b/internal/api/handler/pipeline/find.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleFind(pipelineCtrl *pipeline.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pipeline, err := pipelineCtrl.Find(ctx, session, repoRef, pipelineUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pipeline) + } +} diff --git a/internal/api/handler/pipeline/update.go b/internal/api/handler/pipeline/update.go new file mode 100644 index 0000000000..79d106b3c7 --- /dev/null +++ b/internal/api/handler/pipeline/update.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pipeline + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleUpdate(pipelineCtrl *pipeline.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(pipeline.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pipeline, err := pipelineCtrl.Update(ctx, session, repoRef, pipelineUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pipeline) + } +} diff --git a/internal/api/handler/plugin/list.go b/internal/api/handler/plugin/list.go new file mode 100644 index 0000000000..69adb3bd6d --- /dev/null +++ b/internal/api/handler/plugin/list.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/plugin" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleList(pluginCtrl *plugin.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + filter := request.ParseListQueryFilterFromRequest(r) + ret, totalCount, err := pluginCtrl.List(ctx, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, ret) + } +} diff --git a/internal/api/handler/principal/search.go b/internal/api/handler/principal/search.go new file mode 100644 index 0000000000..b4604f6ea1 --- /dev/null +++ b/internal/api/handler/principal/search.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/principal" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleList(principalCtrl principal.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + principalFilter := request.ParsePrincipalFilter(r) + principalInfos, err := principalCtrl.List(ctx, principalFilter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, principalInfos) + } +} diff --git a/internal/api/handler/pullreq/activity_list.go b/internal/api/handler/pullreq/activity_list.go new file mode 100644 index 0000000000..a34d0a5709 --- /dev/null +++ b/internal/api/handler/pullreq/activity_list.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleListActivities returns a http.HandlerFunc that lists pull request activities for a pull request. +func HandleListActivities(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter, err := request.ParsePullReqActivityFilter(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + list, total, err := pullreqCtrl.ActivityList(ctx, session, repoRef, pullreqNumber, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.PaginationLimit(r, w, int(total)) + render.JSON(w, http.StatusOK, list) + } +} diff --git a/internal/api/handler/pullreq/comment_create.go b/internal/api/handler/pullreq/comment_create.go new file mode 100644 index 0000000000..7a8ff8f4fc --- /dev/null +++ b/internal/api/handler/pullreq/comment_create.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCommentCreate is an HTTP handler for creating a new pull request comment or a reply to a comment. +func HandleCommentCreate(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.CommentCreateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + comment, err := pullreqCtrl.CommentCreate(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, comment) + } +} diff --git a/internal/api/handler/pullreq/comment_delete.go b/internal/api/handler/pullreq/comment_delete.go new file mode 100644 index 0000000000..9fde36c80a --- /dev/null +++ b/internal/api/handler/pullreq/comment_delete.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCommentDelete is an HTTP handler for deleting a pull request comment. +func HandleCommentDelete(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commentID, err := request.GetPullReqCommentIDPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = pullreqCtrl.CommentDelete(ctx, session, repoRef, pullreqNumber, commentID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/pullreq/comment_status.go b/internal/api/handler/pullreq/comment_status.go new file mode 100644 index 0000000000..8554d3e7d1 --- /dev/null +++ b/internal/api/handler/pullreq/comment_status.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCommentStatus is an HTTP handler for updating a pull request comment status. +func HandleCommentStatus(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commentID, err := request.GetPullReqCommentIDPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.CommentStatusInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + comment, err := pullreqCtrl.CommentStatus(ctx, session, repoRef, pullreqNumber, commentID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, comment) + } +} diff --git a/internal/api/handler/pullreq/comment_update.go b/internal/api/handler/pullreq/comment_update.go new file mode 100644 index 0000000000..511018be58 --- /dev/null +++ b/internal/api/handler/pullreq/comment_update.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCommentUpdate is an HTTP handler for updating a pull request comment. +func HandleCommentUpdate(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commentID, err := request.GetPullReqCommentIDPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.CommentUpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + comment, err := pullreqCtrl.CommentUpdate(ctx, session, repoRef, pullreqNumber, commentID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, comment) + } +} diff --git a/internal/api/handler/pullreq/file_view_add.go b/internal/api/handler/pullreq/file_view_add.go new file mode 100644 index 0000000000..153db44df0 --- /dev/null +++ b/internal/api/handler/pullreq/file_view_add.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFileViewAdd handles API that marks a file in a PR as viewed. +func HandleFileViewAdd(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.FileViewAddInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + fileView, err := pullreqCtrl.FileViewAdd(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, fileView) + } +} diff --git a/internal/api/handler/pullreq/file_view_delete.go b/internal/api/handler/pullreq/file_view_delete.go new file mode 100644 index 0000000000..20cf3b302c --- /dev/null +++ b/internal/api/handler/pullreq/file_view_delete.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFileViewDelete handles API that removes a file in a PR from being marked as viewed. +func HandleFileViewDelete(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filePath, err := request.GetRemainderFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = pullreqCtrl.FileViewDelete(ctx, session, repoRef, pullreqNumber, filePath) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/pullreq/file_view_list.go b/internal/api/handler/pullreq/file_view_list.go new file mode 100644 index 0000000000..2a0dd71e3d --- /dev/null +++ b/internal/api/handler/pullreq/file_view_list.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFileViewList handles API that lists all files of the PR marked as viewed for the user.. +func HandleFileViewList(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + fileViews, err := pullreqCtrl.FileViewList(ctx, session, repoRef, pullreqNumber) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, fileViews) + } +} diff --git a/internal/api/handler/pullreq/merge.go b/internal/api/handler/pullreq/merge.go new file mode 100644 index 0000000000..dc5cb1bb77 --- /dev/null +++ b/internal/api/handler/pullreq/merge.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "errors" + "io" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new pull request. +func HandleMerge(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.MergeInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil && !errors.Is(err, io.EOF) { // allow empty body + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pr, err := pullreqCtrl.Merge(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pr) + } +} diff --git a/internal/api/handler/pullreq/pr_commits.go b/internal/api/handler/pullreq/pr_commits.go new file mode 100644 index 0000000000..22d9866070 --- /dev/null +++ b/internal/api/handler/pullreq/pr_commits.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types" +) + +// HandleCommits returns commits for PR. +func HandleCommits(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := &types.PaginationFilter{ + Page: request.ParsePage(r), + Limit: request.ParseLimit(r), + } + + // gitref is Head branch in this case + commits, err := pullreqCtrl.Commits(ctx, session, repoRef, pullreqNumber, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: get last page indicator explicitly - current check is wrong in case len % limit == 0 + isLastPage := len(commits) < filter.Limit + render.PaginationNoTotal(r, w, filter.Page, filter.Limit, isLastPage) + render.JSON(w, http.StatusOK, commits) + } +} diff --git a/internal/api/handler/pullreq/pr_create.go b/internal/api/handler/pullreq/pr_create.go new file mode 100644 index 0000000000..37b21d6a19 --- /dev/null +++ b/internal/api/handler/pullreq/pr_create.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new pull request. +func HandleCreate(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.CreateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pr, err := pullreqCtrl.Create(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, pr) + } +} diff --git a/internal/api/handler/pullreq/pr_find.go b/internal/api/handler/pullreq/pr_find.go new file mode 100644 index 0000000000..4e423a2d27 --- /dev/null +++ b/internal/api/handler/pullreq/pr_find.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind returns a http.HandlerFunc that finds a pull request. +func HandleFind(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pr, err := pullreqCtrl.Find(ctx, session, repoRef, pullreqNumber) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pr) + } +} diff --git a/internal/api/handler/pullreq/pr_list.go b/internal/api/handler/pullreq/pr_list.go new file mode 100644 index 0000000000..4d6176e0f1 --- /dev/null +++ b/internal/api/handler/pullreq/pr_list.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleList returns a http.HandlerFunc that lists pull requests for a repository. +func HandleList(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter, err := request.ParsePullReqFilter(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + if filter.Order == enum.OrderDefault { + filter.Order = enum.OrderDesc + } + + list, total, err := pullreqCtrl.List(ctx, session, repoRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(total)) + render.JSON(w, http.StatusOK, list) + } +} diff --git a/internal/api/handler/pullreq/pr_metadata.go b/internal/api/handler/pullreq/pr_metadata.go new file mode 100644 index 0000000000..e36f1f9b41 --- /dev/null +++ b/internal/api/handler/pullreq/pr_metadata.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMetadata returns a http.HandlerFunc that returns PR metadata. +func HandleMetadata(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pr, err := pullreqCtrl.Find(ctx, session, repoRef, pullreqNumber) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pr.Stats) + } +} diff --git a/internal/api/handler/pullreq/pr_recheck.go b/internal/api/handler/pullreq/pr_recheck.go new file mode 100644 index 0000000000..842d71edac --- /dev/null +++ b/internal/api/handler/pullreq/pr_recheck.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleRecheck handles API that re-checks all system PR checks (mergeability check, ...). +func HandleRecheck(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = pullreqCtrl.Recheck(ctx, session, repoRef, pullreqNumber) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/internal/api/handler/pullreq/pr_state.go b/internal/api/handler/pullreq/pr_state.go new file mode 100644 index 0000000000..6ec96a567e --- /dev/null +++ b/internal/api/handler/pullreq/pr_state.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleState handles API call to update pull request state. +func HandleState(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.StateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pr, err := pullreqCtrl.State(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pr) + } +} diff --git a/internal/api/handler/pullreq/pr_update.go b/internal/api/handler/pullreq/pr_update.go new file mode 100644 index 0000000000..cb100cad85 --- /dev/null +++ b/internal/api/handler/pullreq/pr_update.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate handles update pull request API calls. +func HandleUpdate(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.UpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pr, err := pullreqCtrl.Update(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pr) + } +} diff --git a/internal/api/handler/pullreq/review_submit.go b/internal/api/handler/pullreq/review_submit.go new file mode 100644 index 0000000000..8df590fced --- /dev/null +++ b/internal/api/handler/pullreq/review_submit.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleReviewSubmit handles API that submits a new pull request review. +func HandleReviewSubmit(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.ReviewSubmitInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + _, err = pullreqCtrl.ReviewSubmit(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/internal/api/handler/pullreq/reviewer_add.go b/internal/api/handler/pullreq/reviewer_add.go new file mode 100644 index 0000000000..4fbf1b559f --- /dev/null +++ b/internal/api/handler/pullreq/reviewer_add.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleReviewerAdd handles API that adds a new pull request reviewer. +func HandleReviewerAdd(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(pullreq.ReviewerAddInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + reviewer, err := pullreqCtrl.ReviewerAdd(ctx, session, repoRef, pullreqNumber, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, reviewer) + } +} diff --git a/internal/api/handler/pullreq/reviewer_delete.go b/internal/api/handler/pullreq/reviewer_delete.go new file mode 100644 index 0000000000..59b574c23e --- /dev/null +++ b/internal/api/handler/pullreq/reviewer_delete.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleReviewerDelete handles API that deletes the given reviewer from a particular Pull request. +func HandleReviewerDelete(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + prNum, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + reviewerID, err := request.GetReviewerIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = pullreqCtrl.ReviewerDelete(ctx, session, repoRef, prNum, reviewerID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/pullreq/reviewer_list.go b/internal/api/handler/pullreq/reviewer_list.go new file mode 100644 index 0000000000..08327dd708 --- /dev/null +++ b/internal/api/handler/pullreq/reviewer_list.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleReviewerList handles API that returns list of pull request reviewers. +func HandleReviewerList(pullreqCtrl *pullreq.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pullreqNumber, err := request.GetPullReqNumberFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + list, err := pullreqCtrl.ReviewerList(ctx, session, repoRef, pullreqNumber) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, list) + } +} diff --git a/internal/api/handler/repo/blame.go b/internal/api/handler/repo/blame.go new file mode 100644 index 0000000000..a1660e0cf8 --- /dev/null +++ b/internal/api/handler/repo/blame.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleBlame returns the git blame output for a file. +func HandleBlame(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + path := request.GetOptionalRemainderFromPath(r) + + // line_from is optional, skipped if set to 0 + lineFrom, err := request.QueryParamAsPositiveInt64OrDefault(r, request.QueryParamLineFrom, 0) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // line_to is optional, skipped if set to 0 + lineTo, err := request.QueryParamAsPositiveInt64OrDefault(r, request.QueryParamLineTo, 0) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + gitRef := request.GetGitRefFromQueryOrDefault(r, "") + + stream, err := repoCtrl.Blame(ctx, session, repoRef, gitRef, path, int(lineFrom), int(lineTo)) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSONArrayDynamic(ctx, w, stream) + } +} diff --git a/internal/api/handler/repo/calculate_commit_divergence.go b/internal/api/handler/repo/calculate_commit_divergence.go new file mode 100644 index 0000000000..a5c669e14c --- /dev/null +++ b/internal/api/handler/repo/calculate_commit_divergence.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded branch information to the http response body. + */ +func HandleCalculateCommitDivergence(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.GetCommitDivergencesInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + divergences, err := repoCtrl.GetCommitDivergences(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, divergences) + } +} diff --git a/internal/api/handler/repo/commit.go b/internal/api/handler/repo/commit.go new file mode 100644 index 0000000000..fe203e4314 --- /dev/null +++ b/internal/api/handler/repo/commit.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCommitFiles creates or modify file in repository. +func HandleCommitFiles(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.CommitFilesOptions) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + response, err := repoCtrl.CommitFiles(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, response) + } +} diff --git a/internal/api/handler/repo/content_get.go b/internal/api/handler/repo/content_get.go new file mode 100644 index 0000000000..beb99cf36d --- /dev/null +++ b/internal/api/handler/repo/content_get.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleGetContent handles the get content HTTP API. +func HandleGetContent(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + gitRef := request.GetGitRefFromQueryOrDefault(r, "") + + includeCommit, err := request.GetIncludeCommitFromQueryOrDefault(r, false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + repoPath := request.GetOptionalRemainderFromPath(r) + + resp, err := repoCtrl.GetContent(ctx, session, repoRef, gitRef, repoPath, includeCommit) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, resp) + } +} diff --git a/internal/api/handler/repo/content_paths_details.go b/internal/api/handler/repo/content_paths_details.go new file mode 100644 index 0000000000..1d72b9d45e --- /dev/null +++ b/internal/api/handler/repo/content_paths_details.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandlePathsDetails handles get file or directory details HTTP API. +func HandlePathsDetails(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + gitRef := request.GetGitRefFromQueryOrDefault(r, "") + + var in repo.PathsDetailsInput + err = json.NewDecoder(r.Body).Decode(&in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + resp, err := repoCtrl.PathsDetails(ctx, session, repoRef, gitRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, resp) + } +} diff --git a/internal/api/handler/repo/create.go b/internal/api/handler/repo/create.go new file mode 100644 index 0000000000..5675172e65 --- /dev/null +++ b/internal/api/handler/repo/create.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new repository. +func HandleCreate(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(repo.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + repo, err := repoCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, repo) + } +} diff --git a/internal/api/handler/repo/create_branch.go b/internal/api/handler/repo/create_branch.go new file mode 100644 index 0000000000..414ccefa69 --- /dev/null +++ b/internal/api/handler/repo/create_branch.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded branch information to the http response body. + */ +func HandleCreateBranch(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.CreateBranchInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + branch, err := repoCtrl.CreateBranch(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, branch) + } +} diff --git a/internal/api/handler/repo/create_commit_tag.go b/internal/api/handler/repo/create_commit_tag.go new file mode 100644 index 0000000000..048336ee25 --- /dev/null +++ b/internal/api/handler/repo/create_commit_tag.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleCreateCommitTag(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.CreateCommitTagInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + tag, err := repoCtrl.CreateCommitTag(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, tag) + } +} diff --git a/internal/api/handler/repo/delete.go b/internal/api/handler/repo/delete.go new file mode 100644 index 0000000000..8b58e974d3 --- /dev/null +++ b/internal/api/handler/repo/delete.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Deletes a repository. + */ +func HandleDelete(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = repoCtrl.Delete(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/repo/delete_branch.go b/internal/api/handler/repo/delete_branch.go new file mode 100644 index 0000000000..16dcc74fb1 --- /dev/null +++ b/internal/api/handler/repo/delete_branch.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Deletes a given branch. + */ +func HandleDeleteBranch(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + branchName, err := request.GetRemainderFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = repoCtrl.DeleteBranch(ctx, session, repoRef, branchName) + if err != nil { + render.TranslatedUserError(w, err) + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/repo/delete_tag.go b/internal/api/handler/repo/delete_tag.go new file mode 100644 index 0000000000..c09f8cbf99 --- /dev/null +++ b/internal/api/handler/repo/delete_tag.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleDeleteCommitTag(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + + if err != nil { + render.TranslatedUserError(w, err) + return + } + tagName, err := request.GetRemainderFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = repoCtrl.DeleteTag(ctx, session, repoRef, tagName) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/repo/diff.go b/internal/api/handler/repo/diff.go new file mode 100644 index 0000000000..af1c53f258 --- /dev/null +++ b/internal/api/handler/repo/diff.go @@ -0,0 +1,104 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + "strings" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleDiff returns the diff between two commits, branches or tags. +func HandleDiff(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + path := request.GetOptionalRemainderFromPath(r) + + if strings.HasPrefix(r.Header.Get("Accept"), "text/plain") { + err := repoCtrl.RawDiff(ctx, session, repoRef, path, w) + if err != nil { + http.Error(w, err.Error(), http.StatusOK) + } + return + } + + _, includePatch := request.QueryParam(r, "include_patch") + stream, err := repoCtrl.Diff(ctx, session, repoRef, path, includePatch) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSONArrayDynamic(ctx, w, stream) + } +} + +// HandleCommitDiff returns the diff between two commits, branches or tags. +func HandleCommitDiff(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commitSHA, err := request.GetCommitSHAFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = repoCtrl.CommitDiff(ctx, session, repoRef, commitSHA, w) + if err != nil { + render.TranslatedUserError(w, err) + return + } + } +} + +// HandleDiffStats how diff statistics of two commits, branches or tags. +func HandleDiffStats(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + path := request.GetOptionalRemainderFromPath(r) + + output, err := repoCtrl.DiffStats(ctx, session, repoRef, path) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, output) + } +} diff --git a/internal/api/handler/repo/find.go b/internal/api/handler/repo/find.go new file mode 100644 index 0000000000..7c8289ea07 --- /dev/null +++ b/internal/api/handler/repo/find.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind writes json-encoded repository information to the http response body. +func HandleFind(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + repo, err := repoCtrl.Find(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, repo) + } +} diff --git a/internal/api/handler/repo/get_branch.go b/internal/api/handler/repo/get_branch.go new file mode 100644 index 0000000000..5ecfccda5d --- /dev/null +++ b/internal/api/handler/repo/get_branch.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Gets a given branch. + */ +func HandleGetBranch(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + branchName, err := request.GetRemainderFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + branch, err := repoCtrl.GetBranch(ctx, session, repoRef, branchName) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, branch) + } +} diff --git a/internal/api/handler/repo/get_commit.go b/internal/api/handler/repo/get_commit.go new file mode 100644 index 0000000000..93d8f0a7f3 --- /dev/null +++ b/internal/api/handler/repo/get_commit.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Gets a given commit. + */ +func HandleGetCommit(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + commitSHA, err := request.GetCommitSHAFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + commit, err := repoCtrl.GetCommit(ctx, session, repoRef, commitSHA) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, commit) + } +} diff --git a/internal/api/handler/repo/http_git.go b/internal/api/handler/repo/http_git.go new file mode 100644 index 0000000000..7e457a0fa4 --- /dev/null +++ b/internal/api/handler/repo/http_git.go @@ -0,0 +1,241 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "compress/gzip" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + repoctrl "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/hlog" + "github.com/rs/zerolog/log" +) + +type CtxRepoType string + +type GitAuthError struct { + AccountID string +} + +func (e GitAuthError) Error() string { + return fmt.Sprintf("Authentication failed for account %s", e.AccountID) +} + +func GetInfoRefs(client gitrpc.Interface, repoStore store.RepoStore, authorizer authz.Authorizer) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + http.Error(w, usererror.Translate(err).Error(), http.StatusInternalServerError) + return + } + + repo, err := repoStore.FindByRef(ctx, repoRef) + if err != nil { + http.Error(w, err.Error(), http.StatusNotFound) + return + } + + accountID, _, err := paths.DisectRoot(repo.Path) + if err != nil { + return + } + + if err = apiauth.CheckRepo(ctx, authorizer, session, repo, enum.PermissionRepoView, true); err != nil { + if errors.Is(err, apiauth.ErrNotAuthenticated) { + basicAuth(w, accountID) + return + } + if errors.Is(err, apiauth.ErrNotAuthorized) { + http.Error(w, err.Error(), http.StatusForbidden) + return + } + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Clients MUST NOT reuse or revalidate a cached response. + // Servers MUST include sufficient Cache-Control headers to prevent caching of the response. + // https://git-scm.com/docs/http-protocol + setHeaderNoCache(w) + + service := getServiceType(r) + log.Debug().Msgf("in GetInfoRefs: git service: %v", service) + w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service)) + + if err = client.GetInfoRefs(ctx, w, &gitrpc.InfoRefsParams{ + ReadParams: repoctrl.CreateRPCReadParams(repo), + Service: service, + Options: nil, + GitProtocol: r.Header.Get("Git-Protocol"), + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + log.Err(err).Msgf("in GetInfoRefs: error occurred in service %v", service) + return + } + w.WriteHeader(http.StatusOK) + } +} + +func GetUploadPack(client gitrpc.Interface, urlProvider *url.Provider, + repoStore store.RepoStore, authorizer authz.Authorizer) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + const service = "upload-pack" + + if err := serviceRPC(w, r, client, urlProvider, repoStore, authorizer, service, false, + enum.PermissionRepoView, true); err != nil { + if errors.Is(err, apiauth.ErrNotAuthorized) { + http.Error(w, err.Error(), http.StatusForbidden) + return + } + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } +} + +func PostReceivePack(client gitrpc.Interface, urlProvider *url.Provider, + repoStore store.RepoStore, authorizer authz.Authorizer) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + const service = "receive-pack" + if err := serviceRPC(w, r, client, urlProvider, repoStore, authorizer, service, true, + enum.PermissionRepoPush, false); err != nil { + var authError *GitAuthError + if errors.As(err, &authError) { + basicAuth(w, authError.AccountID) + return + } + if errors.Is(err, apiauth.ErrNotAuthorized) { + http.Error(w, err.Error(), http.StatusForbidden) + return + } + + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + } +} + +func serviceRPC( + w http.ResponseWriter, + r *http.Request, + client gitrpc.Interface, + urlProvider *url.Provider, + repoStore store.RepoStore, + authorizer authz.Authorizer, + service string, + isWriteOperation bool, + permission enum.Permission, + orPublic bool, +) error { + ctx := r.Context() + log := hlog.FromRequest(r) + defer func() { + if err := r.Body.Close(); err != nil { + log.Err(err).Msgf("serviceRPC: Close: %v", err) + } + }() + + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + return err + } + + repo, err := repoStore.FindByRef(ctx, repoRef) + if err != nil { + return err + } + + accountID, _, err := paths.DisectRoot(repo.Path) + if err != nil { + return err + } + + if err = apiauth.CheckRepo(ctx, authorizer, session, repo, permission, orPublic); err != nil { + if errors.Is(err, apiauth.ErrNotAuthenticated) { + return &GitAuthError{ + AccountID: accountID, + } + } + return err + } + + w.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service)) + + reqBody := r.Body + + // Handle GZIP. + if r.Header.Get("Content-Encoding") == "gzip" { + reqBody, err = gzip.NewReader(reqBody) + if err != nil { + return err + } + } + params := &gitrpc.ServicePackParams{ + Service: service, + Data: reqBody, + Options: nil, + GitProtocol: r.Header.Get("Git-Protocol"), + } + + // setup read/writeparams depending on whether it's a write operation + if isWriteOperation { + var writeParams gitrpc.WriteParams + writeParams, err = repoctrl.CreateRPCWriteParams(ctx, urlProvider, session, repo) + if err != nil { + return fmt.Errorf("failed to create RPC write params: %w", err) + } + params.WriteParams = &writeParams + } else { + readParams := repoctrl.CreateRPCReadParams(repo) + params.ReadParams = &readParams + } + + return client.ServicePack(ctx, w, params) +} + +func setHeaderNoCache(w http.ResponseWriter) { + w.Header().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT") + w.Header().Set("Pragma", "no-cache") + w.Header().Set("Cache-Control", "no-cache, max-age=0, must-revalidate") +} + +func getServiceType(r *http.Request) string { + serviceType := r.URL.Query().Get("service") + if !strings.HasPrefix(serviceType, "git-") { + return "" + } + return strings.Replace(serviceType, "git-", "", 1) +} + +func basicAuth(w http.ResponseWriter, accountID string) { + w.Header().Add("WWW-Authenticate", fmt.Sprintf(`Basic realm="%s"`, accountID)) + w.WriteHeader(http.StatusUnauthorized) +} diff --git a/internal/api/handler/repo/import.go b/internal/api/handler/repo/import.go new file mode 100644 index 0000000000..91022f5bb3 --- /dev/null +++ b/internal/api/handler/repo/import.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleImport(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(repo.ImportInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + repo, err := repoCtrl.Import(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, repo) + } +} diff --git a/internal/api/handler/repo/import_cancel.go b/internal/api/handler/repo/import_cancel.go new file mode 100644 index 0000000000..dd87e8f560 --- /dev/null +++ b/internal/api/handler/repo/import_cancel.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleImportCancel(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = repoCtrl.ImportCancel(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/repo/import_progress.go b/internal/api/handler/repo/import_progress.go new file mode 100644 index 0000000000..9005eabd11 --- /dev/null +++ b/internal/api/handler/repo/import_progress.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleImportProgress(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + progress, err := repoCtrl.ImportProgress(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, progress) + } +} diff --git a/internal/api/handler/repo/list_branches.go b/internal/api/handler/repo/list_branches.go new file mode 100644 index 0000000000..c5852556f3 --- /dev/null +++ b/internal/api/handler/repo/list_branches.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded branch information to the http response body. + */ +func HandleListBranches(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + includeCommit, err := request.GetIncludeCommitFromQueryOrDefault(r, false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseBranchFilter(r) + + branches, err := repoCtrl.ListBranches(ctx, session, repoRef, includeCommit, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: get last page indicator explicitly - current check is wrong in case len % pageSize == 0 + isLastPage := len(branches) < filter.Size + render.PaginationNoTotal(r, w, filter.Page, filter.Size, isLastPage) + render.JSON(w, http.StatusOK, branches) + } +} diff --git a/internal/api/handler/repo/list_commit_tags.go b/internal/api/handler/repo/list_commit_tags.go new file mode 100644 index 0000000000..7c95cdbc83 --- /dev/null +++ b/internal/api/handler/repo/list_commit_tags.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded commit tag information to the http response body. + */ +func HandleListCommitTags(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + includeCommit, err := request.GetIncludeCommitFromQueryOrDefault(r, false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseTagFilter(r) + + tags, err := repoCtrl.ListCommitTags(ctx, session, repoRef, includeCommit, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: get last page indicator explicitly - current check is wrong in case len % pageSize == 0 + isLastPage := len(tags) < filter.Size + render.PaginationNoTotal(r, w, filter.Page, filter.Size, isLastPage) + render.JSON(w, http.StatusOK, tags) + } +} diff --git a/internal/api/handler/repo/list_commits.go b/internal/api/handler/repo/list_commits.go new file mode 100644 index 0000000000..37736b8224 --- /dev/null +++ b/internal/api/handler/repo/list_commits.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded commit information to the http response body. + */ +func HandleListCommits(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + gitRef := request.GetGitRefFromQueryOrDefault(r, "") + + filter, err := request.ParseCommitFilter(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + list, err := repoCtrl.ListCommits(ctx, session, repoRef, gitRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: get last page indicator explicitly - current check is wrong in case len % limit == 0 + isLastPage := len(list.Commits) < filter.Limit + render.PaginationNoTotal(r, w, filter.Page, filter.Limit, isLastPage) + render.JSON(w, http.StatusOK, list) + } +} diff --git a/internal/api/handler/repo/list_pipelines.go b/internal/api/handler/repo/list_pipelines.go new file mode 100644 index 0000000000..a418909480 --- /dev/null +++ b/internal/api/handler/repo/list_pipelines.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleListPipelines(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseListQueryFilterFromRequest(r) + latest := request.GetLatestFromPath(r) + repos, totalCount, err := repoCtrl.ListPipelines(ctx, session, repoRef, latest, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, repos) + } +} diff --git a/internal/api/handler/repo/list_service_accounts.go b/internal/api/handler/repo/list_service_accounts.go new file mode 100644 index 0000000000..10919aa66f --- /dev/null +++ b/internal/api/handler/repo/list_service_accounts.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded service account information to the http response body. + */ +func HandleListServiceAccounts(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + sas, err := repoCtrl.ListServiceAccounts(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: implement pagination - or should we block that many service accounts in the first place. + render.JSON(w, http.StatusOK, sas) + } +} diff --git a/internal/api/handler/repo/merge_check.go b/internal/api/handler/repo/merge_check.go new file mode 100644 index 0000000000..e0c233c59f --- /dev/null +++ b/internal/api/handler/repo/merge_check.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMergeCheck checks if two branches are mergeable. +func HandleMergeCheck(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + path := request.GetOptionalRemainderFromPath(r) + + output, err := repoCtrl.MergeCheck(ctx, session, repoRef, path) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, output) + } +} diff --git a/internal/api/handler/repo/move.go b/internal/api/handler/repo/move.go new file mode 100644 index 0000000000..4769d86b62 --- /dev/null +++ b/internal/api/handler/repo/move.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMove moves an existing repo. +func HandleMove(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.MoveInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + repo, err := repoCtrl.Move(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, repo) + } +} diff --git a/internal/api/handler/repo/pipeline_generate.go b/internal/api/handler/repo/pipeline_generate.go new file mode 100644 index 0000000000..53c309ef4b --- /dev/null +++ b/internal/api/handler/repo/pipeline_generate.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandlePipelineGenerate(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + yaml, err := repoCtrl.PipelineGenerate(ctx, session, repoRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + w.Header().Set("Content-Type", "text/yaml; charset=utf-8") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(yaml) + } +} diff --git a/internal/api/handler/repo/raw.go b/internal/api/handler/repo/raw.go new file mode 100644 index 0000000000..a84c4a8352 --- /dev/null +++ b/internal/api/handler/repo/raw.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "fmt" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleRaw returns the raw content of a file. +func HandleRaw(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + gitRef := request.GetGitRefFromQueryOrDefault(r, "") + path := request.GetOptionalRemainderFromPath(r) + + dataReader, dataLength, err := repoCtrl.Raw(ctx, session, repoRef, gitRef, path) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + w.Header().Add("Content-Length", fmt.Sprint(dataLength)) + + render.Reader(ctx, w, http.StatusOK, dataReader) + } +} diff --git a/internal/api/handler/repo/update.go b/internal/api/handler/repo/update.go new file mode 100644 index 0000000000..5c41abda64 --- /dev/null +++ b/internal/api/handler/repo/update.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Updates an existing repository. + */ +func HandleUpdate(repoCtrl *repo.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(repo.UpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + repo, err := repoCtrl.Update(ctx, session, repoRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, repo) + } +} diff --git a/internal/api/handler/repo/upload_file.go b/internal/api/handler/repo/upload_file.go new file mode 100644 index 0000000000..616124af20 --- /dev/null +++ b/internal/api/handler/repo/upload_file.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repo diff --git a/internal/api/handler/resource/resource.go b/internal/api/handler/resource/resource.go new file mode 100644 index 0000000000..b605952ac7 --- /dev/null +++ b/internal/api/handler/resource/resource.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/resources" +) + +func HandleGitIgnore() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + files, err := resources.GitIgnores() + if err != nil { + render.ErrorMessagef(w, http.StatusInternalServerError, "error loading gitignore files: %v", err) + return + } + render.JSON(w, http.StatusOK, files) + } +} + +func HandleLicence() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + response, err := resources.Licenses() + if err != nil { + render.ErrorMessagef(w, http.StatusInternalServerError, "error loading licence file: %v", err) + return + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(response) + } +} diff --git a/internal/api/handler/secret/create.go b/internal/api/handler/secret/create.go new file mode 100644 index 0000000000..89269eb59f --- /dev/null +++ b/internal/api/handler/secret/create.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new secretsitory. +func HandleCreate(secretCtrl *secret.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(secret.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + secret, err := secretCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, secret.CopyWithoutData()) + } +} diff --git a/internal/api/handler/secret/delete.go b/internal/api/handler/secret/delete.go new file mode 100644 index 0000000000..5aaef411ef --- /dev/null +++ b/internal/api/handler/secret/delete.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleDelete(secretCtrl *secret.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + secretRef, err := request.GetSecretRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, secretUID, err := paths.DisectLeaf(secretRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = secretCtrl.Delete(ctx, session, spaceRef, secretUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/secret/find.go b/internal/api/handler/secret/find.go new file mode 100644 index 0000000000..9ef5580cb1 --- /dev/null +++ b/internal/api/handler/secret/find.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +// HandleFind finds a secret from the database. +func HandleFind(secretCtrl *secret.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + secretRef, err := request.GetSecretRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, secretUID, err := paths.DisectLeaf(secretRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + secret, err := secretCtrl.Find(ctx, session, spaceRef, secretUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, secret.CopyWithoutData()) + } +} diff --git a/internal/api/handler/secret/update.go b/internal/api/handler/secret/update.go new file mode 100644 index 0000000000..161d0765b5 --- /dev/null +++ b/internal/api/handler/secret/update.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package secret + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleUpdate(secretCtrl *secret.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(secret.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + secretRef, err := request.GetSecretRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, secretUID, err := paths.DisectLeaf(secretRef) + if err != nil { + render.TranslatedUserError(w, err) + } + + secret, err := secretCtrl.Update(ctx, session, spaceRef, secretUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, secret.CopyWithoutData()) + } +} diff --git a/internal/api/handler/serviceaccount/create.go b/internal/api/handler/serviceaccount/create.go new file mode 100644 index 0000000000..f3f2fff636 --- /dev/null +++ b/internal/api/handler/serviceaccount/create.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Creates a new service account and writes json-encoded service account to the http response body. + */ +func HandleCreate(saCtrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(serviceaccount.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + sa, err := saCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, sa) + } +} diff --git a/internal/api/handler/serviceaccount/create_token.go b/internal/api/handler/serviceaccount/create_token.go new file mode 100644 index 0000000000..9ec4910e2d --- /dev/null +++ b/internal/api/handler/serviceaccount/create_token.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreateToken returns an http.HandlerFunc that creates a new SAT and +// writes a json-encoded TokenResponse to the http.Response body. +func HandleCreateToken(saCrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + saUID, err := request.GetServiceAccountUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(serviceaccount.CreateTokenInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + tokenResponse, err := saCrl.CreateToken(ctx, session, saUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, tokenResponse) + } +} diff --git a/internal/api/handler/serviceaccount/delete.go b/internal/api/handler/serviceaccount/delete.go new file mode 100644 index 0000000000..1d1b9ab8e1 --- /dev/null +++ b/internal/api/handler/serviceaccount/delete.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Deletes a service account. + */ +func HandleDelete(saCrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + saUID, err := request.GetServiceAccountUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = saCrl.Delete(ctx, session, saUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/serviceaccount/delete_token.go b/internal/api/handler/serviceaccount/delete_token.go new file mode 100644 index 0000000000..5c310142d3 --- /dev/null +++ b/internal/api/handler/serviceaccount/delete_token.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleDeleteToken returns an http.HandlerFunc that +// deletes a SAT token of a service account. +func HandleDeleteToken(saCrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + saUID, err := request.GetServiceAccountUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + tokenUID, err := request.GetTokenUIDFromPath(r) + if err != nil { + render.BadRequest(w) + return + } + + err = saCrl.DeleteToken(ctx, session, saUID, tokenUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/serviceaccount/find.go b/internal/api/handler/serviceaccount/find.go new file mode 100644 index 0000000000..0cc7976f74 --- /dev/null +++ b/internal/api/handler/serviceaccount/find.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind returns an http.HandlerFunc that writes json-encoded +// service account information to the http response body. +func HandleFind(saCrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + saUID, err := request.GetServiceAccountUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + sa, err := saCrl.Find(ctx, session, saUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, sa) + } +} diff --git a/internal/api/handler/serviceaccount/list_tokens.go b/internal/api/handler/serviceaccount/list_tokens.go new file mode 100644 index 0000000000..a99a7b1b89 --- /dev/null +++ b/internal/api/handler/serviceaccount/list_tokens.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package serviceaccount + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleListTokens returns an http.HandlerFunc that +// writes a json-encoded list of Tokens to the http.Response body. +func HandleListTokens(saCrl *serviceaccount.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + saUID, err := request.GetServiceAccountUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + res, err := saCrl.ListTokens(ctx, session, saUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, res) + } +} diff --git a/internal/api/handler/space/create.go b/internal/api/handler/space/create.go new file mode 100644 index 0000000000..bb0e3d28df --- /dev/null +++ b/internal/api/handler/space/create.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns an http.HandlerFunc that creates a new space. +func HandleCreate(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(space.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + space, err := spaceCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, space) + } +} diff --git a/internal/api/handler/space/delete.go b/internal/api/handler/space/delete.go new file mode 100644 index 0000000000..91e3b4721b --- /dev/null +++ b/internal/api/handler/space/delete.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleDelete handles the delete space HTTP API. +func HandleDelete(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = spaceCtrl.Delete(ctx, session, spaceRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/space/events.go b/internal/api/handler/space/events.go new file mode 100644 index 0000000000..0903a4af01 --- /dev/null +++ b/internal/api/handler/space/events.go @@ -0,0 +1,63 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/writer" + + "github.com/rs/zerolog/log" +) + +// HandleEvents returns an http.HandlerFunc that watches for +// events on a space +func HandleEvents(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + h := w.Header() + h.Set("Content-Type", "text/event-stream") + h.Set("Cache-Control", "no-cache") + h.Set("Connection", "keep-alive") + h.Set("X-Accel-Buffering", "no") + h.Set("Access-Control-Allow-Origin", "*") + + f, ok := w.(http.Flusher) + if !ok { + log.Error().Msg("http writer type assertion failed") + render.InternalError(w) + return + } + + writer := writer.NewWriterFlusher(w, f) + + err = spaceCtrl.Events(ctx, session, spaceRef, writer) + if err != nil { + render.TranslatedUserError(w, err) + return + } + } +} diff --git a/internal/api/handler/space/export.go b/internal/api/handler/space/export.go new file mode 100644 index 0000000000..a2e36dbd3d --- /dev/null +++ b/internal/api/handler/space/export.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleExport(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(space.ExportInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + err = spaceCtrl.Export(ctx, session, spaceRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + w.WriteHeader(http.StatusAccepted) + } +} diff --git a/internal/api/handler/space/export_progress.go b/internal/api/handler/space/export_progress.go new file mode 100644 index 0000000000..12e22a3687 --- /dev/null +++ b/internal/api/handler/space/export_progress.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleExportProgress(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + progress, err := spaceCtrl.ExportProgress(ctx, session, spaceRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, progress) + } +} diff --git a/internal/api/handler/space/find.go b/internal/api/handler/space/find.go new file mode 100644 index 0000000000..e5524904c4 --- /dev/null +++ b/internal/api/handler/space/find.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +/* + * Writes json-encoded space information to the http response body. + */ +func HandleFind(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + space, err := spaceCtrl.Find(ctx, session, spaceRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, space) + } +} diff --git a/internal/api/handler/space/import.go b/internal/api/handler/space/import.go new file mode 100644 index 0000000000..2add339898 --- /dev/null +++ b/internal/api/handler/space/import.go @@ -0,0 +1,46 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleImport(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(space.ImportInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + space, err := spaceCtrl.Import(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, space) + } +} diff --git a/internal/api/handler/space/list.go b/internal/api/handler/space/list.go new file mode 100644 index 0000000000..5fb5a4af3f --- /dev/null +++ b/internal/api/handler/space/list.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleListSpaces writes json-encoded list of child spaces in the request body. +func HandleListSpaces(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + spaceFilter := request.ParseSpaceFilter(r) + if spaceFilter.Order == enum.OrderDefault { + spaceFilter.Order = enum.OrderAsc + } + + spaces, totalCount, err := spaceCtrl.ListSpaces(ctx, session, spaceRef, spaceFilter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, spaceFilter.Page, spaceFilter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, spaces) + } +} diff --git a/internal/api/handler/space/list_connectors.go b/internal/api/handler/space/list_connectors.go new file mode 100644 index 0000000000..2a84fc0995 --- /dev/null +++ b/internal/api/handler/space/list_connectors.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleListConnectors(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseListQueryFilterFromRequest(r) + ret, totalCount, err := spaceCtrl.ListConnectors(ctx, session, spaceRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, ret) + } +} diff --git a/internal/api/handler/space/list_repos.go b/internal/api/handler/space/list_repos.go new file mode 100644 index 0000000000..976295dddf --- /dev/null +++ b/internal/api/handler/space/list_repos.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleListRepos writes json-encoded list of repos in the request body. +func HandleListRepos(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseRepoFilter(r) + if filter.Order == enum.OrderDefault { + filter.Order = enum.OrderAsc + } + + repos, totalCount, err := spaceCtrl.ListRepositories(ctx, session, spaceRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, repos) + } +} diff --git a/internal/api/handler/space/list_secrets.go b/internal/api/handler/space/list_secrets.go new file mode 100644 index 0000000000..969a9dd0b7 --- /dev/null +++ b/internal/api/handler/space/list_secrets.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types" +) + +func HandleListSecrets(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseListQueryFilterFromRequest(r) + ret, totalCount, err := spaceCtrl.ListSecrets(ctx, session, spaceRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // Strip out data in the returned value + secrets := []types.Secret{} + for _, s := range ret { + secrets = append(secrets, *s.CopyWithoutData()) + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, secrets) + } +} diff --git a/internal/api/handler/space/list_service_accounts.go b/internal/api/handler/space/list_service_accounts.go new file mode 100644 index 0000000000..981d306443 --- /dev/null +++ b/internal/api/handler/space/list_service_accounts.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleListServiceAccounts Writes json-encoded service account information to the http response body. +func HandleListServiceAccounts(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + sas, err := spaceCtrl.ListServiceAccounts(ctx, session, spaceRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: do we need pagination? we should block that many service accounts in the first place. + render.JSON(w, http.StatusOK, sas) + } +} diff --git a/internal/api/handler/space/list_templates.go b/internal/api/handler/space/list_templates.go new file mode 100644 index 0000000000..64d4baca72 --- /dev/null +++ b/internal/api/handler/space/list_templates.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleListTemplates(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseListQueryFilterFromRequest(r) + ret, totalCount, err := spaceCtrl.ListTemplates(ctx, session, spaceRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, ret) + } +} diff --git a/internal/api/handler/space/membership_add.go b/internal/api/handler/space/membership_add.go new file mode 100644 index 0000000000..d3384d75f6 --- /dev/null +++ b/internal/api/handler/space/membership_add.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMembershipAdd handles API that adds a new membership to a space. +func HandleMembershipAdd(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(space.MembershipAddInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + memberInfo, err := spaceCtrl.MembershipAdd(ctx, session, spaceRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, memberInfo) + } +} diff --git a/internal/api/handler/space/membership_delete.go b/internal/api/handler/space/membership_delete.go new file mode 100644 index 0000000000..3e3919b79c --- /dev/null +++ b/internal/api/handler/space/membership_delete.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMembershipDelete handles API that deletes an existing space membership. +func HandleMembershipDelete(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = spaceCtrl.MembershipDelete(ctx, session, spaceRef, userUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/space/membership_list.go b/internal/api/handler/space/membership_list.go new file mode 100644 index 0000000000..589f2ba649 --- /dev/null +++ b/internal/api/handler/space/membership_list.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMembershipList handles API that lists all memberships of a space. +func HandleMembershipList(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseMembershipUserFilter(r) + + memberships, membershipsCount, err := spaceCtrl.MembershipList(ctx, session, spaceRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(membershipsCount)) + render.JSON(w, http.StatusOK, memberships) + } +} diff --git a/internal/api/handler/space/membership_update.go b/internal/api/handler/space/membership_update.go new file mode 100644 index 0000000000..beef0dd14a --- /dev/null +++ b/internal/api/handler/space/membership_update.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMembershipUpdate handles API that changes the role of an existing space membership. +func HandleMembershipUpdate(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(space.MembershipUpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + memberInfo, err := spaceCtrl.MembershipUpdate(ctx, session, spaceRef, userUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, memberInfo) + } +} diff --git a/internal/api/handler/space/move.go b/internal/api/handler/space/move.go new file mode 100644 index 0000000000..d16c107062 --- /dev/null +++ b/internal/api/handler/space/move.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleMove moves an existing space. +func HandleMove(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(space.MoveInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + res, err := spaceCtrl.Move(ctx, session, spaceRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, res) + } +} diff --git a/internal/api/handler/space/update.go b/internal/api/handler/space/update.go new file mode 100644 index 0000000000..4db4274f3e --- /dev/null +++ b/internal/api/handler/space/update.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package space + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate updates an existing space. +func HandleUpdate(spaceCtrl *space.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + spaceRef, err := request.GetSpaceRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(space.UpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + space, err := spaceCtrl.Update(ctx, session, spaceRef, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, space) + } +} diff --git a/internal/api/handler/system/health.go b/internal/api/handler/system/health.go new file mode 100644 index 0000000000..b3734b76a8 --- /dev/null +++ b/internal/api/handler/system/health.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import "net/http" + +// HandleHealth writes a 200 OK status to the http.Response +// if the server is healthy. +func HandleHealth(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) +} diff --git a/internal/api/handler/system/health_test.go b/internal/api/handler/system/health_test.go new file mode 100644 index 0000000000..f07e83d003 --- /dev/null +++ b/internal/api/handler/system/health_test.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import "testing" + +func TestHealth(t *testing.T) { + t.Skip() +} diff --git a/internal/api/handler/system/list_config.go b/internal/api/handler/system/list_config.go new file mode 100644 index 0000000000..378d46b51e --- /dev/null +++ b/internal/api/handler/system/list_config.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/render" +) + +type ConfigOutput struct { + UserSignupAllowed bool `json:"user_signup_allowed"` +} + +// HandleGetConfig returns an http.HandlerFunc that processes an http.Request +// and returns a struct containing all system configs exposed to the users. +func HandleGetConfig(sysCtrl *system.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + userSignupAllowed, err := sysCtrl.IsUserSignupAllowed(ctx) + if err != nil { + render.TranslatedUserError(w, err) + return + } + render.JSON(w, http.StatusOK, ConfigOutput{ + UserSignupAllowed: userSignupAllowed, + }) + } +} diff --git a/internal/api/handler/system/version.go b/internal/api/handler/system/version.go new file mode 100644 index 0000000000..141c6eba93 --- /dev/null +++ b/internal/api/handler/system/version.go @@ -0,0 +1,28 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import ( + "fmt" + "net/http" + + "github.com/harness/gitness/version" +) + +// HandleVersion writes the server version number +// to the http.Response body in plain text. +func HandleVersion(w http.ResponseWriter, r *http.Request) { + fmt.Fprintf(w, "%s", version.Version) +} diff --git a/internal/api/handler/system/version_test.go b/internal/api/handler/system/version_test.go new file mode 100644 index 0000000000..837976832d --- /dev/null +++ b/internal/api/handler/system/version_test.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package system + +import "testing" + +func TestVersion(t *testing.T) { + t.Skip() +} diff --git a/internal/api/handler/template/create.go b/internal/api/handler/template/create.go new file mode 100644 index 0000000000..3e8530160d --- /dev/null +++ b/internal/api/handler/template/create.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new template. +func HandleCreate(templateCtrl *template.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(template.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + template, err := templateCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, template) + } +} diff --git a/internal/api/handler/template/delete.go b/internal/api/handler/template/delete.go new file mode 100644 index 0000000000..8e68f31f0b --- /dev/null +++ b/internal/api/handler/template/delete.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleDelete(templateCtrl *template.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + templateRef, err := request.GetTemplateRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, templateUID, err := paths.DisectLeaf(templateRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = templateCtrl.Delete(ctx, session, spaceRef, templateUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/template/find.go b/internal/api/handler/template/find.go new file mode 100644 index 0000000000..1a569784f8 --- /dev/null +++ b/internal/api/handler/template/find.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +// HandleFind finds a template from the database. +func HandleFind(templateCtrl *template.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + templateRef, err := request.GetTemplateRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, templateUID, err := paths.DisectLeaf(templateRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + template, err := templateCtrl.Find(ctx, session, spaceRef, templateUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, template) + } +} diff --git a/internal/api/handler/template/update.go b/internal/api/handler/template/update.go new file mode 100644 index 0000000000..6f9f1bf67d --- /dev/null +++ b/internal/api/handler/template/update.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/paths" +) + +func HandleUpdate(templateCtrl *template.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(template.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + templateRef, err := request.GetTemplateRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + spaceRef, templateUID, err := paths.DisectLeaf(templateRef) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + template, err := templateCtrl.Update(ctx, session, spaceRef, templateUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, template) + } +} diff --git a/internal/api/handler/trigger/create.go b/internal/api/handler/trigger/create.go new file mode 100644 index 0000000000..38bbeb4c29 --- /dev/null +++ b/internal/api/handler/trigger/create.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleCreate(triggerCtrl *trigger.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(trigger.CreateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + trigger, err := triggerCtrl.Create(ctx, session, repoRef, pipelineUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, trigger) + } +} diff --git a/internal/api/handler/trigger/delete.go b/internal/api/handler/trigger/delete.go new file mode 100644 index 0000000000..63c4a79c90 --- /dev/null +++ b/internal/api/handler/trigger/delete.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleDelete(triggerCtrl *trigger.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + triggerUID, err := request.GetTriggerUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = triggerCtrl.Delete(ctx, session, repoRef, pipelineUID, triggerUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/trigger/find.go b/internal/api/handler/trigger/find.go new file mode 100644 index 0000000000..e369fce4cf --- /dev/null +++ b/internal/api/handler/trigger/find.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleFind(triggerCtrl *trigger.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + triggerUID, err := request.GetTriggerUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + trigger, err := triggerCtrl.Find(ctx, session, repoRef, pipelineUID, triggerUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, trigger) + } +} diff --git a/internal/api/handler/trigger/list.go b/internal/api/handler/trigger/list.go new file mode 100644 index 0000000000..f4c0aa1ca0 --- /dev/null +++ b/internal/api/handler/trigger/list.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleList(triggerCtrl *trigger.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseListQueryFilterFromRequest(r) + + repos, totalCount, err := triggerCtrl.List(ctx, session, repoRef, pipelineUID, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, repos) + } +} diff --git a/internal/api/handler/trigger/update.go b/internal/api/handler/trigger/update.go new file mode 100644 index 0000000000..0a05855df1 --- /dev/null +++ b/internal/api/handler/trigger/update.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleUpdate(triggerCtrl *trigger.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(trigger.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + pipelineUID, err := request.GetPipelineUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + triggerUID, err := request.GetTriggerUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + pipeline, err := triggerCtrl.Update(ctx, session, repoRef, pipelineUID, triggerUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, pipeline) + } +} diff --git a/internal/api/handler/user/create_access_token.go b/internal/api/handler/user/create_access_token.go new file mode 100644 index 0000000000..c12faf7c94 --- /dev/null +++ b/internal/api/handler/user/create_access_token.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreateAccessToken returns an http.HandlerFunc that creates a new PAT and +// writes a json-encoded TokenResponse to the http.Response body. +func HandleCreateAccessToken(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + in := new(user.CreateTokenInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + tokenResponse, err := userCtrl.CreateAccessToken(ctx, session, userUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, tokenResponse) + } +} diff --git a/internal/api/handler/user/delete_token.go b/internal/api/handler/user/delete_token.go new file mode 100644 index 0000000000..3fe1f7101f --- /dev/null +++ b/internal/api/handler/user/delete_token.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleDeleteToken returns an http.HandlerFunc that +// deletes a token of a user. +func HandleDeleteToken(userCtrl *user.Controller, tokenType enum.TokenType) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + tokenUID, err := request.GetTokenUIDFromPath(r) + if err != nil { + render.BadRequest(w) + return + } + + err = userCtrl.DeleteToken(ctx, session, userUID, tokenType, tokenUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/user/find.go b/internal/api/handler/user/find.go new file mode 100644 index 0000000000..7a7d4e6dc1 --- /dev/null +++ b/internal/api/handler/user/find.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind returns an http.HandlerFunc that writes json-encoded +// account information to the http response body. +func HandleFind(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + user, err := userCtrl.Find(ctx, session, userUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, user) + } +} diff --git a/internal/api/handler/user/list_tokens.go b/internal/api/handler/user/list_tokens.go new file mode 100644 index 0000000000..a9492144b3 --- /dev/null +++ b/internal/api/handler/user/list_tokens.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleListTokens returns an http.HandlerFunc that +// writes a json-encoded list of Tokens to the http.Response body. +func HandleListTokens(userCtrl *user.Controller, tokenType enum.TokenType) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + res, err := userCtrl.ListTokens(ctx, session, userUID, tokenType) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, res) + } +} diff --git a/internal/api/handler/user/membership_spaces.go b/internal/api/handler/user/membership_spaces.go new file mode 100644 index 0000000000..3df4fea066 --- /dev/null +++ b/internal/api/handler/user/membership_spaces.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +func HandleMembershipSpaces(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + filter := request.ParseMembershipSpaceFilter(r) + + membershipSpaces, membershipSpaceCount, err := userCtrl.MembershipSpaces(ctx, session, userUID, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(membershipSpaceCount)) + render.JSON(w, http.StatusOK, membershipSpaces) + } +} diff --git a/internal/api/handler/user/update.go b/internal/api/handler/user/update.go new file mode 100644 index 0000000000..a6e17bd391 --- /dev/null +++ b/internal/api/handler/user/update.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate returns an http.HandlerFunc that processes an http.Request +// to update the current user account. +func HandleUpdate(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID := session.Principal.UID + + in := new(user.UpdateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + user, err := userCtrl.Update(ctx, session, userUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, user) + } +} diff --git a/internal/api/handler/user/update_admin.go b/internal/api/handler/user/update_admin.go new file mode 100644 index 0000000000..be9ab0c61d --- /dev/null +++ b/internal/api/handler/user/update_admin.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package user + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdateAdmin returns a http.HandlerFunc that processes an http.Request +// to update the current user admin status. +func HandleUpdateAdmin(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(user.UpdateAdminInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + user, err := userCtrl.UpdateAdmin(ctx, session, userUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, user) + } +} diff --git a/internal/api/handler/users/create.go b/internal/api/handler/users/create.go new file mode 100644 index 0000000000..d4306f46f4 --- /dev/null +++ b/internal/api/handler/users/create.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns an http.HandlerFunc that processes an http.Request +// to create the named user account in the system. +func HandleCreate(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + in := new(user.CreateInput) + err := json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + usr, err := userCtrl.Create(ctx, session, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, usr) + } +} diff --git a/internal/api/handler/users/create_test.go b/internal/api/handler/users/create_test.go new file mode 100644 index 0000000000..78d1498816 --- /dev/null +++ b/internal/api/handler/users/create_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users diff --git a/internal/api/handler/users/delete.go b/internal/api/handler/users/delete.go new file mode 100644 index 0000000000..ab8d7c50d2 --- /dev/null +++ b/internal/api/handler/users/delete.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleDelete returns an http.HandlerFunc that processes an http.Request +// to delete the named user account from the system. +func HandleDelete(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = userCtrl.Delete(ctx, session, userUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/users/delete_test.go b/internal/api/handler/users/delete_test.go new file mode 100644 index 0000000000..78d1498816 --- /dev/null +++ b/internal/api/handler/users/delete_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users diff --git a/internal/api/handler/users/find.go b/internal/api/handler/users/find.go new file mode 100644 index 0000000000..3bfa31a9a1 --- /dev/null +++ b/internal/api/handler/users/find.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind returns an http.HandlerFunc that writes json-encoded +// user account information to the the response body. +func HandleFind(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + usr, err := userCtrl.Find(ctx, session, userUID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, usr) + } +} diff --git a/internal/api/handler/users/find_test.go b/internal/api/handler/users/find_test.go new file mode 100644 index 0000000000..78d1498816 --- /dev/null +++ b/internal/api/handler/users/find_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users diff --git a/internal/api/handler/users/list.go b/internal/api/handler/users/list.go new file mode 100644 index 0000000000..4dcee53ee4 --- /dev/null +++ b/internal/api/handler/users/list.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleList returns an http.HandlerFunc that writes a json-encoded +// list of all registered system users to the response body. +func HandleList(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + filter := request.ParseUserFilter(r) + if filter.Order == enum.OrderDefault { + filter.Order = enum.OrderAsc + } + + list, totalCount, err := userCtrl.List(ctx, session, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, list) + } +} diff --git a/internal/api/handler/users/list_test.go b/internal/api/handler/users/list_test.go new file mode 100644 index 0000000000..78d1498816 --- /dev/null +++ b/internal/api/handler/users/list_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users diff --git a/internal/api/handler/users/update.go b/internal/api/handler/users/update.go new file mode 100644 index 0000000000..9e25674eba --- /dev/null +++ b/internal/api/handler/users/update.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate returns a http.HandlerFunc that processes an http.Request +// to update a user account. +func HandleUpdate(userCtrl *user.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + userUID, err := request.GetUserUIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(user.UpdateInput) + if err = json.NewDecoder(r.Body).Decode(in); err != nil { + render.BadRequestf(w, "Invalid request body: %s.", err) + return + } + + usr, err := userCtrl.Update(ctx, session, userUID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, usr) + } +} diff --git a/internal/api/handler/users/update_test.go b/internal/api/handler/users/update_test.go new file mode 100644 index 0000000000..78d1498816 --- /dev/null +++ b/internal/api/handler/users/update_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package users diff --git a/internal/api/handler/webhook/create.go b/internal/api/handler/webhook/create.go new file mode 100644 index 0000000000..d7e9ea41fd --- /dev/null +++ b/internal/api/handler/webhook/create.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleCreate returns a http.HandlerFunc that creates a new webhook. +func HandleCreate(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(webhook.CreateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + hook, err := webhookCtrl.Create(ctx, session, repoRef, in, false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusCreated, hook) + } +} diff --git a/internal/api/handler/webhook/delete.go b/internal/api/handler/webhook/delete.go new file mode 100644 index 0000000000..b37c1a6364 --- /dev/null +++ b/internal/api/handler/webhook/delete.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleDelete returns a http.HandlerFunc that deletes a webhook. +func HandleDelete(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + err = webhookCtrl.Delete(ctx, session, repoRef, webhookID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.DeleteSuccessful(w) + } +} diff --git a/internal/api/handler/webhook/find.go b/internal/api/handler/webhook/find.go new file mode 100644 index 0000000000..ee80eb48de --- /dev/null +++ b/internal/api/handler/webhook/find.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFind returns a http.HandlerFunc that finds a webhook. +func HandleFind(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhook, err := webhookCtrl.Find(ctx, session, repoRef, webhookID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, webhook) + } +} diff --git a/internal/api/handler/webhook/find_execution.go b/internal/api/handler/webhook/find_execution.go new file mode 100644 index 0000000000..f4db2cd62b --- /dev/null +++ b/internal/api/handler/webhook/find_execution.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleFindExecution returns a http.HandlerFunc that finds a webhook execution. +func HandleFindExecution(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookExecutionID, err := request.GetWebhookExecutionIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + execution, err := webhookCtrl.FindExecution(ctx, session, repoRef, webhookID, webhookExecutionID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, execution) + } +} diff --git a/internal/api/handler/webhook/list.go b/internal/api/handler/webhook/list.go new file mode 100644 index 0000000000..5f59187b57 --- /dev/null +++ b/internal/api/handler/webhook/list.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" +) + +// HandleList returns a http.HandlerFunc that lists webhooks. +func HandleList(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseWebhookFilter(r) + if filter.Order == enum.OrderDefault { + filter.Order = enum.OrderAsc + } + + webhooks, totalCount, err := webhookCtrl.List(ctx, session, repoRef, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.Pagination(r, w, filter.Page, filter.Size, int(totalCount)) + render.JSON(w, http.StatusOK, webhooks) + } +} diff --git a/internal/api/handler/webhook/list_executions.go b/internal/api/handler/webhook/list_executions.go new file mode 100644 index 0000000000..36e2cba770 --- /dev/null +++ b/internal/api/handler/webhook/list_executions.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleListExecutions returns a http.HandlerFunc that lists webhook executions. +func HandleListExecutions(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + filter := request.ParseWebhookExecutionFilter(r) + + executions, err := webhookCtrl.ListExecutions(ctx, session, repoRef, webhookID, filter) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // TODO: get last page indicator explicitly - current check is wrong in case len % pageSize == 0 + isLastPage := len(executions) < filter.Size + render.PaginationNoTotal(r, w, filter.Page, filter.Size, isLastPage) + render.JSON(w, http.StatusOK, executions) + } +} diff --git a/internal/api/handler/webhook/retrigger_execution.go b/internal/api/handler/webhook/retrigger_execution.go new file mode 100644 index 0000000000..53a30ab897 --- /dev/null +++ b/internal/api/handler/webhook/retrigger_execution.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleRetriggerExecution returns a http.HandlerFunc that retriggers a webhook executions. +func HandleRetriggerExecution(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookExecutionID, err := request.GetWebhookExecutionIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + execution, err := webhookCtrl.RetriggerExecution(ctx, session, repoRef, webhookID, webhookExecutionID) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, execution) + } +} diff --git a/internal/api/handler/webhook/update.go b/internal/api/handler/webhook/update.go new file mode 100644 index 0000000000..725dd45419 --- /dev/null +++ b/internal/api/handler/webhook/update.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" +) + +// HandleUpdate returns a http.HandlerFunc that updates an existing webhook. +func HandleUpdate(webhookCtrl *webhook.Controller) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + session, _ := request.AuthSessionFrom(ctx) + + repoRef, err := request.GetRepoRefFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + webhookID, err := request.GetWebhookIDFromPath(r) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + in := new(webhook.UpdateInput) + err = json.NewDecoder(r.Body).Decode(in) + if err != nil { + render.BadRequestf(w, "Invalid Request Body: %s.", err) + return + } + + hook, err := webhookCtrl.Update(ctx, session, repoRef, webhookID, in) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + render.JSON(w, http.StatusOK, hook) + } +} diff --git a/internal/api/middleware/address/address.go b/internal/api/middleware/address/address.go new file mode 100644 index 0000000000..ccf16f88f1 --- /dev/null +++ b/internal/api/middleware/address/address.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package address + +import ( + "net/http" + "strings" +) + +// Handler returns an http.HandlerFunc middleware that sets +// the http.Request scheme and hostname. +func Handler(scheme, host string) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // update the scheme and host for the inbound + // http.Request so they are available to subsequent + // handlers in the chain. + r.URL.Scheme = scheme + r.URL.Host = host + + // if the scheme is not configured, attempt to ascertain + // the scheme from the inbound http.Request. + if r.URL.Scheme == "" { + r.URL.Scheme = resolveScheme(r) + } + + // if the host is not configured, attempt to ascertain + // the host from the inbound http.Request. + if r.URL.Host == "" { + r.URL.Host = resolveHost(r) + } + + // invoke the next handler in the chain. + next.ServeHTTP(w, r) + }) + } +} + +// resolveScheme is a helper function that evaluates the http.Request +// and returns the scheme, HTTP or HTTPS. It is able to detect, +// using the X-Forwarded-Proto, if the original request was HTTPS +// and routed through a reverse proxy with SSL termination. +func resolveScheme(r *http.Request) string { + const https = "https" + switch { + case r.URL.Scheme == https: + return https + case r.TLS != nil: + return https + case strings.HasPrefix(r.Proto, "HTTPS"): + return https + case r.Header.Get("X-Forwarded-Proto") == https: + return https + default: + return "http" + } +} + +// resolveHost is a helper function that evaluates the http.Request +// and returns the hostname. It is able to detect, using the +// X-Forarded-For header, the original hostname when routed +// through a reverse proxy. +func resolveHost(r *http.Request) string { + switch { + case len(r.Host) != 0: + return r.Host + case len(r.URL.Host) != 0: + return r.URL.Host + case len(r.Header.Get("X-Forwarded-For")) != 0: + return r.Header.Get("X-Forwarded-For") + case len(r.Header.Get("X-Host")) != 0: + return r.Header.Get("X-Host") + case len(r.Header.Get("XFF")) != 0: + return r.Header.Get("XFF") + case len(r.Header.Get("X-Real-IP")) != 0: + return r.Header.Get("X-Real-IP") + default: + return "localhost:3000" + } +} diff --git a/internal/api/middleware/address/address_test.go b/internal/api/middleware/address/address_test.go new file mode 100644 index 0000000000..a0924e8f83 --- /dev/null +++ b/internal/api/middleware/address/address_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package address diff --git a/internal/api/middleware/authn/authn.go b/internal/api/middleware/authn/authn.go new file mode 100644 index 0000000000..b7d5cd75ba --- /dev/null +++ b/internal/api/middleware/authn/authn.go @@ -0,0 +1,91 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authn + +import ( + "errors" + "net/http" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/auth/authn" + + "github.com/rs/zerolog" + "github.com/rs/zerolog/hlog" +) + +// Attempt returns an http.HandlerFunc middleware that authenticates +// the http.Request if authentication payload is available. +func Attempt(authenticator authn.Authenticator, sourceRouter authn.SourceRouter) func(http.Handler) http.Handler { + return performAuthentication(authenticator, false, sourceRouter) +} + +// Required returns an http.HandlerFunc middleware that authenticates +// the http.Request and fails the request if no auth data was available. +func Required(authenticator authn.Authenticator, sourceRouter authn.SourceRouter) func(http.Handler) http.Handler { + return performAuthentication(authenticator, true, sourceRouter) +} + +// performAuthentication returns an http.HandlerFunc middleware that authenticates +// the http.Request if authentication payload is available. +// Depending on whether it is required or not, the request will be failed. +func performAuthentication( + authenticator authn.Authenticator, + required bool, sourceRouter authn.SourceRouter, +) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + log := hlog.FromRequest(r) + + session, err := authenticator.Authenticate(r, sourceRouter) + if err != nil { + if !errors.Is(err, authn.ErrNoAuthData) { + // log error to help with investigating any auth related errors + log.Warn().Err(err).Msg("authentication failed") + } + + if required { + render.Unauthorized(w) + return + } + + // if there was no (valid) auth data in the request, then continue without session + next.ServeHTTP(w, r) + return + } + + if session == nil { + // when err == nil session should never be nil! + log.Error().Msg("auth session is nil eventhough the authenticator didn't return any error!") + + render.InternalError(w) + return + } + + // Update the logging context and inject principal in context + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c. + Str("principal_uid", session.Principal.UID). + Str("principal_type", string(session.Principal.Type)). + Bool("principal_admin", session.Principal.Admin) + }) + + next.ServeHTTP(w, r.WithContext( + request.WithAuthSession(ctx, session), + )) + }) + } +} diff --git a/internal/api/middleware/encode/encode.go b/internal/api/middleware/encode/encode.go new file mode 100644 index 0000000000..c62ab29ecc --- /dev/null +++ b/internal/api/middleware/encode/encode.go @@ -0,0 +1,114 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package encode + +import ( + "net/http" + "strings" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/request" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/hlog" +) + +const ( + EncodedPathSeparator = "%252F" +) + +// GitPathBefore wraps an http.HandlerFunc in a layer that encodes a path coming +// as part of the GIT api (e.g. "space1/repo.git") before executing the provided http.HandlerFunc. +func GitPathBefore(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, err := pathTerminatedWithMarker(r, "", ".git", false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + next.ServeHTTP(w, r) + }) +} + +// TerminatedPathBefore wraps an http.HandlerFunc in a layer that encodes a terminated path (e.g. "/space1/space2/+") +// before executing the provided http.HandlerFunc. The first prefix that matches the URL.Path will +// be used during encoding (prefix is ignored during encoding). +func TerminatedPathBefore(prefixes []string, next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + for _, p := range prefixes { + changed, err := pathTerminatedWithMarker(r, p, "/+", false) + if err != nil { + render.TranslatedUserError(w, err) + return + } + + // first prefix that leads to success we can stop + if changed { + break + } + } + + next.ServeHTTP(w, r) + }) +} + +// pathTerminatedWithMarker function encodes a path followed by a custom marker and returns a request with an +// updated URL.Path. +// A non-empty prefix can be provided to encode encode only after the prefix. +// It allows our Rest API to handle paths of the form "/spaces/space1/space2/+/authToken" +// +// Examples: +// Prefix: "" Path: "/space1/space2/+" => "/space1%2Fspace2" +// Prefix: "" Path: "/space1/space2.git" => "/space1%2Fspace2" +// Prefix: "/spaces" Path: "/spaces/space1/space2/+/authToken" => "/spaces/space1%2Fspace2/authToken". +func pathTerminatedWithMarker(r *http.Request, prefix string, marker string, keepMarker bool) (bool, error) { + // In case path doesn't start with prefix - nothing to encode + if len(r.URL.Path) < len(prefix) || r.URL.Path[0:len(prefix)] != prefix { + return false, nil + } + + originalSubPath := r.URL.Path[len(prefix):] + path, _, found := strings.Cut(originalSubPath, marker) + + // If we don't find a marker - nothing to encode + if !found { + return false, nil + } + + // if marker was found - convert to escaped version (skip first character in case path starts with '/'). + // Since replacePrefix unescapes the strings, we have to double escape. + escapedPath := path[0:1] + strings.ReplaceAll(path[1:], types.PathSeparator, EncodedPathSeparator) + if keepMarker { + escapedPath += marker + } + + prefixWithPath := prefix + path + marker + prefixWithEscapedPath := prefix + escapedPath + + hlog.FromRequest(r).Trace().Msgf( + "[Encode] prefix: '%s', marker: '%s', original: '%s', escaped: '%s'.\n", + prefix, + marker, + prefixWithPath, + prefixWithEscapedPath) + + err := request.ReplacePrefix(r, prefixWithPath, prefixWithEscapedPath) + if err != nil { + return false, err + } + + return true, nil +} diff --git a/internal/api/middleware/logging/logging.go b/internal/api/middleware/logging/logging.go new file mode 100644 index 0000000000..695257fc60 --- /dev/null +++ b/internal/api/middleware/logging/logging.go @@ -0,0 +1,79 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logging + +import ( + "net/http" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/request" + + "github.com/rs/xid" + "github.com/rs/zerolog" + "github.com/rs/zerolog/hlog" +) + +const ( + requestIDHeader = "X-Request-Id" +) + +// HLogRequestIDHandler provides a middleware that injects request_id into the logging and execution context. +// It prefers the X-Request-Id header, if that doesn't exist it creates a new request id similar to zerolog. +func HLogRequestIDHandler() func(http.Handler) http.Handler { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // read requestID from header (or create new one if none exists) + var reqID string + if reqIDs, ok := r.Header[requestIDHeader]; ok && len(reqIDs) > 0 && len(reqIDs[0]) > 0 { + reqID = reqIDs[0] + } else { + // similar to zerolog requestID generation + reqID = xid.New().String() + } + + // add requestID to context for internal usage + gitrpc client! + ctx = request.WithRequestID(ctx, reqID) + ctx = gitrpc.WithRequestID(ctx, reqID) + + // update logging context with request ID + log := zerolog.Ctx(ctx) + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c.Str("request_id", reqID) + }) + + // write request ID to response headers + w.Header().Set(requestIDHeader, reqID) + + // continue serving request + h.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} + +// HLogAccessLogHandler provides an hlog based middleware that logs access logs. +func HLogAccessLogHandler() func(http.Handler) http.Handler { + return hlog.AccessHandler( + func(r *http.Request, status, size int, duration time.Duration) { + hlog.FromRequest(r).Info(). + Int("http.status_code", status). + Int("http.response_size_bytes", size). + Dur("http.elapsed_ms", duration). + Msg("http request completed.") + }, + ) +} diff --git a/internal/api/middleware/principal/principal.go b/internal/api/middleware/principal/principal.go new file mode 100644 index 0000000000..7341eb5f5f --- /dev/null +++ b/internal/api/middleware/principal/principal.go @@ -0,0 +1,71 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package principal + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +/* + * RestrictTo returns an http.HandlerFunc middleware that ensures the principal + * is of the provided type. In case there is no authenticated principal, + * or the principal type doesn't match, an error is rendered. + */ +func RestrictTo(pType enum.PrincipalType) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + p, ok := request.PrincipalFrom(ctx) + if !ok || p.Type != pType { + log.Ctx(ctx).Debug().Msgf("Principal of type '%s' required.", pType) + + render.Forbidden(w) + return + } + + next.ServeHTTP(w, r) + }) + } +} + +/* + * RestrictToAdmin returns an http.HandlerFunc middleware that ensures the principal + * is an admin. In case there is no authenticated principal, + * or the principal isn't an admin, an error is rendered. + */ +func RestrictToAdmin() func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + p, ok := request.PrincipalFrom(ctx) + if !ok || !p.Admin { + log.Ctx(ctx).Debug().Msg("No principal found or the principal is no admin") + + render.Forbidden(w) + return + } + + next.ServeHTTP(w, r) + }) + } +} diff --git a/internal/api/openapi/account.go b/internal/api/openapi/account.go new file mode 100644 index 0000000000..a1eb4f2df3 --- /dev/null +++ b/internal/api/openapi/account.go @@ -0,0 +1,87 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +var queryParameterIncludeCookie = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamIncludeCookie, + In: openapi3.ParameterInQuery, + Description: ptr.String("If set to true the token is also returned as a cookie."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeBoolean), + Default: ptrptr(false), + }, + }, + }, +} + +// request to login to an account. +type loginRequest struct { + user.LoginInput +} + +// request to register an account. +type registerRequest struct { + user.RegisterInput +} + +// helper function that constructs the openapi specification +// for the account registration and login endpoints. +func buildAccount(reflector *openapi3.Reflector) { + onLogin := openapi3.Operation{} + onLogin.WithTags("account") + onLogin.WithParameters(queryParameterIncludeCookie) + onLogin.WithMapOfAnything(map[string]interface{}{"operationId": "onLogin"}) + _ = reflector.SetRequest(&onLogin, new(loginRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&onLogin, new(types.TokenResponse), http.StatusOK) + _ = reflector.SetJSONResponse(&onLogin, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&onLogin, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&onLogin, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/login", onLogin) + + opLogout := openapi3.Operation{} + opLogout.WithTags("account") + opLogout.WithMapOfAnything(map[string]interface{}{"operationId": "opLogout"}) + _ = reflector.SetRequest(&opLogout, nil, http.MethodPost) + _ = reflector.SetJSONResponse(&opLogout, nil, http.StatusOK) + _ = reflector.SetJSONResponse(&opLogout, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opLogout, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opLogout, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/logout", opLogout) + + onRegister := openapi3.Operation{} + onRegister.WithTags("account") + onRegister.WithParameters(queryParameterIncludeCookie) + onRegister.WithMapOfAnything(map[string]interface{}{"operationId": "onRegister"}) + _ = reflector.SetRequest(&onRegister, new(registerRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&onRegister, new(types.TokenResponse), http.StatusOK) + _ = reflector.SetJSONResponse(&onRegister, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&onRegister, new(usererror.Error), http.StatusBadRequest) + _ = reflector.Spec.AddOperation(http.MethodPost, "/register", onRegister) +} diff --git a/internal/api/openapi/check.go b/internal/api/openapi/check.go new file mode 100644 index 0000000000..bb44273688 --- /dev/null +++ b/internal/api/openapi/check.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/swaggest/openapi-go/openapi3" +) + +func checkOperations(reflector *openapi3.Reflector) { + const tag = "status_checks" + + reportStatusCheckResults := openapi3.Operation{} + reportStatusCheckResults.WithTags(tag) + reportStatusCheckResults.WithMapOfAnything(map[string]interface{}{"operationId": "reportStatusCheckResults"}) + _ = reflector.SetRequest(&reportStatusCheckResults, struct { + repoRequest + CommitSHA string `path:"commit_sha"` + check.ReportInput + }{}, http.MethodPut) + _ = reflector.SetJSONResponse(&reportStatusCheckResults, new(types.Check), http.StatusOK) + _ = reflector.SetJSONResponse(&reportStatusCheckResults, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&reportStatusCheckResults, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&reportStatusCheckResults, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&reportStatusCheckResults, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPut, "/repos/{repo_ref}/checks/commits/{commit_sha}", + reportStatusCheckResults) + + listStatusCheckResults := openapi3.Operation{} + listStatusCheckResults.WithTags(tag) + listStatusCheckResults.WithParameters( + queryParameterPage, queryParameterLimit) + listStatusCheckResults.WithMapOfAnything(map[string]interface{}{"operationId": "listStatusCheckResults"}) + _ = reflector.SetRequest(&listStatusCheckResults, struct { + repoRequest + CommitSHA string `path:"commit_sha"` + }{}, http.MethodGet) + _ = reflector.SetJSONResponse(&listStatusCheckResults, new([]types.Check), http.StatusOK) + _ = reflector.SetJSONResponse(&listStatusCheckResults, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&listStatusCheckResults, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&listStatusCheckResults, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&listStatusCheckResults, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/checks/commits/{commit_sha}", + listStatusCheckResults) +} diff --git a/internal/api/openapi/common.go b/internal/api/openapi/common.go new file mode 100644 index 0000000000..90691f95d8 --- /dev/null +++ b/internal/api/openapi/common.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +func ptrSchemaType(t openapi3.SchemaType) *openapi3.SchemaType { + return &t +} + +func ptrptr(i interface{}) *interface{} { + return &i +} + +var queryParameterPage = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamPage, + In: openapi3.ParameterInQuery, + Description: ptr.String("The page to return."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Default: ptrptr(1), + Minimum: ptr.Float64(1), + }, + }, + }, +} + +var queryParameterOrder = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamOrder, + In: openapi3.ParameterInQuery, + Description: ptr.String("The order of the output."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.OrderAsc.String()), + Enum: []interface{}{ + ptr.String(enum.OrderAsc.String()), + ptr.String(enum.OrderDesc.String()), + }, + }, + }, + }, +} + +var queryParameterLimit = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamLimit, + In: openapi3.ParameterInQuery, + Description: ptr.String("The maximum number of results to return."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Default: ptrptr(request.PerPageDefault), + Minimum: ptr.Float64(1.0), + Maximum: ptr.Float64(request.PerPageMax), + }, + }, + }, +} + +var queryParameterAfter = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamAfter, + In: openapi3.ParameterInQuery, + Description: ptr.String("The result should contain only entries created at and after this timestamp (unix millis)."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Minimum: ptr.Float64(0), + }, + }, + }, +} diff --git a/internal/api/openapi/connector.go b/internal/api/openapi/connector.go new file mode 100644 index 0000000000..f03d840d3e --- /dev/null +++ b/internal/api/openapi/connector.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/swaggest/openapi-go/openapi3" +) + +type createConnectorRequest struct { + connector.CreateInput +} + +type connectorRequest struct { + Ref string `path:"connector_ref"` +} + +type getConnectorRequest struct { + connectorRequest +} + +type updateConnectorRequest struct { + connectorRequest + connector.UpdateInput +} + +func connectorOperations(reflector *openapi3.Reflector) { + opCreate := openapi3.Operation{} + opCreate.WithTags("connector") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createConnector"}) + _ = reflector.SetRequest(&opCreate, new(createConnectorRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.Connector), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/connectors", opCreate) + + opFind := openapi3.Operation{} + opFind.WithTags("connector") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "findConnector"}) + _ = reflector.SetRequest(&opFind, new(getConnectorRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.Connector), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/connectors/{connector_ref}", opFind) + + opDelete := openapi3.Operation{} + opDelete.WithTags("connector") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteConnector"}) + _ = reflector.SetRequest(&opDelete, new(getConnectorRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/connectors/{connector_ref}", opDelete) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("connector") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateConnector"}) + _ = reflector.SetRequest(&opUpdate, new(updateConnectorRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Connector), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/connectors/{connector_ref}", opUpdate) +} diff --git a/internal/api/openapi/openapi.go b/internal/api/openapi/openapi.go new file mode 100644 index 0000000000..e5177d4f86 --- /dev/null +++ b/internal/api/openapi/openapi.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "github.com/harness/gitness/internal/config" + "github.com/harness/gitness/version" + + "github.com/swaggest/openapi-go/openapi3" +) + +type ( + paginationRequest struct { + Page int `query:"page" default:"1"` + Size int `query:"limit" default:"30"` + } +) + +// Generate is a helper function that constructs the +// openapi specification object, which can be marshaled +// to json or yaml, as needed. +func Generate() *openapi3.Spec { + reflector := openapi3.Reflector{} + reflector.Spec = &openapi3.Spec{Openapi: "3.0.0"} + reflector.Spec.Info. + WithTitle("API Specification"). + WithVersion(version.Version.String()) + reflector.Spec.Servers = []openapi3.Server{{ + URL: config.ApiURL, + }} + + // + // register endpoints + // + + buildSystem(&reflector) + buildAccount(&reflector) + buildUser(&reflector) + buildAdmin(&reflector) + buildPrincipals(&reflector) + spaceOperations(&reflector) + pluginOperations(&reflector) + repoOperations(&reflector) + pipelineOperations(&reflector) + connectorOperations(&reflector) + templateOperations(&reflector) + secretOperations(&reflector) + resourceOperations(&reflector) + pullReqOperations(&reflector) + webhookOperations(&reflector) + checkOperations(&reflector) + + // + // define security scheme + // + + scheme := openapi3.SecuritySchemeOrRef{ + SecurityScheme: &openapi3.SecurityScheme{ + HTTPSecurityScheme: &openapi3.HTTPSecurityScheme{ + Scheme: "bearerAuth", + Bearer: &openapi3.Bearer{}, + }, + }, + } + security := openapi3.ComponentsSecuritySchemes{} + security.WithMapOfSecuritySchemeOrRefValuesItem("bearerAuth", scheme) + reflector.Spec.Components.WithSecuritySchemes(security) + + // + // enforce security scheme globally + // + + reflector.Spec.WithSecurity(map[string][]string{ + "bearerAuth": {}, + }) + + return reflector.Spec +} diff --git a/internal/api/openapi/openapi_test.go b/internal/api/openapi/openapi_test.go new file mode 100644 index 0000000000..5dc7c11471 --- /dev/null +++ b/internal/api/openapi/openapi_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi diff --git a/internal/api/openapi/pipeline.go b/internal/api/openapi/pipeline.go new file mode 100644 index 0000000000..3c0bca4732 --- /dev/null +++ b/internal/api/openapi/pipeline.go @@ -0,0 +1,311 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type pipelineRequest struct { + repoRequest + Ref string `path:"pipeline_uid"` +} + +type executionRequest struct { + pipelineRequest + Number string `path:"execution_number"` +} + +type triggerRequest struct { + pipelineRequest + UID string `path:"trigger_uid"` +} + +type logRequest struct { + executionRequest + StageNum string `path:"stage_number"` + StepNum string `path:"step_number"` +} + +type createExecutionRequest struct { + pipelineRequest +} + +type createTriggerRequest struct { + pipelineRequest + trigger.CreateInput +} + +type createPipelineRequest struct { + repoRequest + pipeline.CreateInput +} + +type getExecutionRequest struct { + executionRequest +} + +type getTriggerRequest struct { + triggerRequest +} + +type getPipelineRequest struct { + pipelineRequest +} + +type updateTriggerRequest struct { + triggerRequest + trigger.UpdateInput +} + +type updatePipelineRequest struct { + pipelineRequest + pipeline.UpdateInput +} + +var queryParameterLatest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamLatest, + In: openapi3.ParameterInQuery, + Description: ptr.String("Whether to fetch latest build information for each pipeline."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeBoolean), + }, + }, + }, +} + +var queryParameterBranch = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamBranch, + In: openapi3.ParameterInQuery, + Description: ptr.String("Branch to run the execution for."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +func pipelineOperations(reflector *openapi3.Reflector) { + opCreate := openapi3.Operation{} + opCreate.WithTags("pipeline") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createPipeline"}) + _ = reflector.SetRequest(&opCreate, new(createPipelineRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.Pipeline), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pipelines", opCreate) + + opPipelines := openapi3.Operation{} + opPipelines.WithTags("pipeline") + opPipelines.WithMapOfAnything(map[string]interface{}{"operationId": "listPipelines"}) + opPipelines.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit, queryParameterLatest) + _ = reflector.SetRequest(&opPipelines, new(repoRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opPipelines, []types.Pipeline{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opPipelines, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pipelines", opPipelines) + + opFind := openapi3.Operation{} + opFind.WithTags("pipeline") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "findPipeline"}) + _ = reflector.SetRequest(&opFind, new(getPipelineRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.Pipeline), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pipelines/{pipeline_uid}", opFind) + + opDelete := openapi3.Operation{} + opDelete.WithTags("pipeline") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deletePipeline"}) + _ = reflector.SetRequest(&opDelete, new(getPipelineRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/pipelines/{pipeline_uid}", opDelete) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("pipeline") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updatePipeline"}) + _ = reflector.SetRequest(&opUpdate, new(updatePipelineRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Pipeline), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, + "/repos/{repo_ref}/pipelines/{pipeline_uid}", opUpdate) + + executionCreate := openapi3.Operation{} + executionCreate.WithTags("pipeline") + executionCreate.WithParameters(queryParameterBranch) + executionCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createExecution"}) + _ = reflector.SetRequest(&executionCreate, new(createExecutionRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&executionCreate, new(types.Execution), http.StatusCreated) + _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions", executionCreate) + + executionFind := openapi3.Operation{} + executionFind.WithTags("pipeline") + executionFind.WithMapOfAnything(map[string]interface{}{"operationId": "findExecution"}) + _ = reflector.SetRequest(&executionFind, new(getExecutionRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&executionFind, new(types.Execution), http.StatusOK) + _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&executionFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionFind) + + executionCancel := openapi3.Operation{} + executionCancel.WithTags("pipeline") + executionCancel.WithMapOfAnything(map[string]interface{}{"operationId": "cancelExecution"}) + _ = reflector.SetRequest(&executionCancel, new(getExecutionRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&executionCancel, new(types.Execution), http.StatusOK) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&executionCancel, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/cancel", executionCancel) + + executionDelete := openapi3.Operation{} + executionDelete.WithTags("pipeline") + executionDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteExecution"}) + _ = reflector.SetRequest(&executionDelete, new(getExecutionRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&executionDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&executionDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}", executionDelete) + + executionList := openapi3.Operation{} + executionList.WithTags("pipeline") + executionList.WithMapOfAnything(map[string]interface{}{"operationId": "listExecutions"}) + executionList.WithParameters(queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&executionList, new(pipelineRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&executionList, []types.Execution{}, http.StatusOK) + _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&executionList, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions", executionList) + + triggerCreate := openapi3.Operation{} + triggerCreate.WithTags("pipeline") + triggerCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createTrigger"}) + _ = reflector.SetRequest(&triggerCreate, new(createTriggerRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&triggerCreate, new(types.Trigger), http.StatusCreated) + _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&triggerCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers", triggerCreate) + + triggerFind := openapi3.Operation{} + triggerFind.WithTags("pipeline") + triggerFind.WithMapOfAnything(map[string]interface{}{"operationId": "findTrigger"}) + _ = reflector.SetRequest(&triggerFind, new(getTriggerRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&triggerFind, new(types.Trigger), http.StatusOK) + _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&triggerFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerFind) + + triggerDelete := openapi3.Operation{} + triggerDelete.WithTags("pipeline") + triggerDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteTrigger"}) + _ = reflector.SetRequest(&triggerDelete, new(getTriggerRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&triggerDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&triggerDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerDelete) + + triggerUpdate := openapi3.Operation{} + triggerUpdate.WithTags("pipeline") + triggerUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateTrigger"}) + _ = reflector.SetRequest(&triggerUpdate, new(updateTriggerRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&triggerUpdate, new(types.Trigger), http.StatusOK) + _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&triggerUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers/{trigger_uid}", triggerUpdate) + + triggerList := openapi3.Operation{} + triggerList.WithTags("pipeline") + triggerList.WithMapOfAnything(map[string]interface{}{"operationId": "listTriggers"}) + triggerList.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&triggerList, new(pipelineRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&triggerList, []types.Trigger{}, http.StatusOK) + _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&triggerList, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/triggers", triggerList) + + logView := openapi3.Operation{} + logView.WithTags("pipeline") + logView.WithMapOfAnything(map[string]interface{}{"operationId": "viewLogs"}) + _ = reflector.SetRequest(&logView, new(logRequest), http.MethodGet) + _ = reflector.SetStringResponse(&logView, http.StatusOK, "application/json") + _ = reflector.SetJSONResponse(&logView, []*livelog.Line{}, http.StatusOK) + _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&logView, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pipelines/{pipeline_uid}/executions/{execution_number}/logs/{stage_number}/{step_number}", logView) +} diff --git a/internal/api/openapi/plugin.go b/internal/api/openapi/plugin.go new file mode 100644 index 0000000000..98f8a258e1 --- /dev/null +++ b/internal/api/openapi/plugin.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +var queryParameterQueryPlugin = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring which is used to filter the plugins by their name."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +type getPluginsRequest struct { +} + +func pluginOperations(reflector *openapi3.Reflector) { + opPlugins := openapi3.Operation{} + opPlugins.WithTags("plugins") + opPlugins.WithMapOfAnything(map[string]interface{}{"operationId": "listPlugins"}) + opPlugins.WithParameters(queryParameterPage, queryParameterLimit, queryParameterQueryPlugin) + _ = reflector.SetRequest(&opPlugins, new(getPluginsRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opPlugins, []types.Plugin{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opPlugins, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opPlugins, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opPlugins, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opPlugins, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/plugins", opPlugins) +} diff --git a/internal/api/openapi/principals.go b/internal/api/openapi/principals.go new file mode 100644 index 0000000000..5906f2760a --- /dev/null +++ b/internal/api/openapi/principals.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type principalRequest struct { +} + +var queryParameterQueryPrincipals = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the principals are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +// TODO: this should not be in standalone swagger. +// https://harness.atlassian.net/browse/CODE-521 +var queryParameterAccountID = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: "accountIdentifier", + In: openapi3.ParameterInQuery, + Description: ptr.String("The account ID the principals are retrieved for (Not required in standalone)."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterPrincipalTypes = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamType, + In: openapi3.ParameterInQuery, + Description: ptr.String("The types of principals to include."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeArray), + Items: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Enum: enum.PrincipalType("").Enum(), + }, + }, + }, + }, + }, +} + +// buildPrincipals function that constructs the openapi specification +// for principal resources. +func buildPrincipals(reflector *openapi3.Reflector) { + opList := openapi3.Operation{} + opList.WithTags("principals") + opList.WithMapOfAnything(map[string]interface{}{"operationId": "listPrincipals"}) + opList.WithParameters(queryParameterQueryPrincipals, queryParameterAccountID, queryParameterPage, + queryParameterLimit, queryParameterPrincipalTypes) + _ = reflector.SetRequest(&opList, new(principalRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opList, new([]types.PrincipalInfo), http.StatusOK) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/principals", opList) +} diff --git a/internal/api/openapi/pullreq.go b/internal/api/openapi/pullreq.go new file mode 100644 index 0000000000..d65e01a093 --- /dev/null +++ b/internal/api/openapi/pullreq.go @@ -0,0 +1,543 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type createPullReqRequest struct { + repoRequest + pullreq.CreateInput +} + +type listPullReqRequest struct { + repoRequest +} + +type pullReqRequest struct { + repoRequest + ID int64 `path:"pullreq_number"` +} + +type getPullReqRequest struct { + pullReqRequest +} + +type updatePullReqRequest struct { + pullReqRequest + pullreq.UpdateInput +} + +type statePullReqRequest struct { + pullReqRequest + pullreq.StateInput +} + +type listPullReqActivitiesRequest struct { + pullReqRequest +} + +type mergePullReq struct { + pullReqRequest + pullreq.MergeInput +} + +type commentCreatePullReqRequest struct { + pullReqRequest + pullreq.CommentCreateInput +} + +type pullReqCommentRequest struct { + pullReqRequest + ID int64 `path:"pullreq_comment_id"` +} + +type commentUpdatePullReqRequest struct { + pullReqCommentRequest + pullreq.CommentUpdateInput +} + +type commentDeletePullReqRequest struct { + pullReqCommentRequest +} + +type commentStatusPullReqRequest struct { + pullReqCommentRequest + pullreq.CommentStatusInput +} + +type reviewerListPullReqRequest struct { + pullReqRequest +} + +type reviewerDeletePullReqRequest struct { + pullReqRequest + PullReqReviewerID int64 `path:"pullreq_reviewer_id"` +} + +type reviewerAddPullReqRequest struct { + pullReqRequest + pullreq.ReviewerAddInput +} + +type reviewSubmitPullReqRequest struct { + pullreq.ReviewSubmitInput + pullReqRequest +} + +type fileViewAddPullReqRequest struct { + pullReqRequest + pullreq.FileViewAddInput +} + +type fileViewListPullReqRequest struct { + pullReqRequest +} + +type fileViewDeletePullReqRequest struct { + pullReqRequest + Path string `path:"file_path"` +} + +var queryParameterQueryPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the pull requests are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSourceRepoRefPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: "source_repo_ref", + In: openapi3.ParameterInQuery, + Description: ptr.String("Source repository ref of the pull requests."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSourceBranchPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: "source_branch", + In: openapi3.ParameterInQuery, + Description: ptr.String("Source branch of the pull requests."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterTargetBranchPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: "target_branch", + In: openapi3.ParameterInQuery, + Description: ptr.String("Target branch of the pull requests."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterCreatedByPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamCreatedBy, + In: openapi3.ParameterInQuery, + Description: ptr.String("The principal ID who created pull requests."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + }, + }, + }, +} + +var queryParameterStatePullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamState, + In: openapi3.ParameterInQuery, + Description: ptr.String("The state of the pull requests to include in the result."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeArray), + Items: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(string(enum.PullReqStateOpen)), + Enum: enum.PullReqState("").Enum(), + }, + }, + }, + }, + }, +} + +var queryParameterSortPullRequest = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the pull requests are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.PullReqSortNumber), + Enum: enum.PullReqSort("").Enum(), + }, + }, + }, +} + +var queryParameterKindPullRequestActivity = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamKind, + In: openapi3.ParameterInQuery, + Description: ptr.String("The kind of the pull request activity to include in the result."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeArray), + Items: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Enum: enum.PullReqActivityKind("").Enum(), + }, + }, + }, + }, + }, +} + +var queryParameterTypePullRequestActivity = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamType, + In: openapi3.ParameterInQuery, + Description: ptr.String("The type of the pull request activity to include in the result."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeArray), + Items: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Enum: enum.PullReqActivityType("").Enum(), + }, + }, + }, + }, + }, +} + +var queryParameterBeforePullRequestActivity = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamBefore, + In: openapi3.ParameterInQuery, + Description: ptr.String("The result should contain only entries created before this timestamp (unix millis)."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Minimum: ptr.Float64(0), + }, + }, + }, +} + +//nolint:funlen +func pullReqOperations(reflector *openapi3.Reflector) { + createPullReq := openapi3.Operation{} + createPullReq.WithTags("pullreq") + createPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "createPullReq"}) + _ = reflector.SetRequest(&createPullReq, new(createPullReqRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&createPullReq, new(types.PullReq), http.StatusCreated) + _ = reflector.SetJSONResponse(&createPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&createPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&createPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&createPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pullreq", createPullReq) + + listPullReq := openapi3.Operation{} + listPullReq.WithTags("pullreq") + listPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "listPullReq"}) + listPullReq.WithParameters( + queryParameterStatePullRequest, queryParameterSourceRepoRefPullRequest, + queryParameterSourceBranchPullRequest, queryParameterTargetBranchPullRequest, + queryParameterQueryPullRequest, queryParameterCreatedByPullRequest, + queryParameterOrder, queryParameterSortPullRequest, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&listPullReq, new(listPullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&listPullReq, new([]types.PullReq), http.StatusOK) + _ = reflector.SetJSONResponse(&listPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&listPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&listPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&listPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pullreq", listPullReq) + + getPullReq := openapi3.Operation{} + getPullReq.WithTags("pullreq") + getPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "getPullReq"}) + _ = reflector.SetRequest(&getPullReq, new(getPullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&getPullReq, new(types.PullReq), http.StatusOK) + _ = reflector.SetJSONResponse(&getPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&getPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&getPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&getPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pullreq/{pullreq_number}", getPullReq) + + putPullReq := openapi3.Operation{} + putPullReq.WithTags("pullreq") + putPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "updatePullReq"}) + _ = reflector.SetRequest(&putPullReq, new(updatePullReqRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&putPullReq, new(types.PullReq), http.StatusOK) + _ = reflector.SetJSONResponse(&putPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&putPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&putPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&putPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/repos/{repo_ref}/pullreq/{pullreq_number}", putPullReq) + + statePullReq := openapi3.Operation{} + statePullReq.WithTags("pullreq") + statePullReq.WithMapOfAnything(map[string]interface{}{"operationId": "statePullReq"}) + _ = reflector.SetRequest(&statePullReq, new(statePullReqRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&statePullReq, new(types.PullReq), http.StatusOK) + _ = reflector.SetJSONResponse(&statePullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&statePullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&statePullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&statePullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pullreq/{pullreq_number}/state", statePullReq) + + listPullReqActivities := openapi3.Operation{} + listPullReqActivities.WithTags("pullreq") + listPullReqActivities.WithMapOfAnything(map[string]interface{}{"operationId": "listPullReqActivities"}) + listPullReqActivities.WithParameters( + queryParameterKindPullRequestActivity, queryParameterTypePullRequestActivity, + queryParameterAfter, queryParameterBeforePullRequestActivity, queryParameterLimit) + _ = reflector.SetRequest(&listPullReqActivities, new(listPullReqActivitiesRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&listPullReqActivities, new([]types.PullReqActivity), http.StatusOK) + _ = reflector.SetJSONResponse(&listPullReqActivities, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&listPullReqActivities, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&listPullReqActivities, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&listPullReqActivities, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pullreq/{pullreq_number}/activities", listPullReqActivities) + + commentCreatePullReq := openapi3.Operation{} + commentCreatePullReq.WithTags("pullreq") + commentCreatePullReq.WithMapOfAnything(map[string]interface{}{"operationId": "commentCreatePullReq"}) + _ = reflector.SetRequest(&commentCreatePullReq, new(commentCreatePullReqRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&commentCreatePullReq, new(types.PullReqActivity), http.StatusOK) + _ = reflector.SetJSONResponse(&commentCreatePullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&commentCreatePullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&commentCreatePullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&commentCreatePullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pullreq/{pullreq_number}/comments", commentCreatePullReq) + + commentUpdatePullReq := openapi3.Operation{} + commentUpdatePullReq.WithTags("pullreq") + commentUpdatePullReq.WithMapOfAnything(map[string]interface{}{"operationId": "commentUpdatePullReq"}) + _ = reflector.SetRequest(&commentUpdatePullReq, new(commentUpdatePullReqRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&commentUpdatePullReq, new(types.PullReqActivity), http.StatusOK) + _ = reflector.SetJSONResponse(&commentUpdatePullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&commentUpdatePullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&commentUpdatePullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&commentUpdatePullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPatch, + "/repos/{repo_ref}/pullreq/{pullreq_number}/comments/{pullreq_comment_id}", commentUpdatePullReq) + + commentDeletePullReq := openapi3.Operation{} + commentDeletePullReq.WithTags("pullreq") + commentDeletePullReq.WithMapOfAnything(map[string]interface{}{"operationId": "commentDeletePullReq"}) + _ = reflector.SetRequest(&commentDeletePullReq, new(commentDeletePullReqRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&commentDeletePullReq, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&commentDeletePullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&commentDeletePullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&commentDeletePullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&commentDeletePullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodDelete, + "/repos/{repo_ref}/pullreq/{pullreq_number}/comments/{pullreq_comment_id}", commentDeletePullReq) + + commentStatusPullReq := openapi3.Operation{} + commentStatusPullReq.WithTags("pullreq") + commentStatusPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "commentStatusPullReq"}) + _ = reflector.SetRequest(&commentStatusPullReq, new(commentStatusPullReqRequest), http.MethodPut) + _ = reflector.SetJSONResponse(&commentStatusPullReq, new(types.PullReqActivity), http.StatusOK) + _ = reflector.SetJSONResponse(&commentStatusPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&commentStatusPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&commentStatusPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&commentStatusPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPut, + "/repos/{repo_ref}/pullreq/{pullreq_number}/comments/{pullreq_comment_id}/status", commentStatusPullReq) + + reviewerAdd := openapi3.Operation{} + reviewerAdd.WithTags("pullreq") + reviewerAdd.WithMapOfAnything(map[string]interface{}{"operationId": "reviewerAddPullReq"}) + _ = reflector.SetRequest(&reviewerAdd, new(reviewerAddPullReqRequest), http.MethodPut) + _ = reflector.SetJSONResponse(&reviewerAdd, new(types.PullReqReviewer), http.StatusOK) + _ = reflector.SetJSONResponse(&reviewerAdd, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&reviewerAdd, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&reviewerAdd, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&reviewerAdd, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPut, + "/repos/{repo_ref}/pullreq/{pullreq_number}/reviewers", reviewerAdd) + + reviewerList := openapi3.Operation{} + reviewerList.WithTags("pullreq") + reviewerList.WithMapOfAnything(map[string]interface{}{"operationId": "reviewerListPullReq"}) + _ = reflector.SetRequest(&reviewerList, new(reviewerListPullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&reviewerList, new([]*types.PullReqReviewer), http.StatusOK) + _ = reflector.SetJSONResponse(&reviewerList, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&reviewerList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&reviewerList, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&reviewerList, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pullreq/{pullreq_number}/reviewers", reviewerList) + + reviewerDelete := openapi3.Operation{} + reviewerDelete.WithTags("pullreq") + reviewerDelete.WithMapOfAnything(map[string]interface{}{"operationId": "reviewerDeletePullReq"}) + _ = reflector.SetRequest(&reviewerDelete, new(reviewerDeletePullReqRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&reviewerDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&reviewerDelete, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&reviewerDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&reviewerDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&reviewerDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodDelete, + "/repos/{repo_ref}/pullreq/{pullreq_number}/reviewers/{pullreq_reviewer_id}", reviewerDelete) + + reviewSubmit := openapi3.Operation{} + reviewSubmit.WithTags("pullreq") + reviewSubmit.WithMapOfAnything(map[string]interface{}{"operationId": "reviewSubmitPullReq"}) + _ = reflector.SetRequest(&reviewSubmit, new(reviewSubmitPullReqRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&reviewSubmit, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&reviewSubmit, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&reviewSubmit, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&reviewSubmit, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&reviewSubmit, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pullreq/{pullreq_number}/reviews", reviewSubmit) + mergePullReqOp := openapi3.Operation{} + mergePullReqOp.WithTags("pullreq") + mergePullReqOp.WithMapOfAnything(map[string]interface{}{"operationId": "mergePullReqOp"}) + _ = reflector.SetRequest(&mergePullReqOp, new(mergePullReq), http.MethodPost) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(types.MergeResponse), http.StatusOK) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusNotFound) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusMethodNotAllowed) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusConflict) + _ = reflector.SetJSONResponse(&mergePullReqOp, new(usererror.Error), http.StatusUnprocessableEntity) + _ = reflector.Spec.AddOperation(http.MethodPost, + "/repos/{repo_ref}/pullreq/{pullreq_number}/merge", mergePullReqOp) + + opListCommits := openapi3.Operation{} + opListCommits.WithTags("pullreq") + opListCommits.WithMapOfAnything(map[string]interface{}{"operationId": "listPullReqCommits"}) + opListCommits.WithParameters(queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opListCommits, new(pullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListCommits, []types.Commit{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pullreq/{pullreq_number}/commits", opListCommits) + + opMetaData := openapi3.Operation{} + opMetaData.WithTags("pullreq") + opMetaData.WithMapOfAnything(map[string]interface{}{"operationId": "pullReqMetaData"}) + _ = reflector.SetRequest(&opMetaData, new(pullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opMetaData, new(types.PullReqStats), http.StatusOK) + _ = reflector.SetJSONResponse(&opMetaData, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMetaData, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMetaData, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opMetaData, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/pullreq/{pullreq_number}/metadata", opMetaData) + + recheckPullReq := openapi3.Operation{} + recheckPullReq.WithTags("pullreq") + recheckPullReq.WithMapOfAnything(map[string]interface{}{"operationId": "recheckPullReq"}) + _ = reflector.SetRequest(&recheckPullReq, nil, http.MethodPost) + _ = reflector.SetJSONResponse(&recheckPullReq, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&recheckPullReq, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&recheckPullReq, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&recheckPullReq, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&recheckPullReq, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/pullreq/{pullreq_number}/recheck", recheckPullReq) + + fileViewAdd := openapi3.Operation{} + fileViewAdd.WithTags("pullreq") + fileViewAdd.WithMapOfAnything(map[string]interface{}{"operationId": "fileViewAddPullReq"}) + _ = reflector.SetRequest(&fileViewAdd, new(fileViewAddPullReqRequest), http.MethodPut) + _ = reflector.SetJSONResponse(&fileViewAdd, new(types.PullReqFileView), http.StatusOK) + _ = reflector.SetJSONResponse(&fileViewAdd, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&fileViewAdd, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&fileViewAdd, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&fileViewAdd, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPut, + "/repos/{repo_ref}/pullreq/{pullreq_number}/file-views", fileViewAdd) + + fileViewList := openapi3.Operation{} + fileViewList.WithTags("pullreq") + fileViewList.WithMapOfAnything(map[string]interface{}{"operationId": "fileViewListPullReq"}) + _ = reflector.SetRequest(&fileViewList, new(fileViewListPullReqRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&fileViewList, []types.PullReqFileView{}, http.StatusOK) + _ = reflector.SetJSONResponse(&fileViewList, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&fileViewList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&fileViewList, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&fileViewList, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/pullreq/{pullreq_number}/file-views", fileViewList) + + fileViewDelete := openapi3.Operation{} + fileViewDelete.WithTags("pullreq") + fileViewDelete.WithMapOfAnything(map[string]interface{}{"operationId": "fileViewDeletePullReq"}) + _ = reflector.SetRequest(&fileViewDelete, new(fileViewDeletePullReqRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&fileViewDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&fileViewDelete, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&fileViewDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&fileViewDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&fileViewDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodDelete, + "/repos/{repo_ref}/pullreq/{pullreq_number}/file-views/{file_path}", fileViewDelete) +} diff --git a/internal/api/openapi/repo.go b/internal/api/openapi/repo.go new file mode 100644 index 0000000000..7a5d73ae55 --- /dev/null +++ b/internal/api/openapi/repo.go @@ -0,0 +1,679 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type createRepositoryRequest struct { + repo.CreateInput +} + +type gitignoreRequest struct { +} + +type licenseRequest struct { +} + +type repoRequest struct { + Ref string `path:"repo_ref"` +} + +type updateRepoRequest struct { + repoRequest + repo.UpdateInput +} + +type moveRepoRequest struct { + repoRequest + repo.MoveInput +} + +type getContentRequest struct { + repoRequest + Path string `path:"path"` +} + +type pathsDetailsRequest struct { + repoRequest + repo.PathsDetailsInput +} + +type getBlameRequest struct { + repoRequest + Path string `path:"path"` +} + +type commitFilesRequest struct { + repoRequest + repo.CommitFilesOptions +} + +// contentType is a plugin for repo.ContentType to allow using oneof. +type contentType string + +func (contentType) Enum() []interface{} { + return []interface{}{repo.ContentTypeFile, repo.ContentTypeDir, repo.ContentTypeSymlink, repo.ContentTypeSubmodule} +} + +// contentInfo is used to overshadow the contentype of repo.ContentInfo. +type contentInfo struct { + repo.ContentInfo + Type contentType `json:"type"` +} + +// dirContent is used to overshadow the Entries type of repo.DirContent. +type dirContent struct { + repo.DirContent + Entries []contentInfo `json:"entries"` +} + +// content is a plugin for repo.content to allow using oneof. +type content struct{} + +func (content) JSONSchemaOneOf() []interface{} { + return []interface{}{repo.FileContent{}, dirContent{}, repo.SymlinkContent{}, repo.SubmoduleContent{}} +} + +// getContentOutput is used to overshadow the content and contenttype of repo.GetContentOutput. +type getContentOutput struct { + repo.GetContentOutput + Type contentType `json:"type"` + Content content `json:"content"` +} + +type listCommitsRequest struct { + repoRequest +} + +type GetCommitRequest struct { + repoRequest + CommitSHA string `path:"commit_sha"` +} + +type calculateCommitDivergenceRequest struct { + repoRequest + repo.GetCommitDivergencesInput +} + +type listBranchesRequest struct { + repoRequest +} +type createBranchRequest struct { + repoRequest + repo.CreateBranchInput +} + +type getBranchRequest struct { + repoRequest + BranchName string `path:"branch_name"` +} + +type deleteBranchRequest struct { + repoRequest + BranchName string `path:"branch_name"` +} + +type createTagRequest struct { + repoRequest + repo.CreateCommitTagInput +} + +type listTagsRequest struct { + repoRequest +} + +type deleteTagRequest struct { + repoRequest + TagName string `path:"tag_name"` +} + +type getRawDiffRequest struct { + repoRequest + Range string `path:"range" example:"main..dev"` +} + +var queryParameterGitRef = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamGitRef, + In: openapi3.ParameterInQuery, + Description: ptr.String("The git reference (branch / tag / commitID) that will be used to retrieve the data. " + + "If no value is provided the default branch of the repository is used."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr("{Repository Default Branch}"), + }, + }, + }, +} + +var queryParameterPath = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamPath, + In: openapi3.ParameterInQuery, + Description: ptr.String("Path for which commit information should be retrieved"), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(""), + }, + }, + }, +} + +var queryParameterSince = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSince, + In: openapi3.ParameterInQuery, + Description: ptr.String("Epoch since when commit information should be retrieved."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + }, + }, + }, +} + +var queryParameterUntil = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamUntil, + In: openapi3.ParameterInQuery, + Description: ptr.String("Epoch until when commit information should be retrieved."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + }, + }, + }, +} + +var queryParameterIncludeCommit = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamIncludeCommit, + In: openapi3.ParameterInQuery, + Description: ptr.String("Indicates whether optional commit information should be included in the response."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeBoolean), + Default: ptrptr(false), + }, + }, + }, +} + +var queryParameterLineFrom = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamLineFrom, + In: openapi3.ParameterInQuery, + Description: ptr.String("Line number from which the file data is considered"), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Default: ptrptr(0), + }, + }, + }, +} + +var queryParameterLineTo = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamLineTo, + In: openapi3.ParameterInQuery, + Description: ptr.String("Line number to which the file data is considered"), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeInteger), + Default: ptrptr(0), + }, + }, + }, +} + +// TODO: this is technically coming from harness package, but we can't reference that. +var queryParameterSpacePath = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: "space_path", + In: openapi3.ParameterInQuery, + Description: ptr.String("path of parent space (Not needed in standalone)."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(false), + }, + }, + }, +} + +var queryParameterSortBranch = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the branches are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.BranchSortOptionName.String()), + Enum: []interface{}{ + ptr.String(enum.BranchSortOptionName.String()), + ptr.String(enum.BranchSortOptionDate.String()), + }, + }, + }, + }, +} + +var queryParameterQueryBranches = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the branches are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSortTags = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the tags are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.TagSortOptionName.String()), + Enum: []interface{}{ + ptr.String(enum.TagSortOptionName.String()), + ptr.String(enum.TagSortOptionDate.String()), + }, + }, + }, + }, +} + +var queryParameterQueryTags = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the tags are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterAfterCommits = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamAfter, + In: openapi3.ParameterInQuery, + Description: ptr.String("The result should only contain commits that occurred after the provided reference."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterCommitter = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamCommitter, + In: openapi3.ParameterInQuery, + Description: ptr.String("Committer pattern for which commit information should be retrieved."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +//nolint:funlen +func repoOperations(reflector *openapi3.Reflector) { + createRepository := openapi3.Operation{} + createRepository.WithTags("repository") + createRepository.WithMapOfAnything(map[string]interface{}{"operationId": "createRepository"}) + createRepository.WithParameters(queryParameterSpacePath) + _ = reflector.SetRequest(&createRepository, new(createRepositoryRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&createRepository, new(types.Repository), http.StatusCreated) + _ = reflector.SetJSONResponse(&createRepository, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&createRepository, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&createRepository, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&createRepository, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos", createRepository) + + importRepository := openapi3.Operation{} + importRepository.WithTags("repository") + importRepository.WithMapOfAnything(map[string]interface{}{"operationId": "importRepository"}) + importRepository.WithParameters(queryParameterSpacePath) + _ = reflector.SetRequest(&importRepository, &struct{ repo.ImportInput }{}, http.MethodPost) + _ = reflector.SetJSONResponse(&importRepository, new(types.Repository), http.StatusCreated) + _ = reflector.SetJSONResponse(&importRepository, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&importRepository, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&importRepository, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&importRepository, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/import", importRepository) + + opFind := openapi3.Operation{} + opFind.WithTags("repository") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "findRepository"}) + _ = reflector.SetRequest(&opFind, new(repoRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.Repository), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}", opFind) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("repository") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateRepository"}) + _ = reflector.SetRequest(&opUpdate, new(updateRepoRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Repository), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/repos/{repo_ref}", opUpdate) + + opDelete := openapi3.Operation{} + opDelete.WithTags("repository") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteRepository"}) + _ = reflector.SetRequest(&opDelete, new(repoRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}", opDelete) + + opMove := openapi3.Operation{} + opMove.WithTags("repository") + opMove.WithMapOfAnything(map[string]interface{}{"operationId": "moveRepository"}) + _ = reflector.SetRequest(&opMove, new(moveRepoRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opMove, new(types.Repository), http.StatusOK) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/move", opMove) + + opServiceAccounts := openapi3.Operation{} + opServiceAccounts.WithTags("repository") + opServiceAccounts.WithMapOfAnything(map[string]interface{}{"operationId": "listRepositoryServiceAccounts"}) + _ = reflector.SetRequest(&opServiceAccounts, new(repoRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opServiceAccounts, []types.ServiceAccount{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/service-accounts", opServiceAccounts) + + opGetContent := openapi3.Operation{} + opGetContent.WithTags("repository") + opGetContent.WithMapOfAnything(map[string]interface{}{"operationId": "getContent"}) + opGetContent.WithParameters(queryParameterGitRef, queryParameterIncludeCommit) + _ = reflector.SetRequest(&opGetContent, new(getContentRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opGetContent, new(getContentOutput), http.StatusOK) + _ = reflector.SetJSONResponse(&opGetContent, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetContent, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGetContent, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGetContent, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/content/{path}", opGetContent) + + opPathDetails := openapi3.Operation{} + opPathDetails.WithTags("repository") + opPathDetails.WithMapOfAnything(map[string]interface{}{"operationId": "pathDetails"}) + opPathDetails.WithParameters(queryParameterGitRef) + _ = reflector.SetRequest(&opPathDetails, new(pathsDetailsRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opPathDetails, new(repo.PathsDetailsOutput), http.StatusOK) + _ = reflector.SetJSONResponse(&opPathDetails, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opPathDetails, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opPathDetails, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opPathDetails, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/path-details", opPathDetails) + + opGetRaw := openapi3.Operation{} + opGetRaw.WithTags("repository") + opGetRaw.WithMapOfAnything(map[string]interface{}{"operationId": "getRaw"}) + opGetRaw.WithParameters(queryParameterGitRef) + _ = reflector.SetRequest(&opGetRaw, new(getContentRequest), http.MethodGet) + // TODO: Figure out how to provide proper list of all potential mime types + _ = reflector.SetStringResponse(&opGetRaw, http.StatusOK, "") + _ = reflector.SetJSONResponse(&opGetRaw, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetRaw, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGetRaw, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGetRaw, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/raw/{path}", opGetRaw) + + opGetBlame := openapi3.Operation{} + opGetBlame.WithTags("repository") + opGetBlame.WithMapOfAnything(map[string]interface{}{"operationId": "getBlame"}) + opGetBlame.WithParameters(queryParameterGitRef, + queryParameterLineFrom, queryParameterLineTo) + _ = reflector.SetRequest(&opGetBlame, new(getBlameRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opGetBlame, []gitrpc.BlamePart{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opGetBlame, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetBlame, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGetBlame, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGetBlame, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/blame/{path}", opGetBlame) + + opListCommits := openapi3.Operation{} + opListCommits.WithTags("repository") + opListCommits.WithMapOfAnything(map[string]interface{}{"operationId": "listCommits"}) + opListCommits.WithParameters(queryParameterGitRef, queryParameterAfterCommits, queryParameterPath, + queryParameterSince, queryParameterUntil, queryParameterCommitter, queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opListCommits, new(listCommitsRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListCommits, []types.ListCommitResponse{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opListCommits, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/commits", opListCommits) + + opGetCommit := openapi3.Operation{} + opGetCommit.WithTags("repository") + opGetCommit.WithMapOfAnything(map[string]interface{}{"operationId": "getCommit"}) + _ = reflector.SetRequest(&opGetCommit, new(GetCommitRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opGetCommit, types.Commit{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opGetCommit, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetCommit, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGetCommit, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGetCommit, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/commits/{commit_sha}", opGetCommit) + + opCalulateCommitDivergence := openapi3.Operation{} + opCalulateCommitDivergence.WithTags("repository") + opCalulateCommitDivergence.WithMapOfAnything(map[string]interface{}{"operationId": "calculateCommitDivergence"}) + _ = reflector.SetRequest(&opCalulateCommitDivergence, new(calculateCommitDivergenceRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCalulateCommitDivergence, []repo.CommitDivergence{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opCalulateCommitDivergence, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCalulateCommitDivergence, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCalulateCommitDivergence, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opCalulateCommitDivergence, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/commits/calculate-divergence", + opCalulateCommitDivergence) + + opCreateBranch := openapi3.Operation{} + opCreateBranch.WithTags("repository") + opCreateBranch.WithMapOfAnything(map[string]interface{}{"operationId": "createBranch"}) + _ = reflector.SetRequest(&opCreateBranch, new(createBranchRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreateBranch, new(repo.Branch), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreateBranch, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreateBranch, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreateBranch, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreateBranch, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/branches", opCreateBranch) + + opGetBranch := openapi3.Operation{} + opGetBranch.WithTags("repository") + opGetBranch.WithMapOfAnything(map[string]interface{}{"operationId": "getBranch"}) + _ = reflector.SetRequest(&opGetBranch, new(getBranchRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opGetBranch, new(repo.Branch), http.StatusOK) + _ = reflector.SetJSONResponse(&opGetBranch, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetBranch, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGetBranch, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGetBranch, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/branches/{branch_name}", opGetBranch) + + opDeleteBranch := openapi3.Operation{} + opDeleteBranch.WithTags("repository") + opDeleteBranch.WithMapOfAnything(map[string]interface{}{"operationId": "deleteBranch"}) + _ = reflector.SetRequest(&opDeleteBranch, new(deleteBranchRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDeleteBranch, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDeleteBranch, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDeleteBranch, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDeleteBranch, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDeleteBranch, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/branches/{branch_name}", opDeleteBranch) + + opListBranches := openapi3.Operation{} + opListBranches.WithTags("repository") + opListBranches.WithMapOfAnything(map[string]interface{}{"operationId": "listBranches"}) + opListBranches.WithParameters(queryParameterIncludeCommit, + queryParameterQueryBranches, queryParameterOrder, queryParameterSortBranch, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opListBranches, new(listBranchesRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListBranches, []repo.Branch{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListBranches, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListBranches, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListBranches, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opListBranches, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/branches", opListBranches) + + opListTags := openapi3.Operation{} + opListTags.WithTags("repository") + opListTags.WithMapOfAnything(map[string]interface{}{"operationId": "listTags"}) + opListTags.WithParameters(queryParameterIncludeCommit, + queryParameterQueryTags, queryParameterOrder, queryParameterSortTags, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opListTags, new(listTagsRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListTags, []repo.CommitTag{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListTags, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListTags, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListTags, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opListTags, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/tags", opListTags) + + opCreateTag := openapi3.Operation{} + opCreateTag.WithTags("repository") + opCreateTag.WithMapOfAnything(map[string]interface{}{"operationId": "createTag"}) + _ = reflector.SetRequest(&opCreateTag, new(createTagRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreateTag, new(repo.CommitTag), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreateTag, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreateTag, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreateTag, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreateTag, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opCreateTag, new(usererror.Error), http.StatusConflict) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/tags", opCreateTag) + + opDeleteTag := openapi3.Operation{} + opDeleteTag.WithTags("repository") + opDeleteTag.WithMapOfAnything(map[string]interface{}{"operationId": "deleteTag"}) + _ = reflector.SetRequest(&opDeleteTag, new(deleteTagRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDeleteTag, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDeleteTag, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDeleteTag, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDeleteTag, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDeleteTag, new(usererror.Error), http.StatusNotFound) + _ = reflector.SetJSONResponse(&opDeleteTag, new(usererror.Error), http.StatusConflict) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/tags/{tag_name}", opDeleteTag) + + opCommitFiles := openapi3.Operation{} + opCommitFiles.WithTags("repository") + opCommitFiles.WithMapOfAnything(map[string]interface{}{"operationId": "commitFiles"}) + _ = reflector.SetRequest(&opCommitFiles, new(commitFilesRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCommitFiles, repo.CommitFilesResponse{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opCommitFiles, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCommitFiles, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCommitFiles, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCommitFiles, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opCommitFiles, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/commits", opCommitFiles) + + opDiff := openapi3.Operation{} + opDiff.WithTags("repository") + opDiff.WithMapOfAnything(map[string]interface{}{"operationId": "rawDiff"}) + _ = reflector.SetRequest(&opDiff, new(getRawDiffRequest), http.MethodGet) + _ = reflector.SetStringResponse(&opDiff, http.StatusOK, "text/plain") + _ = reflector.SetJSONResponse(&opDiff, []gitrpc.FileDiff{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opDiff, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDiff, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDiff, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/diff/{range}", opDiff) + + opCommitDiff := openapi3.Operation{} + opCommitDiff.WithTags("repository") + opCommitDiff.WithMapOfAnything(map[string]interface{}{"operationId": "getCommitDiff"}) + _ = reflector.SetRequest(&opCommitDiff, new(GetCommitRequest), http.MethodGet) + _ = reflector.SetStringResponse(&opCommitDiff, http.StatusOK, "text/plain") + _ = reflector.SetJSONResponse(&opCommitDiff, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCommitDiff, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCommitDiff, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opCommitDiff, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/commits/{commit_sha}/diff", opCommitDiff) + + opDiffStats := openapi3.Operation{} + opDiffStats.WithTags("repository") + opDiffStats.WithMapOfAnything(map[string]interface{}{"operationId": "diffStats"}) + _ = reflector.SetRequest(&opDiffStats, new(getRawDiffRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opDiffStats, new(types.DiffStats), http.StatusOK) + _ = reflector.SetJSONResponse(&opDiffStats, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDiffStats, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDiffStats, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/diff-stats/{range}", opDiffStats) + + opMergeCheck := openapi3.Operation{} + opMergeCheck.WithTags("repository") + opMergeCheck.WithMapOfAnything(map[string]interface{}{"operationId": "mergeCheck"}) + _ = reflector.SetRequest(&opMergeCheck, new(getRawDiffRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opMergeCheck, new(repo.MergeCheck), http.StatusOK) + _ = reflector.SetJSONResponse(&opMergeCheck, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMergeCheck, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMergeCheck, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/merge-check/{range}", opMergeCheck) +} diff --git a/internal/api/openapi/resource.go b/internal/api/openapi/resource.go new file mode 100644 index 0000000000..317f1725ae --- /dev/null +++ b/internal/api/openapi/resource.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/usererror" + + "github.com/swaggest/openapi-go/openapi3" +) + +func resourceOperations(reflector *openapi3.Reflector) { + opListGitignore := openapi3.Operation{} + opListGitignore.WithTags("resource") + opListGitignore.WithMapOfAnything(map[string]interface{}{"operationId": "listGitignore"}) + _ = reflector.SetRequest(&opListGitignore, new(gitignoreRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListGitignore, []string{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListGitignore, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListGitignore, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListGitignore, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/resources/gitignore", opListGitignore) + + opListLicenses := openapi3.Operation{} + opListLicenses.WithTags("resource") + opListLicenses.WithMapOfAnything(map[string]interface{}{"operationId": "listLicenses"}) + _ = reflector.SetRequest(&opListLicenses, new(licenseRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opListLicenses, []struct { + Label string `json:"label"` + Value string `json:"value"` + }{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opListLicenses, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opListLicenses, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opListLicenses, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/resources/license", opListLicenses) +} diff --git a/internal/api/openapi/secret.go b/internal/api/openapi/secret.go new file mode 100644 index 0000000000..e3238db95b --- /dev/null +++ b/internal/api/openapi/secret.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/swaggest/openapi-go/openapi3" +) + +type createSecretRequest struct { + secret.CreateInput +} + +type secretRequest struct { + Ref string `path:"secret_ref"` +} + +type getSecretRequest struct { + secretRequest +} + +type updateSecretRequest struct { + secretRequest + secret.UpdateInput +} + +func secretOperations(reflector *openapi3.Reflector) { + opCreate := openapi3.Operation{} + opCreate.WithTags("secret") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createSecret"}) + _ = reflector.SetRequest(&opCreate, new(createSecretRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.Secret), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/secrets", opCreate) + + opFind := openapi3.Operation{} + opFind.WithTags("secret") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "findSecret"}) + _ = reflector.SetRequest(&opFind, new(getSecretRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.Secret), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/secrets/{secret_ref}", opFind) + + opDelete := openapi3.Operation{} + opDelete.WithTags("secret") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteSecret"}) + _ = reflector.SetRequest(&opDelete, new(getSecretRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/secrets/{secret_ref}", opDelete) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("secret") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateSecret"}) + _ = reflector.SetRequest(&opUpdate, new(updateSecretRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Secret), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/secrets/{secret_ref}", opUpdate) +} diff --git a/internal/api/openapi/space.go b/internal/api/openapi/space.go new file mode 100644 index 0000000000..e7ba02fe35 --- /dev/null +++ b/internal/api/openapi/space.go @@ -0,0 +1,375 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type createSpaceRequest struct { + space.CreateInput +} + +type spaceRequest struct { + Ref string `path:"space_ref"` +} + +type updateSpaceRequest struct { + spaceRequest + space.UpdateInput +} + +type moveSpaceRequest struct { + spaceRequest + space.MoveInput +} + +type exportSpaceRequest struct { + spaceRequest + space.ExportInput +} + +var queryParameterSortRepo = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the repositories are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.RepoAttrUID.String()), + Enum: []interface{}{ + ptr.String(enum.RepoAttrUID.String()), + ptr.String(enum.RepoAttrCreated.String()), + ptr.String(enum.RepoAttrUpdated.String()), + }, + }, + }, + }, +} + +var queryParameterQueryRepo = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring which is used to filter the repositories by their path name."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSortSpace = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the spaces are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.SpaceAttrUID.String()), + Enum: []interface{}{ + ptr.String(enum.SpaceAttrUID.String()), + ptr.String(enum.SpaceAttrCreated.String()), + ptr.String(enum.SpaceAttrUpdated.String()), + }, + }, + }, + }, +} + +var queryParameterQuerySpace = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring which is used to filter the spaces by their path name."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterMembershipUsers = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the space members are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSortMembershipUsers = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The field by which the space members are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.MembershipUserSortName), + Enum: enum.MembershipUserSort("").Enum(), + }, + }, + }, +} + +//nolint:funlen // api spec generation no need for checking func complexity +func spaceOperations(reflector *openapi3.Reflector) { + opCreate := openapi3.Operation{} + opCreate.WithTags("space") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createSpace"}) + _ = reflector.SetRequest(&opCreate, new(createSpaceRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.Space), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/spaces", opCreate) + + opImport := openapi3.Operation{} + opImport.WithTags("space") + opImport.WithMapOfAnything(map[string]interface{}{"operationId": "importSpace"}) + _ = reflector.SetRequest(&opImport, &struct{ space.ImportInput }{}, http.MethodPost) + _ = reflector.SetJSONResponse(&opImport, new(types.Space), http.StatusCreated) + _ = reflector.SetJSONResponse(&opImport, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opImport, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opImport, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opImport, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/spaces/import", opImport) + + opExport := openapi3.Operation{} + opExport.WithTags("space") + opExport.WithMapOfAnything(map[string]interface{}{"operationId": "exportSpace"}) + _ = reflector.SetRequest(&opExport, new(exportSpaceRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opExport, nil, http.StatusAccepted) + _ = reflector.SetJSONResponse(&opExport, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opExport, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opExport, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opExport, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/spaces/{space_ref}/export", opExport) + + opExportProgress := openapi3.Operation{} + opExportProgress.WithTags("space") + opExportProgress.WithMapOfAnything(map[string]interface{}{"operationId": "exportProgressSpace"}) + _ = reflector.SetRequest(&opExportProgress, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opExportProgress, new(space.ExportProgressOutput), http.StatusOK) + _ = reflector.SetJSONResponse(&opExportProgress, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opExportProgress, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opExportProgress, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opExportProgress, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/export-progress", opExportProgress) + + opGet := openapi3.Operation{} + opGet.WithTags("space") + opGet.WithMapOfAnything(map[string]interface{}{"operationId": "getSpace"}) + _ = reflector.SetRequest(&opGet, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opGet, new(types.Space), http.StatusOK) + _ = reflector.SetJSONResponse(&opGet, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGet, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opGet, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opGet, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}", opGet) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("space") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateSpace"}) + _ = reflector.SetRequest(&opUpdate, new(updateSpaceRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Space), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/spaces/{space_ref}", opUpdate) + + opDelete := openapi3.Operation{} + opDelete.WithTags("space") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteSpace"}) + _ = reflector.SetRequest(&opDelete, new(spaceRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/spaces/{space_ref}", opDelete) + + opMove := openapi3.Operation{} + opMove.WithTags("space") + opMove.WithMapOfAnything(map[string]interface{}{"operationId": "moveSpace"}) + _ = reflector.SetRequest(&opMove, new(moveSpaceRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opMove, new(types.Space), http.StatusOK) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMove, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/spaces/{space_ref}/move", opMove) + + opSpaces := openapi3.Operation{} + opSpaces.WithTags("space") + opSpaces.WithMapOfAnything(map[string]interface{}{"operationId": "listSpaces"}) + opSpaces.WithParameters(queryParameterPage, queryParameterLimit) + opSpaces.WithParameters(queryParameterQuerySpace, queryParameterSortSpace, queryParameterOrder, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opSpaces, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opSpaces, []types.Space{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opSpaces, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opSpaces, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opSpaces, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opSpaces, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/spaces", opSpaces) + + opRepos := openapi3.Operation{} + opRepos.WithTags("space") + opRepos.WithMapOfAnything(map[string]interface{}{"operationId": "listRepos"}) + opRepos.WithParameters(queryParameterQueryRepo, queryParameterSortRepo, queryParameterOrder, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opRepos, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opRepos, []types.Repository{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opRepos, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opRepos, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opRepos, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opRepos, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/repos", opRepos) + + opTemplates := openapi3.Operation{} + opTemplates.WithTags("space") + opTemplates.WithMapOfAnything(map[string]interface{}{"operationId": "listTemplates"}) + opTemplates.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opTemplates, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opTemplates, []types.Template{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opTemplates, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opTemplates, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opTemplates, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opTemplates, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/templates", opTemplates) + + opConnectors := openapi3.Operation{} + opConnectors.WithTags("space") + opConnectors.WithMapOfAnything(map[string]interface{}{"operationId": "listConnectors"}) + opConnectors.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opConnectors, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opConnectors, []types.Connector{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opConnectors, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opConnectors, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opConnectors, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opConnectors, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/connectors", opConnectors) + + opSecrets := openapi3.Operation{} + opSecrets.WithTags("space") + opSecrets.WithMapOfAnything(map[string]interface{}{"operationId": "listSecrets"}) + opSecrets.WithParameters(queryParameterQueryRepo, queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opSecrets, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opSecrets, []types.Secret{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opSecrets, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opSecrets, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opSecrets, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opSecrets, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/secrets", opSecrets) + + opServiceAccounts := openapi3.Operation{} + opServiceAccounts.WithTags("space") + opServiceAccounts.WithMapOfAnything(map[string]interface{}{"operationId": "listServiceAccounts"}) + _ = reflector.SetRequest(&opServiceAccounts, new(spaceRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opServiceAccounts, []types.ServiceAccount{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opServiceAccounts, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/service-accounts", opServiceAccounts) + + opMembershipAdd := openapi3.Operation{} + opMembershipAdd.WithTags("space") + opMembershipAdd.WithMapOfAnything(map[string]interface{}{"operationId": "membershipAdd"}) + _ = reflector.SetRequest(&opMembershipAdd, struct { + spaceRequest + space.MembershipAddInput + }{}, http.MethodPost) + _ = reflector.SetJSONResponse(&opMembershipAdd, &types.MembershipUser{}, http.StatusCreated) + _ = reflector.SetJSONResponse(&opMembershipAdd, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMembershipAdd, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMembershipAdd, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opMembershipAdd, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/spaces/{space_ref}/members", opMembershipAdd) + + opMembershipDelete := openapi3.Operation{} + opMembershipDelete.WithTags("space") + opMembershipDelete.WithMapOfAnything(map[string]interface{}{"operationId": "membershipDelete"}) + _ = reflector.SetRequest(&opMembershipDelete, struct { + spaceRequest + UserUID string `path:"user_uid"` + }{}, http.MethodDelete) + _ = reflector.SetJSONResponse(&opMembershipDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opMembershipDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMembershipDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMembershipDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opMembershipDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/spaces/{space_ref}/members/{user_uid}", opMembershipDelete) + + opMembershipUpdate := openapi3.Operation{} + opMembershipUpdate.WithTags("space") + opMembershipUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "membershipUpdate"}) + _ = reflector.SetRequest(&opMembershipUpdate, &struct { + spaceRequest + UserUID string `path:"user_uid"` + space.MembershipUpdateInput + }{}, http.MethodPatch) + _ = reflector.SetJSONResponse(&opMembershipUpdate, &types.MembershipUser{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opMembershipUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMembershipUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMembershipUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opMembershipUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/spaces/{space_ref}/members/{user_uid}", opMembershipUpdate) + + opMembershipList := openapi3.Operation{} + opMembershipList.WithTags("space") + opMembershipList.WithMapOfAnything(map[string]interface{}{"operationId": "membershipList"}) + opMembershipList.WithParameters( + queryParameterMembershipUsers, + queryParameterOrder, queryParameterSortMembershipUsers, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opMembershipList, &struct { + spaceRequest + }{}, http.MethodGet) + _ = reflector.SetJSONResponse(&opMembershipList, []types.MembershipUser{}, http.StatusOK) + _ = reflector.SetJSONResponse(&opMembershipList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opMembershipList, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opMembershipList, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opMembershipList, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/spaces/{space_ref}/members", opMembershipList) +} diff --git a/internal/api/openapi/system.go b/internal/api/openapi/system.go new file mode 100644 index 0000000000..77cf567619 --- /dev/null +++ b/internal/api/openapi/system.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/handler/system" + "github.com/harness/gitness/internal/api/usererror" + + "github.com/swaggest/openapi-go/openapi3" +) + +// helper function that constructs the openapi specification +// for the system registration config endpoints. +func buildSystem(reflector *openapi3.Reflector) { + opGetConfig := openapi3.Operation{} + opGetConfig.WithTags("system") + opGetConfig.WithMapOfAnything(map[string]interface{}{"operationId": "getSystemConfig"}) + _ = reflector.SetRequest(&opGetConfig, nil, http.MethodGet) + _ = reflector.SetJSONResponse(&opGetConfig, new(system.ConfigOutput), http.StatusOK) + _ = reflector.SetJSONResponse(&opGetConfig, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opGetConfig, new(usererror.Error), http.StatusBadRequest) + _ = reflector.Spec.AddOperation(http.MethodGet, "/system/config", opGetConfig) +} diff --git a/internal/api/openapi/template.go b/internal/api/openapi/template.go new file mode 100644 index 0000000000..64890a63ad --- /dev/null +++ b/internal/api/openapi/template.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/swaggest/openapi-go/openapi3" +) + +type createTemplateRequest struct { + template.CreateInput +} + +type templateRequest struct { + Ref string `path:"template_ref"` +} + +type getTemplateRequest struct { + templateRequest +} + +type updateTemplateRequest struct { + templateRequest + template.UpdateInput +} + +func templateOperations(reflector *openapi3.Reflector) { + opCreate := openapi3.Operation{} + opCreate.WithTags("template") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "createTemplate"}) + _ = reflector.SetRequest(&opCreate, new(createTemplateRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.Template), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/templates", opCreate) + + opFind := openapi3.Operation{} + opFind.WithTags("template") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "findTemplate"}) + _ = reflector.SetRequest(&opFind, new(getTemplateRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.Template), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/templates/{template_ref}", opFind) + + opDelete := openapi3.Operation{} + opDelete.WithTags("template") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "deleteTemplate"}) + _ = reflector.SetRequest(&opDelete, new(getTemplateRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/templates/{template_ref}", opDelete) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("template") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateTemplate"}) + _ = reflector.SetRequest(&opUpdate, new(updateTemplateRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.Template), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusForbidden) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/templates/{template_ref}", opUpdate) +} diff --git a/internal/api/openapi/user.go b/internal/api/openapi/user.go new file mode 100644 index 0000000000..7c9042ccda --- /dev/null +++ b/internal/api/openapi/user.go @@ -0,0 +1,102 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +type createTokenRequest struct { + user.CreateTokenInput +} + +var queryParameterMembershipSpaces = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamQuery, + In: openapi3.ParameterInQuery, + Description: ptr.String("The substring by which the spaces the users is a member of are filtered."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + }, + }, + }, +} + +var queryParameterSortMembershipSpaces = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The field by which the spaces the user is a member of are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.MembershipSpaceSortUID), + Enum: enum.MembershipSpaceSort("").Enum(), + }, + }, + }, +} + +// helper function that constructs the openapi specification +// for user account resources. +func buildUser(reflector *openapi3.Reflector) { + opFind := openapi3.Operation{} + opFind.WithTags("user") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "getUser"}) + _ = reflector.SetRequest(&opFind, nil, http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.Spec.AddOperation(http.MethodGet, "/user", opFind) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("user") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "updateUser"}) + _ = reflector.SetRequest(&opUpdate, new(user.UpdateInput), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/user", opUpdate) + + opToken := openapi3.Operation{} + opToken.WithTags("user") + opToken.WithMapOfAnything(map[string]interface{}{"operationId": "createToken"}) + _ = reflector.SetRequest(&opToken, new(createTokenRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opToken, new(types.TokenResponse), http.StatusCreated) + _ = reflector.SetJSONResponse(&opToken, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.Spec.AddOperation(http.MethodPost, "/user/token", opToken) + + opMemberSpaces := openapi3.Operation{} + opMemberSpaces.WithTags("user") + opMemberSpaces.WithMapOfAnything(map[string]interface{}{"operationId": "membershipSpaces"}) + opMemberSpaces.WithParameters( + queryParameterMembershipSpaces, + queryParameterOrder, queryParameterSortMembershipSpaces, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&opMemberSpaces, struct{}{}, http.MethodGet) + _ = reflector.SetJSONResponse(&opMemberSpaces, new([]types.MembershipSpace), http.StatusOK) + _ = reflector.SetJSONResponse(&opMemberSpaces, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.Spec.AddOperation(http.MethodGet, "/user/memberships", opMemberSpaces) +} diff --git a/internal/api/openapi/users.go b/internal/api/openapi/users.go new file mode 100644 index 0000000000..7dd5a1f4db --- /dev/null +++ b/internal/api/openapi/users.go @@ -0,0 +1,120 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/swaggest/openapi-go/openapi3" +) + +type ( + // adminUsersCreateRequest is the request for the admin user create operation. + adminUsersCreateRequest struct { + user.CreateInput + } + + // adminUsersRequest is the request for user specific admin user operations. + adminUsersRequest struct { + UserUID string `path:"user_uid"` + } + + // adminUsersUpdateRequest is the request for the admin user update operation. + adminUsersUpdateRequest struct { + adminUsersRequest + user.UpdateInput + } + + // adminUserListRequest is the request for listing users. + adminUserListRequest struct { + Sort string `query:"sort" enum:"id,email,created,updated"` + Order string `query:"order" enum:"asc,desc"` + + // include pagination request + paginationRequest + } + + // updateAdminRequest is the request for updating the admin attribute for the user. + updateAdminRequest struct { + adminUsersRequest + user.UpdateAdminInput + } +) + +// helper function that constructs the openapi specification +// for admin resources. +func buildAdmin(reflector *openapi3.Reflector) { + opFind := openapi3.Operation{} + opFind.WithTags("admin") + opFind.WithMapOfAnything(map[string]interface{}{"operationId": "adminGetUser"}) + _ = reflector.SetRequest(&opFind, new(adminUsersRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opFind, new(types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opFind, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/admin/users/{user_uid}", opFind) + + opList := openapi3.Operation{} + opList.WithTags("admin") + opList.WithMapOfAnything(map[string]interface{}{"operationId": "adminListUsers"}) + _ = reflector.SetRequest(&opList, new(adminUserListRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&opList, new([]*types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opList, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodGet, "/admin/users", opList) + + opCreate := openapi3.Operation{} + opCreate.WithTags("admin") + opCreate.WithMapOfAnything(map[string]interface{}{"operationId": "adminCreateUser"}) + _ = reflector.SetRequest(&opCreate, new(adminUsersCreateRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&opCreate, new(types.User), http.StatusCreated) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opCreate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPost, "/admin/users", opCreate) + + opUpdate := openapi3.Operation{} + opUpdate.WithTags("admin") + opUpdate.WithMapOfAnything(map[string]interface{}{"operationId": "adminUpdateUser"}) + _ = reflector.SetRequest(&opUpdate, new(adminUsersUpdateRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdate, new(types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opUpdate, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/admin/users/{user_uid}", opUpdate) + + opUpdateAdmin := openapi3.Operation{} + opUpdateAdmin.WithTags("admin") + opUpdateAdmin.WithMapOfAnything(map[string]interface{}{"operationId": "updateUserAdmin"}) + _ = reflector.SetRequest(&opUpdateAdmin, new(updateAdminRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&opUpdateAdmin, new(types.User), http.StatusOK) + _ = reflector.SetJSONResponse(&opUpdateAdmin, new(usererror.Error), http.StatusNotFound) + _ = reflector.SetJSONResponse(&opUpdateAdmin, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/admin/users/{user_uid}/admin", opUpdateAdmin) + + opDelete := openapi3.Operation{} + opDelete.WithTags("admin") + opDelete.WithMapOfAnything(map[string]interface{}{"operationId": "adminDeleteUser"}) + _ = reflector.SetRequest(&opDelete, new(adminUsersRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&opDelete, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&opDelete, new(usererror.Error), http.StatusNotFound) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/admin/users/{user_uid}", opDelete) +} diff --git a/internal/api/openapi/webhook.go b/internal/api/openapi/webhook.go new file mode 100644 index 0000000000..28979b859d --- /dev/null +++ b/internal/api/openapi/webhook.go @@ -0,0 +1,181 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" + "github.com/swaggest/openapi-go/openapi3" +) + +// webhookType is used to add has_secret field. +type webhookType struct { + types.Webhook + HasSecret bool `json:"has_secret"` +} + +type createWebhookRequest struct { + repoRequest + webhook.CreateInput +} + +type listWebhooksRequest struct { + repoRequest +} + +type webhookRequest struct { + repoRequest + ID int64 `path:"webhook_id"` +} + +type getWebhookRequest struct { + webhookRequest +} + +type deleteWebhookRequest struct { + webhookRequest +} + +type updateWebhookRequest struct { + webhookRequest + webhook.UpdateInput +} + +type listWebhookExecutionsRequest struct { + webhookRequest +} + +type webhookExecutionRequest struct { + webhookRequest + ID int64 `path:"webhook_execution_id"` +} + +type getWebhookExecutionRequest struct { + webhookExecutionRequest +} + +var queryParameterSortWebhook = openapi3.ParameterOrRef{ + Parameter: &openapi3.Parameter{ + Name: request.QueryParamSort, + In: openapi3.ParameterInQuery, + Description: ptr.String("The data by which the webhooks are sorted."), + Required: ptr.Bool(false), + Schema: &openapi3.SchemaOrRef{ + Schema: &openapi3.Schema{ + Type: ptrSchemaType(openapi3.SchemaTypeString), + Default: ptrptr(enum.WebhookAttrID.String()), + Enum: []interface{}{ + ptr.String(enum.WebhookAttrID.String()), + ptr.String(enum.WebhookAttrDisplayName.String()), + ptr.String(enum.WebhookAttrCreated.String()), + ptr.String(enum.WebhookAttrUpdated.String()), + }, + }, + }, + }, +} + +//nolint:funlen +func webhookOperations(reflector *openapi3.Reflector) { + createWebhook := openapi3.Operation{} + createWebhook.WithTags("webhook") + createWebhook.WithMapOfAnything(map[string]interface{}{"operationId": "createWebhook"}) + _ = reflector.SetRequest(&createWebhook, new(createWebhookRequest), http.MethodPost) + _ = reflector.SetJSONResponse(&createWebhook, new(webhookType), http.StatusCreated) + _ = reflector.SetJSONResponse(&createWebhook, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&createWebhook, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&createWebhook, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&createWebhook, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPost, "/repos/{repo_ref}/webhooks", createWebhook) + + listWebhooks := openapi3.Operation{} + listWebhooks.WithTags("webhook") + listWebhooks.WithMapOfAnything(map[string]interface{}{"operationId": "listWebhooks"}) + listWebhooks.WithParameters(queryParameterQuerySpace, queryParameterSortWebhook, queryParameterOrder, + queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&listWebhooks, new(listWebhooksRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&listWebhooks, new([]webhookType), http.StatusOK) + _ = reflector.SetJSONResponse(&listWebhooks, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&listWebhooks, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&listWebhooks, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&listWebhooks, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/webhooks", listWebhooks) + + getWebhook := openapi3.Operation{} + getWebhook.WithTags("webhook") + getWebhook.WithMapOfAnything(map[string]interface{}{"operationId": "getWebhook"}) + _ = reflector.SetRequest(&getWebhook, new(getWebhookRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&getWebhook, new(webhookType), http.StatusOK) + _ = reflector.SetJSONResponse(&getWebhook, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&getWebhook, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&getWebhook, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&getWebhook, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, "/repos/{repo_ref}/webhooks/{webhook_id}", getWebhook) + + updateWebhook := openapi3.Operation{} + updateWebhook.WithTags("webhook") + updateWebhook.WithMapOfAnything(map[string]interface{}{"operationId": "updateWebhook"}) + _ = reflector.SetRequest(&updateWebhook, new(updateWebhookRequest), http.MethodPatch) + _ = reflector.SetJSONResponse(&updateWebhook, new(webhookType), http.StatusOK) + _ = reflector.SetJSONResponse(&updateWebhook, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&updateWebhook, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&updateWebhook, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&updateWebhook, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodPatch, "/repos/{repo_ref}/webhooks/{webhook_id}", updateWebhook) + + deleteWebhook := openapi3.Operation{} + deleteWebhook.WithTags("webhook") + deleteWebhook.WithMapOfAnything(map[string]interface{}{"operationId": "deleteWebhook"}) + _ = reflector.SetRequest(&deleteWebhook, new(deleteWebhookRequest), http.MethodDelete) + _ = reflector.SetJSONResponse(&deleteWebhook, nil, http.StatusNoContent) + _ = reflector.SetJSONResponse(&deleteWebhook, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&deleteWebhook, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&deleteWebhook, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&deleteWebhook, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodDelete, "/repos/{repo_ref}/webhooks/{webhook_id}", deleteWebhook) + + listWebhookExecutions := openapi3.Operation{} + listWebhookExecutions.WithTags("webhook") + listWebhookExecutions.WithMapOfAnything(map[string]interface{}{"operationId": "listWebhookExecutions"}) + listWebhookExecutions.WithParameters(queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&listWebhookExecutions, new(listWebhookExecutionsRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&listWebhookExecutions, new([]types.WebhookExecution), http.StatusOK) + _ = reflector.SetJSONResponse(&listWebhookExecutions, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&listWebhookExecutions, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&listWebhookExecutions, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&listWebhookExecutions, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/webhooks/{webhook_id}/executions", listWebhookExecutions) + + getWebhookExecution := openapi3.Operation{} + getWebhookExecution.WithTags("webhook") + getWebhookExecution.WithMapOfAnything(map[string]interface{}{"operationId": "getWebhookExecution"}) + getWebhookExecution.WithParameters(queryParameterPage, queryParameterLimit) + _ = reflector.SetRequest(&getWebhookExecution, new(getWebhookExecutionRequest), http.MethodGet) + _ = reflector.SetJSONResponse(&getWebhookExecution, new(types.WebhookExecution), http.StatusOK) + _ = reflector.SetJSONResponse(&getWebhookExecution, new(usererror.Error), http.StatusBadRequest) + _ = reflector.SetJSONResponse(&getWebhookExecution, new(usererror.Error), http.StatusInternalServerError) + _ = reflector.SetJSONResponse(&getWebhookExecution, new(usererror.Error), http.StatusUnauthorized) + _ = reflector.SetJSONResponse(&getWebhookExecution, new(usererror.Error), http.StatusForbidden) + _ = reflector.Spec.AddOperation(http.MethodGet, + "/repos/{repo_ref}/webhooks/{webhook_id}/executions/{webhook_execution_id}", getWebhookExecution) +} diff --git a/internal/api/render/header.go b/internal/api/render/header.go new file mode 100644 index 0000000000..edccdfc4f9 --- /dev/null +++ b/internal/api/render/header.go @@ -0,0 +1,103 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render + +import ( + "fmt" + "net/http" + "net/url" + "strconv" +) + +// format string for the link header value. +var linkf = `<%s>; rel="%s"` + +// Pagination writes the pagination and link headers to the http.Response. +func Pagination(r *http.Request, w http.ResponseWriter, page, size, total int) { + var ( + last = pagelen(size, total) + ) + + // Add information that doesn't require total + PaginationNoTotal(r, w, page, size, page >= last) + + // add information that requires total + uri := getPaginationBaseURL(r, page, size) + params := uri.Query() + + // update the page query parameter and re-encode + params.Set("page", strconv.Itoa(last)) + uri.RawQuery = params.Encode() + + // write the page total to the header. + w.Header().Set("x-total", strconv.Itoa(total)) + w.Header().Set("x-total-pages", strconv.Itoa(last)) + w.Header().Add("Link", fmt.Sprintf(linkf, uri.String(), "last")) +} + +// PaginationNoTotal writes the pagination and link headers to the http.Response when total is unknown. +func PaginationNoTotal(r *http.Request, w http.ResponseWriter, page int, size int, isLastPage bool) { + var ( + next = page + 1 + prev = max(page-1, 1) + ) + + // write basic headers + w.Header().Set("x-page", strconv.Itoa(page)) + w.Header().Set("x-per-page", strconv.Itoa(size)) + + // write headers based on relative location of current page + uri := getPaginationBaseURL(r, page, size) + params := uri.Query() + + if !isLastPage { + // update the page query parameter and re-encode + params.Set("page", strconv.Itoa(next)) + uri.RawQuery = params.Encode() + + // write the next page to the header. + w.Header().Set("x-next-page", strconv.Itoa(next)) + w.Header().Add("Link", fmt.Sprintf(linkf, uri.String(), "next")) + } + + if page > 1 { + // update the page query parameter and re-encode. + params.Set("page", strconv.Itoa(prev)) + uri.RawQuery = params.Encode() + + // write the previous page to the header. + w.Header().Set("x-prev-page", strconv.Itoa(prev)) + w.Header().Add("Link", fmt.Sprintf(linkf, uri.String(), "prev")) + } +} + +// PaginationLimit writes the x-total header. +func PaginationLimit(r *http.Request, w http.ResponseWriter, total int) { + w.Header().Set("x-total", strconv.Itoa(total)) +} + +func getPaginationBaseURL(r *http.Request, page int, size int) url.URL { + uri := *r.URL + + // parse the existing query parameters and + // sanize parameter list. + params := uri.Query() + params.Del("access_token") + params.Del("token") + params.Set("page", strconv.Itoa(page)) + params.Set("limit", strconv.Itoa(size)) + + return uri +} diff --git a/internal/api/render/header_test.go b/internal/api/render/header_test.go new file mode 100644 index 0000000000..6cefa41759 --- /dev/null +++ b/internal/api/render/header_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render diff --git a/internal/api/render/platform/render.go b/internal/api/render/platform/render.go new file mode 100644 index 0000000000..2c471878cd --- /dev/null +++ b/internal/api/render/platform/render.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package platform + +import ( + "encoding/json" + "net/http" + + "github.com/harness/gitness/internal/api/render" +) + +// RenderResource is a helper function that renders a single +// resource, wrapped in the harness payload envelope. +func RenderResource(w http.ResponseWriter, code int, v interface{}) { + payload := new(wrapper) + payload.Status = "SUCCESS" + payload.Data, _ = json.Marshal(v) + if code >= http.StatusBadRequest { + payload.Status = "ERROR" + } else if code >= http.StatusMultipleChoices { + payload.Status = "FAILURE" + } + render.JSON(w, code, payload) +} + +// wrapper defines the payload wrapper. +type wrapper struct { + Status string `json:"status"` + Data json.RawMessage `json:"data"` +} diff --git a/internal/api/render/render.go b/internal/api/render/render.go new file mode 100644 index 0000000000..04c1fa69d5 --- /dev/null +++ b/internal/api/render/render.go @@ -0,0 +1,178 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "os" + "strconv" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/log" +) + +// indent the json-encoded API responses. +var indent bool + +func init() { + indent, _ = strconv.ParseBool( + os.Getenv("HTTP_JSON_INDENT"), + ) +} + +// TranslatedUserError writes the translated user error of the provided error. +func TranslatedUserError(w http.ResponseWriter, err error) { + log.Warn().Msgf("operation resulted in user facing error. Internal details: %s", err) + UserError(w, usererror.Translate(err)) +} + +// NotFound writes the json-encoded message for a not found error. +func NotFound(w http.ResponseWriter) { + UserError(w, usererror.ErrNotFound) +} + +// Unauthorized writes the json-encoded message for an unauthorized error. +func Unauthorized(w http.ResponseWriter) { + UserError(w, usererror.ErrUnauthorized) +} + +// Forbidden writes the json-encoded message for a forbidden error. +func Forbidden(w http.ResponseWriter) { + UserError(w, usererror.ErrForbidden) +} + +// BadRequest writes the json-encoded message for a bad request error. +func BadRequest(w http.ResponseWriter) { + UserError(w, usererror.ErrBadRequest) +} + +// BadRequestError writes the json-encoded error with a bad request status code. +func BadRequestError(w http.ResponseWriter, err *usererror.Error) { + UserError(w, err) +} + +// BadRequest writes the json-encoded message with a bad request status code. +func BadRequestf(w http.ResponseWriter, format string, args ...interface{}) { + ErrorMessagef(w, http.StatusBadRequest, format, args...) +} + +// InternalError writes the json-encoded message for an internal error. +func InternalError(w http.ResponseWriter) { + UserError(w, usererror.ErrInternal) +} + +// ErrorMessagef writes the json-encoded, formated error message. +func ErrorMessagef(w http.ResponseWriter, code int, format string, args ...interface{}) { + JSON(w, code, &usererror.Error{Message: fmt.Sprintf(format, args...)}) +} + +// UserError writes the json-encoded user error. +func UserError(w http.ResponseWriter, err *usererror.Error) { + JSON(w, err.Status, err) +} + +// DeleteSuccessful writes the header for a successful delete. +func DeleteSuccessful(w http.ResponseWriter) { + w.WriteHeader(http.StatusNoContent) +} + +// JSON writes the json-encoded value to the response +// with the provides status. +func JSON(w http.ResponseWriter, code int, v interface{}) { + // set common headers + w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.Header().Set("X-Content-Type-Options", "nosniff") + + // flush the headers - before body or status will be 200 OK + w.WriteHeader(code) + + // write body + enc := json.NewEncoder(w) + if indent { // is this necessary? it will affect performance + enc.SetIndent("", " ") + } + if err := enc.Encode(v); err != nil { + log.Err(err).Msgf("Failed to write json encoding to response body.") + } +} + +// Reader reads the content from the provided reader and writes it as is to the response body. +// NOTE: If no content-type header is added beforehand, the content-type will be deduced +// automatically by `http.DetectContentType` (https://pkg.go.dev/net/http#DetectContentType). +func Reader(ctx context.Context, w http.ResponseWriter, code int, reader io.Reader) { + w.WriteHeader(code) + _, err := io.Copy(w, reader) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to render data from reader") + } +} + +// JSONArrayDynamic outputs an JSON array whose elements are streamed from a channel. +// Due to the dynamic nature (unknown number of elements) the function will use +// chunked transfer encoding for large files. +func JSONArrayDynamic[T comparable](ctx context.Context, w http.ResponseWriter, stream types.Stream[T]) { + count := 0 + enc := json.NewEncoder(w) + + for { + data, err := stream.Next() + if errors.Is(err, io.EOF) { + break + } + + if err != nil { + // User canceled the request - no need to do anything + if errors.Is(err, context.Canceled) { + return + } + + if count == 0 { + // Write the error only if no data has been streamed yet. + TranslatedUserError(w, err) + return + } + + // Array data has been already streamed, it's too late for the output - so just log and quit. + log.Ctx(ctx).Warn().Msgf("Failed to write JSON array response body: %v", err) + return + } + + if count == 0 { + w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.Header().Set("X-Content-Type-Options", "nosniff") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte{'['}) + } else { + _, _ = w.Write([]byte{','}) + } + + count++ + + _ = enc.Encode(data) + } + + if count == 0 { + _, _ = w.Write([]byte{'['}) + } + + _, _ = w.Write([]byte{']'}) +} diff --git a/internal/api/render/render_test.go b/internal/api/render/render_test.go new file mode 100644 index 0000000000..2fa74b9b5e --- /dev/null +++ b/internal/api/render/render_test.go @@ -0,0 +1,162 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/harness/gitness/internal/api/usererror" +) + +func TestWriteErrorf(t *testing.T) { + w := httptest.NewRecorder() + + e := usererror.New(500, "abc") + UserError(w, e) + + if got, want := w.Code, 500; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, e.Message; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteErrorCode(t *testing.T) { + w := httptest.NewRecorder() + + ErrorMessagef(w, 418, "pc load letter %d", 1) + + if got, want := w.Code, 418; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, "pc load letter 1"; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteNotFound(t *testing.T) { + w := httptest.NewRecorder() + + NotFound(w) + + if got, want := w.Code, 404; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, usererror.ErrNotFound.Message; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteUnauthorized(t *testing.T) { + w := httptest.NewRecorder() + + Unauthorized(w) + + if got, want := w.Code, 401; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, usererror.ErrUnauthorized.Message; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteForbidden(t *testing.T) { + w := httptest.NewRecorder() + + Forbidden(w) + + if got, want := w.Code, 403; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, usererror.ErrForbidden.Message; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteBadRequest(t *testing.T) { + w := httptest.NewRecorder() + + BadRequest(w) + + if got, want := w.Code, 400; want != got { + t.Errorf("Want response code %d, got %d", want, got) + } + + errjson := &usererror.Error{} + if err := json.NewDecoder(w.Body).Decode(errjson); err != nil { + t.Error(err) + } + if got, want := errjson.Message, usererror.ErrBadRequest.Message; got != want { + t.Errorf("Want error message %s, got %s", want, got) + } +} + +func TestWriteJSON(t *testing.T) { + // without indent + { + w := httptest.NewRecorder() + JSON(w, http.StatusTeapot, map[string]string{"hello": "world"}) + if got, want := w.Body.String(), "{\"hello\":\"world\"}\n"; got != want { + t.Errorf("Want JSON body %q, got %q", want, got) + } + if got, want := w.Header().Get("Content-Type"), "application/json; charset=utf-8"; got != want { + t.Errorf("Want Content-Type %q, got %q", want, got) + } + if got, want := w.Code, http.StatusTeapot; got != want { + t.Errorf("Want status code %d, got %d", want, got) + } + } + // with indent + { + indent = true + defer func() { + indent = false + }() + w := httptest.NewRecorder() + JSON(w, http.StatusTeapot, map[string]string{"hello": "world"}) + if got, want := w.Body.String(), "{\n \"hello\": \"world\"\n}\n"; got != want { + t.Errorf("Want JSON body %q, got %q", want, got) + } + } +} diff --git a/internal/api/render/util.go b/internal/api/render/util.go new file mode 100644 index 0000000000..df0e269d03 --- /dev/null +++ b/internal/api/render/util.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render + +// pagelen calculates to total number of pages given the +// page size and total count of all paginated items. +func pagelen(size, total int) int { + quotient, remainder := total/size, total%size + switch { + case quotient == 0: + return 1 + case remainder == 0: + return quotient + default: + return quotient + 1 + } +} + +// max returns the largest of x or y. +func max(x, y int) int { + if x > y { + return x + } + return y +} diff --git a/internal/api/render/util_test.go b/internal/api/render/util_test.go new file mode 100644 index 0000000000..7646c58e6a --- /dev/null +++ b/internal/api/render/util_test.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package render + +import "testing" + +func Test_pagelen(t *testing.T) { + tests := []struct { + size, total, want int + }{ + {25, 1, 1}, + {25, 24, 1}, + {25, 25, 1}, + {25, 26, 2}, + {25, 49, 2}, + {25, 50, 2}, + {25, 51, 3}, + } + + for _, test := range tests { + got, want := pagelen(test.size, test.total), test.want + if got != want { + t.Errorf("got page length %d, want %d", got, want) + } + } +} diff --git a/internal/api/request/auth.go b/internal/api/request/auth.go new file mode 100644 index 0000000000..e387bd0eb3 --- /dev/null +++ b/internal/api/request/auth.go @@ -0,0 +1,36 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" +) + +const ( + QueryParamAccessToken = "access_token" + QueryParamIncludeCookie = "include_cookie" +) + +func GetAccessTokenFromQuery(r *http.Request) (string, bool) { + return QueryParam(r, QueryParamAccessToken) +} + +func GetIncludeCookieFromQueryOrDefault(r *http.Request, dflt bool) (bool, error) { + return QueryParamAsBoolOrDefault(r, QueryParamIncludeCookie, dflt) +} + +func GetTokenFromCookie(r *http.Request, cookieName string) (string, bool) { + return GetCookie(r, cookieName) +} diff --git a/internal/api/request/check.go b/internal/api/request/check.go new file mode 100644 index 0000000000..8875bb4cba --- /dev/null +++ b/internal/api/request/check.go @@ -0,0 +1,29 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" +) + +// ParseCheckListOptions extracts the status check list API options from the url. +func ParseCheckListOptions(r *http.Request) types.CheckListOptions { + return types.CheckListOptions{ + Page: ParsePage(r), + Size: ParseLimit(r), + } +} diff --git a/internal/api/request/connector.go b/internal/api/request/connector.go new file mode 100644 index 0000000000..66a1a25cc0 --- /dev/null +++ b/internal/api/request/connector.go @@ -0,0 +1,20 @@ +package request + +import ( + "net/http" + "net/url" +) + +const ( + PathParamConnectorRef = "connector_ref" +) + +func GetConnectorRefFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamConnectorRef) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} diff --git a/internal/api/request/context.go b/internal/api/request/context.go new file mode 100644 index 0000000000..a103966021 --- /dev/null +++ b/internal/api/request/context.go @@ -0,0 +1,121 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +// This pattern was inpired by the kubernetes request context package. +// https://github.com/kubernetes/apiserver/blob/master/pkg/endpoints/request/context.go + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" +) + +type key int + +const ( + authSessionKey key = iota + serviceAccountKey + userKey + spaceKey + repoKey + requestIDKey +) + +// WithAuthSession returns a copy of parent in which the principal +// value is set. +func WithAuthSession(parent context.Context, v *auth.Session) context.Context { + return context.WithValue(parent, authSessionKey, v) +} + +// AuthSessionFrom returns the value of the principal key on the +// context. +func AuthSessionFrom(ctx context.Context) (*auth.Session, bool) { + v, ok := ctx.Value(authSessionKey).(*auth.Session) + return v, ok && v != nil +} + +// PrincipalFrom returns the principal of the authsession. +func PrincipalFrom(ctx context.Context) (*types.Principal, bool) { + v, ok := AuthSessionFrom(ctx) + if !ok { + return nil, false + } + + return &v.Principal, true +} + +// WithUser returns a copy of parent in which the user value is set. +func WithUser(parent context.Context, v *types.User) context.Context { + return context.WithValue(parent, userKey, v) +} + +// UserFrom returns the value of the user key on the +// context - ok is true iff a non-nile value existed. +func UserFrom(ctx context.Context) (*types.User, bool) { + v, ok := ctx.Value(userKey).(*types.User) + return v, ok && v != nil +} + +// WithServiceAccount returns a copy of parent in which the service account value is set. +func WithServiceAccount(parent context.Context, v *types.ServiceAccount) context.Context { + return context.WithValue(parent, serviceAccountKey, v) +} + +// ServiceAccountFrom returns the value of the service account key on the +// context - ok is true iff a non-nile value existed. +func ServiceAccountFrom(ctx context.Context) (*types.ServiceAccount, bool) { + v, ok := ctx.Value(serviceAccountKey).(*types.ServiceAccount) + return v, ok && v != nil +} + +// WithSpace returns a copy of parent in which the space value is set. +func WithSpace(parent context.Context, v *types.Space) context.Context { + return context.WithValue(parent, spaceKey, v) +} + +// SpaceFrom returns the value of the space key on the +// context - ok is true iff a non-nile value existed. +func SpaceFrom(ctx context.Context) (*types.Space, bool) { + v, ok := ctx.Value(spaceKey).(*types.Space) + return v, ok && v != nil +} + +// WithRepo returns a copy of parent in which the repo value is set. +func WithRepo(parent context.Context, v *types.Repository) context.Context { + return context.WithValue(parent, repoKey, v) +} + +// RepoFrom returns the value of the repo key on the +// context - ok is true iff a non-nile value existed. +func RepoFrom(ctx context.Context) (*types.Repository, bool) { + v, ok := ctx.Value(repoKey).(*types.Repository) + return v, ok && v != nil +} + +// WithRequestID returns a copy of parent in which the request id value is set. +func WithRequestID(parent context.Context, v string) context.Context { + return context.WithValue(parent, requestIDKey, v) +} + +// RequestIDFrom returns the value of the request ID key on the +// context - ok is true iff a non-empty value existed. +// +//nolint:revive // need to emphasize that it's the request id we are retrieving. +func RequestIDFrom(ctx context.Context) (string, bool) { + v, ok := ctx.Value(requestIDKey).(string) + return v, ok && v != "" +} diff --git a/internal/api/request/context_test.go b/internal/api/request/context_test.go new file mode 100644 index 0000000000..eacc6a2788 --- /dev/null +++ b/internal/api/request/context_test.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import "testing" + +func TestContext(t *testing.T) { + t.Skip() +} diff --git a/internal/api/request/git.go b/internal/api/request/git.go new file mode 100644 index 0000000000..e5204f1f20 --- /dev/null +++ b/internal/api/request/git.go @@ -0,0 +1,107 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + QueryParamGitRef = "git_ref" + QueryParamIncludeCommit = "include_commit" + PathParamCommitSHA = "commit_sha" + QueryParamLineFrom = "line_from" + QueryParamLineTo = "line_to" + QueryParamPath = "path" + QueryParamSince = "since" + QueryParamUntil = "until" + QueryParamCommitter = "committer" +) + +func GetGitRefFromQueryOrDefault(r *http.Request, deflt string) string { + return QueryParamOrDefault(r, QueryParamGitRef, deflt) +} + +func GetIncludeCommitFromQueryOrDefault(r *http.Request, deflt bool) (bool, error) { + return QueryParamAsBoolOrDefault(r, QueryParamIncludeCommit, deflt) +} + +func GetCommitSHAFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamCommitSHA) +} + +// ParseSortBranch extracts the branch sort parameter from the url. +func ParseSortBranch(r *http.Request) enum.BranchSortOption { + return enum.ParseBranchSortOption( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseBranchFilter extracts the branch filter from the url. +func ParseBranchFilter(r *http.Request) *types.BranchFilter { + return &types.BranchFilter{ + Query: ParseQuery(r), + Sort: ParseSortBranch(r), + Order: ParseOrder(r), + Page: ParsePage(r), + Size: ParseLimit(r), + } +} + +// ParseSortTag extracts the tag sort parameter from the url. +func ParseSortTag(r *http.Request) enum.TagSortOption { + return enum.ParseTagSortOption( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseTagFilter extracts the tag filter from the url. +func ParseTagFilter(r *http.Request) *types.TagFilter { + return &types.TagFilter{ + Query: ParseQuery(r), + Sort: ParseSortTag(r), + Order: ParseOrder(r), + Page: ParsePage(r), + Size: ParseLimit(r), + } +} + +// ParseCommitFilter extracts the commit filter from the url. +func ParseCommitFilter(r *http.Request) (*types.CommitFilter, error) { + // since is optional, skipped if set to 0 + since, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamSince, 0) + if err != nil { + return nil, err + } + // until is optional, skipped if set to 0 + until, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamUntil, 0) + if err != nil { + return nil, err + } + return &types.CommitFilter{ + After: QueryParamOrDefault(r, QueryParamAfter, ""), + PaginationFilter: types.PaginationFilter{ + Page: ParsePage(r), + Limit: ParseLimit(r), + }, + Path: QueryParamOrDefault(r, QueryParamPath, ""), + Since: since, + Until: until, + Committer: QueryParamOrDefault(r, QueryParamCommitter, ""), + }, nil +} diff --git a/internal/api/request/header.go b/internal/api/request/header.go new file mode 100644 index 0000000000..660d8c2b9e --- /dev/null +++ b/internal/api/request/header.go @@ -0,0 +1,22 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +const ( + // TODO: have shared constants across all services? + HeaderRequestID = "X-Request-Id" + HeaderUserAgent = "User-Agent" + HeaderAuthorization = "Authorization" +) diff --git a/internal/api/request/membership.go b/internal/api/request/membership.go new file mode 100644 index 0000000000..0aef3fa5da --- /dev/null +++ b/internal/api/request/membership.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ParseMembershipUserSort extracts the membership sort parameter from the url. +func ParseMembershipUserSort(r *http.Request) enum.MembershipUserSort { + return enum.ParseMembershipUserSort( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseMembershipUserFilter extracts the membership filter from the url. +func ParseMembershipUserFilter(r *http.Request) types.MembershipUserFilter { + return types.MembershipUserFilter{ + ListQueryFilter: ParseListQueryFilterFromRequest(r), + Sort: ParseMembershipUserSort(r), + Order: ParseOrder(r), + } +} + +// ParseMembershipSpaceSort extracts the membership space sort parameter from the url. +func ParseMembershipSpaceSort(r *http.Request) enum.MembershipSpaceSort { + return enum.ParseMembershipSpaceSort( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseMembershipSpaceFilter extracts the membership space filter from the url. +func ParseMembershipSpaceFilter(r *http.Request) types.MembershipSpaceFilter { + return types.MembershipSpaceFilter{ + ListQueryFilter: ParseListQueryFilterFromRequest(r), + Sort: ParseMembershipSpaceSort(r), + Order: ParseOrder(r), + } +} diff --git a/internal/api/request/pipeline.go b/internal/api/request/pipeline.go new file mode 100644 index 0000000000..3c23291ef2 --- /dev/null +++ b/internal/api/request/pipeline.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + "net/url" +) + +const ( + PathParamPipelineRef = "pipeline_uid" + PathParamExecutionNumber = "execution_number" + PathParamStageNumber = "stage_number" + PathParamStepNumber = "step_number" + PathParamTriggerUID = "trigger_uid" + QueryParamLatest = "latest" + QueryParamBranch = "branch" +) + +func GetPipelineUIDFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamPipelineRef) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} + +func GetBranchFromQuery(r *http.Request) string { + return QueryParamOrDefault(r, QueryParamBranch, "") +} + +func GetExecutionNumberFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamExecutionNumber) +} + +func GetStageNumberFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamStageNumber) +} + +func GetStepNumberFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamStepNumber) +} + +func GetLatestFromPath(r *http.Request) bool { + v, _ := QueryParam(r, QueryParamLatest) + if v == "true" { + return true + } + return false +} + +func GetTriggerUIDFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamTriggerUID) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} diff --git a/internal/api/request/principal.go b/internal/api/request/principal.go new file mode 100644 index 0000000000..9c87646de6 --- /dev/null +++ b/internal/api/request/principal.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + PathParamPrincipalUID = "principal_uid" + PathParamUserUID = "user_uid" + PathParamUserID = "user_id" + PathParamServiceAccountUID = "sa_uid" + + QueryParamPrincipalID = "principal_id" +) + +// GetUserIDFromPath returns the user id from the request path. +func GetUserIDFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamUserID) +} + +func GetPrincipalUIDFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamPrincipalUID) +} + +// GetPrincipalIDFromQuery returns the principal id from the request query. +func GetPrincipalIDFromQuery(r *http.Request) (int64, error) { + return QueryParamAsPositiveInt64(r, QueryParamPrincipalID) +} + +func GetUserUIDFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamUserUID) +} + +func GetServiceAccountUIDFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamServiceAccountUID) +} + +// ParseSortUser extracts the user sort parameter from the url. +func ParseSortUser(r *http.Request) enum.UserAttr { + return enum.ParseUserAttr( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseUserFilter extracts the user filter from the url. +func ParseUserFilter(r *http.Request) *types.UserFilter { + return &types.UserFilter{ + Order: ParseOrder(r), + Page: ParsePage(r), + Sort: ParseSortUser(r), + Size: ParseLimit(r), + } +} + +// ParsePrincipalTypes extracts the principal types from the url. +func ParsePrincipalTypes(r *http.Request) []enum.PrincipalType { + pTypesRaw := r.URL.Query()[QueryParamType] + m := make(map[enum.PrincipalType]struct{}) // use map to eliminate duplicates + for _, pTypeRaw := range pTypesRaw { + if pType, ok := enum.PrincipalType(pTypeRaw).Sanitize(); ok { + m[pType] = struct{}{} + } + } + + res := make([]enum.PrincipalType, 0, len(m)) + for t := range m { + res = append(res, t) + } + + return res +} + +// ParsePrincipalFilter extracts the principal filter from the url. +func ParsePrincipalFilter(r *http.Request) *types.PrincipalFilter { + return &types.PrincipalFilter{ + Query: ParseQuery(r), + Page: ParsePage(r), + Size: ParseLimit(r), + Types: ParsePrincipalTypes(r), + } +} diff --git a/internal/api/request/pullreq.go b/internal/api/request/pullreq.go new file mode 100644 index 0000000000..0154386c8b --- /dev/null +++ b/internal/api/request/pullreq.go @@ -0,0 +1,155 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + PathParamPullReqNumber = "pullreq_number" + PathParamPullReqCommentID = "pullreq_comment_id" + PathParamReviewerID = "pullreq_reviewer_id" +) + +func GetPullReqNumberFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamPullReqNumber) +} + +func GetReviewerIDFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamReviewerID) +} + +func GetPullReqCommentIDPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamPullReqCommentID) +} + +// ParseSortPullReq extracts the pull request sort parameter from the url. +func ParseSortPullReq(r *http.Request) enum.PullReqSort { + result, _ := enum.PullReqSort(r.URL.Query().Get(QueryParamSort)).Sanitize() + return result +} + +// parsePullReqStates extracts the pull request states from the url. +func parsePullReqStates(r *http.Request) []enum.PullReqState { + strStates, _ := QueryParamList(r, QueryParamState) + m := make(map[enum.PullReqState]struct{}) // use map to eliminate duplicates + for _, s := range strStates { + if state, ok := enum.PullReqState(s).Sanitize(); ok { + m[state] = struct{}{} + } + } + + states := make([]enum.PullReqState, 0, len(m)) + for s := range m { + states = append(states, s) + } + + return states +} + +// ParsePullReqFilter extracts the pull request query parameter from the url. +func ParsePullReqFilter(r *http.Request) (*types.PullReqFilter, error) { + // created_by is optional, skipped if set to 0 + createdBy, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamCreatedBy, 0) + if err != nil { + return nil, err + } + return &types.PullReqFilter{ + Page: ParsePage(r), + Size: ParseLimit(r), + Query: ParseQuery(r), + CreatedBy: createdBy, + SourceRepoRef: r.URL.Query().Get("source_repo_ref"), + SourceBranch: r.URL.Query().Get("source_branch"), + TargetBranch: r.URL.Query().Get("target_branch"), + States: parsePullReqStates(r), + Sort: ParseSortPullReq(r), + Order: ParseOrder(r), + }, nil +} + +// ParsePullReqActivityFilter extracts the pull request activity query parameter from the url. +func ParsePullReqActivityFilter(r *http.Request) (*types.PullReqActivityFilter, error) { + // after is optional, skipped if set to 0 + after, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamAfter, 0) + if err != nil { + return nil, err + } + // before is optional, skipped if set to 0 + before, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamBefore, 0) + if err != nil { + return nil, err + } + // limit is optional, skipped if set to 0 + limit, err := QueryParamAsPositiveInt64OrDefault(r, QueryParamLimit, 0) + if err != nil { + return nil, err + } + return &types.PullReqActivityFilter{ + After: after, + Before: before, + Limit: int(limit), + Types: parsePullReqActivityTypes(r), + Kinds: parsePullReqActivityKinds(r), + }, nil +} + +// parsePullReqActivityKinds extracts the pull request activity kinds from the url. +func parsePullReqActivityKinds(r *http.Request) []enum.PullReqActivityKind { + strKinds := r.URL.Query()[QueryParamKind] + m := make(map[enum.PullReqActivityKind]struct{}) // use map to eliminate duplicates + for _, s := range strKinds { + if kind, ok := enum.PullReqActivityKind(s).Sanitize(); ok { + m[kind] = struct{}{} + } + } + + if len(m) == 0 { + return nil + } + + kinds := make([]enum.PullReqActivityKind, 0, len(m)) + for k := range m { + kinds = append(kinds, k) + } + + return kinds +} + +// parsePullReqActivityTypes extracts the pull request activity types from the url. +func parsePullReqActivityTypes(r *http.Request) []enum.PullReqActivityType { + strType := r.URL.Query()[QueryParamType] + m := make(map[enum.PullReqActivityType]struct{}) // use map to eliminate duplicates + for _, s := range strType { + if t, ok := enum.PullReqActivityType(s).Sanitize(); ok { + m[t] = struct{}{} + } + } + + if len(m) == 0 { + return nil + } + + activityTypes := make([]enum.PullReqActivityType, 0, len(m)) + for t := range m { + activityTypes = append(activityTypes, t) + } + + return activityTypes +} diff --git a/internal/api/request/repo.go b/internal/api/request/repo.go new file mode 100644 index 0000000000..243809e1cb --- /dev/null +++ b/internal/api/request/repo.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + "net/url" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + PathParamRepoRef = "repo_ref" + QueryParamRepoID = "repo_id" +) + +func GetRepoRefFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamRepoRef) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} + +// GetRepoIDFromQuery returns the repo id from the request query. +func GetRepoIDFromQuery(r *http.Request) (int64, error) { + return QueryParamAsPositiveInt64(r, QueryParamRepoID) +} + +// ParseSortRepo extracts the repo sort parameter from the url. +func ParseSortRepo(r *http.Request) enum.RepoAttr { + return enum.ParseRepoAtrr( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseRepoFilter extracts the repository filter from the url. +func ParseRepoFilter(r *http.Request) *types.RepoFilter { + return &types.RepoFilter{ + Query: ParseQuery(r), + Order: ParseOrder(r), + Page: ParsePage(r), + Sort: ParseSortRepo(r), + Size: ParseLimit(r), + } +} diff --git a/internal/api/request/secret.go b/internal/api/request/secret.go new file mode 100644 index 0000000000..b104a64492 --- /dev/null +++ b/internal/api/request/secret.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + "net/url" +) + +const ( + PathParamSecretRef = "secret_ref" +) + +func GetSecretRefFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamSecretRef) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} diff --git a/internal/api/request/space.go b/internal/api/request/space.go new file mode 100644 index 0000000000..9665e47463 --- /dev/null +++ b/internal/api/request/space.go @@ -0,0 +1,55 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + "net/url" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + PathParamSpaceRef = "space_ref" +) + +func GetSpaceRefFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamSpaceRef) + if err != nil { + return "", err + } + + // paths are unescaped and lower + return url.PathUnescape(rawRef) +} + +// ParseSortSpace extracts the space sort parameter from the url. +func ParseSortSpace(r *http.Request) enum.SpaceAttr { + return enum.ParseSpaceAttr( + r.URL.Query().Get(QueryParamSort), + ) +} + +// ParseSpaceFilter extracts the space filter from the url. +func ParseSpaceFilter(r *http.Request) *types.SpaceFilter { + return &types.SpaceFilter{ + Query: ParseQuery(r), + Order: ParseOrder(r), + Page: ParsePage(r), + Sort: ParseSortSpace(r), + Size: ParseLimit(r), + } +} diff --git a/internal/api/request/template.go b/internal/api/request/template.go new file mode 100644 index 0000000000..57f8e966dc --- /dev/null +++ b/internal/api/request/template.go @@ -0,0 +1,20 @@ +package request + +import ( + "net/http" + "net/url" +) + +const ( + PathParamTemplateRef = "template_ref" +) + +func GetTemplateRefFromPath(r *http.Request) (string, error) { + rawRef, err := PathParamOrError(r, PathParamTemplateRef) + if err != nil { + return "", err + } + + // paths are unescaped + return url.PathUnescape(rawRef) +} diff --git a/internal/api/request/token.go b/internal/api/request/token.go new file mode 100644 index 0000000000..c19370796f --- /dev/null +++ b/internal/api/request/token.go @@ -0,0 +1,27 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" +) + +const ( + PathParamTokenUID = "token_uid" +) + +func GetTokenUIDFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamTokenUID) +} diff --git a/internal/api/request/util.go b/internal/api/request/util.go new file mode 100644 index 0000000000..8bcfde278d --- /dev/null +++ b/internal/api/request/util.go @@ -0,0 +1,262 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "errors" + "fmt" + "net/http" + "strconv" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/go-chi/chi" +) + +const ( + PathParamRemainder = "*" + + QueryParamCreatedBy = "created_by" + QueryParamSort = "sort" + QueryParamOrder = "order" + QueryParamQuery = "query" + + QueryParamState = "state" + QueryParamKind = "kind" + QueryParamType = "type" + + QueryParamAfter = "after" + QueryParamBefore = "before" + + QueryParamPage = "page" + QueryParamLimit = "limit" + PerPageDefault = 30 + PerPageMax = 100 +) + +// GetCookie tries to retrive the cookie from the request or returns false if it doesn't exist. +func GetCookie(r *http.Request, cookieName string) (string, bool) { + cookie, err := r.Cookie(cookieName) + if errors.Is(err, http.ErrNoCookie) { + return "", false + } else if err != nil { + // this should never happen - documentation and code only return `nil` or `http.ErrNoCookie` + panic(fmt.Sprintf("unexpected error from request.Cookie(...) method: %s", err)) + } + + return cookie.Value, true +} + +// PathParamOrError tries to retrieve the parameter from the request and +// returns the parameter if it exists and is not empty, otherwise returns an error. +func PathParamOrError(r *http.Request, paramName string) (string, error) { + val, ok := PathParam(r, paramName) + if !ok { + return "", usererror.BadRequestf("Parameter '%s' not found in request path.", paramName) + } + + return val, nil +} + +// PathParamOrEmpty retrieves the path parameter or returns an empty string otherwise. +func PathParamOrEmpty(r *http.Request, paramName string) string { + val, ok := PathParam(r, paramName) + if !ok { + return "" + } + + return val +} + +// PathParam retrieves the path parameter or returns false if it exists. +func PathParam(r *http.Request, paramName string) (string, bool) { + val := chi.URLParam(r, paramName) + if val == "" { + return "", false + } + + return val, true +} + +// QueryParam returns the parameter if it exists. +func QueryParam(r *http.Request, paramName string) (string, bool) { + query := r.URL.Query() + if !query.Has(paramName) { + return "", false + } + + return query.Get(paramName), true +} + +// QueryParamList returns list of the parameter values if they exist. +func QueryParamList(r *http.Request, paramName string) ([]string, bool) { + query := r.URL.Query() + if !query.Has(paramName) { + return nil, false + } + + return query[paramName], true +} + +// QueryParamOrDefault retrieves the parameter from the query and +// returns the parameter if it exists, otherwise returns the provided default value. +func QueryParamOrDefault(r *http.Request, paramName string, deflt string) string { + val, ok := QueryParam(r, paramName) + if !ok { + return deflt + } + + return val +} + +// QueryParamOrError tries to retrieve the parameter from the query and +// returns the parameter if it exists, otherwise returns an error. +func QueryParamOrError(r *http.Request, paramName string) (string, error) { + val, ok := QueryParam(r, paramName) + if !ok { + return "", usererror.BadRequestf("Parameter '%s' not found in query.", paramName) + } + + return val, nil +} + +// QueryParamAsPositiveInt64 extracts an integer parameter from the request query. +// If the parameter doesn't exist the provided default value is returned. +func QueryParamAsPositiveInt64OrDefault(r *http.Request, paramName string, deflt int64) (int64, error) { + value, ok := QueryParam(r, paramName) + if !ok { + return deflt, nil + } + + valueInt, err := strconv.ParseInt(value, 10, 64) + if err != nil || valueInt <= 0 { + return 0, usererror.BadRequestf("Parameter '%s' must be a positive integer.", paramName) + } + + return valueInt, nil +} + +// QueryParamAsPositiveInt64 extracts an integer parameter from the request query. +// If the parameter doesn't exist an error is returned. +func QueryParamAsPositiveInt64(r *http.Request, paramName string) (int64, error) { + value, err := QueryParamOrError(r, paramName) + if err != nil { + return 0, err + } + + valueInt, err := strconv.ParseInt(value, 10, 64) + if err != nil || valueInt <= 0 { + return 0, usererror.BadRequestf("Parameter '%s' must be a positive integer.", paramName) + } + + return valueInt, nil +} + +// PathParamAsPositiveInt64 extracts an integer parameter from the request path. +func PathParamAsPositiveInt64(r *http.Request, paramName string) (int64, error) { + rawValue, err := PathParamOrError(r, paramName) + if err != nil { + return 0, err + } + + valueInt, err := strconv.ParseInt(rawValue, 10, 64) + if err != nil || valueInt <= 0 { + return 0, usererror.BadRequestf("Parameter '%s' must be a positive integer.", paramName) + } + + return valueInt, nil +} + +// QueryParamAsBoolOrDefault tries to retrieve the parameter from the query and parse it to bool. +func QueryParamAsBoolOrDefault(r *http.Request, paramName string, deflt bool) (bool, error) { + rawValue, ok := QueryParam(r, paramName) + if !ok || len(rawValue) == 0 { + return deflt, nil + } + + boolValue, err := strconv.ParseBool(rawValue) + if err != nil { + return false, usererror.BadRequestf("Parameter '%s' must be a boolean.", paramName) + } + + return boolValue, nil +} + +// GetOptionalRemainderFromPath returns the remainder ("*") from the path or an empty string if it doesn't exist. +func GetOptionalRemainderFromPath(r *http.Request) string { + return PathParamOrEmpty(r, PathParamRemainder) +} + +// GetRemainderFromPath returns the remainder ("*") from the path or an an error if it doesn't exist. +func GetRemainderFromPath(r *http.Request) (string, error) { + return PathParamOrError(r, PathParamRemainder) +} + +// ParseQuery extracts the query parameter from the url. +func ParseQuery(r *http.Request) string { + return r.URL.Query().Get(QueryParamQuery) +} + +// ParsePage extracts the page parameter from the url. +func ParsePage(r *http.Request) int { + s := r.URL.Query().Get(QueryParamPage) + i, _ := strconv.Atoi(s) + if i <= 0 { + i = 1 + } + return i +} + +// ParseLimit extracts the limit parameter from the url. +func ParseLimit(r *http.Request) int { + s := r.URL.Query().Get(QueryParamLimit) + i, _ := strconv.Atoi(s) + if i <= 0 { + i = PerPageDefault + } else if i > PerPageMax { + i = PerPageMax + } + return i +} + +// ParseOrder extracts the order parameter from the url. +func ParseOrder(r *http.Request) enum.Order { + return enum.ParseOrder( + r.URL.Query().Get(QueryParamOrder), + ) +} + +// ParseSort extracts the sort parameter from the url. +func ParseSort(r *http.Request) string { + return r.URL.Query().Get(QueryParamSort) +} + +// ParsePaginationFromRequest parses pagination related info from the url. +func ParsePaginationFromRequest(r *http.Request) types.Pagination { + return types.Pagination{ + Page: ParsePage(r), + Size: ParseLimit(r), + } +} + +// ParseListQueryFilterFromRequest parses pagination and query related info from the url. +func ParseListQueryFilterFromRequest(r *http.Request) types.ListQueryFilter { + return types.ListQueryFilter{ + Query: ParseQuery(r), + Pagination: ParsePaginationFromRequest(r), + } +} diff --git a/internal/api/request/util_test.go b/internal/api/request/util_test.go new file mode 100644 index 0000000000..4227f36538 --- /dev/null +++ b/internal/api/request/util_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request diff --git a/internal/api/request/webhook.go b/internal/api/request/webhook.go new file mode 100644 index 0000000000..eef3c95dcc --- /dev/null +++ b/internal/api/request/webhook.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package request + +import ( + "net/http" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + PathParamWebhookID = "webhook_id" + PathParamWebhookExecutionID = "webhook_execution_id" +) + +func GetWebhookIDFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamWebhookID) +} + +func GetWebhookExecutionIDFromPath(r *http.Request) (int64, error) { + return PathParamAsPositiveInt64(r, PathParamWebhookExecutionID) +} + +// ParseWebhookFilter extracts the Webhook query parameters for listing from the url. +func ParseWebhookFilter(r *http.Request) *types.WebhookFilter { + return &types.WebhookFilter{ + Query: ParseQuery(r), + Page: ParsePage(r), + Size: ParseLimit(r), + Sort: ParseSortWebhook(r), + Order: ParseOrder(r), + } +} + +// ParseWebhookExecutionFilter extracts the WebhookExecution query parameters for listing from the url. +func ParseWebhookExecutionFilter(r *http.Request) *types.WebhookExecutionFilter { + return &types.WebhookExecutionFilter{ + Page: ParsePage(r), + Size: ParseLimit(r), + } +} + +// ParseSortWebhook extracts the webhook sort parameter from the url. +func ParseSortWebhook(r *http.Request) enum.WebhookAttr { + return enum.ParseWebhookAttr( + r.URL.Query().Get(QueryParamSort), + ) +} diff --git a/internal/api/usererror/translate.go b/internal/api/usererror/translate.go new file mode 100644 index 0000000000..96b4437fcc --- /dev/null +++ b/internal/api/usererror/translate.go @@ -0,0 +1,141 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package usererror + +import ( + "errors" + "net/http" + + "github.com/harness/gitness/gitrpc" + apiauth "github.com/harness/gitness/internal/api/auth" + "github.com/harness/gitness/internal/services/webhook" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types/check" + + "github.com/harness/go-rbac" + "github.com/rs/zerolog/log" +) + +func Translate(err error) *Error { + var ( + rError *Error + checkError *check.ValidationError + gitrpcError *gitrpc.Error + ) + + // TODO: Improve performance of checking multiple errors with errors.Is + + // check if err is RBAC error + if rbacErr := processRBACErrors(err); rbacErr != nil { + return rbacErr + } + + switch { + // api errors + case errors.As(err, &rError): + return rError + + // api auth errors + case errors.Is(err, apiauth.ErrNotAuthenticated): + return ErrUnauthorized + case errors.Is(err, apiauth.ErrNotAuthorized): + return ErrForbidden + + // validation errors + case errors.As(err, &checkError): + return New(http.StatusBadRequest, checkError.Error()) + + // store errors + case errors.Is(err, store.ErrResourceNotFound): + return ErrNotFound + case errors.Is(err, store.ErrDuplicate): + return ErrDuplicate + case errors.Is(err, store.ErrPrimaryPathCantBeDeleted): + return ErrPrimaryPathCantBeDeleted + case errors.Is(err, store.ErrPathTooLong): + return ErrPathTooLong + case errors.Is(err, store.ErrNoChangeInRequestedMove): + return ErrNoChange + case errors.Is(err, store.ErrIllegalMoveCyclicHierarchy): + return ErrCyclicHierarchy + case errors.Is(err, store.ErrSpaceWithChildsCantBeDeleted): + return ErrSpaceWithChildsCantBeDeleted + + // gitrpc errors + case errors.As(err, &gitrpcError): + return NewWithPayload(httpStatusCode( + gitrpcError.Status), + gitrpcError.Message, + gitrpcError.Details, + ) + + // webhook errors + case errors.Is(err, webhook.ErrWebhookNotRetriggerable): + return ErrWebhookNotRetriggerable + + // unknown error + default: + log.Warn().Msgf("Unable to translate error: %s", err) + return ErrInternal + } +} + +// lookup of gitrpc error codes to HTTP status codes. +var codes = map[gitrpc.Status]int{ + gitrpc.StatusConflict: http.StatusConflict, + gitrpc.StatusInvalidArgument: http.StatusBadRequest, + gitrpc.StatusNotFound: http.StatusNotFound, + gitrpc.StatusPathNotFound: http.StatusNotFound, + gitrpc.StatusNotImplemented: http.StatusNotImplemented, + gitrpc.StatusPreconditionFailed: http.StatusPreconditionFailed, + gitrpc.StatusUnauthorized: http.StatusUnauthorized, + gitrpc.StatusInternal: http.StatusInternalServerError, + gitrpc.StatusNotMergeable: http.StatusPreconditionFailed, +} + +// httpStatusCode returns the associated HTTP status code for a gitrpc error code. +func httpStatusCode(code gitrpc.Status) int { + if v, ok := codes[code]; ok { + return v + } + return http.StatusInternalServerError +} + +func processRBACErrors(err error) *Error { + msg := err.Error() + switch { + case + errors.Is(err, rbac.ErrBaseURLRequired), + errors.Is(err, rbac.ErrInvalidPrincipalType), + errors.Is(err, rbac.ErrAccountRequired), + errors.Is(err, rbac.ErrPrincipalIdentifierRequired), + errors.Is(err, rbac.ErrPermissionsRequired), + errors.Is(err, rbac.ErrResourceTypeRequired), + errors.Is(err, rbac.ErrResourceTypeKeyRequired), + errors.Is(err, rbac.ErrResourceTypeValueRequired), + errors.Is(err, rbac.ErrPermissionRequired), + errors.Is(err, rbac.ErrPermissionsSizeExceeded), + errors.Is(err, rbac.ErrInvalidCacheEntryType), + errors.Is(err, rbac.ErrNoHeader), + errors.Is(err, rbac.ErrAuthorizationTokenRequired), + errors.Is(err, rbac.ErrOddNumberOfArguments): + return New(http.StatusBadRequest, msg) + case errors.Is(err, rbac.ErrMapperFuncCannotBeNil), + errors.Is(err, rbac.ErrLoggerCannotBeNil): + return New(http.StatusInternalServerError, msg) + } + + return nil +} diff --git a/internal/api/usererror/usererror.go b/internal/api/usererror/usererror.go new file mode 100644 index 0000000000..60817f8f99 --- /dev/null +++ b/internal/api/usererror/usererror.go @@ -0,0 +1,142 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package usererror + +import ( + "fmt" + "net/http" +) + +var ( + // ErrInternal is returned when an internal error occurred. + ErrInternal = New(http.StatusInternalServerError, "Internal error occurred") + + // ErrInvalidToken is returned when the api request token is invalid. + ErrInvalidToken = New(http.StatusUnauthorized, "Invalid or missing token") + + // ErrBadRequest is returned when there was an issue with the input. + ErrBadRequest = New(http.StatusBadRequest, "Bad Request") + + // ErrUnauthorized is returned when the acting principal is not authenticated. + ErrUnauthorized = New(http.StatusUnauthorized, "Unauthorized") + + // ErrForbidden is returned when the acting principal is not authorized. + ErrForbidden = New(http.StatusForbidden, "Forbidden") + + // ErrNotFound is returned when a resource is not found. + ErrNotFound = New(http.StatusNotFound, "Not Found") + + // ErrPreconditionFailed is returned when a precondition failed. + ErrPreconditionFailed = New(http.StatusPreconditionFailed, "Precondition failed") + + // ErrNotMergeable is returned when a branch can't be merged. + ErrNotMergeable = New(http.StatusPreconditionFailed, "Branch can't be merged") + + // ErrNoChange is returned when no change was found based on the request. + ErrNoChange = New(http.StatusBadRequest, "No Change") + + // ErrDuplicate is returned when a resource already exits. + ErrDuplicate = New(http.StatusConflict, "Resource already exists") + + // ErrPrimaryPathCantBeDeleted is returned when trying to delete a primary path. + ErrPrimaryPathCantBeDeleted = New(http.StatusBadRequest, "The primary path of an object can't be deleted") + + // ErrPathTooLong is returned when an action would lead to a path that is too long. + ErrPathTooLong = New(http.StatusBadRequest, "The resource path is too long") + + // ErrCyclicHierarchy is returned if the action would create a cyclic dependency between spaces. + ErrCyclicHierarchy = New(http.StatusBadRequest, "Unable to perform the action as it would lead to a cyclic dependency") + + // ErrSpaceWithChildsCantBeDeleted is returned if the principal is trying to delete a space that + // still has child resources. + ErrSpaceWithChildsCantBeDeleted = New(http.StatusBadRequest, + "Space can't be deleted as it still contains child resources") + + // ErrDefaultBranchCantBeDeleted is returned if the user tries to delete the default branch of a repository. + ErrDefaultBranchCantBeDeleted = New(http.StatusBadRequest, "The default branch of a repository can't be deleted") + + // ErrRequestTooLarge is returned if the request it too large. + ErrRequestTooLarge = New(http.StatusRequestEntityTooLarge, "The request is too large") + + // ErrWebhookNotRetriggerable is returned if the webhook can't be retriggered. + ErrWebhookNotRetriggerable = New(http.StatusMethodNotAllowed, + "The webhook execution is incomplete and can't be retriggered") +) + +// Error represents a json-encoded API error. +type Error struct { + Status int `json:"-"` + Message string `json:"message"` + Values map[string]any `json:"values,omitempty"` +} + +func (e *Error) Error() string { + return e.Message +} + +// New returns a new user facing error. +func New(status int, message string) *Error { + return &Error{Status: status, Message: message} +} + +// Newf returns a new user facing error. +func Newf(status int, format string, args ...any) *Error { + return &Error{Status: status, Message: fmt.Sprintf(format, args...)} +} + +// NewWithPayload returns a new user facing error with payload. +func NewWithPayload(status int, message string, valueMaps ...map[string]any) *Error { + var values map[string]any + for _, valueMap := range valueMaps { + if values == nil { + values = valueMap + continue + } + for k, v := range valueMap { + values[k] = v + } + } + return &Error{Status: status, Message: message, Values: values} +} + +// BadRequest returns a new user facing bad request error. +func BadRequest(message string) *Error { + return New(http.StatusBadRequest, message) +} + +// BadRequestf returns a new user facing bad request error. +func BadRequestf(format string, args ...any) *Error { + return Newf(http.StatusBadRequest, format, args...) +} + +// BadRequestWithPayload returns a new user facing bad request error with payload. +func BadRequestWithPayload(message string, values ...map[string]any) *Error { + return NewWithPayload(http.StatusBadRequest, message, values...) +} + +// Forbidden returns a new user facing forbidden error. +func Forbidden(message string) *Error { + return New(http.StatusForbidden, message) +} + +// NotFound returns a new user facing not found error. +func NotFound(message string) *Error { + return New(http.StatusNotFound, message) +} + +// ConflictWithPayload returns a new user facing conflict error with payload. +func ConflictWithPayload(message string, values ...map[string]any) *Error { + return NewWithPayload(http.StatusConflict, message, values...) +} diff --git a/internal/api/usererror/usererror_test.go b/internal/api/usererror/usererror_test.go new file mode 100644 index 0000000000..75b88874d0 --- /dev/null +++ b/internal/api/usererror/usererror_test.go @@ -0,0 +1,24 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package usererror + +import "testing" + +func TestError(t *testing.T) { + got, want := ErrNotFound.Message, ErrNotFound.Message + if got != want { + t.Errorf("Want error string %q, got %q", got, want) + } +} diff --git a/internal/auth/authn/authenticator.go b/internal/auth/authn/authenticator.go new file mode 100644 index 0000000000..5d994c8d41 --- /dev/null +++ b/internal/auth/authn/authenticator.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authn + +import ( + "errors" + "net/http" + + "github.com/harness/gitness/internal/auth" +) + +var ( + // ErrNoAuthData that is returned if the authorizer doesn't find any data in the request that can be used for auth. + ErrNoAuthData = errors.New("the request doesn't contain any auth data that can be used by the Authorizer") + // ErrNotAcceptedAuthData that is returned if the request is using an auth data that is not accepted by the authorizer. + // e.g, don't accept jwt (without allowedResources field) for git clone/pull request. + ErrNotAcceptedAuthMethod = errors.New("the request contains auth method that is not accepted by the Authorizer") +) + +type SourceRouter string + +const ( + SourceRouterAPI SourceRouter = "api" + SourceRouterGIT SourceRouter = "git" +) + +// Authenticator is an abstraction of an entity that's responsible for authenticating principals +// that are making calls via HTTP. +type Authenticator interface { + /* + * Tries to authenticate the acting principal if credentials are available. + * Returns: + * (session, nil) - request contains auth data and principal was verified + * (nil, ErrNoAuthData) - request doesn't contain any auth data + * (nil, err) - request contains auth data but verification failed + */ + Authenticate(r *http.Request, sourceRouter SourceRouter) (*auth.Session, error) +} diff --git a/internal/auth/authn/jwt.go b/internal/auth/authn/jwt.go new file mode 100644 index 0000000000..ad4cc58e72 --- /dev/null +++ b/internal/auth/authn/jwt.go @@ -0,0 +1,168 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authn + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/jwt" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + gojwt "github.com/dgrijalva/jwt-go" +) + +var _ Authenticator = (*JWTAuthenticator)(nil) + +// JWTAuthenticator uses the provided JWT to authenticate the caller. +type JWTAuthenticator struct { + cookieName string + principalStore store.PrincipalStore + tokenStore store.TokenStore +} + +func NewTokenAuthenticator( + principalStore store.PrincipalStore, + tokenStore store.TokenStore, + cookieName string, +) *JWTAuthenticator { + return &JWTAuthenticator{ + cookieName: cookieName, + principalStore: principalStore, + tokenStore: tokenStore, + } +} + +func (a *JWTAuthenticator) Authenticate(r *http.Request, sourceRouter SourceRouter) (*auth.Session, error) { + ctx := r.Context() + str := extractToken(r, a.cookieName) + + if len(str) == 0 { + return nil, ErrNoAuthData + } + + var principal *types.Principal + var err error + claims := &jwt.Claims{} + parsed, err := gojwt.ParseWithClaims(str, claims, func(token_ *gojwt.Token) (interface{}, error) { + principal, err = a.principalStore.Find(ctx, claims.PrincipalID) + if err != nil { + return nil, fmt.Errorf("failed to get principal for token: %w", err) + } + return []byte(principal.Salt), nil + }) + if err != nil { + return nil, fmt.Errorf("parsing of JWT claims failed: %w", err) + } + + if !parsed.Valid { + return nil, errors.New("parsed JWT token is invalid") + } + + if _, ok := parsed.Method.(*gojwt.SigningMethodHMAC); !ok { + return nil, errors.New("invalid HMAC signature for JWT") + } + + var metadata auth.Metadata + switch { + case claims.Token != nil: + metadata, err = a.metadataFromTokenClaims(ctx, principal, claims.Token) + if err != nil { + return nil, fmt.Errorf("failed to get metadata from token claims: %w", err) + } + case claims.Membership != nil: + metadata, err = a.metadataFromMembershipClaims(claims.Membership) + if err != nil { + return nil, fmt.Errorf("failed to get metadata from membership claims: %w", err) + } + default: + return nil, fmt.Errorf("jwt is missing sub-claims") + } + + return &auth.Session{ + Principal: *principal, + Metadata: metadata, + }, nil +} + +func (a *JWTAuthenticator) metadataFromTokenClaims( + ctx context.Context, + principal *types.Principal, + tknClaims *jwt.SubClaimsToken, +) (auth.Metadata, error) { + // ensure tkn exists + tkn, err := a.tokenStore.Find(ctx, tknClaims.ID) + if err != nil { + return nil, fmt.Errorf("failed to find token in db: %w", err) + } + + // protect against faked JWTs for other principals in case of single salt leak + if principal.ID != tkn.PrincipalID { + return nil, fmt.Errorf("JWT was for principal %d while db token was for principal %d", + principal.ID, tkn.PrincipalID) + } + + return &auth.TokenMetadata{ + TokenType: tkn.Type, + TokenID: tkn.ID, + }, nil +} + +func (a *JWTAuthenticator) metadataFromMembershipClaims( + mbsClaims *jwt.SubClaimsMembership, +) (auth.Metadata, error) { + // We could check if space exists - but also okay to fail later (saves db call) + return &auth.MembershipMetadata{ + SpaceID: mbsClaims.SpaceID, + Role: mbsClaims.Role, + }, nil +} + +func extractToken(r *http.Request, cookieName string) string { + // Check query param first (as that's most immediately visible to caller) + if queryToken, ok := request.GetAccessTokenFromQuery(r); ok { + return queryToken + } + + // check authorization header next + headerToken := r.Header.Get(request.HeaderAuthorization) + switch { + // in case of git push / pull it would be basic auth and token is in password + case strings.HasPrefix(headerToken, "Basic "): + // return pwd either way - if it's invalid pwd is empty string which we'd return anyway + _, pwd, _ := r.BasicAuth() + return pwd + // strip bearer prefix if present + case strings.HasPrefix(headerToken, "Bearer "): + return headerToken[7:] + // otherwise use value as is + case headerToken != "": + return headerToken + } + + // check cookies last (as that's least visible to caller) + if cookieToken, ok := request.GetTokenFromCookie(r, cookieName); ok { + return cookieToken + } + + // no token found + return "" +} diff --git a/internal/auth/authn/wire.go b/internal/auth/authn/wire.go new file mode 100644 index 0000000000..adc6af8e37 --- /dev/null +++ b/internal/auth/authn/wire.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authn + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideAuthenticator, +) + +func ProvideAuthenticator(config *types.Config, principalStore store.PrincipalStore, tokenStore store.TokenStore) Authenticator { + return NewTokenAuthenticator(principalStore, tokenStore, config.Token.CookieName) +} diff --git a/internal/auth/authz/authz.go b/internal/auth/authz/authz.go new file mode 100644 index 0000000000..d479fa9b13 --- /dev/null +++ b/internal/auth/authz/authz.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authz + +import ( + "context" + "errors" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +var ( + // ErrNoPermissionCheckProvided is error that is thrown if no permission checks are provided. + ErrNoPermissionCheckProvided = errors.New("no permission checks provided") +) + +// Authorizer abstraction of an entity responsible for authorizing access to resources. +type Authorizer interface { + /* + * Checks whether the principal of the current session with the provided metadata + * has the permission to execute the action on the resource within the scope. + * Returns + * (true, nil) - the action is permitted + * (false, nil) - the action is not permitted + * (false, err) - an error occurred while performing the permission check and the action should be denied + */ + Check(ctx context.Context, + session *auth.Session, + scope *types.Scope, + resource *types.Resource, + permission enum.Permission) (bool, error) + + /* + * Checks whether the principal of the current session with the provided metadata + * has the permission to execute ALL the action on the resource within the scope. + * Returns + * (true, nil) - all requested actions are permitted + * (false, nil) - at least one requested action is not permitted + * (false, err) - an error occurred while performing the permission check and all actions should be denied + */ + CheckAll(ctx context.Context, + session *auth.Session, + permissionChecks ...types.PermissionCheck) (bool, error) +} diff --git a/internal/auth/authz/membership.go b/internal/auth/authz/membership.go new file mode 100644 index 0000000000..7e83196a8e --- /dev/null +++ b/internal/auth/authz/membership.go @@ -0,0 +1,173 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authz + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +var _ Authorizer = (*MembershipAuthorizer)(nil) + +type MembershipAuthorizer struct { + permissionCache PermissionCache + spaceStore store.SpaceStore +} + +func NewMembershipAuthorizer( + permissionCache PermissionCache, + spaceStore store.SpaceStore, +) *MembershipAuthorizer { + return &MembershipAuthorizer{ + permissionCache: permissionCache, + spaceStore: spaceStore, + } +} + +func (a *MembershipAuthorizer) Check( + ctx context.Context, + session *auth.Session, + scope *types.Scope, + resource *types.Resource, + permission enum.Permission, +) (bool, error) { + // public access - not expected to come here as of now (have to refactor that part) + if session == nil { + log.Ctx(ctx).Warn().Msgf( + "public access request for %s in scope %#v got to authorizer", + permission, + scope, + ) + return false, nil + } + + log.Ctx(ctx).Debug().Msgf( + "[MembershipAuthorizer] %s with id '%d' requests %s for %s '%s' in scope %#v with metadata %#v", + session.Principal.Type, + session.Principal.ID, + permission, + resource.Type, + resource.Name, + scope, + session.Metadata, + ) + + if session.Principal.Admin { + return true, nil // system admin can call any API + } + + var spacePath string + + switch resource.Type { + case enum.ResourceTypeSpace: + spacePath = paths.Concatinate(scope.SpacePath, resource.Name) + + case enum.ResourceTypeRepo: + spacePath = scope.SpacePath + + case enum.ResourceTypeServiceAccount: + spacePath = scope.SpacePath + + case enum.ResourceTypePipeline: + spacePath = scope.SpacePath + + case enum.ResourceTypeSecret: + spacePath = scope.SpacePath + + case enum.ResourceTypeUser: + // a user is allowed to view / edit themselves + if resource.Name == session.Principal.UID && + (permission == enum.PermissionUserView || permission == enum.PermissionUserEdit) { + return true, nil + } + + // everything else is reserved for admins only (like operations on users other than yourself, or setting admin) + return false, nil + + // Service operations aren't exposed to users + case enum.ResourceTypeService: + return false, nil + + default: + return false, nil + } + + // ephemeral membership overrides any other space memberships of the principal + if membershipMetadata, ok := session.Metadata.(*auth.MembershipMetadata); ok { + return a.checkWithMembershipMetadata(ctx, membershipMetadata, spacePath, permission) + } + + // ensure we aren't bypassing unknown metadata with impact on authorization + if session.Metadata != nil && session.Metadata.ImpactsAuthorization() { + return false, fmt.Errorf("session contains unknown metadata that impacts authorization: %T", session.Metadata) + } + + return a.permissionCache.Get(ctx, PermissionCacheKey{ + PrincipalID: session.Principal.ID, + SpaceRef: spacePath, + Permission: permission, + }) +} + +func (a *MembershipAuthorizer) CheckAll(ctx context.Context, session *auth.Session, + permissionChecks ...types.PermissionCheck) (bool, error) { + for _, p := range permissionChecks { + if _, err := a.Check(ctx, session, &p.Scope, &p.Resource, p.Permission); err != nil { + return false, err + } + } + + return true, nil +} + +// checkWithMembershipMetadata checks access using the ephemeral membership provided in the metadata. +func (a *MembershipAuthorizer) checkWithMembershipMetadata( + ctx context.Context, + membershipMetadata *auth.MembershipMetadata, + requestedSpacePath string, + requestedPermission enum.Permission, +) (bool, error) { + space, err := a.spaceStore.Find(ctx, membershipMetadata.SpaceID) + if err != nil { + return false, fmt.Errorf("failed to find space: %w", err) + } + + if !paths.IsAncesterOf(space.Path, requestedSpacePath) { + return false, fmt.Errorf( + "requested permission scope '%s' is outside of ephemeral membership scope '%s'", + requestedSpacePath, + space.Path, + ) + } + + if !roleHasPermission(membershipMetadata.Role, requestedPermission) { + return false, fmt.Errorf( + "requested permission '%s' is outside of ephemeral membership role '%s'", + requestedPermission, + membershipMetadata.Role, + ) + } + + // access is granted by ephemeral membership + return true, nil +} diff --git a/internal/auth/authz/membership_cache.go b/internal/auth/authz/membership_cache.go new file mode 100644 index 0000000000..e80237b12f --- /dev/null +++ b/internal/auth/authz/membership_cache.go @@ -0,0 +1,104 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authz + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "golang.org/x/exp/slices" +) + +type PermissionCacheKey struct { + PrincipalID int64 + SpaceRef string + Permission enum.Permission +} +type PermissionCache cache.Cache[PermissionCacheKey, bool] + +func NewPermissionCache( + spaceStore store.SpaceStore, + membershipStore store.MembershipStore, + cacheDuration time.Duration, +) PermissionCache { + return cache.New[PermissionCacheKey, bool](permissionCacheGetter{ + spaceStore: spaceStore, + membershipStore: membershipStore, + }, cacheDuration) +} + +type permissionCacheGetter struct { + spaceStore store.SpaceStore + membershipStore store.MembershipStore +} + +func (g permissionCacheGetter) Find(ctx context.Context, key PermissionCacheKey) (bool, error) { + spaceRef := key.SpaceRef + principalID := key.PrincipalID + + // Find the starting space. + space, err := g.spaceStore.FindByRef(ctx, spaceRef) + if err != nil { + return false, fmt.Errorf("failed to find space '%s': %w", spaceRef, err) + } + + // limit the depth to be safe (e.g. root/space1/space2 => maxDepth of 3) + maxDepth := len(paths.Segments(spaceRef)) + + for depth := 0; depth < maxDepth; depth++ { + // Find the membership in the current space. + membership, err := g.membershipStore.Find(ctx, types.MembershipKey{ + SpaceID: space.ID, + PrincipalID: principalID, + }) + if err != nil && !errors.Is(err, gitness_store.ErrResourceNotFound) { + return false, fmt.Errorf("failed to find membership: %w", err) + } + + // If the membership is defined in the current space, check if the user has the required permission. + if membership != nil && + roleHasPermission(membership.Role, key.Permission) { + return true, nil + } + + // If membership with the requested permission has not been found in the current space, + // move to the parent space, if any. + + if space.ParentID == 0 { + return false, nil + } + + space, err = g.spaceStore.Find(ctx, space.ParentID) + if err != nil { + return false, fmt.Errorf("failed to find parent space with id %d: %w", space.ParentID, err) + } + } + + return false, nil +} + +func roleHasPermission(role enum.MembershipRole, permission enum.Permission) bool { + _, hasRole := slices.BinarySearch(role.Permissions(), permission) + return hasRole +} diff --git a/internal/auth/authz/unsafe.go b/internal/auth/authz/unsafe.go new file mode 100644 index 0000000000..e8dc1df970 --- /dev/null +++ b/internal/auth/authz/unsafe.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authz + +import ( + "context" + + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +var _ Authorizer = (*UnsafeAuthorizer)(nil) + +/* + * An unsafe authorizer that gives permits any action and simply logs the permission request. + */ +type UnsafeAuthorizer struct{} + +func NewUnsafeAuthorizer() *UnsafeAuthorizer { + return &UnsafeAuthorizer{} +} + +func (a *UnsafeAuthorizer) Check(ctx context.Context, session *auth.Session, + scope *types.Scope, resource *types.Resource, permission enum.Permission) (bool, error) { + log.Ctx(ctx).Info().Msgf( + "[Authz] %s with id '%d' requests %s for %s '%s' in scope %#v with metadata %#v", + session.Principal.Type, + session.Principal.ID, + permission, + resource.Type, + resource.Name, + scope, + session.Metadata, + ) + + return true, nil +} +func (a *UnsafeAuthorizer) CheckAll(ctx context.Context, session *auth.Session, + permissionChecks ...types.PermissionCheck) (bool, error) { + for _, p := range permissionChecks { + if _, err := a.Check(ctx, session, &p.Scope, &p.Resource, p.Permission); err != nil { + return false, err + } + } + + return true, nil +} diff --git a/internal/auth/authz/wire.go b/internal/auth/authz/wire.go new file mode 100644 index 0000000000..1d017e1d52 --- /dev/null +++ b/internal/auth/authz/wire.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package authz + +import ( + "time" + + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideAuthorizer, + ProvidePermissionCache, +) + +func ProvideAuthorizer(pCache PermissionCache, spaceStore store.SpaceStore) Authorizer { + return NewMembershipAuthorizer(pCache, spaceStore) +} + +func ProvidePermissionCache( + spaceStore store.SpaceStore, + membershipStore store.MembershipStore, +) PermissionCache { + const permissionCacheTimeout = time.Second * 15 + return NewPermissionCache(spaceStore, membershipStore, permissionCacheTimeout) +} diff --git a/internal/auth/metadata.go b/internal/auth/metadata.go new file mode 100644 index 0000000000..d6d6f41625 --- /dev/null +++ b/internal/auth/metadata.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import "github.com/harness/gitness/types/enum" + +type Metadata interface { + ImpactsAuthorization() bool +} + +// EmptyMetadata represents the state when the auth session doesn't have any extra metadata. +type EmptyMetadata struct{} + +func (m *EmptyMetadata) ImpactsAuthorization() bool { + return false +} + +// TokenMetadata contains information about the token that was used during auth. +type TokenMetadata struct { + TokenType enum.TokenType + TokenID int64 +} + +func (m *TokenMetadata) ImpactsAuthorization() bool { + return false +} + +// MembershipMetadata contains information about an ephemeral membership grant. +type MembershipMetadata struct { + SpaceID int64 + Role enum.MembershipRole +} + +func (m *MembershipMetadata) ImpactsAuthorization() bool { + return true +} diff --git a/internal/auth/session.go b/internal/auth/session.go new file mode 100644 index 0000000000..fca53a1b5b --- /dev/null +++ b/internal/auth/session.go @@ -0,0 +1,28 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package auth + +import ( + "github.com/harness/gitness/types" +) + +// Session contains information of the authenticated principal and auth related metadata. +type Session struct { + // Principal is the authenticated principal. + Principal types.Principal + + // Metadata contains auth related information (access grants, tokenId, sshKeyId, ...) + Metadata Metadata +} diff --git a/internal/bootstrap/bootstrap.go b/internal/bootstrap/bootstrap.go new file mode 100644 index 0000000000..0c511f085c --- /dev/null +++ b/internal/bootstrap/bootstrap.go @@ -0,0 +1,221 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bootstrap + +import ( + "context" + "errors" + "fmt" + + "github.com/harness/gitness/internal/api/controller/service" + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/auth" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/log" +) + +// systemServicePrincipal is the principal representing gitness. +// It is used for all operations executed by gitness itself. +var systemServicePrincipal *types.Principal + +func NewSystemServiceSession() *auth.Session { + return &auth.Session{ + Principal: *systemServicePrincipal, + Metadata: &auth.EmptyMetadata{}, + } +} + +// pipelineServicePrincipal is the principal that is used during +// pipeline executions for calling gitness APIs. +var pipelineServicePrincipal *types.Principal + +func NewPipelineServiceSession() *auth.Session { + return &auth.Session{ + Principal: *pipelineServicePrincipal, + Metadata: &auth.EmptyMetadata{}, + } +} + +// Bootstrap is an abstraction of a function that bootstraps a system. +type Bootstrap func(context.Context) error + +func System(config *types.Config, userCtrl *user.Controller, + serviceCtrl *service.Controller) func(context.Context) error { + return func(ctx context.Context) error { + if err := SystemService(ctx, config, serviceCtrl); err != nil { + return fmt.Errorf("failed to setup system service: %w", err) + } + + if err := PipelineService(ctx, config, serviceCtrl); err != nil { + return fmt.Errorf("failed to setup pipeline service: %w", err) + } + + if err := AdminUser(ctx, config, userCtrl); err != nil { + return fmt.Errorf("failed to setup admin user: %w", err) + } + + return nil + } +} + +// AdminUser sets up the admin user based on the config (if provided). +func AdminUser(ctx context.Context, config *types.Config, userCtrl *user.Controller) error { + if config.Principal.Admin.Password == "" { + return nil + } + + usr, err := userCtrl.FindNoAuth(ctx, config.Principal.Admin.UID) + if errors.Is(err, store.ErrResourceNotFound) { + usr, err = createAdminUser(ctx, config, userCtrl) + } + + if err != nil { + return fmt.Errorf("failed to setup admin user: %w", err) + } + if !usr.Admin { + return fmt.Errorf("user with uid '%s' exists but is no admin (ID: %d)", usr.UID, usr.ID) + } + + log.Ctx(ctx).Info().Msgf("Completed setup of admin user '%s' (id: %d).", usr.UID, usr.ID) + + return nil +} + +func createAdminUser( + ctx context.Context, + config *types.Config, + userCtrl *user.Controller, +) (*types.User, error) { + in := &user.CreateInput{ + UID: config.Principal.Admin.UID, + DisplayName: config.Principal.Admin.DisplayName, + Email: config.Principal.Admin.Email, + Password: config.Principal.Admin.Password, + } + + usr, createErr := userCtrl.CreateNoAuth(ctx, in, true) + if createErr == nil || !errors.Is(createErr, store.ErrDuplicate) { + return usr, createErr + } + + // user might've been created by another instance in which case we should find it now. + var findErr error + usr, findErr = userCtrl.FindNoAuth(ctx, config.Principal.Admin.UID) + if findErr != nil { + return nil, fmt.Errorf("failed to find user with uid '%s' (%s) after duplicate error: %w", + config.Principal.Admin.UID, findErr, createErr) + } + + return usr, nil +} + +// SystemService sets up the gitness service principal that is used for +// resources that are automatically created by the system. +func SystemService( + ctx context.Context, + config *types.Config, + serviceCtrl *service.Controller, +) error { + svc, err := serviceCtrl.FindNoAuth(ctx, config.Principal.System.UID) + if errors.Is(err, store.ErrResourceNotFound) { + svc, err = createServicePrincipal( + ctx, + serviceCtrl, + config.Principal.System.UID, + config.Principal.System.Email, + config.Principal.System.DisplayName, + true, + ) + } + + if err != nil { + return fmt.Errorf("failed to setup system service: %w", err) + } + if !svc.Admin { + return fmt.Errorf("service with uid '%s' exists but is no admin (ID: %d)", svc.UID, svc.ID) + } + + systemServicePrincipal = svc.ToPrincipal() + + log.Ctx(ctx).Info().Msgf("Completed setup of system service '%s' (id: %d).", svc.UID, svc.ID) + + return nil +} + +// PipelineService sets up the pipeline service principal that is used during +// pipeline executions for calling gitness APIs. +func PipelineService( + ctx context.Context, + config *types.Config, + serviceCtrl *service.Controller, +) error { + svc, err := serviceCtrl.FindNoAuth(ctx, config.Principal.Pipeline.UID) + if errors.Is(err, store.ErrResourceNotFound) { + svc, err = createServicePrincipal( + ctx, + serviceCtrl, + config.Principal.Pipeline.UID, + config.Principal.Pipeline.Email, + config.Principal.Pipeline.DisplayName, + false, + ) + } + + if err != nil { + return fmt.Errorf("failed to setup pipeline service: %w", err) + } + + pipelineServicePrincipal = svc.ToPrincipal() + + log.Ctx(ctx).Info().Msgf("Completed setup of pipeline service '%s' (id: %d).", svc.UID, svc.ID) + + return nil +} + +func createServicePrincipal( + ctx context.Context, + serviceCtrl *service.Controller, + uid string, + email string, + displayName string, + admin bool, +) (*types.Service, error) { + in := &service.CreateInput{ + UID: uid, + Email: email, + DisplayName: displayName, + } + + svc, createErr := serviceCtrl.CreateNoAuth(ctx, in, admin) + if createErr == nil || !errors.Is(createErr, store.ErrDuplicate) { + return svc, createErr + } + + // service might've been created by another instance in which case we should find it now. + var findErr error + svc, findErr = serviceCtrl.FindNoAuth(ctx, uid) + if findErr != nil { + return nil, fmt.Errorf( + "failed to find service with uid '%s' (%s) after duplicate error: %w", + uid, + findErr, + createErr, + ) + } + + return svc, nil +} diff --git a/internal/bootstrap/wire.go b/internal/bootstrap/wire.go new file mode 100644 index 0000000000..64f7221d30 --- /dev/null +++ b/internal/bootstrap/wire.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bootstrap + +import ( + "github.com/harness/gitness/internal/api/controller/service" + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet(ProvideBootstrap) + +func ProvideBootstrap(config *types.Config, userCtrl *user.Controller, + serviceCtrl *service.Controller) Bootstrap { + return System(config, userCtrl, serviceCtrl) +} diff --git a/internal/config/url.go b/internal/config/url.go new file mode 100644 index 0000000000..b69973a9be --- /dev/null +++ b/internal/config/url.go @@ -0,0 +1,5 @@ +package config + +const ( + ApiURL = "/api/v1" +) diff --git a/internal/cron/nightly.go b/internal/cron/nightly.go new file mode 100644 index 0000000000..d9e768930e --- /dev/null +++ b/internal/cron/nightly.go @@ -0,0 +1,49 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import ( + "context" + "time" + + "github.com/rs/zerolog/log" +) + +// Nightly is a sub-routine that periodically purges historical data. +type Nightly struct { + // Inject required stores here +} + +// NewNightly returns a new Nightly sub-routine. +func NewNightly() *Nightly { + return &Nightly{} +} + +// Run runs the purge sub-routine. +func (n *Nightly) Run(ctx context.Context) { + const hoursPerDay = 24 + ticker := time.NewTicker(hoursPerDay * time.Hour) + logger := log.Ctx(ctx) + for { + select { + case <-ctx.Done(): + return // break + case <-ticker.C: + // TODO replace this with your nightly + // cron tasks. + logger.Trace().Msg("cron job executed") + } + } +} diff --git a/internal/cron/nightly_test.go b/internal/cron/nightly_test.go new file mode 100644 index 0000000000..c0709df16d --- /dev/null +++ b/internal/cron/nightly_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron diff --git a/internal/cron/wire.go b/internal/cron/wire.go new file mode 100644 index 0000000000..3e1a298505 --- /dev/null +++ b/internal/cron/wire.go @@ -0,0 +1,20 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cron + +import "github.com/google/wire" + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet(NewNightly) diff --git a/internal/events/git/branch.go b/internal/events/git/branch.go new file mode 100644 index 0000000000..a464d9a5a8 --- /dev/null +++ b/internal/events/git/branch.go @@ -0,0 +1,97 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "context" + + "github.com/harness/gitness/events" + + "github.com/rs/zerolog/log" +) + +const BranchCreatedEvent events.EventType = "branch-created" + +type BranchCreatedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + SHA string `json:"sha"` +} + +func (r *Reporter) BranchCreated(ctx context.Context, payload *BranchCreatedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, BranchCreatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send branch created event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported branch created event with id '%s'", eventID) +} + +func (r *Reader) RegisterBranchCreated(fn events.HandlerFunc[*BranchCreatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, BranchCreatedEvent, fn, opts...) +} + +const BranchUpdatedEvent events.EventType = "branch-updated" + +type BranchUpdatedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Forced bool `json:"forced"` +} + +func (r *Reporter) BranchUpdated(ctx context.Context, payload *BranchUpdatedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, BranchUpdatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send branch updated event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported branch updated event with id '%s'", eventID) +} + +func (r *Reader) RegisterBranchUpdated(fn events.HandlerFunc[*BranchUpdatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, BranchUpdatedEvent, fn, opts...) +} + +const BranchDeletedEvent events.EventType = "branch-deleted" + +type BranchDeletedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + SHA string `json:"sha"` +} + +func (r *Reporter) BranchDeleted(ctx context.Context, payload *BranchDeletedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, BranchDeletedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send branch deleted event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported branch deleted event with id '%s'", eventID) +} + +func (r *Reader) RegisterBranchDeleted(fn events.HandlerFunc[*BranchDeletedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, BranchDeletedEvent, fn, opts...) +} diff --git a/internal/events/git/events.go b/internal/events/git/events.go new file mode 100644 index 0000000000..3ff3deb4c7 --- /dev/null +++ b/internal/events/git/events.go @@ -0,0 +1,20 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +const ( + // category defines the event category used for this package. + category = "git" +) diff --git a/internal/events/git/reader.go b/internal/events/git/reader.go new file mode 100644 index 0000000000..a37c577894 --- /dev/null +++ b/internal/events/git/reader.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "github.com/harness/gitness/events" +) + +func NewReaderFactory(eventsSystem *events.System) (*events.ReaderFactory[*Reader], error) { + readerFactoryFunc := func(innerReader *events.GenericReader) (*Reader, error) { + return &Reader{ + innerReader: innerReader, + }, nil + } + + return events.NewReaderFactory(eventsSystem, category, readerFactoryFunc) +} + +// Reader is the event reader for this package. +// It exposes typesafe event registration methods for all events by this package. +// NOTE: Event registration methods are in the event's dedicated file. +type Reader struct { + innerReader *events.GenericReader +} + +func (r *Reader) Configure(opts ...events.ReaderOption) { + r.innerReader.Configure(opts...) +} diff --git a/internal/events/git/reporter.go b/internal/events/git/reporter.go new file mode 100644 index 0000000000..d09c5473d0 --- /dev/null +++ b/internal/events/git/reporter.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "errors" + + "github.com/harness/gitness/events" +) + +// Reporter is the event reporter for this package. +// It exposes typesafe send methods for all events of this package. +// NOTE: Event send methods are in the event's dedicated file. +type Reporter struct { + innerReporter *events.GenericReporter +} + +func NewReporter(eventsSystem *events.System) (*Reporter, error) { + innerReporter, err := events.NewReporter(eventsSystem, category) + if err != nil { + return nil, errors.New("failed to create new GenericReporter from event system") + } + + return &Reporter{ + innerReporter: innerReporter, + }, nil +} diff --git a/internal/events/git/tag.go b/internal/events/git/tag.go new file mode 100644 index 0000000000..424186ab50 --- /dev/null +++ b/internal/events/git/tag.go @@ -0,0 +1,97 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "context" + + "github.com/harness/gitness/events" + + "github.com/rs/zerolog/log" +) + +const TagCreatedEvent events.EventType = "tag-created" + +type TagCreatedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + SHA string `json:"sha"` +} + +func (r *Reporter) TagCreated(ctx context.Context, payload *TagCreatedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, TagCreatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send tag created event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported tag created event with id '%s'", eventID) +} + +func (r *Reader) RegisterTagCreated(fn events.HandlerFunc[*TagCreatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, TagCreatedEvent, fn, opts...) +} + +const TagUpdatedEvent events.EventType = "tag-updated" + +type TagUpdatedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + Forced bool `json:"forced"` +} + +func (r *Reporter) TagUpdated(ctx context.Context, payload *TagUpdatedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, TagUpdatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send tag updated event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported tag updated event with id '%s'", eventID) +} + +func (r *Reader) RegisterTagUpdated(fn events.HandlerFunc[*TagUpdatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, TagUpdatedEvent, fn, opts...) +} + +const TagDeletedEvent events.EventType = "tag-deleted" + +type TagDeletedPayload struct { + RepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Ref string `json:"ref"` + SHA string `json:"sha"` +} + +func (r *Reporter) TagDeleted(ctx context.Context, payload *TagDeletedPayload) { + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, TagDeletedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send tag deleted event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported tag deleted event with id '%s'", eventID) +} + +func (r *Reader) RegisterTagDeleted(fn events.HandlerFunc[*TagDeletedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, TagDeletedEvent, fn, opts...) +} diff --git a/internal/events/git/wire.go b/internal/events/git/wire.go new file mode 100644 index 0000000000..4259a0971b --- /dev/null +++ b/internal/events/git/wire.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "github.com/harness/gitness/events" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideReaderFactory, + ProvideReporter, +) + +func ProvideReaderFactory(eventsSystem *events.System) (*events.ReaderFactory[*Reader], error) { + return NewReaderFactory(eventsSystem) +} + +func ProvideReporter(eventsSystem *events.System) (*Reporter, error) { + return NewReporter(eventsSystem) +} diff --git a/internal/events/pullreq/category.go b/internal/events/pullreq/category.go new file mode 100644 index 0000000000..bf7386f6cf --- /dev/null +++ b/internal/events/pullreq/category.go @@ -0,0 +1,20 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +const ( + // category defines the event category used for this package. + category = "pullreq" +) diff --git a/internal/events/pullreq/events.go b/internal/events/pullreq/events.go new file mode 100644 index 0000000000..06c0e9ce55 --- /dev/null +++ b/internal/events/pullreq/events.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +type Base struct { + PullReqID int64 `json:"pullreq_id"` + SourceRepoID int64 `json:"source_repo_id"` + TargetRepoID int64 `json:"repo_id"` + PrincipalID int64 `json:"principal_id"` + Number int64 `json:"number"` +} diff --git a/internal/events/pullreq/events_branch.go b/internal/events/pullreq/events_branch.go new file mode 100644 index 0000000000..f336ebcfcf --- /dev/null +++ b/internal/events/pullreq/events_branch.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "context" + + "github.com/harness/gitness/events" + + "github.com/rs/zerolog/log" +) + +const BranchUpdatedEvent events.EventType = "branch-updated" + +type BranchUpdatedPayload struct { + Base + OldSHA string `json:"old_sha"` + NewSHA string `json:"new_sha"` + OldMergeBaseSHA string `json:"old_merge_base_sha"` + NewMergeBaseSHA string `json:"new_merge_base_sha"` + Forced bool `json:"forced"` +} + +func (r *Reporter) BranchUpdated(ctx context.Context, payload *BranchUpdatedPayload) { + if payload == nil { + return + } + + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, BranchUpdatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send pull request branch updated event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported pull request branch updated event with id '%s'", eventID) +} + +func (r *Reader) RegisterBranchUpdated(fn events.HandlerFunc[*BranchUpdatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, BranchUpdatedEvent, fn, opts...) +} diff --git a/internal/events/pullreq/events_state.go b/internal/events/pullreq/events_state.go new file mode 100644 index 0000000000..2ec3339a6a --- /dev/null +++ b/internal/events/pullreq/events_state.go @@ -0,0 +1,133 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "context" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +const CreatedEvent events.EventType = "created" + +type CreatedPayload struct { + Base + SourceBranch string `json:"source_branch"` + TargetBranch string `json:"target_branch"` + SourceSHA string `json:"source_sha"` +} + +func (r *Reporter) Created(ctx context.Context, payload *CreatedPayload) { + if payload == nil { + return + } + + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, CreatedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send pull request created event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported pull request created event with id '%s'", eventID) +} + +func (r *Reader) RegisterCreated(fn events.HandlerFunc[*CreatedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, CreatedEvent, fn, opts...) +} + +const ClosedEvent events.EventType = "closed" + +type ClosedPayload struct { + Base +} + +func (r *Reporter) Closed(ctx context.Context, payload *ClosedPayload) { + if payload == nil { + return + } + + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, ClosedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send pull request closed event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported pull request closed event with id '%s'", eventID) +} + +func (r *Reader) RegisterClosed(fn events.HandlerFunc[*ClosedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, ClosedEvent, fn, opts...) +} + +const ReopenedEvent events.EventType = "reopened" + +type ReopenedPayload struct { + Base + SourceSHA string `json:"source_sha"` + MergeBaseSHA string `json:"merge_base_sha"` +} + +func (r *Reporter) Reopened(ctx context.Context, payload *ReopenedPayload) { + if payload == nil { + return + } + + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, ReopenedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send pull request reopened event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported pull request reopened event with id '%s'", eventID) +} + +func (r *Reader) RegisterReopened(fn events.HandlerFunc[*ReopenedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, ReopenedEvent, fn, opts...) +} + +const MergedEvent events.EventType = "merged" + +type MergedPayload struct { + Base + MergeMethod enum.MergeMethod `json:"merge_method"` + MergeSHA string `json:"merge_sha"` + TargetSHA string `json:"target_sha"` + SourceSHA string `json:"source_sha"` +} + +func (r *Reporter) Merged(ctx context.Context, payload *MergedPayload) { + if payload == nil { + return + } + + eventID, err := events.ReporterSendEvent(r.innerReporter, ctx, MergedEvent, payload) + if err != nil { + log.Ctx(ctx).Err(err).Msgf("failed to send pull request merged event") + return + } + + log.Ctx(ctx).Debug().Msgf("reported pull request merged event with id '%s'", eventID) +} + +func (r *Reader) RegisterMerged(fn events.HandlerFunc[*MergedPayload], + opts ...events.HandlerOption) error { + return events.ReaderRegisterEvent(r.innerReader, MergedEvent, fn, opts...) +} diff --git a/internal/events/pullreq/reader.go b/internal/events/pullreq/reader.go new file mode 100644 index 0000000000..46005ebc4f --- /dev/null +++ b/internal/events/pullreq/reader.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "github.com/harness/gitness/events" +) + +func NewReaderFactory(eventsSystem *events.System) (*events.ReaderFactory[*Reader], error) { + readerFactoryFunc := func(innerReader *events.GenericReader) (*Reader, error) { + return &Reader{ + innerReader: innerReader, + }, nil + } + + return events.NewReaderFactory(eventsSystem, category, readerFactoryFunc) +} + +// Reader is the event reader for this package. +type Reader struct { + innerReader *events.GenericReader +} + +func (r *Reader) Configure(opts ...events.ReaderOption) { + r.innerReader.Configure(opts...) +} diff --git a/internal/events/pullreq/reporter.go b/internal/events/pullreq/reporter.go new file mode 100644 index 0000000000..e6dd923e9a --- /dev/null +++ b/internal/events/pullreq/reporter.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "errors" + + "github.com/harness/gitness/events" +) + +// Reporter is the event reporter for this package. +type Reporter struct { + innerReporter *events.GenericReporter +} + +func NewReporter(eventsSystem *events.System) (*Reporter, error) { + innerReporter, err := events.NewReporter(eventsSystem, category) + if err != nil { + return nil, errors.New("failed to create new GenericReporter from event system") + } + + return &Reporter{ + innerReporter: innerReporter, + }, nil +} diff --git a/internal/events/pullreq/wire.go b/internal/events/pullreq/wire.go new file mode 100644 index 0000000000..4259a0971b --- /dev/null +++ b/internal/events/pullreq/wire.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package events + +import ( + "github.com/harness/gitness/events" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideReaderFactory, + ProvideReporter, +) + +func ProvideReaderFactory(eventsSystem *events.System) (*events.ReaderFactory[*Reader], error) { + return NewReaderFactory(eventsSystem) +} + +func ProvideReporter(eventsSystem *events.System) (*Reporter, error) { + return NewReporter(eventsSystem) +} diff --git a/internal/githook/githook.go b/internal/githook/githook.go new file mode 100644 index 0000000000..0301d45dbd --- /dev/null +++ b/internal/githook/githook.go @@ -0,0 +1,109 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package githook + +import ( + "context" + "fmt" + "net/http" + "strings" + "time" + + "github.com/harness/gitness/githook" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/types" + "github.com/harness/gitness/version" + + "github.com/rs/zerolog/log" +) + +var ( + // ExecutionTimeout is the timeout used for githook CLI runs. + ExecutionTimeout = 3 * time.Minute +) + +// GenerateEnvironmentVariables generates the required environment variables for a payload +// constructed from the provided parameters. +func GenerateEnvironmentVariables( + ctx context.Context, + apiBaseURL string, + repoID int64, + principalID int64, + disabled bool, +) (map[string]string, error) { + // best effort retrieving of requestID - log in case we can't find it but don't fail operation. + requestID, ok := request.RequestIDFrom(ctx) + if !ok { + log.Ctx(ctx).Warn().Msg("operation doesn't have a requestID in the context - generate githook payload without") + } + + // generate githook base url + baseURL := strings.TrimLeft(apiBaseURL, "/") + "/v1/internal/git-hooks" + + payload := &types.GithookPayload{ + BaseURL: baseURL, + RepoID: repoID, + PrincipalID: principalID, + RequestID: requestID, + Disabled: disabled, + } + + if err := payload.Validate(); err != nil { + return nil, fmt.Errorf("generated payload is invalid: %w", err) + } + + return githook.GenerateEnvironmentVariables(payload) +} + +// LoadFromEnvironment returns a new githook.CLICore created by loading the payload from the environment variable. +func LoadFromEnvironment() (*githook.CLICore, error) { + payload, err := githook.LoadPayloadFromEnvironment[*types.GithookPayload]() + if err != nil { + return nil, fmt.Errorf("failed to load payload from environment: %w", err) + } + + // ensure we return disabled error in case it's explicitly disabled (will result in no-op) + if payload.Disabled { + return nil, githook.ErrDisabled + } + + if err := payload.Validate(); err != nil { + return nil, fmt.Errorf("payload validation failed: %w", err) + } + + return githook.NewCLICore( + githook.NewClient( + http.DefaultClient, + payload.BaseURL, + func(r *http.Request) *http.Request { + // add query params + query := r.URL.Query() + query.Add(request.QueryParamRepoID, fmt.Sprint(payload.RepoID)) + query.Add(request.QueryParamPrincipalID, fmt.Sprint(payload.PrincipalID)) + + r.URL.RawQuery = query.Encode() + + // add headers + if len(payload.RequestID) > 0 { + r.Header.Add(request.HeaderRequestID, payload.RequestID) + } + r.Header.Add(request.HeaderUserAgent, fmt.Sprintf("Gitness/%s", version.Version)) + + return r + }, + ), + ExecutionTimeout, + ), nil +} diff --git a/internal/inernal_test.go b/internal/inernal_test.go new file mode 100644 index 0000000000..6471380eca --- /dev/null +++ b/internal/inernal_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal diff --git a/internal/internal.go b/internal/internal.go new file mode 100644 index 0000000000..605b9f0def --- /dev/null +++ b/internal/internal.go @@ -0,0 +1,24 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build tools +// +build tools + +// following https://github.com/golang/go/wiki/Modules#how-can-i-track-tool-dependencies-for-a-module + +package internal + +import ( + _ "github.com/google/wire/cmd/wire" +) diff --git a/internal/jwt/jwt.go b/internal/jwt/jwt.go new file mode 100644 index 0000000000..09757ba5e5 --- /dev/null +++ b/internal/jwt/jwt.go @@ -0,0 +1,107 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jwt + +import ( + "time" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/golang-jwt/jwt" + "github.com/pkg/errors" +) + +const ( + issuer = "Gitness" +) + +// Claims defines gitness jwt claims. +type Claims struct { + jwt.StandardClaims + + PrincipalID int64 `json:"pid,omitempty"` + + Token *SubClaimsToken `json:"tkn,omitempty"` + Membership *SubClaimsMembership `json:"ms,omitempty"` +} + +// SubClaimsToken contains information about the token the JWT was created for. +type SubClaimsToken struct { + Type enum.TokenType `json:"typ,omitempty"` + ID int64 `json:"id,omitempty"` +} + +// SubClaimsMembership contains the ephemeral membership the JWT was created with. +type SubClaimsMembership struct { + Role enum.MembershipRole `json:"role,omitempty"` + SpaceID int64 `json:"sid,omitempty"` +} + +// GenerateForToken generates a jwt for a given token. +func GenerateForToken(token *types.Token, secret string) (string, error) { + var expiresAt int64 + if token.ExpiresAt != nil { + expiresAt = *token.ExpiresAt + } + + jwtToken := jwt.NewWithClaims(jwt.SigningMethodHS256, Claims{ + StandardClaims: jwt.StandardClaims{ + Issuer: issuer, + // times required to be in sec not millisec + IssuedAt: token.IssuedAt / 1000, + ExpiresAt: expiresAt / 1000, + }, + PrincipalID: token.PrincipalID, + Token: &SubClaimsToken{ + Type: token.Type, + ID: token.ID, + }, + }) + + res, err := jwtToken.SignedString([]byte(secret)) + if err != nil { + return "", errors.Wrap(err, "Failed to sign token") + } + + return res, nil +} + +// GenerateWithMembership generates a jwt with the given ephemeral membership. +func GenerateWithMembership(principalID int64, spaceID int64, role enum.MembershipRole, lifetime time.Duration, secret string) (string, error) { + issuedAt := time.Now() + expiresAt := issuedAt.Add(lifetime) + + jwtToken := jwt.NewWithClaims(jwt.SigningMethodHS256, Claims{ + StandardClaims: jwt.StandardClaims{ + Issuer: issuer, + // times required to be in sec + IssuedAt: issuedAt.Unix(), + ExpiresAt: expiresAt.Unix(), + }, + PrincipalID: principalID, + Membership: &SubClaimsMembership{ + SpaceID: spaceID, + Role: role, + }, + }) + + res, err := jwtToken.SignedString([]byte(secret)) + if err != nil { + return "", errors.Wrap(err, "Failed to sign token") + } + + return res, nil +} diff --git a/internal/paths/paths.go b/internal/paths/paths.go new file mode 100644 index 0000000000..135af92db3 --- /dev/null +++ b/internal/paths/paths.go @@ -0,0 +1,99 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package paths + +import ( + "errors" + "strings" + + "github.com/harness/gitness/types" +) + +var ( + ErrPathEmpty = errors.New("path is empty") +) + +// DisectLeaf splits a path into its parent path and the leaf name +// e.g. space1/space2/space3 -> (space1/space2, space3, nil). +func DisectLeaf(path string) (string, string, error) { + path = strings.Trim(path, types.PathSeparator) + + if path == "" { + return "", "", ErrPathEmpty + } + + i := strings.LastIndex(path, types.PathSeparator) + if i == -1 { + return "", path, nil + } + + return path[:i], path[i+1:], nil +} + +// DisectRoot splits a path into its root space and sub-path +// e.g. space1/space2/space3 -> (space1, space2/space3, nil). +func DisectRoot(path string) (string, string, error) { + path = strings.Trim(path, types.PathSeparator) + + if path == "" { + return "", "", ErrPathEmpty + } + + i := strings.Index(path, types.PathSeparator) + if i == -1 { + return path, "", nil + } + + return path[:i], path[i+1:], nil +} + +/* + * Concatinate two paths together (takes care of leading / trailing '/') + * e.g. (space1/, /space2/) -> space1/space2 + * + * NOTE: "//" is not a valid path, so all '/' will be trimmed. + */ +func Concatinate(path1 string, path2 string) string { + path1 = strings.Trim(path1, types.PathSeparator) + path2 = strings.Trim(path2, types.PathSeparator) + + if path1 == "" { + return path2 + } else if path2 == "" { + return path1 + } + + return path1 + types.PathSeparator + path2 +} + +// Segments returns all segments of the path +// e.g. /space1/space2/space3 -> [space1, space2, space3]. +func Segments(path string) []string { + path = strings.Trim(path, types.PathSeparator) + return strings.Split(path, types.PathSeparator) +} + +// IsAncesterOf returns true iff 'path' is an ancestor of 'other' or they are the same. +// e.g. other = path(/.*) +func IsAncesterOf(path string, other string) bool { + path = strings.Trim(path, types.PathSeparator) + other = strings.Trim(other, types.PathSeparator) + + // add "/" to both to handle space1/inner and space1/in + return strings.Contains( + other+types.PathSeparator, + path+types.PathSeparator, + ) +} diff --git a/internal/pipeline/canceler/canceler.go b/internal/pipeline/canceler/canceler.go new file mode 100644 index 0000000000..59e1611715 --- /dev/null +++ b/internal/pipeline/canceler/canceler.go @@ -0,0 +1,143 @@ +package canceler + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type service struct { + executionStore store.ExecutionStore + sseStreamer sse.Streamer + repoStore store.RepoStore + scheduler scheduler.Scheduler + stageStore store.StageStore + stepStore store.StepStore +} + +// Canceler cancels a build. +type Canceler interface { + // Cancel cancels the provided execution. + Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error +} + +// New returns a cancellation service that encapsulates +// all cancellation operations. +func New( + executionStore store.ExecutionStore, + sseStreamer sse.Streamer, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + stageStore store.StageStore, + stepStore store.StepStore, +) Canceler { + return &service{ + executionStore: executionStore, + sseStreamer: sseStreamer, + repoStore: repoStore, + scheduler: scheduler, + stageStore: stageStore, + stepStore: stepStore, + } +} + +func (s *service) Cancel(ctx context.Context, repo *types.Repository, execution *types.Execution) error { + log := log.With(). + Int64("execution.id", execution.ID). + Str("execution.status", string(execution.Status)). + Str("execution.Ref", execution.Ref). + Logger() + + // do not cancel the build if the build status is + // complete. only cancel the build if the status is + // running or pending. + switch execution.Status { + case enum.CIStatusPending, enum.CIStatusRunning: + default: + return nil + } + + // update the build status to killed. if the update fails + // due to an optimistic lock error it means the build has + // already started, and should now be ignored. + now := time.Now().UnixMilli() + execution.Status = enum.CIStatusKilled + execution.Finished = now + if execution.Started == 0 { + execution.Started = now + } + + err := s.executionStore.Update(ctx, execution) + if err != nil { + return fmt.Errorf("could not update execution status to canceled: %w", err) + } + + stages, err := s.stageStore.ListWithSteps(ctx, execution.ID) + if err != nil { + return fmt.Errorf("could not list stages with steps: %w", err) + } + + // update the status of all steps to indicate they + // were killed or skipped. + for _, stage := range stages { + if stage.Status.IsDone() { + continue + } + if stage.Started != 0 { + stage.Status = enum.CIStatusKilled + } else { + stage.Status = enum.CIStatusSkipped + stage.Started = now + } + stage.Stopped = now + err := s.stageStore.Update(ctx, stage) + if err != nil { + log.Debug().Err(err). + Int64("stage.number", stage.Number). + Msg("canceler: cannot update stage status") + } + + // update the status of all steps to indicate they + // were killed or skipped. + for _, step := range stage.Steps { + if step.Status.IsDone() { + continue + } + if step.Started != 0 { + step.Status = enum.CIStatusKilled + } else { + step.Status = enum.CIStatusSkipped + step.Started = now + } + step.Stopped = now + step.ExitCode = 130 + err := s.stepStore.Update(ctx, step) + if err != nil { + log.Debug().Err(err). + Int64("stage.number", stage.Number). + Int64("step.number", step.Number). + Msg("canceler: cannot update step status") + } + } + } + + execution.Stages = stages + log.Info().Msg("canceler: successfully cancelled build") + + // trigger a SSE to notify subscribers that + // the execution was cancelled. + err = s.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeExecutionCanceled, execution) + if err != nil { + log.Debug().Err(err).Msg("canceler: failed to publish server-sent event") + } + + return nil +} diff --git a/internal/pipeline/canceler/wire.go b/internal/pipeline/canceler/wire.go new file mode 100644 index 0000000000..d4a8f58e87 --- /dev/null +++ b/internal/pipeline/canceler/wire.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package canceler + +import ( + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideCanceler, +) + +// ProvideExecutionManager provides an execution manager. +func ProvideCanceler( + executionStore store.ExecutionStore, + sseStreamer sse.Streamer, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + stageStore store.StageStore, + stepStore store.StepStore) Canceler { + return New(executionStore, sseStreamer, repoStore, scheduler, stageStore, stepStore) +} diff --git a/internal/pipeline/checks/write.go b/internal/pipeline/checks/write.go new file mode 100644 index 0000000000..bf47b68d5e --- /dev/null +++ b/internal/pipeline/checks/write.go @@ -0,0 +1,57 @@ +package checks + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// Write is a util function which writes execution and pipeline state to the +// check store. +func Write( + ctx context.Context, + checkStore store.CheckStore, + execution *types.Execution, + pipeline *types.Pipeline, +) error { + payload := types.CheckPayloadInternal{ + Number: execution.Number, + RepoID: execution.RepoID, + PipelineID: execution.PipelineID, + } + data, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("could not marshal check payload: %w", err) + } + now := time.Now().UnixMilli() + summary := pipeline.Description + if summary == "" { + summary = pipeline.UID + } + check := &types.Check{ + RepoID: execution.RepoID, + UID: pipeline.UID, + Summary: summary, + Created: now, + Updated: now, + CreatedBy: execution.CreatedBy, + Status: execution.Status.ConvertToCheckStatus(), + CommitSHA: execution.After, + Metadata: []byte("{}"), + Payload: types.CheckPayload{ + Version: "1", + Kind: enum.CheckPayloadKindPipeline, + Data: data, + }, + } + err = checkStore.Upsert(ctx, check) + if err != nil { + return fmt.Errorf("could not upsert to check store: %w", err) + } + return nil +} diff --git a/internal/pipeline/commit/gitness.go b/internal/pipeline/commit/gitness.go new file mode 100644 index 0000000000..7e8b3eaa7f --- /dev/null +++ b/internal/pipeline/commit/gitness.go @@ -0,0 +1,75 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package commit + +import ( + "context" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller" + "github.com/harness/gitness/types" +) + +type service struct { + gitRPCClient gitrpc.Interface +} + +func new(gitRPCClient gitrpc.Interface) CommitService { + return &service{gitRPCClient: gitRPCClient} +} + +// FindRef finds information about a commit in gitness for the git ref. +// This is using the branch only as the ref at the moment, can be changed +// when needed to take any ref (like sha, tag). +func (f *service) FindRef( + ctx context.Context, + repo *types.Repository, + branch string, +) (*types.Commit, error) { + readParams := gitrpc.ReadParams{ + RepoUID: repo.GitUID, + } + branchOutput, err := f.gitRPCClient.GetBranch(ctx, &gitrpc.GetBranchParams{ + ReadParams: readParams, + BranchName: branch, + }) + if err != nil { + return nil, err + } + + // convert the RPC commit output to a types.Commit. + return controller.MapCommit(branchOutput.Branch.Commit) +} + +// FindCommit finds information about a commit in gitness for the git SHA +func (f *service) FindCommit( + ctx context.Context, + repo *types.Repository, + sha string, +) (*types.Commit, error) { + readParams := gitrpc.ReadParams{ + RepoUID: repo.GitUID, + } + commitOutput, err := f.gitRPCClient.GetCommit(ctx, &gitrpc.GetCommitParams{ + ReadParams: readParams, + SHA: sha, + }) + if err != nil { + return nil, err + } + + // convert the RPC commit output to a types.Commit. + return controller.MapCommit(&commitOutput.Commit) +} diff --git a/internal/pipeline/commit/service.go b/internal/pipeline/commit/service.go new file mode 100644 index 0000000000..b1809f98c8 --- /dev/null +++ b/internal/pipeline/commit/service.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package commit + +import ( + "context" + + "github.com/harness/gitness/types" +) + +type ( + // CommitService provides access to commit information via + // the SCM provider. Today, this is gitness but it can + // be extendible to any SCM provider. + CommitService interface { + // ref is the ref to fetch the commit from, eg refs/heads/master + FindRef(ctx context.Context, repo *types.Repository, ref string) (*types.Commit, error) + + // FindCommit returns information about a commit in a repo. + FindCommit(ctx context.Context, repo *types.Repository, sha string) (*types.Commit, error) + } +) diff --git a/internal/pipeline/commit/wire.go b/internal/pipeline/commit/wire.go new file mode 100644 index 0000000000..bc114ef7c9 --- /dev/null +++ b/internal/pipeline/commit/wire.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package commit + +import ( + "github.com/harness/gitness/gitrpc" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideCommitService, +) + +// ProvideCommitService provides a service which can fetch commit +// information about a repository. +func ProvideCommitService(gitRPCClient gitrpc.Interface) CommitService { + return new(gitRPCClient) +} diff --git a/internal/pipeline/file/gitness.go b/internal/pipeline/file/gitness.go new file mode 100644 index 0000000000..fd443dec98 --- /dev/null +++ b/internal/pipeline/file/gitness.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package file + +import ( + "context" + "fmt" + "io" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/types" +) + +type service struct { + gitRPCClient gitrpc.Interface +} + +func new(gitRPCClient gitrpc.Interface) FileService { + return &service{gitRPCClient: gitRPCClient} +} + +func (f *service) Get( + ctx context.Context, + repo *types.Repository, + path string, + ref string, +) (*File, error) { + readParams := gitrpc.ReadParams{ + RepoUID: repo.GitUID, + } + treeNodeOutput, err := f.gitRPCClient.GetTreeNode(ctx, &gitrpc.GetTreeNodeParams{ + ReadParams: readParams, + GitREF: ref, + Path: path, + IncludeLatestCommit: false, + }) + if err != nil { + return nil, fmt.Errorf("failed to read tree node: %w", err) + } + // viewing Raw content is only supported for blob content + if treeNodeOutput.Node.Type != gitrpc.TreeNodeTypeBlob { + return nil, fmt.Errorf("path content is not of blob type: %s", treeNodeOutput.Node.Type) + } + + blobReader, err := f.gitRPCClient.GetBlob(ctx, &gitrpc.GetBlobParams{ + ReadParams: readParams, + SHA: treeNodeOutput.Node.SHA, + SizeLimit: 0, // no size limit, we stream whatever data there is + }) + if err != nil { + return nil, fmt.Errorf("failed to read blob from gitrpc: %w", err) + } + + buf, err := io.ReadAll(blobReader.Content) + if err != nil { + return nil, fmt.Errorf("could not read blob content from file: %w", err) + } + + return &File{ + Data: buf, + }, nil +} diff --git a/internal/pipeline/file/service.go b/internal/pipeline/file/service.go new file mode 100644 index 0000000000..c9a8ac3297 --- /dev/null +++ b/internal/pipeline/file/service.go @@ -0,0 +1,41 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package file + +import ( + "context" + + "github.com/harness/gitness/types" +) + +type ( + // File represents the raw file contents in the + // version control system. + File struct { + Data []byte + } + + // FileService provides access to contents of files in + // the SCM provider. Today, this is gitness but it should + // be extendible to any SCM provider. + // The plan is for all remote repos to be pointers inside gitness + // so a repo entry would always exist. If this changes, the interface + // can be updated. + FileService interface { + // path is the path in the repo to read + // ref is the git ref for the repository e.g. refs/heads/master + Get(ctx context.Context, repo *types.Repository, path, ref string) (*File, error) + } +) diff --git a/internal/pipeline/file/wire.go b/internal/pipeline/file/wire.go new file mode 100644 index 0000000000..f6410b748c --- /dev/null +++ b/internal/pipeline/file/wire.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package file + +import ( + "github.com/harness/gitness/gitrpc" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideFileService, +) + +// ProvideFileService provides a service which can read file contents +// from a repository. +func ProvideFileService(gitRPCClient gitrpc.Interface) FileService { + return new(gitRPCClient) +} diff --git a/internal/pipeline/manager/client.go b/internal/pipeline/manager/client.go new file mode 100644 index 0000000000..b8f2bd15bd --- /dev/null +++ b/internal/pipeline/manager/client.go @@ -0,0 +1,178 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "bytes" + "context" + "encoding/json" + + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/drone/drone-go/drone" + "github.com/drone/runner-go/client" +) + +type embedded struct { + config *types.Config + manager ExecutionManager +} + +var _ client.Client = (*embedded)(nil) + +func NewEmbeddedClient(manager ExecutionManager, config *types.Config) *embedded { + return &embedded{ + config: config, + manager: manager, + } +} + +// Join notifies the server the runner is joining the cluster. +// Since the runner is embedded, this can just return nil. +func (e *embedded) Join(ctx context.Context, machine string) error { + return nil +} + +// Leave notifies the server the runner is leaving the cluster. +// Since the runner is embedded, this can just return nil. +func (e *embedded) Leave(ctx context.Context, machine string) error { + return nil +} + +// Ping sends a ping message to the server to test connectivity. +// Since the runner is embedded, this can just return nil. +func (e *embedded) Ping(ctx context.Context, machine string) error { + return nil +} + +// Request requests the next available build stage for execution. +func (e *embedded) Request(ctx context.Context, args *client.Filter) (*drone.Stage, error) { + request := &Request{ + Kind: args.Kind, + Type: args.Type, + OS: args.OS, + Arch: args.Arch, + Variant: args.Variant, + Kernel: args.Kernel, + Labels: args.Labels, + } + stage, err := e.manager.Request(ctx, request) + if err != nil { + return nil, err + } + return ConvertToDroneStage(stage), nil +} + +// Accept accepts the build stage for execution. +func (e *embedded) Accept(ctx context.Context, s *drone.Stage) error { + stage, err := e.manager.Accept(ctx, s.ID, s.Machine) + if err != nil { + return err + } + *s = *ConvertToDroneStage(stage) + return err +} + +// Detail gets the build stage details for execution. +func (e *embedded) Detail(ctx context.Context, stage *drone.Stage) (*client.Context, error) { + details, err := e.manager.Details(ctx, stage.ID) + if err != nil { + return nil, err + } + + return &client.Context{ + Build: ConvertToDroneBuild(details.Execution), + Repo: ConvertToDroneRepo(details.Repo), + Stage: ConvertToDroneStage(details.Stage), + Secrets: ConvertToDroneSecrets(details.Secrets), + Config: ConvertToDroneFile(details.Config), + Netrc: ConvertToDroneNetrc(details.Netrc), + System: &drone.System{ + Proto: e.config.Server.HTTP.Proto, + Host: "host.docker.internal", + }, + }, nil +} + +// Update updates the build stage. +func (e *embedded) Update(ctx context.Context, stage *drone.Stage) error { + var err error + convertedStage := ConvertFromDroneStage(stage) + status := enum.ParseCIStatus(stage.Status) + if status == enum.CIStatusPending || status == enum.CIStatusRunning { + err = e.manager.BeforeStage(ctx, convertedStage) + } else { + err = e.manager.AfterStage(ctx, convertedStage) + } + *stage = *ConvertToDroneStage(convertedStage) + return err +} + +// UpdateStep updates the build step. +func (e *embedded) UpdateStep(ctx context.Context, step *drone.Step) error { + var err error + convertedStep := ConvertFromDroneStep(step) + status := enum.ParseCIStatus(step.Status) + if status == enum.CIStatusPending || status == enum.CIStatusRunning { + err = e.manager.BeforeStep(ctx, convertedStep) + } else { + err = e.manager.AfterStep(ctx, convertedStep) + } + *step = *ConvertToDroneStep(convertedStep) + return err +} + +// Watch watches for build cancellation requests. +func (e *embedded) Watch(ctx context.Context, executionID int64) (bool, error) { + return e.manager.Watch(ctx, executionID) +} + +// Batch batch writes logs to the streaming logs. +func (e *embedded) Batch(ctx context.Context, step int64, lines []*drone.Line) error { + for _, l := range lines { + line := ConvertFromDroneLine(l) + err := e.manager.Write(ctx, step, line) + if err != nil { + return err + } + } + return nil +} + +// Upload uploads the full logs to the server. +func (e *embedded) Upload(ctx context.Context, step int64, l []*drone.Line) error { + var buffer bytes.Buffer + lines := []livelog.Line{} + for _, line := range l { + lines = append(lines, *ConvertFromDroneLine(line)) + } + out, err := json.Marshal(lines) + if err != nil { + return err + } + _, err = buffer.Write(out) + if err != nil { + return err + } + return e.manager.UploadLogs(ctx, step, &buffer) +} + +// UploadCard uploads a card to drone server. +func (e *embedded) UploadCard(ctx context.Context, step int64, card *drone.CardInput) error { + // Implement UploadCard logic here + return nil // Replace with appropriate error handling and logic +} diff --git a/internal/pipeline/manager/convert.go b/internal/pipeline/manager/convert.go new file mode 100644 index 0000000000..0ade78f39b --- /dev/null +++ b/internal/pipeline/manager/convert.go @@ -0,0 +1,261 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "time" + + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/drone/drone-go/drone" + "github.com/drone/runner-go/client" +) + +func ConvertToDroneStage(stage *types.Stage) *drone.Stage { + return &drone.Stage{ + ID: stage.ID, + BuildID: stage.ExecutionID, + Number: int(stage.Number), + Name: stage.Name, + Kind: stage.Kind, + Type: stage.Type, + Status: string(stage.Status), + Error: stage.Error, + ErrIgnore: stage.ErrIgnore, + ExitCode: stage.ExitCode, + Machine: stage.Machine, + OS: stage.OS, + Arch: stage.Arch, + Variant: stage.Variant, + Kernel: stage.Kernel, + Limit: stage.Limit, + LimitRepo: stage.LimitRepo, + Started: stage.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: stage.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Created: stage.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Updated: stage.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Version: stage.Version, + OnSuccess: stage.OnSuccess, + OnFailure: stage.OnFailure, + DependsOn: stage.DependsOn, + Labels: stage.Labels, + Steps: ConvertToDroneSteps(stage.Steps), + } +} + +func ConvertToDroneSteps(steps []*types.Step) []*drone.Step { + droneSteps := make([]*drone.Step, len(steps)) + for i, step := range steps { + droneSteps[i] = ConvertToDroneStep(step) + } + return droneSteps +} + +func ConvertToDroneStep(step *types.Step) *drone.Step { + return &drone.Step{ + ID: step.ID, + StageID: step.StageID, + Number: int(step.Number), + Name: step.Name, + Status: string(step.Status), + Error: step.Error, + ErrIgnore: step.ErrIgnore, + ExitCode: step.ExitCode, + Started: step.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Version: step.Version, + DependsOn: step.DependsOn, + Image: step.Image, + Detached: step.Detached, + Schema: step.Schema, + } +} + +func ConvertFromDroneStep(step *drone.Step) *types.Step { + return &types.Step{ + ID: step.ID, + StageID: step.StageID, + Number: int64(step.Number), + Name: step.Name, + Status: enum.ParseCIStatus(step.Status), + Error: step.Error, + ErrIgnore: step.ErrIgnore, + ExitCode: step.ExitCode, + Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped * 1e3, + Version: step.Version, + DependsOn: step.DependsOn, + Image: step.Image, + Detached: step.Detached, + Schema: step.Schema, + } +} + +func ConvertFromDroneSteps(steps []*drone.Step) []*types.Step { + typesSteps := make([]*types.Step, len(steps)) + for i, step := range steps { + typesSteps[i] = &types.Step{ + ID: step.ID, + StageID: step.StageID, + Number: int64(step.Number), + Name: step.Name, + Status: enum.ParseCIStatus(step.Status), + Error: step.Error, + ErrIgnore: step.ErrIgnore, + ExitCode: step.ExitCode, + Started: step.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: step.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Version: step.Version, + DependsOn: step.DependsOn, + Image: step.Image, + Detached: step.Detached, + Schema: step.Schema, + } + } + return typesSteps +} + +func ConvertFromDroneStage(stage *drone.Stage) *types.Stage { + return &types.Stage{ + ID: stage.ID, + ExecutionID: stage.BuildID, + Number: int64(stage.Number), + Name: stage.Name, + Kind: stage.Kind, + Type: stage.Type, + Status: enum.ParseCIStatus(stage.Status), + Error: stage.Error, + ErrIgnore: stage.ErrIgnore, + ExitCode: stage.ExitCode, + Machine: stage.Machine, + OS: stage.OS, + Arch: stage.Arch, + Variant: stage.Variant, + Kernel: stage.Kernel, + Limit: stage.Limit, + LimitRepo: stage.LimitRepo, + Started: stage.Started * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Stopped: stage.Stopped * 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Version: stage.Version, + OnSuccess: stage.OnSuccess, + OnFailure: stage.OnFailure, + DependsOn: stage.DependsOn, + Labels: stage.Labels, + Steps: ConvertFromDroneSteps(stage.Steps), + } +} + +func ConvertFromDroneLine(l *drone.Line) *livelog.Line { + return &livelog.Line{ + Number: l.Number, + Message: l.Message, + Timestamp: l.Timestamp, + } +} + +func ConvertToDroneBuild(execution *types.Execution) *drone.Build { + return &drone.Build{ + ID: execution.ID, + RepoID: execution.RepoID, + Trigger: execution.Trigger, + Number: execution.Number, + Parent: execution.Parent, + Status: string(execution.Status), + Error: execution.Error, + Event: execution.Event, + Action: execution.Action, + Link: execution.Link, + Timestamp: execution.Timestamp, + Title: execution.Title, + Message: execution.Message, + Before: execution.Before, + After: execution.After, + Ref: execution.Ref, + Fork: execution.Fork, + Source: execution.Source, + Target: execution.Target, + Author: execution.Author, + AuthorName: execution.AuthorName, + AuthorEmail: execution.AuthorEmail, + AuthorAvatar: execution.AuthorAvatar, + Sender: execution.Sender, + Params: execution.Params, + Cron: execution.Cron, + Deploy: execution.Deploy, + DeployID: execution.DeployID, + Debug: execution.Debug, + Started: execution.Started / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Finished: execution.Finished / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Created: execution.Created / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Updated: execution.Updated / 1e3, // Drone uses Unix() timestamps whereas we use UnixMilli() + Version: execution.Version, + } +} + +func ConvertToDroneRepo(repo *types.Repository) *drone.Repo { + return &drone.Repo{ + ID: repo.ID, + Trusted: true, // as builds are running on user machines, the repo is marked trusted. + UID: repo.UID, + UserID: repo.CreatedBy, + Name: repo.UID, + HTTPURL: repo.GitURL, + Link: repo.GitURL, + Private: !repo.IsPublic, + Created: repo.Created, + Updated: repo.Updated, + Version: repo.Version, + Branch: repo.DefaultBranch, + // TODO: We can get this from configuration once we start populating it. + // If this is not set drone runner cancels the build. + Timeout: int64(time.Duration(10 * time.Hour).Seconds()), + } +} + +func ConvertToDroneFile(file *file.File) *client.File { + return &client.File{ + Data: file.Data, + } +} + +func ConvertToDroneSecret(secret *types.Secret) *drone.Secret { + return &drone.Secret{ + Name: secret.UID, + Data: secret.Data, + } +} + +func ConvertToDroneSecrets(secrets []*types.Secret) []*drone.Secret { + ret := make([]*drone.Secret, len(secrets)) + for i, s := range secrets { + ret[i] = ConvertToDroneSecret(s) + } + return ret +} + +func ConvertToDroneNetrc(netrc *Netrc) *drone.Netrc { + if netrc == nil { + return nil + } + + return &drone.Netrc{ + Machine: netrc.Machine, + Login: netrc.Login, + Password: netrc.Password, + } +} diff --git a/internal/pipeline/manager/manager.go b/internal/pipeline/manager/manager.go new file mode 100644 index 0000000000..d93f313f30 --- /dev/null +++ b/internal/pipeline/manager/manager.go @@ -0,0 +1,485 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "context" + "errors" + "fmt" + "io" + "net/url" + "time" + + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/internal/jwt" + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + urlprovider "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/livelog" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +const ( + // pipelineJWTLifetime specifies the max lifetime of an ephemeral pipeline jwt token. + pipelineJWTLifetime = 72 * time.Hour + // pipelineJWTRole specifies the role of an ephemeral pipeline jwt token. + pipelineJWTRole = enum.MembershipRoleContributor +) + +var noContext = context.Background() + +var _ ExecutionManager = (*Manager)(nil) + +type ( + // Request provides filters when requesting a pending + // build from the queue. This allows an agent, for example, + // to request a build that matches its architecture and kernel. + Request struct { + Kind string `json:"kind"` + Type string `json:"type"` + OS string `json:"os"` + Arch string `json:"arch"` + Variant string `json:"variant"` + Kernel string `json:"kernel"` + Labels map[string]string `json:"labels,omitempty"` + } + + // Config represents a pipeline config file. + Config struct { + Data string `json:"data"` + Kind string `json:"kind"` + } + + // Netrc contains login and initialization information used + // by an automated login process. + Netrc struct { + Machine string `json:"machine"` + Login string `json:"login"` + Password string `json:"password"` + } + + // ExecutionContext represents the minimum amount of information + // required by the runner to execute a build. + ExecutionContext struct { + Repo *types.Repository `json:"repository"` + Execution *types.Execution `json:"build"` + Stage *types.Stage `json:"stage"` + Secrets []*types.Secret `json:"secrets"` + Config *file.File `json:"config"` + Netrc *Netrc `json:"netrc"` + } + + // ExecutionManager encapsulates complex build operations and provides + // a simplified interface for build runners. + ExecutionManager interface { + // Request requests the next available build stage for execution. + Request(ctx context.Context, args *Request) (*types.Stage, error) + + // Watch watches for build cancellation requests. + Watch(ctx context.Context, executionID int64) (bool, error) + + // Accept accepts the build stage for execution. + Accept(ctx context.Context, stage int64, machine string) (*types.Stage, error) + + // Write writes a line to the build logs. + Write(ctx context.Context, step int64, line *livelog.Line) error + + // Details returns details about stage. + Details(ctx context.Context, stageID int64) (*ExecutionContext, error) + + // UploadLogs uploads the full logs. + UploadLogs(ctx context.Context, step int64, r io.Reader) error + + // BeforeStep signals the build step is about to start. + BeforeStep(ctx context.Context, step *types.Step) error + + // AfterStep signals the build step is complete. + AfterStep(ctx context.Context, step *types.Step) error + + // BeforeStage signals the build stage is about to start. + BeforeStage(ctx context.Context, stage *types.Stage) error + + // AfterStage signals the build stage is complete. + AfterStage(ctx context.Context, stage *types.Stage) error + } +) + +// Manager provides a simplified interface to the build runner so that it +// can more easily interact with the server. +type Manager struct { + Executions store.ExecutionStore + Config *types.Config + FileService file.FileService + Pipelines store.PipelineStore + urlProvider *urlprovider.Provider + Checks store.CheckStore + // Converter store.ConvertService + SSEStreamer sse.Streamer + // Globals store.GlobalSecretStore + Logs store.LogStore + Logz livelog.LogStream + // Netrcs store.NetrcService + Repos store.RepoStore + Scheduler scheduler.Scheduler + Secrets store.SecretStore + // Status store.StatusService + Stages store.StageStore + Steps store.StepStore + // System *store.System + Users store.PrincipalStore + // Webhook store.WebhookSender +} + +func New( + config *types.Config, + executionStore store.ExecutionStore, + pipelineStore store.PipelineStore, + urlProvider *urlprovider.Provider, + sseStreamer sse.Streamer, + fileService file.FileService, + logStore store.LogStore, + logStream livelog.LogStream, + checkStore store.CheckStore, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + secretStore store.SecretStore, + stageStore store.StageStore, + stepStore store.StepStore, + userStore store.PrincipalStore, +) *Manager { + return &Manager{ + Config: config, + Executions: executionStore, + Pipelines: pipelineStore, + urlProvider: urlProvider, + SSEStreamer: sseStreamer, + FileService: fileService, + Logs: logStore, + Logz: logStream, + Checks: checkStore, + Repos: repoStore, + Scheduler: scheduler, + Secrets: secretStore, + Stages: stageStore, + Steps: stepStore, + Users: userStore, + } +} + +// Request requests the next available build stage for execution. +func (m *Manager) Request(ctx context.Context, args *Request) (*types.Stage, error) { + log := log.With(). + Str("kind", args.Kind). + Str("type", args.Type). + Str("os", args.OS). + Str("arch", args.Arch). + Str("kernel", args.Kernel). + Str("variant", args.Variant). + Logger() + log.Debug().Msg("manager: request queue item") + + stage, err := m.Scheduler.Request(ctx, scheduler.Filter{ + Kind: args.Kind, + Type: args.Type, + OS: args.OS, + Arch: args.Arch, + Kernel: args.Kernel, + Variant: args.Variant, + Labels: args.Labels, + }) + if err != nil && ctx.Err() != nil { + log.Debug().Err(err).Msg("manager: context canceled") + return nil, err + } + if err != nil { + log.Warn().Err(err).Msg("manager: request queue item error") + return nil, err + } + return stage, nil +} + +// Accept accepts the build stage for execution. It is possible for multiple +// agents to pull the same stage from the queue. +func (m *Manager) Accept(ctx context.Context, id int64, machine string) (*types.Stage, error) { + log := log.With(). + Int64("stage-id", id). + Str("machine", machine). + Logger() + log.Debug().Msg("manager: accept stage") + + stage, err := m.Stages.Find(noContext, id) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot find stage") + return nil, err + } + if stage.Machine != "" { + log.Debug().Msg("manager: stage already assigned. abort.") + return nil, fmt.Errorf("stage already assigned, abort") + } + + stage.Machine = machine + stage.Status = enum.CIStatusPending + err = m.Stages.Update(noContext, stage) + if errors.Is(err, gitness_store.ErrVersionConflict) { + log.Debug().Err(err).Msg("manager: stage processed by another agent") + } else if err != nil { + log.Debug().Err(err).Msg("manager: cannot update stage") + } else { + log.Info().Msg("manager: stage accepted") + } + return stage, err +} + +// Write writes a line to the build logs. +func (m *Manager) Write(ctx context.Context, step int64, line *livelog.Line) error { + err := m.Logz.Write(ctx, step, line) + if err != nil { + log.Warn().Int64("step-id", step).Err(err).Msg("manager: cannot write to log stream") + return err + } + return nil +} + +// UploadLogs uploads the full logs. +func (m *Manager) UploadLogs(ctx context.Context, step int64, r io.Reader) error { + err := m.Logs.Create(ctx, step, r) + if err != nil { + log.Error().Err(err).Int64("step-id", step).Msg("manager: cannot upload complete logs") + return err + } + return nil +} + +// Details provides details about the stage. +func (m *Manager) Details(ctx context.Context, stageID int64) (*ExecutionContext, error) { + log := log.With(). + Int64("stage-id", stageID). + Logger() + log.Debug().Msg("manager: fetching stage details") + + stage, err := m.Stages.Find(noContext, stageID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot find stage") + return nil, err + } + execution, err := m.Executions.Find(noContext, stage.ExecutionID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot find build") + return nil, err + } + pipeline, err := m.Pipelines.Find(noContext, execution.PipelineID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot find pipeline") + return nil, err + } + repo, err := m.Repos.Find(noContext, execution.RepoID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot find repo") + return nil, err + } + // Backfill clone URL + repo.GitURL = m.urlProvider.GenerateCICloneURL(repo.Path) + + stages, err := m.Stages.List(noContext, stage.ExecutionID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot list stages") + return nil, err + } + execution.Stages = stages + log = log.With(). + Int64("build", execution.Number). + Str("repo", repo.GetGitUID()). + Logger() + + // TODO: Currently we fetch all the secrets from the same space. + // This logic can be updated when needed. + secrets, err := m.Secrets.ListAll(noContext, repo.ParentID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot list secrets") + return nil, err + } + + // Fetch contents of YAML from the execution ref at the pipeline config path. + file, err := m.FileService.Get(noContext, repo, pipeline.ConfigPath, execution.After) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot fetch file") + return nil, err + } + + netrc, err := m.createNetrc(repo) + if err != nil { + log.Warn().Err(err).Msg("manager: failed to create netrc") + return nil, err + } + + return &ExecutionContext{ + Repo: repo, + Execution: execution, + Stage: stage, + Secrets: secrets, + Config: file, + Netrc: netrc, + }, nil +} + +func (m *Manager) createNetrc(repo *types.Repository) (*Netrc, error) { + pipelinePrincipal := bootstrap.NewPipelineServiceSession().Principal + jwt, err := jwt.GenerateWithMembership( + pipelinePrincipal.ID, + repo.ParentID, + pipelineJWTRole, + pipelineJWTLifetime, + pipelinePrincipal.Salt, + ) + if err != nil { + return nil, fmt.Errorf("failed to create jwt: %w", err) + } + + cloneUrl, err := url.Parse(repo.GitURL) + if err != nil { + return nil, fmt.Errorf("failed to parse clone url '%s': %w", cloneUrl, err) + } + + return &Netrc{ + Machine: cloneUrl.Hostname(), + Login: pipelinePrincipal.UID, + Password: jwt, + }, nil +} + +// Before signals the build step is about to start. +func (m *Manager) BeforeStep(ctx context.Context, step *types.Step) error { + log := log.With(). + Str("step.status", string(step.Status)). + Str("step.name", step.Name). + Int64("step.id", step.ID). + Logger() + + log.Debug().Msg("manager: updating step status") + + err := m.Logz.Create(noContext, step.ID) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot create log stream") + return err + } + updater := &updater{ + Executions: m.Executions, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, + } + return updater.do(noContext, step) +} + +// After signals the build step is complete. +func (m *Manager) AfterStep(ctx context.Context, step *types.Step) error { + log := log.With(). + Str("step.status", string(step.Status)). + Str("step.name", step.Name). + Int64("step.id", step.ID). + Logger() + log.Debug().Msg("manager: updating step status") + + var retErr error + updater := &updater{ + Executions: m.Executions, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, + } + + if err := updater.do(noContext, step); err != nil { + retErr = err + log.Warn().Err(err).Msg("manager: cannot update step") + } + + if err := m.Logz.Delete(noContext, step.ID); err != nil { + log.Warn().Err(err).Msg("manager: cannot teardown log stream") + } + return retErr +} + +// BeforeAll signals the build stage is about to start. +func (m *Manager) BeforeStage(ctx context.Context, stage *types.Stage) error { + s := &setup{ + Executions: m.Executions, + Checks: m.Checks, + Pipelines: m.Pipelines, + SSEStreamer: m.SSEStreamer, + Repos: m.Repos, + Steps: m.Steps, + Stages: m.Stages, + Users: m.Users, + } + + return s.do(noContext, stage) +} + +// AfterAll signals the build stage is complete. +func (m *Manager) AfterStage(ctx context.Context, stage *types.Stage) error { + t := &teardown{ + Executions: m.Executions, + Pipelines: m.Pipelines, + Checks: m.Checks, + SSEStreamer: m.SSEStreamer, + Logs: m.Logz, + Repos: m.Repos, + Scheduler: m.Scheduler, + Steps: m.Steps, + Stages: m.Stages, + } + return t.do(noContext, stage) +} + +// Watch watches for build cancellation requests. +func (m *Manager) Watch(ctx context.Context, executionID int64) (bool, error) { + ok, err := m.Scheduler.Cancelled(ctx, executionID) + // we expect a context cancel error here which + // indicates a polling timeout. The subscribing + // client should look for the context cancel error + // and resume polling. + if err != nil { + return ok, err + } + + // // TODO: we should be able to return + // // immediately if Cancelled returns true. This requires + // // some more testing but would avoid the extra database + // // call. + // if ok { + // return ok, err + // } + + // if no error is returned we should check + // the database to see if the build is complete. If + // complete, return true. + execution, err := m.Executions.Find(ctx, executionID) + if err != nil { + log := log.With(). + Int64("execution.id", executionID). + Logger() + log.Warn().Msg("manager: cannot find build") + return ok, fmt.Errorf("could not find build for cancellation: %w", err) + } + return execution.Status.IsDone(), nil +} diff --git a/internal/pipeline/manager/setup.go b/internal/pipeline/manager/setup.go new file mode 100644 index 0000000000..0a4bac0737 --- /dev/null +++ b/internal/pipeline/manager/setup.go @@ -0,0 +1,136 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "context" + "errors" + "time" + + "github.com/harness/gitness/internal/pipeline/checks" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type setup struct { + Executions store.ExecutionStore + Checks store.CheckStore + SSEStreamer sse.Streamer + Pipelines store.PipelineStore + Repos store.RepoStore + Steps store.StepStore + Stages store.StageStore + Users store.PrincipalStore +} + +func (s *setup) do(ctx context.Context, stage *types.Stage) error { + execution, err := s.Executions.Find(noContext, stage.ExecutionID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find the execution") + return err + } + + log := log.With(). + Int64("execution.number", execution.Number). + Int64("execution.id", execution.ID). + Int64("stage.id", stage.ID). + Int64("repo.id", execution.RepoID). + Logger() + + repo, err := s.Repos.Find(noContext, execution.RepoID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find the repository") + return err + } + + if len(stage.Error) > 500 { + stage.Error = stage.Error[:500] + } + err = s.Stages.Update(noContext, stage) + if err != nil { + log.Error().Err(err). + Str("stage.status", string(stage.Status)). + Msg("manager: cannot update the stage") + return err + } + + // TODO: create all the steps as part of a single transaction? + for _, step := range stage.Steps { + if len(step.Error) > 500 { + step.Error = step.Error[:500] + } + err := s.Steps.Create(noContext, step) + if err != nil { + log.Error().Err(err). + Str("stage.status", string(stage.Status)). + Str("step.name", step.Name). + Int64("step.id", step.ID). + Msg("manager: cannot persist the step") + return err + } + } + + _, err = s.updateExecution(noContext, execution) + if err != nil { + log.Error().Err(err).Msg("manager: cannot update the execution") + return err + } + pipeline, err := s.Pipelines.Find(ctx, execution.PipelineID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find pipeline") + return err + } + // try to write to the checks store - if not, log an error and continue + err = checks.Write(ctx, s.Checks, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("manager: could not write to checks store") + } + stages, err := s.Stages.ListWithSteps(noContext, execution.ID) + if err != nil { + log.Error().Err(err).Msg("manager: could not list stages with steps") + return err + } + execution.Stages = stages + err = s.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionRunning, execution) + if err != nil { + log.Warn().Err(err).Msg("manager: could not publish execution event") + } + + return nil +} + +// helper function that updates the execution status from pending to running. +// This accounts for the fact that another agent may have already updated +// the execution status, which may happen if two stages execute concurrently. +func (s *setup) updateExecution(ctx context.Context, execution *types.Execution) (bool, error) { + if execution.Status != enum.CIStatusPending { + return false, nil + } + execution.Started = time.Now().UnixMilli() + execution.Status = enum.CIStatusRunning + err := s.Executions.Update(noContext, execution) + if errors.Is(err, gitness_store.ErrVersionConflict) { + return false, nil + } + if err != nil { + return false, err + } + return true, nil +} diff --git a/internal/pipeline/manager/teardown.go b/internal/pipeline/manager/teardown.go new file mode 100644 index 0000000000..06e1ca0b09 --- /dev/null +++ b/internal/pipeline/manager/teardown.go @@ -0,0 +1,354 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "context" + "strings" + "time" + + "github.com/harness/gitness/internal/pipeline/checks" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/livelog" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/hashicorp/go-multierror" + "github.com/rs/zerolog/log" +) + +type teardown struct { + Executions store.ExecutionStore + Checks store.CheckStore + Pipelines store.PipelineStore + SSEStreamer sse.Streamer + Logs livelog.LogStream + Scheduler scheduler.Scheduler + Repos store.RepoStore + Steps store.StepStore + Stages store.StageStore +} + +func (t *teardown) do(ctx context.Context, stage *types.Stage) error { + log := log.With(). + Int64("stage.id", stage.ID). + Logger() + log.Debug().Msg("manager: stage is complete. teardown") + + execution, err := t.Executions.Find(noContext, stage.ExecutionID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find the execution") + return err + } + + log = log.With(). + Int64("execution.number", execution.Number). + Int64("execution.id", execution.ID). + Int64("repo.id", execution.RepoID). + Str("stage.status", string(stage.Status)). + Logger() + + repo, err := t.Repos.Find(noContext, execution.RepoID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find the repository") + return err + } + + for _, step := range stage.Steps { + if len(step.Error) > 500 { + step.Error = step.Error[:500] + } + err := t.Steps.Update(noContext, step) + if err != nil { + log = log.With(). + Str("step.name", step.Name). + Int64("step.id", step.ID). + Err(err). + Logger() + + log.Error().Msg("manager: cannot persist the step") + return err + } + } + + if len(stage.Error) > 500 { + stage.Error = stage.Error[:500] + } + + err = t.Stages.Update(noContext, stage) + if err != nil { + log.Error().Err(err). + Msg("manager: cannot update the stage") + return err + } + + for _, step := range stage.Steps { + t.Logs.Delete(noContext, step.ID) + } + + stages, err := t.Stages.ListWithSteps(noContext, execution.ID) + if err != nil { + log.Warn().Err(err). + Msg("manager: cannot get stages") + return err + } + + err = t.cancelDownstream(ctx, stages) + if err != nil { + log.Error().Err(err). + Msg("manager: cannot cancel downstream builds") + return err + } + + err = t.scheduleDownstream(ctx, stage, stages) + if err != nil { + log.Error().Err(err). + Msg("manager: cannot schedule downstream builds") + return err + } + + if !isexecutionComplete(stages) { + log.Warn().Err(err). + Msg("manager: execution pending completion of additional stages") + return nil + } + + log.Info().Msg("manager: execution is finished, teardown") + + execution.Status = enum.CIStatusSuccess + execution.Finished = time.Now().UnixMilli() + for _, sibling := range stages { + if sibling.Status == enum.CIStatusKilled { + execution.Status = enum.CIStatusKilled + break + } + if sibling.Status == enum.CIStatusFailure { + execution.Status = enum.CIStatusFailure + break + } + if sibling.Status == enum.CIStatusError { + execution.Status = enum.CIStatusError + break + } + } + if execution.Started == 0 { + execution.Started = execution.Finished + } + + err = t.Executions.Update(noContext, execution) + if err == gitness_store.ErrVersionConflict { + log.Warn().Err(err). + Msg("manager: execution updated by another goroutine") + return nil + } + if err != nil { + log.Warn().Err(err). + Msg("manager: cannot update the execution") + return err + } + + execution.Stages = stages + err = t.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionCompleted, execution) + if err != nil { + log.Warn().Err(err). + Msg("manager: could not publish execution completed event") + } + + pipeline, err := t.Pipelines.Find(ctx, execution.PipelineID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find pipeline") + return err + } + // try to write to the checks store - if not, log an error and continue + err = checks.Write(ctx, t.Checks, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("manager: could not write to checks store") + } + + return nil +} + +// cancelDownstream is a helper function that tests for +// downstream stages and cancels them based on the overall +// pipeline state. +func (t *teardown) cancelDownstream( + ctx context.Context, + stages []*types.Stage, +) error { + failed := false + for _, s := range stages { + // check pipeline state + if s.Status.IsFailed() { + failed = true + } + } + + var errs error + for _, s := range stages { + if s.Status != enum.CIStatusWaitingOnDeps { + continue + } + + var skip bool + if failed && !s.OnFailure { + skip = true + } + if !failed && !s.OnSuccess { + skip = true + } + if !skip { + continue + } + + if !areDepsComplete(s, stages) { + continue + } + + log := log.With(). + Int64("stage.id", s.ID). + Bool("stage.on_success", s.OnSuccess). + Bool("stage.on_failure", s.OnFailure). + Bool("failed", failed). + Str("stage.depends_on", strings.Join(s.DependsOn, ",")). + Logger() + + log.Debug().Msg("manager: skipping step") + + s.Status = enum.CIStatusSkipped + s.Started = time.Now().UnixMilli() + s.Stopped = time.Now().UnixMilli() + err := t.Stages.Update(noContext, s) + if err == gitness_store.ErrVersionConflict { + t.resync(ctx, s) + continue + } + if err != nil { + log.Error().Err(err). + Msg("manager: cannot update stage status") + errs = multierror.Append(errs, err) + } + } + return errs +} + +func isexecutionComplete(stages []*types.Stage) bool { + for _, stage := range stages { + switch stage.Status { + case enum.CIStatusPending, + enum.CIStatusRunning, + enum.CIStatusWaitingOnDeps, + enum.CIStatusDeclined, + enum.CIStatusBlocked: + return false + } + } + return true +} + +func areDepsComplete(stage *types.Stage, stages []*types.Stage) bool { + deps := map[string]struct{}{} + for _, dep := range stage.DependsOn { + deps[dep] = struct{}{} + } + for _, sibling := range stages { + if _, ok := deps[sibling.Name]; !ok { + continue + } + if !sibling.Status.IsDone() { + return false + } + } + return true +} + +// scheduleDownstream is a helper function that tests for +// downstream stages and schedules stages if all dependencies +// and execution requirements are met. +func (t *teardown) scheduleDownstream( + ctx context.Context, + stage *types.Stage, + stages []*types.Stage, +) error { + + var errs error + for _, sibling := range stages { + if sibling.Status == enum.CIStatusWaitingOnDeps { + if len(sibling.DependsOn) == 0 { + continue + } + + // PROBLEM: isDep only checks the direct parent + // i think .... + // if isDep(stage, sibling) == false { + // continue + // } + if !areDepsComplete(sibling, stages) { + continue + } + // if isLastDep(stage, sibling, stages) == false { + // continue + // } + + log := log.With(). + Int64("stage.id", sibling.ID). + Str("stage.name", sibling.Name). + Str("stage.depends_on", strings.Join(sibling.DependsOn, ",")). + Logger() + + log.Debug().Msg("manager: schedule next stage") + + sibling.Status = enum.CIStatusPending + err := t.Stages.Update(noContext, sibling) + if err == gitness_store.ErrVersionConflict { + t.resync(ctx, sibling) + continue + } + if err != nil { + log.Error().Err(err). + Msg("manager: cannot update stage status") + errs = multierror.Append(errs, err) + } + + err = t.Scheduler.Schedule(noContext, sibling) + if err != nil { + log.Error().Err(err). + Msg("manager: cannot schedule stage") + errs = multierror.Append(errs, err) + } + } + } + return errs +} + +// resync updates the stage from the database. Note that it does +// not update the Version field. This is by design. It prevents +// the current go routine from updating a stage that has been +// updated by another go routine. +func (t *teardown) resync(ctx context.Context, stage *types.Stage) error { + updated, err := t.Stages.Find(ctx, stage.ID) + if err != nil { + return err + } + stage.Status = updated.Status + stage.Error = updated.Error + stage.ExitCode = updated.ExitCode + stage.Machine = updated.Machine + stage.Started = updated.Started + stage.Stopped = updated.Stopped + return nil +} diff --git a/internal/pipeline/manager/updater.go b/internal/pipeline/manager/updater.go new file mode 100644 index 0000000000..38d24ce977 --- /dev/null +++ b/internal/pipeline/manager/updater.go @@ -0,0 +1,83 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "context" + + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +type updater struct { + Executions store.ExecutionStore + Repos store.RepoStore + SSEStreamer sse.Streamer + Steps store.StepStore + Stages store.StageStore +} + +func (u *updater) do(ctx context.Context, step *types.Step) error { + log := log.With(). + Str("step.name", step.Name). + Str("step.status", string(step.Status)). + Int64("step.id", step.ID). + Logger() + + if len(step.Error) > 500 { + step.Error = step.Error[:500] + } + err := u.Steps.Update(noContext, step) + if err != nil { + log.Error().Err(err).Msg("manager: cannot update step") + return err + } + + stage, err := u.Stages.Find(noContext, step.StageID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find stage") + return nil + } + + execution, err := u.Executions.Find(noContext, stage.ExecutionID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find execution") + return nil + } + + repo, err := u.Repos.Find(noContext, execution.RepoID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find repo") + return nil + } + + stages, err := u.Stages.ListWithSteps(noContext, stage.ExecutionID) + if err != nil { + log.Error().Err(err).Msg("manager: cannot find stages") + return nil + } + + execution.Stages = stages + err = u.SSEStreamer.Publish(noContext, repo.ParentID, enum.SSETypeExecutionUpdated, execution) + if err != nil { + log.Warn().Err(err).Msg("manager: cannot publish execution updated event") + } + + return nil +} diff --git a/internal/pipeline/manager/wire.go b/internal/pipeline/manager/wire.go new file mode 100644 index 0000000000..d53bf1be77 --- /dev/null +++ b/internal/pipeline/manager/wire.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package manager + +import ( + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/livelog" + "github.com/harness/gitness/types" + + "github.com/drone/runner-go/client" + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideExecutionManager, + ProvideExecutionClient, +) + +// ProvideExecutionManager provides an execution manager. +func ProvideExecutionManager( + config *types.Config, + executionStore store.ExecutionStore, + pipelineStore store.PipelineStore, + urlProvider *url.Provider, + sseStreamer sse.Streamer, + fileService file.FileService, + logStore store.LogStore, + logStream livelog.LogStream, + checkStore store.CheckStore, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + secretStore store.SecretStore, + stageStore store.StageStore, + stepStore store.StepStore, + userStore store.PrincipalStore) ExecutionManager { + return New(config, executionStore, pipelineStore, urlProvider, sseStreamer, fileService, logStore, + logStream, checkStore, repoStore, scheduler, secretStore, stageStore, stepStore, userStore) +} + +// ProvideExecutionClient provides a client implementation to interact with the execution manager. +// We use an embedded client here +func ProvideExecutionClient(manager ExecutionManager, config *types.Config) client.Client { + return NewEmbeddedClient(manager, config) +} diff --git a/internal/pipeline/plugin/manager.go b/internal/pipeline/plugin/manager.go new file mode 100644 index 0000000000..ca9cc1130a --- /dev/null +++ b/internal/pipeline/plugin/manager.go @@ -0,0 +1,240 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "archive/zip" + "bytes" + "context" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + v1yaml "github.com/drone/spec/dist/go" + "github.com/drone/spec/dist/go/parse" + "github.com/rs/zerolog/log" +) + +// Lookup returns a resource by name, kind and type. +type LookupFunc func(name, kind, typ, version string) (*v1yaml.Config, error) + +type PluginManager struct { + config *types.Config + pluginStore store.PluginStore +} + +func NewPluginManager( + config *types.Config, + pluginStore store.PluginStore, +) *PluginManager { + return &PluginManager{ + config: config, + pluginStore: pluginStore, + } +} + +// GetLookupFn returns a lookup function for plugins which can be used in the resolver. +func (m *PluginManager) GetLookupFn() LookupFunc { + return func(name, kind, typ, version string) (*v1yaml.Config, error) { + if kind != "plugin" { + return nil, fmt.Errorf("only plugin kind supported") + } + if typ != "step" { + return nil, fmt.Errorf("only step plugins supported") + } + plugin, err := m.pluginStore.Find(context.Background(), name, version) + if err != nil { + return nil, fmt.Errorf("could not lookup plugin: %w", err) + } + // Convert plugin to v1yaml spec + config, err := parse.ParseString(plugin.Spec) + if err != nil { + return nil, fmt.Errorf("could not unmarshal plugin to v1yaml spec: %w", err) + } + + return config, nil + } +} + +// Populate fetches plugins information from an external source or a local zip +// and populates in the DB. +func (m *PluginManager) Populate(ctx context.Context) error { + path := m.config.CI.PluginsZipPath + if path == "" { + return fmt.Errorf("plugins path not provided to read schemas from") + } + + var zipFile *zip.ReadCloser + if _, err := os.Stat(path); err != nil { // local path doesn't exist - must be a remote link + // Download zip file locally + f, err := os.CreateTemp(os.TempDir(), "plugins.zip") + if err != nil { + return fmt.Errorf("could not create temp file: %w", err) + } + defer os.Remove(f.Name()) + err = downloadZip(path, f.Name()) + if err != nil { + return fmt.Errorf("could not download remote zip: %w", err) + } + path = f.Name() + } + // open up a zip reader for the file + zipFile, err := zip.OpenReader(path) + if err != nil { + return fmt.Errorf("could not open zip for reading: %w", err) + } + defer zipFile.Close() + + // upsert any new plugins. + err = m.traverseAndUpsertPlugins(ctx, zipFile) + if err != nil { + return fmt.Errorf("could not upsert plugins: %w", err) + } + + return nil +} + +// downloadZip is a helper function that downloads a zip from a URL and +// writes it to a path in the local filesystem. +func downloadZip(url, path string) error { + response, err := http.Get(url) + if err != nil { + return fmt.Errorf("could not get zip from url: %w", err) + } + defer response.Body.Close() + + // Create the file on the local FS. If it exists, it will be truncated. + output, err := os.Create(path) + if err != nil { + return fmt.Errorf("could not create output file: %w", err) + } + defer output.Close() + + // Copy the zip output to the file. + _, err = io.Copy(output, response.Body) + if err != nil { + return fmt.Errorf("could not copy response body output to file: %w", err) + } + + return nil +} + +// traverseAndUpsertPlugins traverses through the zip and upserts plugins into the database +// if they are not present. +func (m *PluginManager) traverseAndUpsertPlugins(ctx context.Context, rc *zip.ReadCloser) error { + plugins, err := m.pluginStore.ListAll(ctx) + if err != nil { + return fmt.Errorf("could not list plugins: %w", err) + } + // Put the plugins in a map so we don't have to perform frequent DB queries. + pluginMap := map[string]*types.Plugin{} + for _, p := range plugins { + pluginMap[p.UID] = p + } + cnt := 0 + for _, file := range rc.File { + matched, err := filepath.Match("**/plugins/*/*.yaml", file.Name) + if err != nil { // only returns BadPattern error which shouldn't happen + return fmt.Errorf("could not glob pattern: %w", err) + } + if !matched { + continue + } + fc, err := file.Open() + if err != nil { + log.Warn().Err(err).Str("name", file.Name).Msg("could not open file") + continue + } + defer fc.Close() + var buf bytes.Buffer + _, err = io.Copy(&buf, fc) + if err != nil { + log.Warn().Err(err).Str("name", file.Name).Msg("could not read file contents") + continue + } + // schema should be a valid config - if not log an error and continue. + config, err := parse.ParseBytes(buf.Bytes()) + if err != nil { + log.Warn().Err(err).Str("name", file.Name).Msg("could not parse schema into valid config") + continue + } + + var desc string + switch vv := config.Spec.(type) { + case *v1yaml.PluginStep: + desc = vv.Description + case *v1yaml.PluginStage: + desc = vv.Description + default: + log.Warn().Str("name", file.Name).Msg("schema did not match a valid plugin schema") + continue + } + + plugin := &types.Plugin{ + Description: desc, + UID: config.Name, + Type: config.Type, + Spec: buf.String(), + } + + // Try to read the logo if it exists in the same directory + dir := filepath.Dir(file.Name) + logoFile := filepath.Join(dir, "logo.svg") + if lf, err := rc.Open(logoFile); err == nil { // if we can open the logo file + var lbuf bytes.Buffer + _, err = io.Copy(&lbuf, lf) + if err != nil { + log.Warn().Err(err).Str("name", file.Name).Msg("could not copy logo file") + } else { + plugin.Logo = lbuf.String() + } + } + + // If plugin already exists in the database, skip upsert + if p, ok := pluginMap[plugin.UID]; ok { + if p.Matches(plugin) { + continue + } + + } + + // If plugin name exists with a different spec, call update - otherwise call create. + // TODO: Once we start using versions, we can think of whether we want to + // keep different schemas for each version in the database. For now, we will + // simply overwrite the existing version with the new version. + if _, ok := pluginMap[plugin.UID]; ok { + err = m.pluginStore.Update(ctx, plugin) + if err != nil { + log.Warn().Str("name", file.Name).Err(err).Msg("could not update plugin") + continue + } + log.Info().Str("name", file.Name).Msg("detected changes: updated existing plugin entry") + } else { + err = m.pluginStore.Create(ctx, plugin) + if err != nil { + log.Warn().Str("name", file.Name).Err(err).Msg("could not create plugin in DB") + continue + } + cnt++ + } + } + log.Info().Msgf("added %d new entries to plugins", cnt) + return nil +} diff --git a/internal/pipeline/plugin/wire.go b/internal/pipeline/plugin/wire.go new file mode 100644 index 0000000000..9a6433f9ca --- /dev/null +++ b/internal/pipeline/plugin/wire.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvidePluginManager, +) + +// ProvidePluginManager provides an execution runner. +func ProvidePluginManager( + config *types.Config, + pluginStore store.PluginStore, +) *PluginManager { + return NewPluginManager(config, pluginStore) +} diff --git a/internal/pipeline/runner/poller.go b/internal/pipeline/runner/poller.go new file mode 100644 index 0000000000..765de03702 --- /dev/null +++ b/internal/pipeline/runner/poller.go @@ -0,0 +1,55 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runner + +import ( + "context" + "fmt" + "runtime/debug" + + "github.com/harness/gitness/types" + + "github.com/drone-runners/drone-runner-docker/engine/resource" + runtime2 "github.com/drone-runners/drone-runner-docker/engine2/runtime" + "github.com/drone/drone-go/drone" + runnerclient "github.com/drone/runner-go/client" + "github.com/drone/runner-go/poller" +) + +func NewExecutionPoller( + runner *runtime2.Runner, + config *types.Config, + client runnerclient.Client, +) *poller.Poller { + // taking the cautious approach of recovering in case of panics + runWithRecovery := func(ctx context.Context, stage *drone.Stage) (err error) { + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf("panic received while executing run: %s", debug.Stack()) + } + }() + return runner.Run(ctx, stage) + } + + return &poller.Poller{ + Client: client, + Dispatch: runWithRecovery, + Filter: &runnerclient.Filter{ + Kind: resource.Kind, + Type: resource.Type, + // TODO: Check if other parameters are needed. + }, + } +} diff --git a/internal/pipeline/runner/runner.go b/internal/pipeline/runner/runner.go new file mode 100644 index 0000000000..a1f43ce395 --- /dev/null +++ b/internal/pipeline/runner/runner.go @@ -0,0 +1,102 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runner + +import ( + "github.com/harness/gitness/internal/pipeline/manager" + "github.com/harness/gitness/internal/pipeline/plugin" + "github.com/harness/gitness/types" + + "github.com/drone-runners/drone-runner-docker/engine" + "github.com/drone-runners/drone-runner-docker/engine/compiler" + "github.com/drone-runners/drone-runner-docker/engine/linter" + "github.com/drone-runners/drone-runner-docker/engine/resource" + compiler2 "github.com/drone-runners/drone-runner-docker/engine2/compiler" + engine2 "github.com/drone-runners/drone-runner-docker/engine2/engine" + runtime2 "github.com/drone-runners/drone-runner-docker/engine2/runtime" + "github.com/drone/drone-go/drone" + runnerclient "github.com/drone/runner-go/client" + "github.com/drone/runner-go/environ/provider" + "github.com/drone/runner-go/pipeline/reporter/history" + "github.com/drone/runner-go/pipeline/reporter/remote" + "github.com/drone/runner-go/pipeline/runtime" + "github.com/drone/runner-go/pipeline/uploader" + "github.com/drone/runner-go/registry" + "github.com/drone/runner-go/secret" +) + +func NewExecutionRunner( + config *types.Config, + client runnerclient.Client, + pluginManager *plugin.PluginManager, + m manager.ExecutionManager, +) (*runtime2.Runner, error) { + // For linux, containers need to have extra hosts set in order to interact with + // the gitness container. + extraHosts := []string{"host.docker.internal:host-gateway"} + compiler := &compiler.Compiler{ + Environ: provider.Static(map[string]string{}), + Registry: registry.Static([]*drone.Registry{}), + Secret: secret.Encrypted(), + ExtraHosts: extraHosts, + } + + remote := remote.New(client) + upload := uploader.New(client) + tracer := history.New(remote) + engine, err := engine.NewEnv(engine.Opts{}) + if err != nil { + return nil, err + } + + exec := runtime.NewExecer(tracer, remote, upload, + engine, int64(config.CI.ParallelWorkers)) + + legacyRunner := &runtime.Runner{ + Machine: config.InstanceID, + Client: client, + Reporter: tracer, + Lookup: resource.Lookup, + Lint: linter.New().Lint, + Compiler: compiler, + Exec: exec.Exec, + } + + engine2, err := engine2.NewEnv(engine2.Opts{}) + if err != nil { + return nil, err + } + + exec2 := runtime2.NewExecer(tracer, remote, upload, engine2, int64(config.CI.ParallelWorkers)) + + compiler2 := &compiler2.CompilerImpl{ + Environ: provider.Static(map[string]string{}), + Registry: registry.Static([]*drone.Registry{}), + Secret: secret.Encrypted(), + ExtraHosts: extraHosts, + } + + runner := &runtime2.Runner{ + Machine: config.InstanceID, + Client: client, + Resolver: pluginManager.GetLookupFn(), + Reporter: tracer, + Compiler: compiler2, + Exec: exec2.Exec, + LegacyRunner: legacyRunner, + } + + return runner, nil +} diff --git a/internal/pipeline/runner/wire.go b/internal/pipeline/runner/wire.go new file mode 100644 index 0000000000..1e25c9b2ac --- /dev/null +++ b/internal/pipeline/runner/wire.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runner + +import ( + "github.com/harness/gitness/internal/pipeline/manager" + "github.com/harness/gitness/internal/pipeline/plugin" + "github.com/harness/gitness/types" + + runtime2 "github.com/drone-runners/drone-runner-docker/engine2/runtime" + runnerclient "github.com/drone/runner-go/client" + "github.com/drone/runner-go/poller" + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideExecutionRunner, + ProvideExecutionPoller, +) + +// ProvideExecutionRunner provides an execution runner. +func ProvideExecutionRunner( + config *types.Config, + client runnerclient.Client, + pluginManager *plugin.PluginManager, + manager manager.ExecutionManager, +) (*runtime2.Runner, error) { + return NewExecutionRunner(config, client, pluginManager, manager) +} + +// ProvideExecutionPoller provides a poller which can poll the manager +// for new builds and execute them. +func ProvideExecutionPoller( + runner *runtime2.Runner, + config *types.Config, + client runnerclient.Client, +) *poller.Poller { + return NewExecutionPoller(runner, config, client) +} diff --git a/internal/pipeline/scheduler/canceler.go b/internal/pipeline/scheduler/canceler.go new file mode 100644 index 0000000000..4ecc468822 --- /dev/null +++ b/internal/pipeline/scheduler/canceler.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scheduler + +import ( + "context" + "sync" + "time" +) + +type canceler struct { + sync.Mutex + + subscribers map[chan struct{}]int64 + cancelled map[int64]time.Time +} + +func newCanceler() *canceler { + return &canceler{ + subscribers: make(map[chan struct{}]int64), + cancelled: make(map[int64]time.Time), + } +} + +func (c *canceler) Cancel(ctx context.Context, id int64) error { + c.Lock() + defer c.Unlock() + c.cancelled[id] = time.Now().Add(time.Minute * 5) + for subscriber, build := range c.subscribers { + if id == build { + close(subscriber) + } + } + c.collect() + return nil +} + +func (c *canceler) Cancelled(ctx context.Context, id int64) (bool, error) { + subscriber := make(chan struct{}) + c.Lock() + c.subscribers[subscriber] = id + c.Unlock() + + defer func() { + c.Lock() + delete(c.subscribers, subscriber) + c.Unlock() + }() + + for { + select { + case <-ctx.Done(): + return false, ctx.Err() + case <-time.After(time.Minute): + c.Lock() + _, ok := c.cancelled[id] + c.Unlock() + if ok { + return true, nil + } + case <-subscriber: + return true, nil + } + } +} + +func (c *canceler) collect() { + // the list of cancelled builds is stored with a ttl, and + // is not removed until the ttl is reached. This provides + // adequate window for clients with connectivity issues to + // reconnect and receive notification of cancel events. + now := time.Now() + for build, timestamp := range c.cancelled { + if now.After(timestamp) { + delete(c.cancelled, build) + } + } +} diff --git a/internal/pipeline/scheduler/queue.go b/internal/pipeline/scheduler/queue.go new file mode 100644 index 0000000000..bf31ccc027 --- /dev/null +++ b/internal/pipeline/scheduler/queue.go @@ -0,0 +1,305 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scheduler + +import ( + "context" + "sync" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type queue struct { + sync.Mutex + globMx lock.Mutex + + ready chan struct{} + paused bool + interval time.Duration + throttle int + store store.StageStore + workers map[*worker]struct{} + ctx context.Context +} + +// newQueue returns a new Queue backed by the build datastore. +func newQueue(store store.StageStore, lock lock.MutexManager) (*queue, error) { + const lockKey = "build_queue" + mx, err := lock.NewMutex(lockKey) + if err != nil { + return nil, err + } + q := &queue{ + store: store, + globMx: mx, + ready: make(chan struct{}, 1), + workers: map[*worker]struct{}{}, + interval: time.Minute, + ctx: context.Background(), + } + go q.start() + return q, nil +} + +func (q *queue) Schedule(ctx context.Context, stage *types.Stage) error { + select { + case q.ready <- struct{}{}: + default: + } + return nil +} + +func (q *queue) Pause(ctx context.Context) error { + q.Lock() + q.paused = true + q.Unlock() + return nil +} + +func (q *queue) Request(ctx context.Context, params Filter) (*types.Stage, error) { + w := &worker{ + kind: params.Kind, + typ: params.Type, + os: params.OS, + arch: params.Arch, + kernel: params.Kernel, + variant: params.Variant, + labels: params.Labels, + channel: make(chan *types.Stage), + } + q.Lock() + q.workers[w] = struct{}{} + q.Unlock() + + select { + case q.ready <- struct{}{}: + default: + } + + select { + case <-ctx.Done(): + q.Lock() + delete(q.workers, w) + q.Unlock() + return nil, ctx.Err() + case b := <-w.channel: + return b, nil + } +} + +func (q *queue) signal(ctx context.Context) error { + if err := q.globMx.Lock(ctx); err != nil { + return err + } + defer q.globMx.Unlock(ctx) + + q.Lock() + count := len(q.workers) + pause := q.paused + q.Unlock() + if pause { + return nil + } + if count == 0 { + return nil + } + items, err := q.store.ListIncomplete(ctx) + if err != nil { + return err + } + + q.Lock() + defer q.Unlock() + for _, item := range items { + if item.Status == enum.CIStatusRunning { + continue + } + if item.Machine != "" { + continue + } + + // if the stage defines concurrency limits we + // need to make sure those limits are not exceeded + // before proceeding. + if withinLimits(item, items) == false { + continue + } + + // if the system defines concurrency limits + // per repository we need to make sure those limits + // are not exceeded before proceeding. + if shouldThrottle(item, items, item.LimitRepo) == true { + continue + } + + loop: + for w := range q.workers { + // the worker must match the resource kind and type + if !matchResource(w.kind, w.typ, item.Kind, item.Type) { + continue + } + + if w.os != "" || w.arch != "" || w.variant != "" || w.kernel != "" { + // the worker is platform-specific. check to ensure + // the queue item matches the worker platform. + if w.os != item.OS { + continue + } + if w.arch != item.Arch { + continue + } + // if the pipeline defines a variant it must match + // the worker variant (e.g. arm6, arm7, etc). + if item.Variant != "" && item.Variant != w.variant { + continue + } + // if the pipeline defines a kernel version it must match + // the worker kernel version (e.g. 1709, 1803). + if item.Kernel != "" && item.Kernel != w.kernel { + continue + } + } + + if len(item.Labels) > 0 || len(w.labels) > 0 { + if !checkLabels(item.Labels, w.labels) { + continue + } + } + + select { + case w.channel <- item: + delete(q.workers, w) + break loop + } + } + } + return nil +} + +func (q *queue) start() error { + for { + select { + case <-q.ctx.Done(): + return q.ctx.Err() + case <-q.ready: + q.signal(q.ctx) + case <-time.After(q.interval): + q.signal(q.ctx) + } + } +} + +type worker struct { + kind string + typ string + os string + arch string + kernel string + variant string + labels map[string]string + channel chan *types.Stage +} + +type counter struct { + counts map[string]int +} + +func checkLabels(a, b map[string]string) bool { + if len(a) != len(b) { + return false + } + for k, v := range a { + if w, ok := b[k]; !ok || v != w { + return false + } + } + return true +} + +func withinLimits(stage *types.Stage, siblings []*types.Stage) bool { + if stage.Limit == 0 { + return true + } + count := 0 + for _, sibling := range siblings { + if sibling.RepoID != stage.RepoID { + continue + } + if sibling.ID == stage.ID { + continue + } + if sibling.Name != stage.Name { + continue + } + if sibling.ID < stage.ID || + sibling.Status == enum.CIStatusRunning { + count++ + } + } + return count < stage.Limit +} + +func shouldThrottle(stage *types.Stage, siblings []*types.Stage, limit int) bool { + // if no throttle limit is defined (default) then + // return false to indicate no throttling is needed. + if limit == 0 { + return false + } + // if the repository is running it is too late + // to skip and we can exit + if stage.Status == enum.CIStatusRunning { + return false + } + + count := 0 + // loop through running stages to count number of + // running stages for the parent repository. + for _, sibling := range siblings { + // ignore stages from other repository. + if sibling.RepoID != stage.RepoID { + continue + } + // ignore this stage and stages that were + // scheduled after this stage. + if sibling.ID >= stage.ID { + continue + } + count++ + } + // if the count of running stages exceeds the + // throttle limit return true. + return count >= limit +} + +// matchResource is a helper function that returns +func matchResource(kinda, typea, kindb, typeb string) bool { + if kinda == "" { + kinda = "pipeline" + } + if kindb == "" { + kindb = "pipeline" + } + if typea == "" { + typea = "docker" + } + if typeb == "" { + typeb = "docker" + } + return kinda == kindb && typea == typeb +} diff --git a/internal/pipeline/scheduler/scheduler.go b/internal/pipeline/scheduler/scheduler.go new file mode 100644 index 0000000000..fb8cc84781 --- /dev/null +++ b/internal/pipeline/scheduler/scheduler.go @@ -0,0 +1,69 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scheduler + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/types" +) + +// Filter provides filter criteria to limit stages requested +// from the scheduler. +type Filter struct { + Kind string + Type string + OS string + Arch string + Kernel string + Variant string + Labels map[string]string +} + +// Scheduler schedules Build stages for execution. +type Scheduler interface { + // Schedule schedules the stage for execution. + Schedule(ctx context.Context, stage *types.Stage) error + + // Request requests the next stage scheduled for execution. + Request(ctx context.Context, filter Filter) (*types.Stage, error) + + // Cancel cancels scheduled or running jobs associated + // with the parent build ID. + Cancel(context.Context, int64) error + + // Cancelled blocks and listens for a cancellation event and + // returns true if the build has been cancelled. + Cancelled(context.Context, int64) (bool, error) +} + +type scheduler struct { + *queue + *canceler +} + +// newScheduler provides an instance of a scheduler with cancel abilities +func newScheduler(stageStore store.StageStore, lock lock.MutexManager) (Scheduler, error) { + q, err := newQueue(stageStore, lock) + if err != nil { + return nil, err + } + return scheduler{ + q, + newCanceler(), + }, nil +} diff --git a/internal/pipeline/scheduler/wire.go b/internal/pipeline/scheduler/wire.go new file mode 100644 index 0000000000..bb9f03568c --- /dev/null +++ b/internal/pipeline/scheduler/wire.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scheduler + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideScheduler, +) + +// ProvideScheduler provides a scheduler which can be used to schedule and request builds. +func ProvideScheduler( + stageStore store.StageStore, + lock lock.MutexManager, +) (Scheduler, error) { + return newScheduler(stageStore, lock) +} diff --git a/internal/pipeline/triggerer/dag/dag.go b/internal/pipeline/triggerer/dag/dag.go new file mode 100644 index 0000000000..68304f697f --- /dev/null +++ b/internal/pipeline/triggerer/dag/dag.go @@ -0,0 +1,147 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dag + +// Dag is a directed acyclic graph. +type Dag struct { + graph map[string]*Vertex +} + +// Vertex is a vertex in the graph. +type Vertex struct { + Name string + Skip bool + graph []string +} + +// New creates a new directed acyclic graph (dag) that can +// determinate if a stage has dependencies. +func New() *Dag { + return &Dag{ + graph: make(map[string]*Vertex), + } +} + +// Add establishes a dependency between two vertices in the graph. +func (d *Dag) Add(from string, to ...string) *Vertex { + vertex := new(Vertex) + vertex.Name = from + vertex.Skip = false + vertex.graph = to + d.graph[from] = vertex + return vertex +} + +// Get returns the vertex from the graph. +func (d *Dag) Get(name string) (*Vertex, bool) { + vertex, ok := d.graph[name] + return vertex, ok +} + +// Dependencies returns the direct dependencies accounting for +// skipped dependencies. +func (d *Dag) Dependencies(name string) []string { + vertex := d.graph[name] + return d.dependencies(vertex) +} + +// Ancestors returns the ancestors of the vertex. +func (d *Dag) Ancestors(name string) []*Vertex { + vertex := d.graph[name] + return d.ancestors(vertex) +} + +// DetectCycles returns true if cycles are detected in the graph. +func (d *Dag) DetectCycles() bool { + visited := make(map[string]bool) + recStack := make(map[string]bool) + + for vertex := range d.graph { + if !visited[vertex] { + if d.detectCycles(vertex, visited, recStack) { + return true + } + } + } + return false +} + +// helper function returns the list of ancestors for the vertex. +func (d *Dag) ancestors(parent *Vertex) []*Vertex { + if parent == nil { + return nil + } + var combined []*Vertex + for _, name := range parent.graph { + vertex, found := d.graph[name] + if !found { + continue + } + if !vertex.Skip { + combined = append(combined, vertex) + } + combined = append(combined, d.ancestors(vertex)...) + } + return combined +} + +// helper function returns the list of dependencies for the, +// vertex taking into account skipped dependencies. +func (d *Dag) dependencies(parent *Vertex) []string { + if parent == nil { + return nil + } + var combined []string + for _, name := range parent.graph { + vertex, found := d.graph[name] + if !found { + continue + } + if vertex.Skip { + // if the vertex is skipped we should move up the + // graph and check direct ancestors. + combined = append(combined, d.dependencies(vertex)...) + } else { + combined = append(combined, vertex.Name) + } + } + return combined +} + +// helper function returns true if the vertex is cyclical. +func (d *Dag) detectCycles(name string, visited, recStack map[string]bool) bool { + visited[name] = true + recStack[name] = true + + vertex, ok := d.graph[name] + if !ok { + return false + } + for _, v := range vertex.graph { + // only check cycles on a vertex one time + if !visited[v] { + if d.detectCycles(v, visited, recStack) { + return true + } + // if we've visited this vertex in this recursion + // stack, then we have a cycle + } else if recStack[v] { + return true + } + + } + recStack[name] = false + return false +} diff --git a/internal/pipeline/triggerer/dag/dag_test.go b/internal/pipeline/triggerer/dag/dag_test.go new file mode 100644 index 0000000000..4c1c04c0d6 --- /dev/null +++ b/internal/pipeline/triggerer/dag/dag_test.go @@ -0,0 +1,219 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dag + +import ( + "reflect" + "testing" +) + +func TestDag(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend") + dag.Add("notify", "backend", "frontend") + if dag.DetectCycles() { + t.Errorf("cycles detected") + } + + dag = New() + dag.Add("notify", "backend", "frontend") + if dag.DetectCycles() { + t.Errorf("cycles detected") + } + + dag = New() + dag.Add("backend", "frontend") + dag.Add("frontend", "backend") + dag.Add("notify", "backend", "frontend") + if dag.DetectCycles() == false { + t.Errorf("Expect cycles detected") + } + + dag = New() + dag.Add("backend", "backend") + dag.Add("frontend", "backend") + dag.Add("notify", "backend", "frontend") + if dag.DetectCycles() == false { + t.Errorf("Expect cycles detected") + } + + dag = New() + dag.Add("backend") + dag.Add("frontend") + dag.Add("notify", "backend", "frontend", "notify") + if dag.DetectCycles() == false { + t.Errorf("Expect cycles detected") + } +} + +func TestAncestors(t *testing.T) { + dag := New() + v := dag.Add("backend") + dag.Add("frontend", "backend") + dag.Add("notify", "frontend") + + ancestors := dag.Ancestors("frontend") + if got, want := len(ancestors), 1; got != want { + t.Errorf("Want %d ancestors, got %d", want, got) + } + if ancestors[0] != v { + t.Errorf("Unexpected ancestor") + } + + if v := dag.Ancestors("backend"); len(v) != 0 { + t.Errorf("Expect vertexes with no dependencies has zero ancestors") + } +} + +func TestAncestors_Skipped(t *testing.T) { + dag := New() + dag.Add("backend").Skip = true + dag.Add("frontend", "backend").Skip = true + dag.Add("notify", "frontend") + + if v := dag.Ancestors("frontend"); len(v) != 0 { + t.Errorf("Expect skipped vertexes excluded") + } + if v := dag.Ancestors("notify"); len(v) != 0 { + t.Errorf("Expect skipped vertexes excluded") + } +} + +func TestAncestors_NotFound(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend", "backend") + dag.Add("notify", "frontend") + if dag.DetectCycles() { + t.Errorf("cycles detected") + } + if v := dag.Ancestors("does-not-exist"); len(v) != 0 { + t.Errorf("Expect vertex not found does not panic") + } +} + +func TestAncestors_Malformed(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend", "does-not-exist") + dag.Add("notify", "frontend") + if dag.DetectCycles() { + t.Errorf("cycles detected") + } + if v := dag.Ancestors("frontend"); len(v) != 0 { + t.Errorf("Expect invalid dependency does not panic") + } +} + +func TestAncestors_Complex(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend") + dag.Add("publish", "backend", "frontend") + dag.Add("deploy", "publish") + last := dag.Add("notify", "deploy") + if dag.DetectCycles() { + t.Errorf("cycles detected") + } + + ancestors := dag.Ancestors("notify") + if got, want := len(ancestors), 4; got != want { + t.Errorf("Want %d ancestors, got %d", want, got) + return + } + for _, ancestor := range ancestors { + if ancestor == last { + t.Errorf("Unexpected ancestor") + } + } + + v, _ := dag.Get("publish") + v.Skip = true + ancestors = dag.Ancestors("notify") + if got, want := len(ancestors), 3; got != want { + t.Errorf("Want %d ancestors, got %d", want, got) + return + } +} + +func TestDependencies(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend") + dag.Add("publish", "backend", "frontend") + + if deps := dag.Dependencies("backend"); len(deps) != 0 { + t.Errorf("Expect zero dependencies") + } + if deps := dag.Dependencies("frontend"); len(deps) != 0 { + t.Errorf("Expect zero dependencies") + } + + got, want := dag.Dependencies("publish"), []string{"backend", "frontend"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Unexpected dependencies, got %v", got) + } +} + +func TestDependencies_Skipped(t *testing.T) { + dag := New() + dag.Add("backend") + dag.Add("frontend").Skip = true + dag.Add("publish", "backend", "frontend") + + if deps := dag.Dependencies("backend"); len(deps) != 0 { + t.Errorf("Expect zero dependencies") + } + if deps := dag.Dependencies("frontend"); len(deps) != 0 { + t.Errorf("Expect zero dependencies") + } + + got, want := dag.Dependencies("publish"), []string{"backend"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Unexpected dependencies, got %v", got) + } +} + +func TestDependencies_Complex(t *testing.T) { + dag := New() + dag.Add("clone") + dag.Add("backend") + dag.Add("frontend", "backend").Skip = true + dag.Add("publish", "frontend", "clone") + dag.Add("notify", "publish") + + if deps := dag.Dependencies("clone"); len(deps) != 0 { + t.Errorf("Expect zero dependencies for clone") + } + if deps := dag.Dependencies("backend"); len(deps) != 0 { + t.Errorf("Expect zero dependencies for backend") + } + + got, want := dag.Dependencies("frontend"), []string{"backend"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Unexpected dependencies for frontend, got %v", got) + } + + got, want = dag.Dependencies("publish"), []string{"backend", "clone"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Unexpected dependencies for publish, got %v", got) + } + + got, want = dag.Dependencies("notify"), []string{"publish"} + if !reflect.DeepEqual(got, want) { + t.Errorf("Unexpected dependencies for notify, got %v", got) + } +} diff --git a/internal/pipeline/triggerer/skip.go b/internal/pipeline/triggerer/skip.go new file mode 100644 index 0000000000..895e4f0ac1 --- /dev/null +++ b/internal/pipeline/triggerer/skip.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package triggerer + +import ( + "strings" + + "github.com/drone/drone-yaml/yaml" +) + +func skipBranch(document *yaml.Pipeline, branch string) bool { + return !document.Trigger.Branch.Match(branch) +} + +func skipRef(document *yaml.Pipeline, ref string) bool { + return !document.Trigger.Ref.Match(ref) +} + +func skipEvent(document *yaml.Pipeline, event string) bool { + return !document.Trigger.Event.Match(event) +} + +func skipAction(document *yaml.Pipeline, action string) bool { + return !document.Trigger.Action.Match(action) +} + +func skipInstance(document *yaml.Pipeline, instance string) bool { + return !document.Trigger.Instance.Match(instance) +} + +func skipTarget(document *yaml.Pipeline, env string) bool { + return !document.Trigger.Target.Match(env) +} + +func skipRepo(document *yaml.Pipeline, repo string) bool { + return !document.Trigger.Repo.Match(repo) +} + +func skipCron(document *yaml.Pipeline, cron string) bool { + return !document.Trigger.Cron.Match(cron) +} + +func skipMessageEval(str string) bool { + lower := strings.ToLower(str) + switch { + case strings.Contains(lower, "[ci skip]"), + strings.Contains(lower, "[skip ci]"), + strings.Contains(lower, "***no_ci***"): + return true + default: + return false + } +} diff --git a/internal/pipeline/triggerer/trigger.go b/internal/pipeline/triggerer/trigger.go new file mode 100644 index 0000000000..d605300115 --- /dev/null +++ b/internal/pipeline/triggerer/trigger.go @@ -0,0 +1,530 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package triggerer + +import ( + "context" + "fmt" + "regexp" + "runtime/debug" + "time" + + "github.com/harness/gitness/internal/pipeline/checks" + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/manager" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/pipeline/triggerer/dag" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/drone-runners/drone-runner-docker/engine2/inputs" + "github.com/drone-runners/drone-runner-docker/engine2/script" + "github.com/drone/drone-yaml/yaml" + "github.com/drone/drone-yaml/yaml/linter" + v1yaml "github.com/drone/spec/dist/go" + "github.com/drone/spec/dist/go/parse/normalize" + "github.com/jmoiron/sqlx" + "github.com/rs/zerolog/log" +) + +var _ Triggerer = (*triggerer)(nil) + +// Hook represents the payload of a post-commit hook. +type Hook struct { + Parent int64 `json:"parent"` + Trigger string `json:"trigger"` + TriggeredBy int64 `json:"triggered_by"` + Action enum.TriggerAction `json:"action"` + Link string `json:"link"` + Timestamp int64 `json:"timestamp"` + Title string `json:"title"` + Message string `json:"message"` + Before string `json:"before"` + After string `json:"after"` + Ref string `json:"ref"` + Fork string `json:"fork"` + Source string `json:"source"` + Target string `json:"target"` + AuthorLogin string `json:"author_login"` + AuthorName string `json:"author_name"` + AuthorEmail string `json:"author_email"` + AuthorAvatar string `json:"author_avatar"` + Debug bool `json:"debug"` + Cron string `json:"cron"` + Sender string `json:"sender"` + Params map[string]string `json:"params"` +} + +// Triggerer is responsible for triggering a Execution from an +// incoming hook (could be manual or webhook). If an execution is skipped a nil value is +// returned. +type Triggerer interface { + Trigger(ctx context.Context, pipeline *types.Pipeline, hook *Hook) (*types.Execution, error) +} + +type triggerer struct { + executionStore store.ExecutionStore + checkStore store.CheckStore + stageStore store.StageStore + db *sqlx.DB + pipelineStore store.PipelineStore + fileService file.FileService + scheduler scheduler.Scheduler + repoStore store.RepoStore +} + +func New( + executionStore store.ExecutionStore, + checkStore store.CheckStore, + stageStore store.StageStore, + pipelineStore store.PipelineStore, + db *sqlx.DB, + repoStore store.RepoStore, + scheduler scheduler.Scheduler, + fileService file.FileService, +) *triggerer { + return &triggerer{ + executionStore: executionStore, + checkStore: checkStore, + stageStore: stageStore, + scheduler: scheduler, + db: db, + pipelineStore: pipelineStore, + fileService: fileService, + repoStore: repoStore, + } +} + +func (t *triggerer) Trigger( + ctx context.Context, + pipeline *types.Pipeline, + base *Hook, +) (*types.Execution, error) { + log := log.With(). + Int64("pipeline.id", pipeline.ID). + Str("trigger.ref", base.Ref). + Str("trigger.commit", base.After). + Logger() + + log.Debug().Msg("trigger: received") + defer func() { + // taking the paranoid approach to recover from + // a panic that should absolutely never happen. + if r := recover(); r != nil { + log.Error().Msgf("runner: unexpected panic: %s", r) + debug.PrintStack() + } + }() + + event := string(base.Action.GetTriggerEvent()) + + repo, err := t.repoStore.Find(ctx, pipeline.RepoID) + if err != nil { + log.Error().Err(err).Msg("could not find repo") + return nil, err + } + + file, err := t.fileService.Get(ctx, repo, pipeline.ConfigPath, base.After) + if err != nil { + log.Error().Err(err).Msg("trigger: could not find yaml") + return nil, err + } + + now := time.Now().UnixMilli() + execution := &types.Execution{ + RepoID: repo.ID, + PipelineID: pipeline.ID, + Trigger: base.Trigger, + CreatedBy: base.TriggeredBy, + Parent: base.Parent, + Status: enum.CIStatusPending, + Event: event, + Action: string(base.Action), + Link: base.Link, + // Timestamp: base.Timestamp, + Title: trunc(base.Title, 2000), + Message: trunc(base.Message, 2000), + Before: base.Before, + After: base.After, + Ref: base.Ref, + Fork: base.Fork, + Source: base.Source, + Target: base.Target, + Author: base.AuthorLogin, + AuthorName: base.AuthorName, + AuthorEmail: base.AuthorEmail, + AuthorAvatar: base.AuthorAvatar, + Params: base.Params, + Debug: base.Debug, + Sender: base.Sender, + Cron: base.Cron, + Created: now, + Updated: now, + } + + // For drone, follow the existing path of calculating dependencies, creating a DAG, + // and creating stages accordingly. For V1 YAML - for now we can just parse the stages + // and create them sequentially. + stages := []*types.Stage{} + if !isV1Yaml(file.Data) { + manifest, err := yaml.ParseString(string(file.Data)) + if err != nil { + log.Warn().Err(err).Msg("trigger: cannot parse yaml") + return t.createExecutionWithError(ctx, pipeline, base, err.Error()) + } + + err = linter.Manifest(manifest, true) + if err != nil { + log.Warn().Err(err).Msg("trigger: yaml linting error") + return t.createExecutionWithError(ctx, pipeline, base, err.Error()) + } + + var matched []*yaml.Pipeline + var dag = dag.New() + for _, document := range manifest.Resources { + pipeline, ok := document.(*yaml.Pipeline) + if !ok { + continue + } + // TODO add repo + // TODO add instance + // TODO add target + // TODO add ref + name := pipeline.Name + if name == "" { + name = "default" + } + node := dag.Add(pipeline.Name, pipeline.DependsOn...) + node.Skip = true + + if skipBranch(pipeline, base.Target) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match branch") + } else if skipEvent(pipeline, event) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match event") + } else if skipAction(pipeline, string(base.Action)) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match action") + } else if skipRef(pipeline, base.Ref) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match ref") + } else if skipRepo(pipeline, repo.Path) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match repo") + } else if skipCron(pipeline, base.Cron) { + log.Info().Str("pipeline", pipeline.Name).Msg("trigger: skipping pipeline, does not match cron job") + } else { + matched = append(matched, pipeline) + node.Skip = false + } + } + + if dag.DetectCycles() { + return t.createExecutionWithError(ctx, pipeline, base, "Error: Dependency cycle detected in Pipeline") + } + + if len(matched) == 0 { + log.Info().Msg("trigger: skipping execution, no matching pipelines") + return nil, nil + } + + for i, match := range matched { + onSuccess := match.Trigger.Status.Match(string(enum.CIStatusSuccess)) + onFailure := match.Trigger.Status.Match(string(enum.CIStatusFailure)) + if len(match.Trigger.Status.Include)+len(match.Trigger.Status.Exclude) == 0 { + onFailure = false + } + + now := time.Now().UnixMilli() + + stage := &types.Stage{ + RepoID: repo.ID, + Number: int64(i + 1), + Name: match.Name, + Kind: match.Kind, + Type: match.Type, + OS: match.Platform.OS, + Arch: match.Platform.Arch, + Variant: match.Platform.Variant, + Kernel: match.Platform.Version, + Limit: match.Concurrency.Limit, + Status: enum.CIStatusWaitingOnDeps, + DependsOn: match.DependsOn, + OnSuccess: onSuccess, + OnFailure: onFailure, + Labels: match.Node, + Created: now, + Updated: now, + } + if stage.Kind == "pipeline" && stage.Type == "" { + stage.Type = "docker" + } + if stage.OS == "" { + stage.OS = "linux" + } + if stage.Arch == "" { + stage.Arch = "amd64" + } + + if stage.Name == "" { + stage.Name = "default" + } + if len(stage.DependsOn) == 0 { + stage.Status = enum.CIStatusPending + } + stages = append(stages, stage) + } + + for _, stage := range stages { + // here we re-work the dependencies for the stage to + // account for the fact that some steps may be skipped + // and may otherwise break the dependency chain. + stage.DependsOn = dag.Dependencies(stage.Name) + + // if the stage is pending dependencies, but those + // dependencies are skipped, the stage can be executed + // immediately. + if stage.Status == enum.CIStatusWaitingOnDeps && + len(stage.DependsOn) == 0 { + stage.Status = enum.CIStatusPending + } + } + } else { + stages, err = parseV1Stages(file.Data, repo, execution) + if err != nil { + return nil, fmt.Errorf("could not parse v1 YAML into stages: %w", err) + } + } + + // Increment pipeline number using optimistic locking. + pipeline, err = t.pipelineStore.IncrementSeqNum(ctx, pipeline) + if err != nil { + log.Error().Err(err).Msg("trigger: cannot increment execution sequence number") + return nil, err + } + // TODO: this can be made better. We are setting this later since otherwise any parsing failure + // would lead to an incremented pipeline sequence number. + execution.Number = pipeline.Seq + + err = t.createExecutionWithStages(ctx, execution, stages) + if err != nil { + log.Error().Err(err).Msg("trigger: cannot create execution") + return nil, err + } + + // try to write to check store. log on failure but don't error out the execution + err = checks.Write(ctx, t.checkStore, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("trigger: could not write to check store") + } + + for _, stage := range stages { + if stage.Status != enum.CIStatusPending { + continue + } + err = t.scheduler.Schedule(ctx, stage) + if err != nil { + log.Error().Err(err).Msg("trigger: cannot enqueue execution") + return nil, err + } + } + + return execution, nil +} + +func trunc(s string, i int) string { + runes := []rune(s) + if len(runes) > i { + return string(runes[:i]) + } + return s +} + +// parseV1Stages tries to parse the yaml into a list of stages and returns an error +// if we are unable to do so or the yaml contains something unexpected. +// Currently, all the stages will be executed one after the other on completion. +// Once we have depends on in v1, this will be changed to use the DAG. +func parseV1Stages(data []byte, repo *types.Repository, execution *types.Execution) ([]*types.Stage, error) { + stages := []*types.Stage{} + // For V1 YAML, just go through the YAML and create stages serially for now + config, err := v1yaml.ParseBytes(data) + if err != nil { + return nil, fmt.Errorf("could not parse v1 yaml: %w", err) + } + + // Normalize the config to make sure stage names and step names are unique + err = normalize.Normalize(config) + if err != nil { + return nil, fmt.Errorf("could not normalize v1 yaml: %w", err) + } + + if config.Kind != "pipeline" { + return nil, fmt.Errorf("cannot support non-pipeline kinds in v1 at the moment: %w", err) + } + + inputParams := map[string]interface{}{} + inputParams["repo"] = inputs.Repo(manager.ConvertToDroneRepo(repo)) + inputParams["build"] = inputs.Build(manager.ConvertToDroneBuild(execution)) + + var prevStage string + + switch v := config.Spec.(type) { + case *v1yaml.Pipeline: + // Expand expressions in strings and matrices + script.ExpandConfig(config, inputParams) + + for idx, stage := range v.Stages { + // Only parse CI stages for now + switch stage.Spec.(type) { + case *v1yaml.StageCI: + now := time.Now().UnixMilli() + var onSuccess, onFailure bool + onSuccess = true + if stage.When != nil { + if when := stage.When.Eval; when != "" { + // TODO: pass in params for resolution + onSuccess, onFailure, err = script.EvalWhen(when, inputParams) + if err != nil { + return nil, fmt.Errorf("could not resolve when condition for stage: %w", err) + } + } + } + + dependsOn := []string{} + if prevStage != "" { + dependsOn = append(dependsOn, prevStage) + } + status := enum.CIStatusWaitingOnDeps + // If the stage has no dependencies, it can be picked up for execution. + if len(dependsOn) == 0 { + status = enum.CIStatusPending + } + temp := &types.Stage{ + RepoID: repo.ID, + Number: int64(idx + 1), + Name: stage.Id, // for v1, ID is the unique identifier per stage + Created: now, + Updated: now, + Status: status, + OnSuccess: onSuccess, + OnFailure: onFailure, + DependsOn: dependsOn, + } + prevStage = temp.Name + stages = append(stages, temp) + default: + return nil, fmt.Errorf("only CI stage supported in v1 at the moment") + } + + } + default: + return nil, fmt.Errorf("unknown yaml: %w", err) + } + return stages, nil +} + +// Checks whether YAML is V1 Yaml or drone Yaml +func isV1Yaml(data []byte) bool { + // if we are dealing with the legacy drone yaml, use + // the legacy drone engine. + return regexp.MustCompilePOSIX(`^spec:`).Match(data) +} + +// createExecutionWithStages writes an execution along with its stages in a single transaction. +func (t *triggerer) createExecutionWithStages( + ctx context.Context, + execution *types.Execution, + stages []*types.Stage, +) error { + return dbtx.New(t.db).WithTx(ctx, func(ctx context.Context) error { + err := t.executionStore.Create(ctx, execution) + if err != nil { + return err + } + + for _, stage := range stages { + stage.ExecutionID = execution.ID + err := t.stageStore.Create(ctx, stage) + if err != nil { + return err + } + } + return nil + }) +} + +// createExecutionWithError creates an execution with an error message. +func (t *triggerer) createExecutionWithError( + ctx context.Context, + pipeline *types.Pipeline, + base *Hook, + message string, +) (*types.Execution, error) { + log := log.With(). + Int64("pipeline.id", pipeline.ID). + Str("trigger.ref", base.Ref). + Str("trigger.commit", base.After). + Logger() + + pipeline, err := t.pipelineStore.IncrementSeqNum(ctx, pipeline) + if err != nil { + return nil, err + } + + now := time.Now().UnixMilli() + + execution := &types.Execution{ + RepoID: pipeline.RepoID, + PipelineID: pipeline.ID, + Number: pipeline.Seq, + Parent: base.Parent, + Status: enum.CIStatusError, + Error: message, + Event: string(base.Action.GetTriggerEvent()), + Action: string(base.Action), + Link: base.Link, + Title: base.Title, + Message: base.Message, + CreatedBy: base.TriggeredBy, + Before: base.Before, + After: base.After, + Ref: base.Ref, + Fork: base.Fork, + Source: base.Source, + Target: base.Target, + Author: base.AuthorLogin, + AuthorName: base.AuthorName, + AuthorEmail: base.AuthorEmail, + AuthorAvatar: base.AuthorAvatar, + Debug: base.Debug, + Sender: base.Sender, + Created: now, + Updated: now, + Started: now, + Finished: now, + } + + err = t.executionStore.Create(ctx, execution) + if err != nil { + log.Error().Err(err).Msg("trigger: cannot create execution error") + return nil, err + } + + // try to write to check store, log on failure + err = checks.Write(ctx, t.checkStore, execution, pipeline) + if err != nil { + log.Error().Err(err).Msg("trigger: failed to update check") + } + + return execution, nil +} diff --git a/internal/pipeline/triggerer/wire.go b/internal/pipeline/triggerer/wire.go new file mode 100644 index 0000000000..d05ad1ea5e --- /dev/null +++ b/internal/pipeline/triggerer/wire.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package triggerer + +import ( + "github.com/harness/gitness/internal/pipeline/file" + "github.com/harness/gitness/internal/pipeline/scheduler" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideTriggerer, +) + +// ProvideTriggerer provides a triggerer which can execute builds. +func ProvideTriggerer( + executionStore store.ExecutionStore, + checkStore store.CheckStore, + stageStore store.StageStore, + db *sqlx.DB, + pipelineStore store.PipelineStore, + fileService file.FileService, + scheduler scheduler.Scheduler, + repoStore store.RepoStore, +) Triggerer { + return New(executionStore, checkStore, stageStore, pipelineStore, + db, repoStore, scheduler, fileService) +} diff --git a/internal/request/request.go b/internal/request/request.go new file mode 100644 index 0000000000..d3551bc397 --- /dev/null +++ b/internal/request/request.go @@ -0,0 +1,73 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package request provides http handlers for serving the +// web applications and API endpoints. +package request + +import ( + "fmt" + "net/http" + "net/url" +) + +// ReplacePrefix replaces the path of the request. +// IMPORTANT: +// - both prefix are unescaped for path, and used as is for RawPath! +// - only called by top level handler!! +func ReplacePrefix(r *http.Request, oldPrefix string, newPrefix string) error { + /* + * According to official documentation, we can change anything in the request but the body: + * https://pkg.go.dev/net/http#Handler + * + * ASSUMPTION: + * This is called by a top level handler (no router or middleware above it) + * Therefore, we don't have to worry about getting any routing metadata out of sync. + * + * This is different to returning a shallow clone with updated URL, which is what + * http.StripPrefix or earlier versions of request.WithContext are doing: + * https://cs.opensource.google/go/go/+/refs/tags/go1.19:src/net/http/server.go;l=2138 + * https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/net/http/request.go;l=355 + * + * http.StripPrefix initially changed the path only, but that was updated because of official recommendations: + * https://github.com/golang/go/issues/18952 + */ + unOldPrefix, err := url.PathUnescape(oldPrefix) + if err != nil { + return fmt.Errorf("failed to unescape old prefix '%s'", oldPrefix) + } + unNewPrefix, err := url.PathUnescape(newPrefix) + if err != nil { + return fmt.Errorf("failed to unescape new prefix '%s'", newPrefix) + } + + unl := len(unOldPrefix) + if len(r.URL.Path) < unl || r.URL.Path[0:unl] != unOldPrefix { + return fmt.Errorf("path '%s' doesn't contain prefix '%s'", r.URL.Path, unOldPrefix) + } + + // only change RawPath if it exists + if r.URL.RawPath != "" { + l := len(oldPrefix) + if len(r.URL.RawPath) < l || r.URL.RawPath[0:l] != oldPrefix { + return fmt.Errorf("raw path '%s' doesn't contain prefix '%s'", r.URL.RawPath, oldPrefix) + } + + r.URL.RawPath = newPrefix + r.URL.RawPath[l:] + } + + r.URL.Path = unNewPrefix + r.URL.Path[unl:] + + return nil +} diff --git a/internal/router/api.go b/internal/router/api.go new file mode 100644 index 0000000000..500324cf0b --- /dev/null +++ b/internal/router/api.go @@ -0,0 +1,618 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package router + +import ( + "fmt" + "net/http" + + "github.com/harness/gitness/githook" + "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/controller/execution" + controllergithook "github.com/harness/gitness/internal/api/controller/githook" + "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/controller/plugin" + "github.com/harness/gitness/internal/api/controller/principal" + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/api/handler/account" + handlercheck "github.com/harness/gitness/internal/api/handler/check" + handlerconnector "github.com/harness/gitness/internal/api/handler/connector" + handlerexecution "github.com/harness/gitness/internal/api/handler/execution" + handlergithook "github.com/harness/gitness/internal/api/handler/githook" + handlerlogs "github.com/harness/gitness/internal/api/handler/logs" + handlerpipeline "github.com/harness/gitness/internal/api/handler/pipeline" + handlerplugin "github.com/harness/gitness/internal/api/handler/plugin" + handlerprincipal "github.com/harness/gitness/internal/api/handler/principal" + handlerpullreq "github.com/harness/gitness/internal/api/handler/pullreq" + handlerrepo "github.com/harness/gitness/internal/api/handler/repo" + "github.com/harness/gitness/internal/api/handler/resource" + handlersecret "github.com/harness/gitness/internal/api/handler/secret" + handlerserviceaccount "github.com/harness/gitness/internal/api/handler/serviceaccount" + handlerspace "github.com/harness/gitness/internal/api/handler/space" + handlersystem "github.com/harness/gitness/internal/api/handler/system" + handlertemplate "github.com/harness/gitness/internal/api/handler/template" + handlertrigger "github.com/harness/gitness/internal/api/handler/trigger" + handleruser "github.com/harness/gitness/internal/api/handler/user" + "github.com/harness/gitness/internal/api/handler/users" + handlerwebhook "github.com/harness/gitness/internal/api/handler/webhook" + "github.com/harness/gitness/internal/api/middleware/address" + middlewareauthn "github.com/harness/gitness/internal/api/middleware/authn" + "github.com/harness/gitness/internal/api/middleware/encode" + "github.com/harness/gitness/internal/api/middleware/logging" + middlewareprincipal "github.com/harness/gitness/internal/api/middleware/principal" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/auth/authn" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/go-chi/chi" + "github.com/go-chi/chi/middleware" + "github.com/go-chi/cors" + "github.com/rs/zerolog/hlog" +) + +// APIHandler is an abstraction of a http handler that handles API calls. +type APIHandler interface { + http.Handler +} + +var ( + // terminatedPathPrefixesAPI is the list of prefixes that will require resolving terminated paths. + terminatedPathPrefixesAPI = []string{"/v1/spaces/", "/v1/repos/", + "/v1/secrets/", "/v1/connectors", "/v1/templates"} +) + +// NewAPIHandler returns a new APIHandler. +func NewAPIHandler( + config *types.Config, + authenticator authn.Authenticator, + repoCtrl *repo.Controller, + executionCtrl *execution.Controller, + logCtrl *logs.Controller, + spaceCtrl *space.Controller, + pipelineCtrl *pipeline.Controller, + secretCtrl *secret.Controller, + triggerCtrl *trigger.Controller, + connectorCtrl *connector.Controller, + templateCtrl *template.Controller, + pluginCtrl *plugin.Controller, + pullreqCtrl *pullreq.Controller, + webhookCtrl *webhook.Controller, + githookCtrl *controllergithook.Controller, + saCtrl *serviceaccount.Controller, + userCtrl *user.Controller, + principalCtrl principal.Controller, + checkCtrl *check.Controller, + sysCtrl *system.Controller, +) APIHandler { + // Use go-chi router for inner routing. + r := chi.NewRouter() + + // Apply common api middleware. + r.Use(middleware.NoCache) + r.Use(middleware.Recoverer) + + // configure logging middleware. + r.Use(hlog.URLHandler("http.url")) + r.Use(hlog.MethodHandler("http.method")) + r.Use(logging.HLogRequestIDHandler()) + r.Use(logging.HLogAccessLogHandler()) + r.Use(address.Handler("", "")) + + // configure cors middleware + r.Use(corsHandler(config)) + + // for now always attempt auth - enforced per operation. + r.Use(middlewareauthn.Attempt(authenticator, authn.SourceRouterAPI)) + + r.Route("/v1", func(r chi.Router) { + setupRoutesV1(r, config, repoCtrl, executionCtrl, triggerCtrl, logCtrl, pipelineCtrl, + connectorCtrl, templateCtrl, pluginCtrl, secretCtrl, spaceCtrl, pullreqCtrl, + webhookCtrl, githookCtrl, saCtrl, userCtrl, principalCtrl, checkCtrl, sysCtrl) + }) + + // wrap router in terminatedPath encoder. + return encode.TerminatedPathBefore(terminatedPathPrefixesAPI, r) +} + +func corsHandler(config *types.Config) func(http.Handler) http.Handler { + return cors.New( + cors.Options{ + AllowedOrigins: config.Cors.AllowedOrigins, + AllowedMethods: config.Cors.AllowedMethods, + AllowedHeaders: config.Cors.AllowedHeaders, + ExposedHeaders: config.Cors.ExposedHeaders, + AllowCredentials: config.Cors.AllowCredentials, + MaxAge: config.Cors.MaxAge, + }, + ).Handler +} + +func setupRoutesV1(r chi.Router, + config *types.Config, + repoCtrl *repo.Controller, + executionCtrl *execution.Controller, + triggerCtrl *trigger.Controller, + logCtrl *logs.Controller, + pipelineCtrl *pipeline.Controller, + connectorCtrl *connector.Controller, + templateCtrl *template.Controller, + pluginCtrl *plugin.Controller, + secretCtrl *secret.Controller, + spaceCtrl *space.Controller, + pullreqCtrl *pullreq.Controller, + webhookCtrl *webhook.Controller, + githookCtrl *controllergithook.Controller, + saCtrl *serviceaccount.Controller, + userCtrl *user.Controller, + principalCtrl principal.Controller, + checkCtrl *check.Controller, + sysCtrl *system.Controller, +) { + setupSpaces(r, spaceCtrl) + setupRepos(r, repoCtrl, pipelineCtrl, executionCtrl, triggerCtrl, logCtrl, pullreqCtrl, webhookCtrl, checkCtrl) + setupConnectors(r, connectorCtrl) + setupTemplates(r, templateCtrl) + setupSecrets(r, secretCtrl) + setupUser(r, userCtrl) + setupServiceAccounts(r, saCtrl) + setupPrincipals(r, principalCtrl) + setupInternal(r, githookCtrl) + setupAdmin(r, userCtrl) + setupAccount(r, userCtrl, sysCtrl, config) + setupSystem(r, sysCtrl) + setupResources(r) + setupPlugins(r, pluginCtrl) +} + +func setupSpaces(r chi.Router, spaceCtrl *space.Controller) { + r.Route("/spaces", func(r chi.Router) { + // Create takes path and parentId via body, not uri + r.Post("/", handlerspace.HandleCreate(spaceCtrl)) + r.Post("/import", handlerspace.HandleImport(spaceCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamSpaceRef), func(r chi.Router) { + // space operations + r.Get("/", handlerspace.HandleFind(spaceCtrl)) + r.Patch("/", handlerspace.HandleUpdate(spaceCtrl)) + r.Delete("/", handlerspace.HandleDelete(spaceCtrl)) + + r.Get("/events", handlerspace.HandleEvents(spaceCtrl)) + + r.Post("/move", handlerspace.HandleMove(spaceCtrl)) + r.Get("/spaces", handlerspace.HandleListSpaces(spaceCtrl)) + r.Get("/repos", handlerspace.HandleListRepos(spaceCtrl)) + r.Get("/service-accounts", handlerspace.HandleListServiceAccounts(spaceCtrl)) + r.Get("/secrets", handlerspace.HandleListSecrets(spaceCtrl)) + r.Get("/connectors", handlerspace.HandleListConnectors(spaceCtrl)) + r.Get("/templates", handlerspace.HandleListTemplates(spaceCtrl)) + r.Post("/export", handlerspace.HandleExport(spaceCtrl)) + r.Get("/export-progress", handlerspace.HandleExportProgress(spaceCtrl)) + + r.Route("/members", func(r chi.Router) { + r.Get("/", handlerspace.HandleMembershipList(spaceCtrl)) + r.Post("/", handlerspace.HandleMembershipAdd(spaceCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamUserUID), func(r chi.Router) { + r.Delete("/", handlerspace.HandleMembershipDelete(spaceCtrl)) + r.Patch("/", handlerspace.HandleMembershipUpdate(spaceCtrl)) + }) + }) + }) + }) +} + +func setupRepos(r chi.Router, + repoCtrl *repo.Controller, + pipelineCtrl *pipeline.Controller, + executionCtrl *execution.Controller, + triggerCtrl *trigger.Controller, + logCtrl *logs.Controller, + pullreqCtrl *pullreq.Controller, + webhookCtrl *webhook.Controller, + checkCtrl *check.Controller, +) { + r.Route("/repos", func(r chi.Router) { + // Create takes path and parentId via body, not uri + r.Post("/", handlerrepo.HandleCreate(repoCtrl)) + r.Post("/import", handlerrepo.HandleImport(repoCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamRepoRef), func(r chi.Router) { + // repo level operations + r.Get("/", handlerrepo.HandleFind(repoCtrl)) + r.Patch("/", handlerrepo.HandleUpdate(repoCtrl)) + r.Delete("/", handlerrepo.HandleDelete(repoCtrl)) + + r.Post("/move", handlerrepo.HandleMove(repoCtrl)) + r.Get("/service-accounts", handlerrepo.HandleListServiceAccounts(repoCtrl)) + + r.Get("/import-progress", handlerrepo.HandleImportProgress(repoCtrl)) + + // content operations + // NOTE: this allows /content and /content/ to both be valid (without any other tricks.) + // We don't expect there to be any other operations in that route (as that could overlap with file names) + r.Route("/content", func(r chi.Router) { + r.Get("/*", handlerrepo.HandleGetContent(repoCtrl)) + }) + + r.Post("/path-details", handlerrepo.HandlePathsDetails(repoCtrl)) + + r.Route("/blame", func(r chi.Router) { + r.Get("/*", handlerrepo.HandleBlame(repoCtrl)) + }) + + r.Route("/raw", func(r chi.Router) { + r.Get("/*", handlerrepo.HandleRaw(repoCtrl)) + }) + + // commit operations + r.Route("/commits", func(r chi.Router) { + r.Get("/", handlerrepo.HandleListCommits(repoCtrl)) + + r.Post("/calculate-divergence", handlerrepo.HandleCalculateCommitDivergence(repoCtrl)) + r.Post("/", handlerrepo.HandleCommitFiles(repoCtrl)) + + // per commit operations + r.Route(fmt.Sprintf("/{%s}", request.PathParamCommitSHA), func(r chi.Router) { + r.Get("/", handlerrepo.HandleGetCommit(repoCtrl)) + r.Get("/diff", handlerrepo.HandleCommitDiff(repoCtrl)) + }) + }) + + // branch operations + r.Route("/branches", func(r chi.Router) { + r.Get("/", handlerrepo.HandleListBranches(repoCtrl)) + r.Post("/", handlerrepo.HandleCreateBranch(repoCtrl)) + + // per branch operations (can't be grouped in single route) + r.Get("/*", handlerrepo.HandleGetBranch(repoCtrl)) + r.Delete("/*", handlerrepo.HandleDeleteBranch(repoCtrl)) + }) + + // tags operations + r.Route("/tags", func(r chi.Router) { + r.Get("/", handlerrepo.HandleListCommitTags(repoCtrl)) + r.Post("/", handlerrepo.HandleCreateCommitTag(repoCtrl)) + r.Delete("/*", handlerrepo.HandleDeleteCommitTag(repoCtrl)) + }) + + // diffs + r.Route("/diff", func(r chi.Router) { + r.Get("/*", handlerrepo.HandleDiff(repoCtrl)) + }) + r.Route("/diff-stats", func(r chi.Router) { + r.Get("/*", handlerrepo.HandleDiffStats(repoCtrl)) + }) + r.Route("/merge-check", func(r chi.Router) { + r.Post("/*", handlerrepo.HandleMergeCheck(repoCtrl)) + }) + + SetupPullReq(r, pullreqCtrl) + + setupWebhook(r, webhookCtrl) + + setupPipelines(r, repoCtrl, pipelineCtrl, executionCtrl, triggerCtrl, logCtrl) + + SetupChecks(r, checkCtrl) + }) + }) +} + +func setupPipelines( + r chi.Router, + repoCtrl *repo.Controller, + pipelineCtrl *pipeline.Controller, + executionCtrl *execution.Controller, + triggerCtrl *trigger.Controller, + logCtrl *logs.Controller) { + r.Route("/pipelines", func(r chi.Router) { + r.Get("/", handlerrepo.HandleListPipelines(repoCtrl)) + // Create takes path and parentId via body, not uri + r.Post("/", handlerpipeline.HandleCreate(pipelineCtrl)) + r.Get("/generate", handlerrepo.HandlePipelineGenerate(repoCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamPipelineRef), func(r chi.Router) { + r.Get("/", handlerpipeline.HandleFind(pipelineCtrl)) + r.Patch("/", handlerpipeline.HandleUpdate(pipelineCtrl)) + r.Delete("/", handlerpipeline.HandleDelete(pipelineCtrl)) + setupExecutions(r, executionCtrl, logCtrl) + setupTriggers(r, triggerCtrl) + }) + }) +} + +func setupConnectors( + r chi.Router, + connectorCtrl *connector.Controller, +) { + r.Route("/connectors", func(r chi.Router) { + // Create takes path and parentId via body, not uri + r.Post("/", handlerconnector.HandleCreate(connectorCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamConnectorRef), func(r chi.Router) { + r.Get("/", handlerconnector.HandleFind(connectorCtrl)) + r.Patch("/", handlerconnector.HandleUpdate(connectorCtrl)) + r.Delete("/", handlerconnector.HandleDelete(connectorCtrl)) + }) + }) +} + +func setupTemplates( + r chi.Router, + templateCtrl *template.Controller, +) { + r.Route("/templates", func(r chi.Router) { + // Create takes path and parentId via body, not uri + r.Post("/", handlertemplate.HandleCreate(templateCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamTemplateRef), func(r chi.Router) { + r.Get("/", handlertemplate.HandleFind(templateCtrl)) + r.Patch("/", handlertemplate.HandleUpdate(templateCtrl)) + r.Delete("/", handlertemplate.HandleDelete(templateCtrl)) + }) + }) +} + +func setupSecrets(r chi.Router, secretCtrl *secret.Controller) { + r.Route("/secrets", func(r chi.Router) { + // Create takes path and parentId via body, not uri + r.Post("/", handlersecret.HandleCreate(secretCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamSecretRef), func(r chi.Router) { + r.Get("/", handlersecret.HandleFind(secretCtrl)) + r.Patch("/", handlersecret.HandleUpdate(secretCtrl)) + r.Delete("/", handlersecret.HandleDelete(secretCtrl)) + }) + }) +} + +func setupPlugins(r chi.Router, pluginCtrl *plugin.Controller) { + r.Route("/plugins", func(r chi.Router) { + r.Get("/", handlerplugin.HandleList(pluginCtrl)) + }) +} + +func setupExecutions( + r chi.Router, + executionCtrl *execution.Controller, + logCtrl *logs.Controller, +) { + r.Route("/executions", func(r chi.Router) { + r.Get("/", handlerexecution.HandleList(executionCtrl)) + r.Post("/", handlerexecution.HandleCreate(executionCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamExecutionNumber), func(r chi.Router) { + r.Get("/", handlerexecution.HandleFind(executionCtrl)) + r.Post("/cancel", handlerexecution.HandleCancel(executionCtrl)) + r.Delete("/", handlerexecution.HandleDelete(executionCtrl)) + r.Get( + fmt.Sprintf("/logs/{%s}/{%s}", + request.PathParamStageNumber, + request.PathParamStepNumber, + ), handlerlogs.HandleFind(logCtrl)) + // TODO: Decide whether API should be /stream/logs/{}/{} or /logs/{}/{}/stream + r.Get( + fmt.Sprintf("/logs/{%s}/{%s}/stream", + request.PathParamStageNumber, + request.PathParamStepNumber, + ), handlerlogs.HandleTail(logCtrl)) + }) + }) +} + +func setupTriggers( + r chi.Router, + triggerCtrl *trigger.Controller, +) { + r.Route("/triggers", func(r chi.Router) { + r.Get("/", handlertrigger.HandleList(triggerCtrl)) + r.Post("/", handlertrigger.HandleCreate(triggerCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamTriggerUID), func(r chi.Router) { + r.Get("/", handlertrigger.HandleFind(triggerCtrl)) + r.Patch("/", handlertrigger.HandleUpdate(triggerCtrl)) + r.Delete("/", handlertrigger.HandleDelete(triggerCtrl)) + }) + }) +} + +func setupInternal(r chi.Router, githookCtrl *controllergithook.Controller) { + r.Route("/internal", func(r chi.Router) { + SetupGitHooks(r, githookCtrl) + }) +} + +func SetupGitHooks(r chi.Router, githookCtrl *controllergithook.Controller) { + r.Route("/git-hooks", func(r chi.Router) { + r.Post("/"+githook.HTTPRequestPathPreReceive, handlergithook.HandlePreReceive(githookCtrl)) + r.Post("/"+githook.HTTPRequestPathUpdate, handlergithook.HandleUpdate(githookCtrl)) + r.Post("/"+githook.HTTPRequestPathPostReceive, handlergithook.HandlePostReceive(githookCtrl)) + }) +} + +func SetupPullReq(r chi.Router, pullreqCtrl *pullreq.Controller) { + r.Route("/pullreq", func(r chi.Router) { + r.Post("/", handlerpullreq.HandleCreate(pullreqCtrl)) + r.Get("/", handlerpullreq.HandleList(pullreqCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamPullReqNumber), func(r chi.Router) { + r.Get("/", handlerpullreq.HandleFind(pullreqCtrl)) + r.Patch("/", handlerpullreq.HandleUpdate(pullreqCtrl)) + r.Post("/state", handlerpullreq.HandleState(pullreqCtrl)) + r.Post("/recheck", handlerpullreq.HandleRecheck(pullreqCtrl)) + r.Get("/activities", handlerpullreq.HandleListActivities(pullreqCtrl)) + r.Route("/comments", func(r chi.Router) { + r.Post("/", handlerpullreq.HandleCommentCreate(pullreqCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamPullReqCommentID), func(r chi.Router) { + r.Patch("/", handlerpullreq.HandleCommentUpdate(pullreqCtrl)) + r.Delete("/", handlerpullreq.HandleCommentDelete(pullreqCtrl)) + r.Put("/status", handlerpullreq.HandleCommentStatus(pullreqCtrl)) + }) + }) + r.Route("/reviewers", func(r chi.Router) { + r.Get("/", handlerpullreq.HandleReviewerList(pullreqCtrl)) + r.Put("/", handlerpullreq.HandleReviewerAdd(pullreqCtrl)) + r.Route(fmt.Sprintf("/{%s}", request.PathParamReviewerID), func(r chi.Router) { + r.Delete("/", handlerpullreq.HandleReviewerDelete(pullreqCtrl)) + }) + }) + r.Route("/reviews", func(r chi.Router) { + r.Post("/", handlerpullreq.HandleReviewSubmit(pullreqCtrl)) + }) + r.Post("/merge", handlerpullreq.HandleMerge(pullreqCtrl)) + r.Get("/commits", handlerpullreq.HandleCommits(pullreqCtrl)) + r.Get("/metadata", handlerpullreq.HandleMetadata(pullreqCtrl)) + + r.Route("/file-views", func(r chi.Router) { + r.Put("/", handlerpullreq.HandleFileViewAdd(pullreqCtrl)) + r.Get("/", handlerpullreq.HandleFileViewList(pullreqCtrl)) + r.Delete("/*", handlerpullreq.HandleFileViewDelete(pullreqCtrl)) + }) + }) + }) +} + +func setupWebhook(r chi.Router, webhookCtrl *webhook.Controller) { + r.Route("/webhooks", func(r chi.Router) { + r.Post("/", handlerwebhook.HandleCreate(webhookCtrl)) + r.Get("/", handlerwebhook.HandleList(webhookCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamWebhookID), func(r chi.Router) { + r.Get("/", handlerwebhook.HandleFind(webhookCtrl)) + r.Patch("/", handlerwebhook.HandleUpdate(webhookCtrl)) + r.Delete("/", handlerwebhook.HandleDelete(webhookCtrl)) + + r.Route("/executions", func(r chi.Router) { + r.Get("/", handlerwebhook.HandleListExecutions(webhookCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamWebhookExecutionID), func(r chi.Router) { + r.Get("/", handlerwebhook.HandleFindExecution(webhookCtrl)) + r.Post("/retrigger", handlerwebhook.HandleRetriggerExecution(webhookCtrl)) + }) + }) + }) + }) +} + +func SetupChecks(r chi.Router, checkCtrl *check.Controller) { + r.Route("/checks", func(r chi.Router) { + r.Route(fmt.Sprintf("/commits/{%s}", request.PathParamCommitSHA), func(r chi.Router) { + r.Put("/", handlercheck.HandleCheckReport(checkCtrl)) + r.Get("/", handlercheck.HandleCheckList(checkCtrl)) + }) + }) +} + +func setupUser(r chi.Router, userCtrl *user.Controller) { + r.Route("/user", func(r chi.Router) { + // enforce principal authenticated and it's a user + r.Use(middlewareprincipal.RestrictTo(enum.PrincipalTypeUser)) + r.Get("/", handleruser.HandleFind(userCtrl)) + r.Patch("/", handleruser.HandleUpdate(userCtrl)) + r.Get("/memberships", handleruser.HandleMembershipSpaces(userCtrl)) + + // PAT + r.Route("/tokens", func(r chi.Router) { + r.Get("/", handleruser.HandleListTokens(userCtrl, enum.TokenTypePAT)) + r.Post("/", handleruser.HandleCreateAccessToken(userCtrl)) + + // per token operations + r.Route(fmt.Sprintf("/{%s}", request.PathParamTokenUID), func(r chi.Router) { + r.Delete("/", handleruser.HandleDeleteToken(userCtrl, enum.TokenTypePAT)) + }) + }) + + // SESSION TOKENS + r.Route("/sessions", func(r chi.Router) { + r.Get("/", handleruser.HandleListTokens(userCtrl, enum.TokenTypeSession)) + + // per token operations + r.Route(fmt.Sprintf("/{%s}", request.PathParamTokenUID), func(r chi.Router) { + r.Delete("/", handleruser.HandleDeleteToken(userCtrl, enum.TokenTypeSession)) + }) + }) + }) +} + +func setupServiceAccounts(r chi.Router, saCtrl *serviceaccount.Controller) { + r.Route("/service-accounts", func(r chi.Router) { + // create takes parent information via body + r.Post("/", handlerserviceaccount.HandleCreate(saCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamServiceAccountUID), func(r chi.Router) { + r.Get("/", handlerserviceaccount.HandleFind(saCtrl)) + r.Delete("/", handlerserviceaccount.HandleDelete(saCtrl)) + + // SAT + r.Route("/tokens", func(r chi.Router) { + r.Get("/", handlerserviceaccount.HandleListTokens(saCtrl)) + r.Post("/", handlerserviceaccount.HandleCreateToken(saCtrl)) + + // per token operations + r.Route(fmt.Sprintf("/{%s}", request.PathParamTokenUID), func(r chi.Router) { + r.Delete("/", handlerserviceaccount.HandleDeleteToken(saCtrl)) + }) + }) + }) + }) +} + +func setupSystem(r chi.Router, sysCtrl *system.Controller) { + r.Route("/system", func(r chi.Router) { + r.Get("/health", handlersystem.HandleHealth) + r.Get("/version", handlersystem.HandleVersion) + r.Get("/config", handlersystem.HandleGetConfig(sysCtrl)) + }) +} + +func setupResources(r chi.Router) { + r.Route("/resources", func(r chi.Router) { + r.Get("/gitignore", resource.HandleGitIgnore()) + r.Get("/license", resource.HandleLicence()) + }) +} + +func setupPrincipals(r chi.Router, principalCtrl principal.Controller) { + r.Route("/principals", func(r chi.Router) { + r.Get("/", handlerprincipal.HandleList(principalCtrl)) + }) +} + +func setupAdmin(r chi.Router, userCtrl *user.Controller) { + r.Route("/admin", func(r chi.Router) { + r.Use(middlewareprincipal.RestrictToAdmin()) + r.Route("/users", func(r chi.Router) { + r.Get("/", users.HandleList(userCtrl)) + r.Post("/", users.HandleCreate(userCtrl)) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamUserUID), func(r chi.Router) { + r.Get("/", users.HandleFind(userCtrl)) + r.Patch("/", users.HandleUpdate(userCtrl)) + r.Delete("/", users.HandleDelete(userCtrl)) + r.Patch("/admin", handleruser.HandleUpdateAdmin(userCtrl)) + }) + }) + }) +} + +func setupAccount(r chi.Router, userCtrl *user.Controller, sysCtrl *system.Controller, config *types.Config) { + cookieName := config.Token.CookieName + r.Post("/login", account.HandleLogin(userCtrl, cookieName)) + r.Post("/register", account.HandleRegister(userCtrl, sysCtrl, cookieName)) + r.Post("/logout", account.HandleLogout(userCtrl, cookieName)) +} diff --git a/internal/router/git.go b/internal/router/git.go new file mode 100644 index 0000000000..b92acd190b --- /dev/null +++ b/internal/router/git.go @@ -0,0 +1,93 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package router + +import ( + "fmt" + "net/http" + + "github.com/harness/gitness/gitrpc" + handlerrepo "github.com/harness/gitness/internal/api/handler/repo" + middlewareauthn "github.com/harness/gitness/internal/api/middleware/authn" + "github.com/harness/gitness/internal/api/middleware/encode" + "github.com/harness/gitness/internal/api/middleware/logging" + "github.com/harness/gitness/internal/api/request" + "github.com/harness/gitness/internal/auth/authn" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + + "github.com/go-chi/chi" + "github.com/go-chi/chi/middleware" + "github.com/rs/zerolog/hlog" +) + +// GitHandler is an abstraction of an http handler that handles git calls. +type GitHandler interface { + http.Handler +} + +// NewGitHandler returns a new GitHandler. +func NewGitHandler( + config *types.Config, + urlProvider *url.Provider, + repoStore store.RepoStore, + authenticator authn.Authenticator, + authorizer authz.Authorizer, + client gitrpc.Interface, +) GitHandler { + // Use go-chi router for inner routing. + r := chi.NewRouter() + + // Apply common api middleware. + r.Use(middleware.NoCache) + r.Use(middleware.Recoverer) + + // configure logging middleware. + r.Use(hlog.URLHandler("http.url")) + r.Use(hlog.MethodHandler("http.method")) + r.Use(logging.HLogRequestIDHandler()) + r.Use(logging.HLogAccessLogHandler()) + + r.Route(fmt.Sprintf("/{%s}", request.PathParamRepoRef), func(r chi.Router) { + r.Use(middlewareauthn.Attempt(authenticator, authn.SourceRouterGIT)) + + // smart protocol + r.Handle("/git-upload-pack", handlerrepo.GetUploadPack(client, urlProvider, repoStore, authorizer)) + r.Post("/git-receive-pack", handlerrepo.PostReceivePack(client, urlProvider, repoStore, authorizer)) + r.Get("/info/refs", handlerrepo.GetInfoRefs(client, repoStore, authorizer)) + + // dumb protocol + r.Get("/HEAD", stubGitHandler(repoStore)) + r.Get("/objects/info/alternates", stubGitHandler(repoStore)) + r.Get("/objects/info/http-alternates", stubGitHandler(repoStore)) + r.Get("/objects/info/packs", stubGitHandler(repoStore)) + r.Get("/objects/info/{file:[^/]*}", stubGitHandler(repoStore)) + r.Get("/objects/{head:[0-9a-f]{2}}/{hash:[0-9a-f]{38}}", stubGitHandler(repoStore)) + r.Get("/objects/pack/pack-{file:[0-9a-f]{40}}.pack", stubGitHandler(repoStore)) + r.Get("/objects/pack/pack-{file:[0-9a-f]{40}}.idx", stubGitHandler(repoStore)) + }) + + // wrap router in git path encoder. + return encode.GitPathBefore(r) +} + +func stubGitHandler(repoStore store.RepoStore) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("Seems like an asteroid destroyed the ancient git protocol")) + w.WriteHeader(http.StatusBadGateway) + } +} diff --git a/internal/router/router.go b/internal/router/router.go new file mode 100644 index 0000000000..5ee0714b82 --- /dev/null +++ b/internal/router/router.go @@ -0,0 +1,168 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package router provides http handlers for serving the +// web applicationa and API endpoints. +package router + +import ( + "net/http" + "strings" + + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/internal/request" + + "github.com/go-logr/logr" + "github.com/go-logr/zerologr" + "github.com/rs/zerolog" + "github.com/rs/zerolog/hlog" + "github.com/rs/zerolog/log" +) + +const ( + APIMount = "/api" + GitMount = "/git" +) + +type Router struct { + api APIHandler + git GitHandler + web WebHandler + + // gitHost describes the optional host via which git traffic is identified. + // Note: always stored as lowercase. + gitHost string +} + +// NewRouter returns a new http.Handler that routes traffic +// to the appropriate handlers. +func NewRouter( + api APIHandler, + git GitHandler, + web WebHandler, + gitHost string, +) *Router { + return &Router{ + api: api, + git: git, + web: web, + + gitHost: strings.ToLower(gitHost), + } +} + +func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) { + var err error + // setup logger for request + log := log.Logger.With().Logger() + ctx := log.WithContext(req.Context()) + // add logger to logr interface for usage in 3rd party libs + ctx = logr.NewContext(ctx, zerologr.New(&log)) + req = req.WithContext(ctx) + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c. + Str("http.original_url", req.URL.String()) + }) + + /* + * 1. GIT + * + * All Git originating traffic starts with "/space1/space2/repo.git". + */ + if r.isGitTraffic(req) { + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c.Str("http.handler", "git") + }) + + // remove matched prefix to simplify API handlers (only if it's there) + if err = stripPrefix(GitMount, req); err != nil { + hlog.FromRequest(req).Err(err).Msgf("Failed striping of prefix for git request.") + render.InternalError(w) + return + } + + r.git.ServeHTTP(w, req) + return + } + + /* + * 2. REST API + * + * All Rest API calls start with "/api/", and thus can be uniquely identified. + */ + if r.isAPITraffic(req) { + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c.Str("http.handler", "api") + }) + + // remove matched prefix to simplify API handlers + if err = stripPrefix(APIMount, req); err != nil { + hlog.FromRequest(req).Err(err).Msgf("Failed striping of prefix for api request.") + render.InternalError(w) + return + } + + r.api.ServeHTTP(w, req) + return + } + + /* + * 3. WEB + * + * Everything else will be routed to web (or return 404) + */ + log.UpdateContext(func(c zerolog.Context) zerolog.Context { + return c.Str("http.handler", "web") + }) + + r.web.ServeHTTP(w, req) +} + +// stripPrefix removes the prefix from the request path (or noop if it's not there). +func stripPrefix(prefix string, req *http.Request) error { + p := req.URL.Path + if !strings.HasPrefix(p, prefix) { + return nil + } + return request.ReplacePrefix(req, req.URL.Path[:len(prefix)], "") +} + +// isGitTraffic returns true iff the request is identified as part of the git http protocol. +func (r *Router) isGitTraffic(req *http.Request) bool { + // git traffic is always reachable via the git mounting path. + p := req.URL.Path + if strings.HasPrefix(p, GitMount) { + return true + } + + // otherwise check if the request came in via the configured git host (if enabled) + if len(r.gitHost) > 0 { + // cut (optional) port off the host + h, _, _ := strings.Cut(req.Host, ":") + + // check if request host matches the configured git host (case insensitive) + if r.gitHost == strings.ToLower(h) { + return true + } + } + + // otherwise we don't treat it as git traffic + return false +} + +// isAPITraffic returns true iff the request is identified as part of our rest API. +func (r *Router) isAPITraffic(req *http.Request) bool { + p := req.URL.Path + return strings.HasPrefix(p, APIMount) +} diff --git a/internal/router/router_test.go b/internal/router/router_test.go new file mode 100644 index 0000000000..8d49cc632a --- /dev/null +++ b/internal/router/router_test.go @@ -0,0 +1,38 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package router + +import "testing" + +// this unit test ensures routes that require authorization +// return a 401 unauthorized if no token, or an invalid token +// is provided. +func TestTokenGate(t *testing.T) { + t.Skip() +} + +// this unit test ensures routes that require pipeline access +// return a 403 forbidden if the user does not have acess +// to the pipeline. +func TestPipelineGate(t *testing.T) { + t.Skip() +} + +// this unit test ensures routes that require system access +// return a 403 forbidden if the user does not have acess +// to the pipeline. +func TestSystemGate(t *testing.T) { + t.Skip() +} diff --git a/internal/router/web.go b/internal/router/web.go new file mode 100644 index 0000000000..750e1a4c4c --- /dev/null +++ b/internal/router/web.go @@ -0,0 +1,86 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package router + +import ( + "net/http" + + "github.com/harness/gitness/internal/api/openapi" + "github.com/harness/gitness/internal/api/render" + "github.com/harness/gitness/types" + "github.com/harness/gitness/web" + + "github.com/go-chi/chi" + "github.com/swaggest/swgui/v3emb" + "github.com/unrolled/secure" +) + +// WebHandler is an abstraction of an http handler that handles web calls. +type WebHandler interface { + http.Handler +} + +// NewWebHandler returns a new WebHandler. +func NewWebHandler(config *types.Config) WebHandler { + // Use go-chi router for inner routing + r := chi.NewRouter() + // create middleware to enforce security best practices for + // the user interface. note that theis middleware is only used + // when serving the user interface (not found handler, below). + sec := secure.New( + secure.Options{ + AllowedHosts: config.Secure.AllowedHosts, + HostsProxyHeaders: config.Secure.HostsProxyHeaders, + SSLRedirect: config.Secure.SSLRedirect, + SSLTemporaryRedirect: config.Secure.SSLTemporaryRedirect, + SSLHost: config.Secure.SSLHost, + SSLProxyHeaders: config.Secure.SSLProxyHeaders, + STSSeconds: config.Secure.STSSeconds, + STSIncludeSubdomains: config.Secure.STSIncludeSubdomains, + STSPreload: config.Secure.STSPreload, + ForceSTSHeader: config.Secure.ForceSTSHeader, + FrameDeny: config.Secure.FrameDeny, + ContentTypeNosniff: config.Secure.ContentTypeNosniff, + BrowserXssFilter: config.Secure.BrowserXSSFilter, + ContentSecurityPolicy: config.Secure.ContentSecurityPolicy, + ReferrerPolicy: config.Secure.ReferrerPolicy, + }, + ) + + // openapi playground endpoints + // TODO: this should not be generated and marshaled on the fly every time? + r.HandleFunc("/openapi.yaml", func(w http.ResponseWriter, r *http.Request) { + spec := openapi.Generate() + data, err := spec.MarshalYAML() + if err != nil { + render.ErrorMessagef(w, http.StatusInternalServerError, "error serializing openapi.yaml: %v", err) + return + } + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(data) + }) + swagger := v3emb.NewHandler("API Definition", "/openapi.yaml", "/swagger") + r.With(sec.Handler).Handle("/swagger", swagger) + r.With(sec.Handler).Handle("/swagger/*", swagger) + + // serve all other routes from the embedded filesystem, + // which in turn serves the user interface. + r.With(sec.Handler).NotFound( + web.Handler(), + ) + + return r +} diff --git a/internal/router/wire.go b/internal/router/wire.go new file mode 100644 index 0000000000..950d5dbbd4 --- /dev/null +++ b/internal/router/wire.go @@ -0,0 +1,104 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package router + +import ( + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/api/controller/check" + "github.com/harness/gitness/internal/api/controller/connector" + "github.com/harness/gitness/internal/api/controller/execution" + "github.com/harness/gitness/internal/api/controller/githook" + "github.com/harness/gitness/internal/api/controller/logs" + "github.com/harness/gitness/internal/api/controller/pipeline" + "github.com/harness/gitness/internal/api/controller/plugin" + "github.com/harness/gitness/internal/api/controller/principal" + "github.com/harness/gitness/internal/api/controller/pullreq" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/api/controller/secret" + "github.com/harness/gitness/internal/api/controller/serviceaccount" + "github.com/harness/gitness/internal/api/controller/space" + "github.com/harness/gitness/internal/api/controller/system" + "github.com/harness/gitness/internal/api/controller/template" + "github.com/harness/gitness/internal/api/controller/trigger" + "github.com/harness/gitness/internal/api/controller/user" + "github.com/harness/gitness/internal/api/controller/webhook" + "github.com/harness/gitness/internal/auth/authn" + "github.com/harness/gitness/internal/auth/authz" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideRouter, + ProvideGitHandler, + ProvideAPIHandler, + ProvideWebHandler, +) + +func ProvideRouter( + config *types.Config, + api APIHandler, + git GitHandler, + web WebHandler, +) *Router { + return NewRouter(api, git, web, + config.Server.HTTP.GitHost) +} + +func ProvideGitHandler( + config *types.Config, + urlProvider *url.Provider, + repoStore store.RepoStore, + authenticator authn.Authenticator, + authorizer authz.Authorizer, + client gitrpc.Interface, +) GitHandler { + return NewGitHandler(config, urlProvider, repoStore, authenticator, authorizer, client) +} + +func ProvideAPIHandler( + config *types.Config, + authenticator authn.Authenticator, + repoCtrl *repo.Controller, + executionCtrl *execution.Controller, + logCtrl *logs.Controller, + spaceCtrl *space.Controller, + pipelineCtrl *pipeline.Controller, + secretCtrl *secret.Controller, + triggerCtrl *trigger.Controller, + connectorCtrl *connector.Controller, + templateCtrl *template.Controller, + pluginCtrl *plugin.Controller, + pullreqCtrl *pullreq.Controller, + webhookCtrl *webhook.Controller, + githookCtrl *githook.Controller, + saCtrl *serviceaccount.Controller, + userCtrl *user.Controller, + principalCtrl principal.Controller, + checkCtrl *check.Controller, + sysCtrl *system.Controller, +) APIHandler { + return NewAPIHandler(config, authenticator, repoCtrl, executionCtrl, logCtrl, spaceCtrl, pipelineCtrl, + secretCtrl, triggerCtrl, connectorCtrl, templateCtrl, pluginCtrl, pullreqCtrl, webhookCtrl, + githookCtrl, saCtrl, userCtrl, principalCtrl, checkCtrl, sysCtrl) +} + +func ProvideWebHandler(config *types.Config) WebHandler { + return NewWebHandler(config) +} diff --git a/internal/server/server.go b/internal/server/server.go new file mode 100644 index 0000000000..9bcc693172 --- /dev/null +++ b/internal/server/server.go @@ -0,0 +1,25 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package server implements an http server. +package server + +import ( + "github.com/harness/gitness/http" +) + +// Server is the http server for gitness. +type Server struct { + *http.Server +} diff --git a/internal/server/server_test.go b/internal/server/server_test.go new file mode 100644 index 0000000000..4e18ede600 --- /dev/null +++ b/internal/server/server_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server diff --git a/internal/server/wire.go b/internal/server/wire.go new file mode 100644 index 0000000000..28035eff60 --- /dev/null +++ b/internal/server/wire.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "github.com/harness/gitness/http" + "github.com/harness/gitness/internal/router" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet(ProvideServer) + +// ProvideServer provides a server instance. +func ProvideServer(config *types.Config, router *router.Router) *Server { + return &Server{ + http.NewServer( + http.Config{ + Acme: config.Server.Acme.Enabled, + Addr: config.Server.HTTP.Bind, + Host: config.Server.HTTP.Host, + }, + router, + ), + } +} diff --git a/internal/services/codecomments/migrator.go b/internal/services/codecomments/migrator.go new file mode 100644 index 0000000000..aac38c332b --- /dev/null +++ b/internal/services/codecomments/migrator.go @@ -0,0 +1,245 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codecomments + +import ( + "context" + + "github.com/harness/gitness/gitrpc" + gitrpcenum "github.com/harness/gitness/gitrpc/enum" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/log" +) + +// Migrator is a utility used to migrate code comments after update of the pull request's source branch. +type Migrator struct { + gitRPCClient gitrpc.Interface +} + +// MigrateNew updates the "+" (the added lines) part of code comments +// after a new commit on the pull request's source branch. +// The parameter newSHA should contain the latest commit SHA of the pull request's source branch. +func (migrator *Migrator) MigrateNew( + ctx context.Context, + repoGitUID string, + newSHA string, + comments []*types.CodeComment, +) { + migrator.migrate( + ctx, + repoGitUID, + newSHA, + comments, + func(codeComment *types.CodeComment) string { + return codeComment.SourceSHA + }, + func(codeComment *types.CodeComment, sha string) { + codeComment.SourceSHA = sha + }, + func(codeComment *types.CodeComment) (int, int) { + return codeComment.LineNew, codeComment.LineNew + codeComment.SpanNew - 1 + }, + func(codeComment *types.CodeComment, line int) { + codeComment.LineNew += line + }, + ) +} + +// MigrateOld updates the "-" (the removes lines) part of code comments +// after the pull request's change of the merge base commit. +func (migrator *Migrator) MigrateOld( + ctx context.Context, + repoGitUID string, + newSHA string, + comments []*types.CodeComment, +) { + migrator.migrate( + ctx, + repoGitUID, + newSHA, + comments, + func(codeComment *types.CodeComment) string { + return codeComment.MergeBaseSHA + }, + func(codeComment *types.CodeComment, sha string) { + codeComment.MergeBaseSHA = sha + }, + func(codeComment *types.CodeComment) (int, int) { + return codeComment.LineOld, codeComment.LineOld + codeComment.SpanOld - 1 + }, + func(codeComment *types.CodeComment, line int) { + codeComment.LineOld += line + }, + ) +} + +//nolint:gocognit,funlen // refactor if needed +func (migrator *Migrator) migrate( + ctx context.Context, + repoGitUID string, + newSHA string, + comments []*types.CodeComment, + getSHA func(codeComment *types.CodeComment) string, + setSHA func(codeComment *types.CodeComment, sha string), + getCommentStartEnd func(codeComment *types.CodeComment) (int, int), + updateCommentLine func(codeComment *types.CodeComment, line int), +) { + if len(comments) == 0 { + return + } + + commitMap, initialValuesMap := mapCodeComments(comments, getSHA) + + for commentSHA, fileMap := range commitMap { + // get all hunk headers for the diff between the SHA that's stored in the comment and the new SHA. + diffSummary, errDiff := migrator.gitRPCClient.GetDiffHunkHeaders(ctx, gitrpc.GetDiffHunkHeadersParams{ + ReadParams: gitrpc.ReadParams{ + RepoUID: repoGitUID, + }, + SourceCommitSHA: commentSHA, + TargetCommitSHA: newSHA, + }) + if gitrpc.ErrorStatus(errDiff) == gitrpc.StatusNotFound { + // Handle the commit SHA not found error and mark all code comments as outdated. + for _, codeComments := range fileMap { + for _, codeComment := range codeComments { + codeComment.Outdated = true + } + } + continue + } + if errDiff != nil { + log.Ctx(ctx).Err(errDiff). + Msgf("failed to get git diff between comment's sha %s and the latest %s", commentSHA, newSHA) + continue + } + + // Traverse all the changed files + for _, file := range diffSummary.Files { + var codeComments []*types.CodeComment + + codeComments = fileMap[file.FileHeader.OldName] + + // Handle file renames + if file.FileHeader.OldName != file.FileHeader.NewName { + if len(codeComments) == 0 { + // If the code comments are not found using the old name of the file, try with the new name. + codeComments = fileMap[file.FileHeader.NewName] + } else { + // Update the code comment's path to the new file name + for _, cc := range codeComments { + cc.Path = file.FileHeader.NewName + } + } + } + + // Handle file delete + if _, isDeleted := file.FileHeader.Extensions[gitrpcenum.DiffExtHeaderDeletedFileMode]; isDeleted { + for _, codeComment := range codeComments { + codeComment.Outdated = true + } + continue + } + + // Handle new files - shouldn't happen because no code comments should exist for a non-existing file. + if _, isAdded := file.FileHeader.Extensions[gitrpcenum.DiffExtHeaderNewFileMode]; isAdded { + for _, codeComment := range codeComments { + codeComment.Outdated = true + } + continue + } + + for _, hunk := range file.HunkHeaders { + for _, cc := range codeComments { + if cc.Outdated { + continue + } + + ccStart, ccEnd := getCommentStartEnd(cc) + outdated, moveDelta := processCodeComment(ccStart, ccEnd, hunk) + if outdated { + cc.CodeCommentFields = initialValuesMap[cc.ID] // revert the CC to the original values + cc.Outdated = true + continue + } + + updateCommentLine(cc, moveDelta) + } + } + } + + for _, codeComments := range fileMap { + for _, codeComment := range codeComments { + if codeComment.Outdated { + continue + } + setSHA(codeComment, newSHA) + } + } + } +} + +// mapCodeComments groups code comments to maps, first by commit SHA and then by file name. +// It assumes the incoming list is already sorted. +func mapCodeComments( + comments []*types.CodeComment, + extractSHA func(*types.CodeComment) string, +) (map[string]map[string][]*types.CodeComment, map[int64]types.CodeCommentFields) { + commitMap := map[string]map[string][]*types.CodeComment{} + originalComments := make(map[int64]types.CodeCommentFields, len(comments)) + + for _, comment := range comments { + commitSHA := extractSHA(comment) + + fileMap := commitMap[commitSHA] + if fileMap == nil { + fileMap = map[string][]*types.CodeComment{} + } + + fileComments := fileMap[comment.Path] + fileComments = append(fileComments, comment) + fileMap[comment.Path] = fileComments + + commitMap[commitSHA] = fileMap + + originalComments[comment.ID] = comment.CodeCommentFields + } + + return commitMap, originalComments +} + +func processCodeComment(ccStart, ccEnd int, h gitrpc.HunkHeader) (outdated bool, moveDelta int) { + // A code comment is marked as outdated if: + // * The code lines covered by the code comment are changed + // (the range given by the OldLine/OldSpan overlaps the code comment's code range) + // * There are new lines inside the line range covered by the code comment, don't care about how many + // (the NewLine is between the CC start and CC end; the value of the NewSpan is unimportant). + outdated = + (h.OldSpan > 0 && ccEnd >= h.OldLine && ccStart <= h.OldLine+h.OldSpan-1) || // code comment's code is changed + (h.NewSpan > 0 && h.NewLine > ccStart && h.NewLine <= ccEnd) // lines are added inside the code comment + + if outdated { + return // outdated comments aren't moved + } + + if ccEnd <= h.OldLine { + return // the change described by the hunk header is below the code comment, so it doesn't affect it + } + + moveDelta = h.NewSpan - h.OldSpan + + return +} diff --git a/internal/services/codecomments/migrator_test.go b/internal/services/codecomments/migrator_test.go new file mode 100644 index 0000000000..bb21ee266e --- /dev/null +++ b/internal/services/codecomments/migrator_test.go @@ -0,0 +1,173 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codecomments + +import ( + "testing" + + "github.com/harness/gitness/gitrpc" +) + +func TestProcessCodeComment(t *testing.T) { + // the code comment tested in this unit test spans five lines, from line 20 to line 24 + const ccStart = 20 + const ccEnd = 24 + tests := []struct { + name string + hunk gitrpc.HunkHeader + expOutdated bool + expMoveDelta int + }{ + // only added lines + { + name: "three-lines-added-before-far", + hunk: gitrpc.HunkHeader{OldLine: 10, OldSpan: 0, NewLine: 11, NewSpan: 3}, + expOutdated: false, expMoveDelta: 3, + }, + { + name: "three-lines-added-before-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 19, OldSpan: 0, NewLine: 20, NewSpan: 3}, + expOutdated: false, expMoveDelta: 3, + }, + { + name: "three-lines-added-overlap-at-start", + hunk: gitrpc.HunkHeader{OldLine: 20, OldSpan: 0, NewLine: 21, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-added-inside", + hunk: gitrpc.HunkHeader{OldLine: 21, OldSpan: 0, NewLine: 22, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-added-overlap-at-end", + hunk: gitrpc.HunkHeader{OldLine: 23, OldSpan: 0, NewLine: 24, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-added-after-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 24, OldSpan: 0, NewLine: 25, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "three-lines-added-after-far", + hunk: gitrpc.HunkHeader{OldLine: 30, OldSpan: 0, NewLine: 31, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + // only removed lines + { + name: "three-lines-removed-before-far", + hunk: gitrpc.HunkHeader{OldLine: 10, OldSpan: 3, NewLine: 9, NewSpan: 0}, + expOutdated: false, expMoveDelta: -3, + }, + { + name: "three-lines-removed-before-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 17, OldSpan: 3, NewLine: 16, NewSpan: 0}, + expOutdated: false, expMoveDelta: -3, + }, + { + name: "three-lines-removed-overlap-at-start", + hunk: gitrpc.HunkHeader{OldLine: 18, OldSpan: 3, NewLine: 17, NewSpan: 0}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-removed-inside", + hunk: gitrpc.HunkHeader{OldLine: 21, OldSpan: 3, NewLine: 20, NewSpan: 0}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-removed-overlap-at-end", + hunk: gitrpc.HunkHeader{OldLine: 24, OldSpan: 3, NewLine: 23, NewSpan: 0}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-removed-after-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 25, OldSpan: 3, NewLine: 24, NewSpan: 0}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "three-lines-removed-after-far", + hunk: gitrpc.HunkHeader{OldLine: 30, OldSpan: 3, NewLine: 29, NewSpan: 0}, + expOutdated: false, expMoveDelta: 0, + }, + // only changed lines + { + name: "three-lines-changed-before-far", + hunk: gitrpc.HunkHeader{OldLine: 10, OldSpan: 3, NewLine: 10, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "three-lines-changed-before-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 17, OldSpan: 3, NewLine: 17, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "three-lines-changed-overlap-at-start", + hunk: gitrpc.HunkHeader{OldLine: 18, OldSpan: 3, NewLine: 18, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-changed-inside", + hunk: gitrpc.HunkHeader{OldLine: 21, OldSpan: 3, NewLine: 21, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-changed-overlap-at-end", + hunk: gitrpc.HunkHeader{OldLine: 24, OldSpan: 3, NewLine: 24, NewSpan: 3}, + expOutdated: true, expMoveDelta: 0, + }, + { + name: "three-lines-changed-after-but-touching", + hunk: gitrpc.HunkHeader{OldLine: 25, OldSpan: 3, NewLine: 25, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "three-lines-changed-after-far", + hunk: gitrpc.HunkHeader{OldLine: 30, OldSpan: 3, NewLine: 30, NewSpan: 3}, + expOutdated: false, expMoveDelta: 0, + }, + // mixed tests + { + name: "two-lines-added-one-changed-just-before", + hunk: gitrpc.HunkHeader{OldLine: 19, OldSpan: 1, NewLine: 19, NewSpan: 3}, + expOutdated: false, expMoveDelta: 2, + }, + { + name: "two-lines-removed-one-added-just-after", + hunk: gitrpc.HunkHeader{OldLine: 25, OldSpan: 2, NewLine: 25, NewSpan: 1}, + expOutdated: false, expMoveDelta: 0, + }, + { + name: "twenty-lines-added-at-line-15", + hunk: gitrpc.HunkHeader{OldLine: 14, OldSpan: 0, NewLine: 15, NewSpan: 20}, + expOutdated: false, expMoveDelta: 20, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + outdated, moveDelta := processCodeComment(ccStart, ccEnd, test.hunk) + + if want, got := test.expOutdated, outdated; want != got { + t.Errorf("outdated mismatch; want=%t got=%t", want, got) + return + } + + if want, got := test.expMoveDelta, moveDelta; want != got { + t.Errorf("moveDelta mismatch; want=%d got=%d", want, got) + } + }) + } +} diff --git a/internal/services/codecomments/wire.go b/internal/services/codecomments/wire.go new file mode 100644 index 0000000000..8284914450 --- /dev/null +++ b/internal/services/codecomments/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codecomments + +import ( + "github.com/harness/gitness/gitrpc" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideMigrator, +) + +func ProvideMigrator( + gitRPCClient gitrpc.Interface, +) *Migrator { + return &Migrator{ + gitRPCClient: gitRPCClient, + } +} diff --git a/internal/services/exporter/harness_code_client.go b/internal/services/exporter/harness_code_client.go new file mode 100644 index 0000000000..64435bb9e0 --- /dev/null +++ b/internal/services/exporter/harness_code_client.go @@ -0,0 +1,228 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// go:build harness + +package exporter + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/types" +) + +const ( + pathCreateRepo = "/v1/accounts/%s/orgs/%s/projects/%s/repos" + pathDeleteRepo = "/v1/accounts/%s/orgs/%s/projects/%s/repos/%s" + headerApiKey = "X-Api-Key" + routingId = "routingId" +) + +var ( + errHTTPNotFound = fmt.Errorf("not found") + errHTTPBadRequest = fmt.Errorf("bad request") + errHTTPInternal = fmt.Errorf("internal error") + errHTTPDuplicate = fmt.Errorf("resource already exists") +) + +type harnessCodeClient struct { + client *client +} + +type client struct { + baseURL string + httpClient http.Client + + accountId string + orgId string + projectId string + + token string +} + +// newClient creates a new harness Client for interacting with the platforms APIs. +func newClient(baseURL string, accountID string, orgId string, projectId string, token string) (*client, error) { + if baseURL == "" { + return nil, fmt.Errorf("baseUrl required") + } + if accountID == "" { + return nil, fmt.Errorf("accountID required") + } + if orgId == "" { + return nil, fmt.Errorf("orgId required") + } + if projectId == "" { + return nil, fmt.Errorf("projectId required") + } + if token == "" { + return nil, fmt.Errorf("token required") + } + + return &client{ + baseURL: baseURL, + accountId: accountID, + orgId: orgId, + projectId: projectId, + token: token, + httpClient: http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: false, + }, + }, + }, + }, nil +} + +func newHarnessCodeClient(baseUrl string, accountID string, orgId string, projectId string, token string) (*harnessCodeClient, error) { + client, err := newClient(baseUrl, accountID, orgId, projectId, token) + if err != nil { + return nil, err + } + return &harnessCodeClient{ + client: client, + }, nil +} + +func (c *harnessCodeClient) CreateRepo(ctx context.Context, input repo.CreateInput) (*types.Repository, error) { + path := fmt.Sprintf(pathCreateRepo, c.client.accountId, c.client.orgId, c.client.projectId) + bodyBytes, err := json.Marshal(input) + if err != nil { + return nil, fmt.Errorf("failed to serialize body: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, appendPath(c.client.baseURL, path), bytes.NewBuffer(bodyBytes)) + if err != nil { + return nil, fmt.Errorf("unable to create new http request : %w", err) + } + + q := map[string]string{routingId: c.client.accountId} + addQueryParams(req, q) + req.Header.Add("Content-Type", "application/json") + req.ContentLength = int64(len(bodyBytes)) + + resp, err := c.client.Do(req) + if err != nil { + return nil, fmt.Errorf("request execution failed: %w", err) + } + + if resp != nil && resp.Body != nil { + defer func() { _ = resp.Body.Close() }() + } + + repository := new(types.Repository) + err = mapStatusCodeToError(resp.StatusCode) + if err != nil { + return nil, err + } + + err = unmarshalResponse(resp, repository) + if err != nil { + return nil, err + } + return repository, err +} + +func addQueryParams(req *http.Request, params map[string]string) { + if len(params) > 0 { + q := req.URL.Query() + for key, value := range params { + q.Add(key, value) + } + req.URL.RawQuery = q.Encode() + } +} + +func (c *harnessCodeClient) DeleteRepo(ctx context.Context, repoUid string) error { + path := fmt.Sprintf(pathDeleteRepo, c.client.accountId, c.client.orgId, c.client.projectId, repoUid) + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, appendPath(c.client.baseURL, path), nil) + if err != nil { + return fmt.Errorf("unable to create new http request : %w", err) + } + + q := map[string]string{routingId: c.client.accountId} + addQueryParams(req, q) + resp, err := c.client.Do(req) + if err != nil { + return fmt.Errorf("request execution failed: %w", err) + } + + if resp != nil && resp.Body != nil { + defer func() { _ = resp.Body.Close() }() + } + return mapStatusCodeToError(resp.StatusCode) +} + +func appendPath(uri string, path string) string { + if path == "" { + return uri + } + + return strings.TrimRight(uri, "/") + "/" + strings.TrimLeft(path, "/") +} + +func (c *client) Do(r *http.Request) (*http.Response, error) { + addAuthHeader(r, c.token) + return c.httpClient.Do(r) +} + +// addAuthHeader adds the Authorization header to the request. +func addAuthHeader(req *http.Request, token string) { + req.Header.Add(headerApiKey, token) +} + +func unmarshalResponse(resp *http.Response, data interface{}) error { + if resp == nil { + return fmt.Errorf("http response is empty") + } + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("error reading response body : %w", err) + } + err = json.Unmarshal(body, data) + if err != nil { + return fmt.Errorf("error deserializing response body : %w", err) + } + return nil +} + +func mapStatusCodeToError(statusCode int) error { + switch { + case statusCode == 500: + return errHTTPInternal + case statusCode >= 500: + return fmt.Errorf("received server side error status code %d", statusCode) + case statusCode == 404: + return errHTTPNotFound + case statusCode == 400: + return errHTTPBadRequest + case statusCode == 409: + return errHTTPDuplicate + case statusCode >= 400: + return fmt.Errorf("received client side error status code %d", statusCode) + case statusCode >= 300: + return fmt.Errorf("received further action required status code %d", statusCode) + default: + // TODO: definitely more things to consider here ... + return nil + } +} diff --git a/internal/services/exporter/repository.go b/internal/services/exporter/repository.go new file mode 100644 index 0000000000..a7834ed93e --- /dev/null +++ b/internal/services/exporter/repository.go @@ -0,0 +1,268 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exporter + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/url" + "strings" + "time" + + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/internal/api/controller/repo" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/types/enum" + "github.com/rs/zerolog/log" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/store" + gitnessurl "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" +) + +var ( + // ErrNotFound is returned if no export data was found. + ErrNotFound = errors.New("export not found") +) + +type Repository struct { + urlProvider *gitnessurl.Provider + git gitrpc.Interface + repoStore store.RepoStore + scheduler *job.Scheduler + encrypter encrypt.Encrypter + sseStreamer sse.Streamer +} + +type Input struct { + UID string `json:"uid"` + ID int64 `json:"id"` + Description string `json:"description"` + IsPublic bool `json:"is_public"` + HarnessCodeInfo HarnessCodeInfo `json:"harness_code_info"` +} + +type HarnessCodeInfo struct { + AccountId string `json:"account_id"` + ProjectIdentifier string `json:"project_identifier"` + OrgIdentifier string `json:"org_identifier"` + Token string `json:"token"` +} + +var _ job.Handler = (*Repository)(nil) + +const ( + exportJobMaxRetries = 1 + exportJobMaxDuration = 45 * time.Minute + exportRepoJobUid = "export_repo_%d" + exportSpaceJobUid = "export_space_%d" + jobType = "repository_export" +) + +var ErrJobRunning = errors.New("an export job is already running") + +func (r *Repository) Register(executor *job.Executor) error { + return executor.Register(jobType, r) +} + +func (r *Repository) RunManyForSpace( + ctx context.Context, + spaceId int64, + repos []*types.Repository, + harnessCodeInfo *HarnessCodeInfo, +) error { + jobGroupId := getJobGroupId(spaceId) + + jobs, err := r.scheduler.GetJobProgressForGroup(ctx, jobGroupId) + if err != nil { + return fmt.Errorf("cannot get job progress before starting. %w", err) + } + + if len(jobs) >= 0 { + err = checkJobAlreadyRunning(jobs) + if err != nil { + return err + } + + n, err := r.scheduler.PurgeJobsByGroupId(ctx, jobGroupId) + if err != nil { + return err + } + log.Ctx(ctx).Info().Msgf("deleted %d old jobs", n) + } + + jobDefinitions := make([]job.Definition, len(repos)) + for i, repository := range repos { + repoJobData := Input{ + UID: repository.UID, + ID: repository.ID, + Description: repository.Description, + IsPublic: repository.IsPublic, + HarnessCodeInfo: *harnessCodeInfo, + } + + data, err := json.Marshal(repoJobData) + if err != nil { + return fmt.Errorf("failed to marshal job input json: %w", err) + } + strData := strings.TrimSpace(string(data)) + encryptedData, err := r.encrypter.Encrypt(strData) + if err != nil { + return fmt.Errorf("failed to encrypt job input: %w", err) + } + + jobUID := fmt.Sprintf(exportRepoJobUid, repository.ID) + + jobDefinitions[i] = job.Definition{ + UID: jobUID, + Type: jobType, + MaxRetries: exportJobMaxRetries, + Timeout: exportJobMaxDuration, + Data: base64.StdEncoding.EncodeToString(encryptedData), + } + } + + return r.scheduler.RunJobs(ctx, jobGroupId, jobDefinitions) +} + +func checkJobAlreadyRunning(jobs []types.JobProgress) error { + if jobs == nil { + return nil + } + for _, j := range jobs { + if !j.State.IsCompleted() { + return ErrJobRunning + } + } + return nil +} + +func getJobGroupId(spaceId int64) string { + return fmt.Sprintf(exportSpaceJobUid, spaceId) +} + +// Handle is repository export background job handler. +func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressReporter) (string, error) { + input, err := r.getJobInput(data) + if err != nil { + return "", err + } + harnessCodeInfo := input.HarnessCodeInfo + client, err := newHarnessCodeClient(r.urlProvider.GetHarnessCodeInternalUrl(), harnessCodeInfo.AccountId, harnessCodeInfo.OrgIdentifier, harnessCodeInfo.ProjectIdentifier, harnessCodeInfo.Token) + if err != nil { + return "", err + } + + repository, err := r.repoStore.Find(ctx, input.ID) + if err != nil { + return "", err + } + remoteRepo, err := client.CreateRepo(ctx, repo.CreateInput{ + UID: repository.UID, + DefaultBranch: repository.DefaultBranch, + Description: repository.Description, + IsPublic: repository.IsPublic, + Readme: false, + License: "", + GitIgnore: "", + }) + if err != nil { + r.publishSSE(ctx, repository) + return "", err + } + + urlWithToken, err := modifyUrl(remoteRepo.GitURL, harnessCodeInfo.Token) + if err != nil { + return "", err + } + + err = r.git.PushRemote(ctx, &gitrpc.PushRemoteParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repository.GitUID}, + RemoteUrl: urlWithToken, + }) + if err != nil && !strings.Contains(err.Error(), "empty") { + errDelete := client.DeleteRepo(ctx, remoteRepo.UID) + if errDelete != nil { + log.Ctx(ctx).Err(errDelete).Msgf("failed to delete repo '%s' on harness", remoteRepo.UID) + } + r.publishSSE(ctx, repository) + return "", err + } + + log.Ctx(ctx).Info().Msgf("completed exporting repository '%s' to harness", repository.UID) + + r.publishSSE(ctx, repository) + + return "", nil +} + +func (r *Repository) publishSSE(ctx context.Context, repository *types.Repository) { + err := r.sseStreamer.Publish(ctx, repository.ParentID, enum.SSETypeRepositoryExportCompleted, repository) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msg("failed to publish export completion SSE") + } +} + +func (r *Repository) getJobInput(data string) (Input, error) { + encrypted, err := base64.StdEncoding.DecodeString(data) + if err != nil { + return Input{}, fmt.Errorf("failed to base64 decode job input: %w", err) + } + + decrypted, err := r.encrypter.Decrypt(encrypted) + if err != nil { + return Input{}, fmt.Errorf("failed to decrypt job input: %w", err) + } + + var input Input + + err = json.NewDecoder(strings.NewReader(decrypted)).Decode(&input) + if err != nil { + return Input{}, fmt.Errorf("failed to unmarshal job input json: %w", err) + } + + return input, nil +} + +func (r *Repository) GetProgressForSpace(ctx context.Context, spaceID int64) ([]types.JobProgress, error) { + spaceId := getJobGroupId(spaceID) + progress, err := r.scheduler.GetJobProgressForGroup(ctx, spaceId) + if err != nil { + return nil, fmt.Errorf("failed to get job progress for group: %w", err) + } + + if len(progress) == 0 { + return nil, ErrNotFound + } + + return progress, nil +} + +func modifyUrl(u string, token string) (string, error) { + parsedUrl, err := url.Parse(u) + if err != nil { + fmt.Println("Error parsing URL:", err) + return "", err + } + + // Set the username and password in the URL + parsedUrl.User = url.UserPassword("token", token) + return parsedUrl.String(), nil +} diff --git a/internal/services/exporter/wire.go b/internal/services/exporter/wire.go new file mode 100644 index 0000000000..ee5825b4a5 --- /dev/null +++ b/internal/services/exporter/wire.go @@ -0,0 +1,41 @@ +package exporter + +import ( + "github.com/google/wire" + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" +) + +var WireSet = wire.NewSet( + ProvideSpaceExporter, +) + +func ProvideSpaceExporter( + urlProvider *url.Provider, + git gitrpc.Interface, + repoStore store.RepoStore, + scheduler *job.Scheduler, + executor *job.Executor, + encrypter encrypt.Encrypter, + sseStreamer sse.Streamer, +) (*Repository, error) { + exporter := &Repository{ + urlProvider: urlProvider, + git: git, + repoStore: repoStore, + scheduler: scheduler, + encrypter: encrypter, + sseStreamer: sseStreamer, + } + + err := executor.Register(jobType, exporter) + if err != nil { + return nil, err + } + + return exporter, nil +} diff --git a/internal/services/importer/id.go b/internal/services/importer/id.go new file mode 100644 index 0000000000..9847da4693 --- /dev/null +++ b/internal/services/importer/id.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package importer + +import ( + "strconv" + "strings" +) + +const jobIDPrefix = "import-repo-" + +func JobIDFromRepoID(repoID int64) string { + return jobIDPrefix + strconv.FormatInt(repoID, 10) +} + +func RepoIDFromJobID(jobID string) int64 { + if !strings.HasPrefix(jobID, jobIDPrefix) { + return 0 + } + repoID, _ := strconv.ParseInt(jobID[len(jobIDPrefix):], 10, 64) + return repoID +} diff --git a/internal/services/importer/pipelines.go b/internal/services/importer/pipelines.go new file mode 100644 index 0000000000..d104510c61 --- /dev/null +++ b/internal/services/importer/pipelines.go @@ -0,0 +1,246 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package importer + +import ( + "context" + "fmt" + "path" + "strings" + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/drone/go-convert/convert/bitbucket" + "github.com/drone/go-convert/convert/circle" + "github.com/drone/go-convert/convert/drone" + "github.com/drone/go-convert/convert/github" + "github.com/drone/go-convert/convert/gitlab" + "github.com/rs/zerolog/log" +) + +type pipelineFile struct { + Name string + OriginalPath string + ConvertedPath string + Content []byte +} + +func (r *Repository) processPipelines(ctx context.Context, + principal *types.Principal, + repo *types.Repository, + commitMessage string, +) error { + writeParams, err := r.createRPCWriteParams(ctx, principal, repo) + if err != nil { + return err + } + + pipelineFiles := r.convertPipelines(ctx, principal, repo) + if len(pipelineFiles) == 0 { + return nil + } + + actions := make([]gitrpc.CommitFileAction, len(pipelineFiles)) + for i, file := range pipelineFiles { + actions[i] = gitrpc.CommitFileAction{ + Action: gitrpc.CreateAction, + Path: file.ConvertedPath, + Payload: file.Content, + SHA: "", + } + } + + now := time.Now() + identity := &gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + } + + _, err = r.git.CommitFiles(ctx, &gitrpc.CommitFilesParams{ + WriteParams: writeParams, + Title: commitMessage, + Message: "", + Branch: repo.DefaultBranch, + NewBranch: repo.DefaultBranch, + Actions: actions, + Committer: identity, + CommitterDate: &now, + Author: identity, + AuthorDate: &now, + }) + if err != nil { + return fmt.Errorf("failed to commit converted pipeline files: %w", err) + } + + nowMilli := now.UnixMilli() + + err = dbtx.New(r.db).WithTx(ctx, func(ctx context.Context) error { + for _, p := range pipelineFiles { + pipeline := &types.Pipeline{ + Description: "", + RepoID: repo.ID, + UID: p.Name, + CreatedBy: principal.ID, + Seq: 0, + DefaultBranch: repo.DefaultBranch, + ConfigPath: p.ConvertedPath, + Created: nowMilli, + Updated: nowMilli, + Version: 0, + } + + err = r.pipelineStore.Create(ctx, pipeline) + if err != nil { + return fmt.Errorf("pipeline creation failed: %w", err) + } + + // Try to create a default trigger on pipeline creation. + // Default trigger operations are set on pull request created, reopened or updated. + // We log an error on failure but don't fail the op. + trigger := &types.Trigger{ + Description: "auto-created trigger on pipeline conversion", + Created: nowMilli, + Updated: nowMilli, + PipelineID: pipeline.ID, + RepoID: pipeline.RepoID, + CreatedBy: principal.ID, + UID: "default", + Actions: []enum.TriggerAction{enum.TriggerActionPullReqCreated, + enum.TriggerActionPullReqReopened, enum.TriggerActionPullReqBranchUpdated}, + Disabled: false, + Version: 0, + } + err = r.triggerStore.Create(ctx, trigger) + if err != nil { + return fmt.Errorf("failed to create auto trigger on pipeline creation: %w", err) + } + } + + return nil + }, dbtx.TxDefault) + if err != nil { + return fmt.Errorf("failed to insert pipelines and triggers: %w", err) + } + + return nil +} + +// convertPipelines converts pipelines found in the repository. +// Note: For GitHub actions, there can be multiple. +func (r *Repository) convertPipelines(ctx context.Context, + principal *types.Principal, + repo *types.Repository, +) []pipelineFile { + const maxSize = 65536 + + match := func(dirPath, regExpDef string) []pipelineFile { + files, err := r.matchFiles(ctx, repo, repo.DefaultBranch, dirPath, regExpDef, maxSize) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf("failed to find pipeline file(s) '%s' in '%s'", + regExpDef, dirPath) + return nil + } + return files + } + + if files := match("", ".drone.yml"); len(files) > 0 { + converted := convertPipelineFiles(ctx, files, func() pipelineConverter { return drone.New() }) + if len(converted) > 0 { + return converted + } + } + + if files := match("", "bitbucket-pipelines.yml"); len(files) > 0 { + converted := convertPipelineFiles(ctx, files, func() pipelineConverter { return bitbucket.New() }) + if len(converted) > 0 { + return converted + } + } + + if files := match("", ".gitlab.yml"); len(files) > 0 { + converted := convertPipelineFiles(ctx, files, func() pipelineConverter { return gitlab.New() }) + if len(converted) > 0 { + return converted + } + } + + if files := match(".circleci", "config.yml"); len(files) > 0 { + converted := convertPipelineFiles(ctx, files, func() pipelineConverter { return circle.New() }) + if len(converted) > 0 { + return converted + } + } + + filesYML := match(".github/workflows", "*.yml") + filesYAML := match(".github/workflows", "*.yaml") + files := append(filesYML, filesYAML...) + converted := convertPipelineFiles(ctx, files, func() pipelineConverter { return github.New() }) + if len(converted) > 0 { + return converted + } + + return nil +} + +type pipelineConverter interface { + ConvertBytes([]byte) ([]byte, error) +} + +func convertPipelineFiles(ctx context.Context, + files []pipelineFile, + gen func() pipelineConverter, +) []pipelineFile { + const ( + harnessPipelineName = "pipeline" + harnessPipelineNameOnly = "default-" + harnessPipelineName + harnessPipelineDir = ".harness" + harnessPipelineFileOnly = harnessPipelineDir + "/pipeline.yaml" + ) + + result := make([]pipelineFile, 0, len(files)) + for _, file := range files { + data, err := gen().ConvertBytes(file.Content) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf("failed to convert pipeline file %s", file.OriginalPath) + continue + } + + var pipelineName string + var pipelinePath string + + if len(files) == 1 { + pipelineName = harnessPipelineNameOnly + pipelinePath = harnessPipelineFileOnly + } else { + base := path.Base(file.OriginalPath) + base = strings.TrimSuffix(base, path.Ext(base)) + pipelineName = harnessPipelineName + "-" + base + pipelinePath = harnessPipelineDir + "/" + base + ".yaml" + } + + result = append(result, pipelineFile{ + Name: pipelineName, + OriginalPath: file.OriginalPath, + ConvertedPath: pipelinePath, + Content: data, + }) + } + + return result +} diff --git a/internal/services/importer/provider.go b/internal/services/importer/provider.go new file mode 100644 index 0000000000..d09726bf1d --- /dev/null +++ b/internal/services/importer/provider.go @@ -0,0 +1,246 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package importer + +import ( + "context" + "crypto/sha512" + "encoding/base32" + "fmt" + "net/http" + "time" + + "github.com/harness/gitness/internal/api/usererror" + "github.com/harness/gitness/types" + + "github.com/drone/go-scm/scm" + "github.com/drone/go-scm/scm/driver/github" + "github.com/drone/go-scm/scm/driver/gitlab" + "github.com/drone/go-scm/scm/transport/oauth2" +) + +type ProviderType string + +const ( + ProviderTypeGitHub ProviderType = "github" + ProviderTypeGitLab ProviderType = "gitlab" +) + +func (p ProviderType) Enum() []any { + return []any{ + ProviderTypeGitHub, + ProviderTypeGitLab, + } +} + +type Provider struct { + Type ProviderType `json:"type"` + Host string `json:"host"` + Username string `json:"username"` + Password string `json:"password"` +} + +type RepositoryInfo struct { + Space string + UID string + CloneURL string + IsPublic bool + DefaultBranch string +} + +// ToRepo converts the RepositoryInfo into the types.Repository object marked as being imported. +func (r *RepositoryInfo) ToRepo( + spaceID int64, + uid string, + description string, + principal *types.Principal, +) *types.Repository { + now := time.Now().UnixMilli() + gitTempUID := fmt.Sprintf("importing-%s-%d", hash(fmt.Sprintf("%d:%s", spaceID, uid)), now) + return &types.Repository{ + Version: 0, + ParentID: spaceID, + UID: uid, + GitUID: gitTempUID, // the correct git UID will be set by the job handler + Description: description, + IsPublic: r.IsPublic, + CreatedBy: principal.ID, + Created: now, + Updated: now, + ForkID: 0, + DefaultBranch: r.DefaultBranch, + Importing: true, + } +} + +func hash(s string) string { + h := sha512.New() + _, _ = h.Write([]byte(s)) + return base32.StdEncoding.EncodeToString(h.Sum(nil)[:10]) +} + +func getClient(provider Provider, authReq bool) (*scm.Client, error) { + if authReq && (provider.Username == "" || provider.Password == "") { + return nil, usererror.BadRequest("scm provider authentication credentials missing") + } + + var c *scm.Client + var err error + + switch provider.Type { + case "": + return nil, usererror.BadRequest("scm provider can not be empty") + + case ProviderTypeGitHub: + if provider.Host != "" { + c, err = github.New(provider.Host) + if err != nil { + return nil, usererror.BadRequestf("scm provider Host invalid: %s", err.Error()) + } + } else { + c = github.NewDefault() + } + + case ProviderTypeGitLab: + if provider.Host != "" { + c, err = gitlab.New(provider.Host) + if err != nil { + return nil, usererror.BadRequestf("scm provider Host invalid: %s", err.Error()) + } + } else { + c = gitlab.NewDefault() + } + + default: + return nil, usererror.BadRequestf("unsupported scm provider: %s", provider) + } + + if provider.Password != "" { + c.Client = &http.Client{ + Transport: &oauth2.Transport{ + Source: oauth2.StaticTokenSource(&scm.Token{Token: provider.Password}), + }, + } + } + + return c, nil +} +func LoadRepositoryFromProvider(ctx context.Context, provider Provider, repoSlug string) (RepositoryInfo, error) { + scmClient, err := getClient(provider, false) + if err != nil { + return RepositoryInfo{}, err + } + + if repoSlug == "" { + return RepositoryInfo{}, usererror.BadRequest("provider repository identifier is missing") + } + + scmRepo, scmResp, err := scmClient.Repositories.Find(ctx, repoSlug) + if err = convertSCMError(provider, repoSlug, scmResp, err); err != nil { + return RepositoryInfo{}, err + } + + return RepositoryInfo{ + Space: scmRepo.Namespace, + UID: scmRepo.Name, + CloneURL: scmRepo.Clone, + IsPublic: !scmRepo.Private, + DefaultBranch: scmRepo.Branch, + }, nil +} + +func LoadRepositoriesFromProviderSpace(ctx context.Context, provider Provider, spaceSlug string) ([]RepositoryInfo, error) { + scmClient, err := getClient(provider, true) + if err != nil { + return nil, err + } + + if spaceSlug == "" { + return nil, usererror.BadRequest("provider space identifier is missing") + } + + const pageSize = 100 + opts := scm.RepoListOptions{ + ListOptions: scm.ListOptions{ + Page: 0, + Size: pageSize, + }, + RepoSearchTerm: scm.RepoSearchTerm{ + User: spaceSlug, + }, + } + + repos := make([]RepositoryInfo, 0) + for { + opts.Page++ + + scmRepos, scmResp, err := scmClient.Repositories.ListV2(ctx, opts) + if err = convertSCMError(provider, spaceSlug, scmResp, err); err != nil { + return nil, err + } + + if len(scmRepos) == 0 { + break + } + + for _, scmRepo := range scmRepos { + // in some cases the namespace filter isn't working (e.g. Gitlab) + if scmRepo.Namespace != spaceSlug { + continue + } + + repos = append(repos, RepositoryInfo{ + Space: scmRepo.Namespace, + UID: scmRepo.Name, + CloneURL: scmRepo.Clone, + IsPublic: !scmRepo.Private, + DefaultBranch: scmRepo.Branch, + }) + } + } + + return repos, nil +} + +func convertSCMError(provider Provider, slug string, r *scm.Response, err error) error { + if err == nil { + return nil + } + + if r == nil { + if provider.Host != "" { + return usererror.BadRequestf("failed to make HTTP request to %s (host=%s): %s", + provider.Type, provider.Host, err) + } else { + return usererror.BadRequestf("failed to make HTTP request to %s: %s", + provider.Type, err) + } + } + + switch r.Status { + case http.StatusNotFound: + return usererror.BadRequestf("couldn't find %s at %s: %s", + slug, provider.Type, err.Error()) + case http.StatusUnauthorized: + return usererror.BadRequestf("bad credentials provided for %s at %s: %s", + slug, provider.Type, err.Error()) + case http.StatusForbidden: + return usererror.BadRequestf("access denied to %s at %s: %s", + slug, provider.Type, err.Error()) + default: + return usererror.BadRequestf("failed to fetch %s from %s (HTTP status %d): %s", + slug, provider.Type, r.Status, err.Error()) + } +} diff --git a/internal/services/importer/repository.go b/internal/services/importer/repository.go new file mode 100644 index 0000000000..48be610cd9 --- /dev/null +++ b/internal/services/importer/repository.go @@ -0,0 +1,460 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package importer + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/url" + "strings" + "time" + + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/bootstrap" + "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + gitnessurl "github.com/harness/gitness/internal/url" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + "github.com/rs/zerolog/log" +) + +const ( + importJobMaxRetries = 0 + importJobMaxDuration = 45 * time.Minute +) + +var ( + // ErrNotFound is returned if no import data was found. + ErrNotFound = errors.New("import not found") +) + +type Repository struct { + defaultBranch string + urlProvider *gitnessurl.Provider + git gitrpc.Interface + db *sqlx.DB + repoStore store.RepoStore + pipelineStore store.PipelineStore + triggerStore store.TriggerStore + encrypter encrypt.Encrypter + scheduler *job.Scheduler + sseStreamer sse.Streamer +} + +var _ job.Handler = (*Repository)(nil) + +type Input struct { + RepoID int64 `json:"repo_id"` + GitUser string `json:"git_user"` + GitPass string `json:"git_pass"` + CloneURL string `json:"clone_url"` +} + +const jobType = "repository_import" + +func (r *Repository) Register(executor *job.Executor) error { + return executor.Register(jobType, r) +} + +// Run starts a background job that imports the provided repository from the provided clone URL. +func (r *Repository) Run(ctx context.Context, provider Provider, repo *types.Repository, cloneURL string) error { + jobDef, err := r.getJobDef(JobIDFromRepoID(repo.ID), Input{ + RepoID: repo.ID, + GitUser: provider.Username, + GitPass: provider.Password, + CloneURL: cloneURL, + }) + if err != nil { + return err + } + + return r.scheduler.RunJob(ctx, jobDef) +} + +// RunMany starts background jobs that import the provided repositories from the provided clone URLs. +func (r *Repository) RunMany(ctx context.Context, + groupID string, + provider Provider, + repoIDs []int64, + cloneURLs []string, +) error { + if len(repoIDs) != len(cloneURLs) { + return fmt.Errorf("slice length mismatch: have %d repositories and %d clone URLs", + len(repoIDs), len(cloneURLs)) + } + + n := len(repoIDs) + defs := make([]job.Definition, n) + + for k := 0; k < n; k++ { + repoID := repoIDs[k] + cloneURL := cloneURLs[k] + + jobDef, err := r.getJobDef(JobIDFromRepoID(repoID), Input{ + RepoID: repoID, + GitUser: provider.Username, + GitPass: provider.Password, + CloneURL: cloneURL, + }) + if err != nil { + return err + } + + defs[k] = jobDef + } + + err := r.scheduler.RunJobs(ctx, groupID, defs) + if err != nil { + return fmt.Errorf("failed to run jobs: %w", err) + } + + return nil +} + +func (r *Repository) getJobDef(jobUID string, input Input) (job.Definition, error) { + data, err := json.Marshal(input) + if err != nil { + return job.Definition{}, fmt.Errorf("failed to marshal job input json: %w", err) + } + + strData := strings.TrimSpace(string(data)) + + encryptedData, err := r.encrypter.Encrypt(strData) + if err != nil { + return job.Definition{}, fmt.Errorf("failed to encrypt job input: %w", err) + } + + return job.Definition{ + UID: jobUID, + Type: jobType, + MaxRetries: importJobMaxRetries, + Timeout: importJobMaxDuration, + Data: base64.StdEncoding.EncodeToString(encryptedData), + }, nil +} + +func (r *Repository) getJobInput(data string) (Input, error) { + encrypted, err := base64.StdEncoding.DecodeString(data) + if err != nil { + return Input{}, fmt.Errorf("failed to base64 decode job input: %w", err) + } + + decrypted, err := r.encrypter.Decrypt(encrypted) + if err != nil { + return Input{}, fmt.Errorf("failed to decrypt job input: %w", err) + } + + var input Input + + err = json.NewDecoder(strings.NewReader(decrypted)).Decode(&input) + if err != nil { + return Input{}, fmt.Errorf("failed to unmarshal job input json: %w", err) + } + + return input, nil +} + +// Handle is repository import background job handler. +func (r *Repository) Handle(ctx context.Context, data string, _ job.ProgressReporter) (string, error) { + systemPrincipal := bootstrap.NewSystemServiceSession().Principal + + input, err := r.getJobInput(data) + if err != nil { + return "", err + } + + if input.CloneURL == "" { + return "", errors.New("missing git repository clone URL") + } + + repoURL, err := url.Parse(input.CloneURL) + if err != nil { + return "", fmt.Errorf("failed to parse git clone URL: %w", err) + } + + repoURL.User = url.UserPassword(input.GitUser, input.GitPass) + cloneURLWithAuth := repoURL.String() + + repo, err := r.repoStore.Find(ctx, input.RepoID) + if err != nil { + return "", fmt.Errorf("failed to find repo by id: %w", err) + } + + if !repo.Importing { + return "", fmt.Errorf("repository %s is not being imported", repo.UID) + } + + log := log.Ctx(ctx).With(). + Int64("repo.id", repo.ID). + Str("repo.path", repo.Path). + Logger() + + log.Info().Msg("create git repository") + + gitUID, err := r.createGitRepository(ctx, &systemPrincipal, repo.ID) + if err != nil { + return "", fmt.Errorf("failed to create empty git repository: %w", err) + } + + log.Info().Msgf("successfully created git repository with git_uid '%s'", gitUID) + + err = func() error { + repo.GitUID = gitUID + + log.Info().Msg("sync repository") + + defaultBranch, err := r.syncGitRepository(ctx, &systemPrincipal, repo, cloneURLWithAuth) + if err != nil { + return fmt.Errorf("failed to sync git repository from '%s': %w", input.CloneURL, err) + } + + log.Info().Msgf("successfully synced repository (returned default branch: '%s')", defaultBranch) + + if defaultBranch == "" { + defaultBranch = r.defaultBranch + } + + log.Info().Msg("update repo in DB") + + repo, err = r.repoStore.UpdateOptLock(ctx, repo, func(repo *types.Repository) error { + if !repo.Importing { + return errors.New("repository has already finished importing") + } + + repo.GitUID = gitUID + repo.DefaultBranch = defaultBranch + repo.Importing = false + + return nil + }) + if err != nil { + return fmt.Errorf("failed to update repository after import: %w", err) + } + + const convertPipelinesCommitMessage = "autoconvert pipeline" + err = r.processPipelines(ctx, &systemPrincipal, repo, convertPipelinesCommitMessage) + if err != nil { + log.Warn().Err(err).Msg("failed to convert pipelines") + } + + return nil + }() + if err != nil { + log.Error().Err(err).Msg("failed repository import - cleanup git repository") + + if errDel := r.deleteGitRepository(ctx, &systemPrincipal, repo); errDel != nil { + log.Warn().Err(errDel). + Msg("failed to delete git repository after failed import") + } + + return "", fmt.Errorf("failed to import repository: %w", err) + } + + err = r.sseStreamer.Publish(ctx, repo.ParentID, enum.SSETypeRepositoryImportCompleted, repo) + if err != nil { + log.Warn().Err(err).Msg("failed to publish import completion SSE") + } + + log.Info().Msg("completed repository import") + + return "", nil +} + +func (r *Repository) GetProgress(ctx context.Context, repo *types.Repository) (types.JobProgress, error) { + progress, err := r.scheduler.GetJobProgress(ctx, JobIDFromRepoID(repo.ID)) + if errors.Is(err, gitness_store.ErrResourceNotFound) { + if repo.Importing { + // if the job is not found but repo is marked as importing, return state=failed + return job.FailProgress(), nil + } + + // otherwise there either was no import, or it completed a long time ago (job cleaned up by now) + return types.JobProgress{}, ErrNotFound + } + if err != nil { + return types.JobProgress{}, fmt.Errorf("failed to get job progress: %w", err) + } + + return progress, nil +} + +func (r *Repository) Cancel(ctx context.Context, repo *types.Repository) error { + if !repo.Importing { + return nil + } + + err := r.scheduler.CancelJob(ctx, JobIDFromRepoID(repo.ID)) + if err != nil { + return fmt.Errorf("failed to cancel job: %w", err) + } + + return nil +} + +func (r *Repository) createGitRepository(ctx context.Context, + principal *types.Principal, + repoID int64, +) (string, error) { + now := time.Now() + + envVars, err := r.createEnvVars(ctx, principal, repoID) + if err != nil { + return "", err + } + + resp, err := r.git.CreateRepository(ctx, &gitrpc.CreateRepositoryParams{ + Actor: gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + }, + EnvVars: envVars, + DefaultBranch: r.defaultBranch, + Files: nil, + Author: &gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + }, + AuthorDate: &now, + Committer: &gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + }, + CommitterDate: &now, + }) + if err != nil { + return "", fmt.Errorf("failed to create empty git repository: %w", err) + } + + return resp.UID, nil +} + +func (r *Repository) syncGitRepository(ctx context.Context, + principal *types.Principal, + repo *types.Repository, + sourceCloneURL string, +) (string, error) { + writeParams, err := r.createRPCWriteParams(ctx, principal, repo) + if err != nil { + return "", err + } + + syncOut, err := r.git.SyncRepository(ctx, &gitrpc.SyncRepositoryParams{ + WriteParams: writeParams, + Source: sourceCloneURL, + CreateIfNotExists: false, + }) + if err != nil { + return "", fmt.Errorf("failed to sync repository: %w", err) + } + + return syncOut.DefaultBranch, nil +} + +func (r *Repository) deleteGitRepository(ctx context.Context, + principal *types.Principal, + repo *types.Repository, +) error { + writeParams, err := r.createRPCWriteParams(ctx, principal, repo) + if err != nil { + return err + } + + err = r.git.DeleteRepository(ctx, &gitrpc.DeleteRepositoryParams{ + WriteParams: writeParams, + }) + if err != nil { + return fmt.Errorf("failed to delete git repository: %w", err) + } + + return nil +} + +func (r *Repository) matchFiles(ctx context.Context, + repo *types.Repository, + ref string, + dirPath string, + pattern string, + maxSize int, +) ([]pipelineFile, error) { + resp, err := r.git.MatchFiles(ctx, &gitrpc.MatchFilesParams{ + ReadParams: gitrpc.ReadParams{RepoUID: repo.GitUID}, + Ref: ref, + DirPath: dirPath, + Pattern: pattern, + MaxSize: maxSize, + }) + if err != nil { + return nil, fmt.Errorf("failed to convert pipelines: %w", err) + } + + pipelines := make([]pipelineFile, len(resp.Files)) + for i, pipeline := range resp.Files { + pipelines[i] = pipelineFile{ + Name: "", + OriginalPath: pipeline.Path, + ConvertedPath: "", + Content: pipeline.Content, + } + } + + return pipelines, nil +} + +func (r *Repository) createRPCWriteParams(ctx context.Context, + principal *types.Principal, + repo *types.Repository, +) (gitrpc.WriteParams, error) { + envVars, err := r.createEnvVars(ctx, principal, repo.ID) + if err != nil { + return gitrpc.WriteParams{}, err + } + + return gitrpc.WriteParams{ + Actor: gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + }, + RepoUID: repo.GitUID, + EnvVars: envVars, + }, nil +} + +func (r *Repository) createEnvVars(ctx context.Context, + principal *types.Principal, + repoID int64, +) (map[string]string, error) { + envVars, err := githook.GenerateEnvironmentVariables( + ctx, + r.urlProvider.GetAPIBaseURLInternal(), + repoID, + principal.ID, + false, + ) + if err != nil { + return nil, fmt.Errorf("failed to generate git hook environment variables: %w", err) + } + + return envVars, nil +} diff --git a/internal/services/importer/wire.go b/internal/services/importer/wire.go new file mode 100644 index 0000000000..dc2ef98d46 --- /dev/null +++ b/internal/services/importer/wire.go @@ -0,0 +1,66 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package importer + +import ( + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/jmoiron/sqlx" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideRepoImporter, +) + +func ProvideRepoImporter( + config *types.Config, + urlProvider *url.Provider, + git gitrpc.Interface, + db *sqlx.DB, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + triggerStore store.TriggerStore, + encrypter encrypt.Encrypter, + scheduler *job.Scheduler, + executor *job.Executor, + sseStreamer sse.Streamer, +) (*Repository, error) { + importer := &Repository{ + defaultBranch: config.Git.DefaultBranch, + urlProvider: urlProvider, + git: git, + db: db, + repoStore: repoStore, + pipelineStore: pipelineStore, + triggerStore: triggerStore, + encrypter: encrypter, + scheduler: scheduler, + sseStreamer: sseStreamer, + } + + err := executor.Register(jobType, importer) + if err != nil { + return nil, err + } + + return importer, nil +} diff --git a/internal/services/job/definition.go b/internal/services/job/definition.go new file mode 100644 index 0000000000..4407a1b6e2 --- /dev/null +++ b/internal/services/job/definition.go @@ -0,0 +1,77 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "errors" + "time" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type Definition struct { + UID string + Type string + MaxRetries int + Timeout time.Duration + Data string +} + +func (def *Definition) Validate() error { + if def.Type == "" { + return errors.New("job Type must not be empty") + } + + if def.UID == "" { + return errors.New("job must have unique identifier") + } + + if def.MaxRetries < 0 { + return errors.New("job MaxRetries must be positive") + } + + if def.Timeout < time.Second { + return errors.New("job Timeout too short") + } + + return nil +} + +func (def *Definition) toNewJob() *types.Job { + nowMilli := time.Now().UnixMilli() + return &types.Job{ + UID: def.UID, + Created: nowMilli, + Updated: nowMilli, + Type: def.Type, + Priority: enum.JobPriorityNormal, + Data: def.Data, + Result: "", + MaxDurationSeconds: int(def.Timeout / time.Second), + MaxRetries: def.MaxRetries, + State: enum.JobStateScheduled, + Scheduled: nowMilli, + TotalExecutions: 0, + RunBy: "", + RunDeadline: nowMilli, + RunProgress: ProgressMin, + LastExecuted: 0, // never executed + IsRecurring: false, + RecurringCron: "", + ConsecutiveFailures: 0, + LastFailureError: "", + } +} diff --git a/internal/services/job/executor.go b/internal/services/job/executor.go new file mode 100644 index 0000000000..67008639fb --- /dev/null +++ b/internal/services/job/executor.go @@ -0,0 +1,161 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "context" + "errors" + "fmt" + "runtime/debug" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// Executor holds map of Handler objects per each job type registered. +// The Scheduler uses the Executor to start execution of jobs. +type Executor struct { + handlerMap map[string]Handler + handlerComplete bool + store store.JobStore + publisher pubsub.Publisher +} + +const ( + ProgressMin = 0 + ProgressMax = 100 +) + +// ProgressReporter can be used by a job Handler to report back the execution progress. +type ProgressReporter func(progress int, result string) error + +// Handler is a job executor for a specific job type. +// An implementation should try to honor the context and +// try to abort the execution as soon as the context is done. +type Handler interface { + Handle(ctx context.Context, input string, fn ProgressReporter) (result string, err error) +} + +var noHandlerDefinedError = errors.New("no handler registered for the job type") + +// NewExecutor creates new Executor. +func NewExecutor(jobStore store.JobStore, publisher pubsub.Publisher) *Executor { + return &Executor{ + handlerMap: make(map[string]Handler), + handlerComplete: false, + store: jobStore, + publisher: publisher, + } +} + +// Register registers a job Handler for the provided job type. +// This function is not thread safe. All calls are expected to be made +// in a single thread during the application boot time. +func (e *Executor) Register(jobType string, exec Handler) error { + if jobType == "" { + return errors.New("jobType must not be empty") + } + + if e.handlerComplete { + return errors.New("job handler registration is complete") + } + + if exec == nil { + return errors.New("provided Handler is nil") + } + + if _, ok := e.handlerMap[jobType]; ok { + return fmt.Errorf("a Handler is already defined to run the '%s' job types", jobType) + } + + e.handlerMap[jobType] = exec + + return nil +} + +// finishRegistration forbids further registration of job types. +// It is called by the Scheduler when it starts. +func (e *Executor) finishRegistration() { + e.handlerComplete = true +} + +// exec runs a single job. This function is synchronous, +// so the caller is responsible to run it in a separate go-routine. +func (e *Executor) exec( + ctx context.Context, + jobUID, jobType string, + input string, +) (result string, err error) { + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf( + "panic while processing job=%s type=%s: %v\n%s", + jobUID, jobType, r, debug.Stack()) + } + }() + + exec, ok := e.handlerMap[jobType] + if !ok { + return "", noHandlerDefinedError + } + + // progressReporter is the function with which the job can update its progress. + // This function will be executed in the job executor's Go-routine. + // It uses the job's context. + progressReporter := func(progress int, result string) error { + if progress < ProgressMin || progress > ProgressMax { + return errors.New("progress must be between 0 and 100") + } + + jobDummy := &types.Job{ + UID: jobUID, + Updated: time.Now().UnixMilli(), + Result: result, + State: enum.JobStateRunning, + RunProgress: progress, + } + + // This doesn't need to be behind the global lock because it only updates the single row. + // While a job is running no other process should touch it. + // Even this call will fail if the context deadline has been exceeded. + // The job parameter is a dummy types.Job object that just holds fields that should be updated. + if err := e.store.UpdateProgress(ctx, jobDummy); err != nil { + return err + } + + // tell everybody that a job progress has been updated + if err := publishStateChange(ctx, e.publisher, jobDummy); err != nil { + log.Err(err).Msg("failed to publish job state change") + } + + return nil + } + + return exec.Handle(ctx, input, progressReporter) // runs the job +} + +func FailProgress() types.JobProgress { + return types.JobProgress{ + State: enum.JobStateFailed, + Progress: ProgressMax, + Result: "", + Failure: "", + } +} diff --git a/internal/services/job/job_overdue.go b/internal/services/job/job_overdue.go new file mode 100644 index 0000000000..5ee890e55b --- /dev/null +++ b/internal/services/job/job_overdue.go @@ -0,0 +1,99 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +const ( + jobUIDOverdue = "gitness:jobs:overdue" + jobTypeOverdue = "gitness:jobs:overdue" + jobCronOverdue = "*/20 * * * *" // every 20 min +) + +type jobOverdue struct { + store store.JobStore + mxManager lock.MutexManager + scheduler *Scheduler +} + +func newJobOverdue(jobStore store.JobStore, mxManager lock.MutexManager, scheduler *Scheduler) *jobOverdue { + return &jobOverdue{ + store: jobStore, + mxManager: mxManager, + scheduler: scheduler, + } +} + +// Handle reclaims overdue jobs. Normally this shouldn't happen. +// But, it can occur if DB update after a job execution fails, +// or the server suddenly terminates while the job is still running. +func (j *jobOverdue) Handle(ctx context.Context, _ string, _ ProgressReporter) (string, error) { + mx, err := globalLock(ctx, j.mxManager) + if err != nil { + return "", fmt.Errorf("failed to obtain the lock to reclaim overdue jobs") + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Err(err).Msg("failed to release global lock after reclaiming overdue jobs") + } + }() + + overdueJobs, err := j.store.ListDeadlineExceeded(ctx, time.Now()) + if err != nil { + return "", fmt.Errorf("failed to list overdue jobs") + } + + if len(overdueJobs) == 0 { + return "", nil + } + + var minScheduled time.Time + + for _, job := range overdueJobs { + const errorMessage = "deadline exceeded" + postExec(job, "", errorMessage) + + err = j.store.UpdateExecution(ctx, job) + if err != nil { + return "", fmt.Errorf("failed update overdue job") + } + + if job.State == enum.JobStateScheduled { + scheduled := time.UnixMilli(job.Scheduled) + if minScheduled.IsZero() || minScheduled.After(scheduled) { + minScheduled = scheduled + } + } + } + + if !minScheduled.IsZero() { + j.scheduler.scheduleProcessing(minScheduled) + } + + result := fmt.Sprintf("found %d overdue jobs", len(overdueJobs)) + + return result, nil +} diff --git a/internal/services/job/job_purge.go b/internal/services/job/job_purge.go new file mode 100644 index 0000000000..28e76f91b6 --- /dev/null +++ b/internal/services/job/job_purge.go @@ -0,0 +1,77 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + + "github.com/rs/zerolog/log" +) + +const ( + jobUIDPurge = "gitness:jobs:purge" + jobTypePurge = "gitness:jobs:purge" + jobCronPurge = "15 */4 * * *" // every 4 hours at 15 minutes +) + +type jobPurge struct { + store store.JobStore + mxManager lock.MutexManager + minOldAge time.Duration +} + +func newJobPurge(jobStore store.JobStore, mxManager lock.MutexManager, minOldAge time.Duration) *jobPurge { + if minOldAge < 0 { + minOldAge = 0 + } + + return &jobPurge{ + store: jobStore, + mxManager: mxManager, + minOldAge: minOldAge, + } +} + +func (j *jobPurge) Handle(ctx context.Context, _ string, _ ProgressReporter) (string, error) { + mx, err := globalLock(ctx, j.mxManager) + if err != nil { + return "", fmt.Errorf("failed to obtain the lock to clean up old jobs") + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Err(err).Msg("failed to release global lock after cleaning up old jobs") + } + }() + + olderThan := time.Now().Add(-j.minOldAge) + + n, err := j.store.DeleteOld(ctx, olderThan) + if err != nil { + return "", fmt.Errorf("failed to purge old jobs") + } + + result := "no old jobs found" + if n > 0 { + result = fmt.Sprintf("deleted %d old jobs", n) + } + + return result, nil +} diff --git a/internal/services/job/lock.go b/internal/services/job/lock.go new file mode 100644 index 0000000000..dbe45d410f --- /dev/null +++ b/internal/services/job/lock.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "context" + + "github.com/harness/gitness/lock" +) + +func globalLock(ctx context.Context, manager lock.MutexManager) (lock.Mutex, error) { + const lockKey = "jobs" + mx, err := manager.NewMutex(lockKey) + if err != nil { + return nil, err + } + + err = mx.Lock(ctx) + + return mx, err +} diff --git a/internal/services/job/pubsub.go b/internal/services/job/pubsub.go new file mode 100644 index 0000000000..15dafef2b2 --- /dev/null +++ b/internal/services/job/pubsub.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "bytes" + "context" + "encoding/gob" + "fmt" + + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" +) + +const ( + PubSubTopicCancelJob = "gitness:job:cancel_job" + PubSubTopicStateChange = "gitness:job:state_change" +) + +func encodeStateChange(job *types.Job) ([]byte, error) { + stateChange := &types.JobStateChange{ + UID: job.UID, + State: job.State, + Progress: job.RunProgress, + Result: job.Result, + Failure: job.LastFailureError, + } + + buffer := bytes.NewBuffer(nil) + if err := gob.NewEncoder(buffer).Encode(stateChange); err != nil { + return nil, err + } + + return buffer.Bytes(), nil +} + +func DecodeStateChange(payload []byte) (*types.JobStateChange, error) { + stateChange := &types.JobStateChange{} + if err := gob.NewDecoder(bytes.NewReader(payload)).Decode(stateChange); err != nil { + return nil, err + } + + return stateChange, nil +} + +func publishStateChange(ctx context.Context, publisher pubsub.Publisher, job *types.Job) error { + payload, err := encodeStateChange(job) + if err != nil { + return fmt.Errorf("failed to gob encode JobStateChange: %w", err) + } + + err = publisher.Publish(ctx, PubSubTopicStateChange, payload) + if err != nil { + return fmt.Errorf("failed to publish JobStateChange: %w", err) + } + + return nil +} diff --git a/internal/services/job/scheduler.go b/internal/services/job/scheduler.go new file mode 100644 index 0000000000..05ca00bd96 --- /dev/null +++ b/internal/services/job/scheduler.go @@ -0,0 +1,755 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "context" + "errors" + "fmt" + "runtime/debug" + "sync" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gorhill/cronexpr" + "github.com/rs/zerolog/log" +) + +// Scheduler controls execution of background jobs. +type Scheduler struct { + // dependencies + store store.JobStore + executor *Executor + mxManager lock.MutexManager + pubsubService pubsub.PubSub + + // configuration fields + instanceID string + maxRunning int + purgeMinOldAge time.Duration + + // synchronization stuff + signal chan time.Time + done chan struct{} + wgRunning sync.WaitGroup + cancelJobMx sync.Mutex + cancelJobMap map[string]context.CancelFunc +} + +func NewScheduler( + jobStore store.JobStore, + executor *Executor, + mxManager lock.MutexManager, + pubsubService pubsub.PubSub, + instanceID string, + maxRunning int, + purgeMinOldAge time.Duration, +) (*Scheduler, error) { + if maxRunning < 1 { + maxRunning = 1 + } + return &Scheduler{ + store: jobStore, + executor: executor, + mxManager: mxManager, + pubsubService: pubsubService, + + instanceID: instanceID, + maxRunning: maxRunning, + purgeMinOldAge: purgeMinOldAge, + + cancelJobMap: map[string]context.CancelFunc{}, + }, nil +} + +// Run runs the background job scheduler. +// It's a blocking call. It blocks until the provided context is done. +func (s *Scheduler) Run(ctx context.Context) error { + if s.done != nil { + return errors.New("already started") + } + + consumer := s.pubsubService.Subscribe(ctx, PubSubTopicCancelJob, s.handleCancelJob) + defer func() { + err := consumer.Close() + if err != nil { + log.Ctx(ctx).Err(err). + Msg("job scheduler: failed to close pubsub cancel job consumer") + } + }() + + if err := s.createNecessaryJobs(ctx); err != nil { + return fmt.Errorf("failed to create necessary jobs: %w", err) + } + + if err := s.registerNecessaryJobs(); err != nil { + return fmt.Errorf("failed to register scheduler's internal jobs: %w", err) + } + + s.executor.finishRegistration() + + log.Ctx(ctx).Debug().Msg("job scheduler: starting") + + s.done = make(chan struct{}) + defer close(s.done) + + s.signal = make(chan time.Time, 1) + + timer := newSchedulerTimer() + defer timer.Stop() + + for { + err := func() error { + defer func() { + if r := recover(); r != nil { + stack := string(debug.Stack()) + log.Ctx(ctx).Error(). + Str("panic", fmt.Sprintf("[%T] job scheduler panic: %v", r, r)). + Msg(stack) + } + }() + + select { + case <-ctx.Done(): + return ctx.Err() + + case newTime := <-s.signal: + dur := timer.RescheduleEarlier(newTime) + if dur > 0 { + log.Ctx(ctx).Trace(). + Msgf("job scheduler: update of scheduled job processing time... runs in %s", dur) + } + return nil + + case now := <-timer.Ch(): + count, nextExec, gotAllJobs, err := s.processReadyJobs(ctx, now) + + // If the next processing time isn't known use the default. + if nextExec.IsZero() { + const period = time.Minute + nextExec = now.Add(period) + } + + // Reset the timer. Make the timer edgy if there are more jobs available. + dur := timer.ResetAt(nextExec, !gotAllJobs) + + if err != nil { + log.Ctx(ctx).Err(err). + Msgf("job scheduler: failed to process jobs; next iteration in %s", dur) + } else { + log.Ctx(ctx).Trace(). + Msgf("job scheduler: started %d jobs; next iteration in %s", count, dur) + } + + return nil + } + }() + if err != nil { + return err + } + } +} + +// WaitJobsDone waits until execution of all jobs has finished. +// It is intended to be used for graceful shutdown, after the Run method has finished. +func (s *Scheduler) WaitJobsDone(ctx context.Context) { + log.Ctx(ctx).Debug().Msg("job scheduler: stopping... waiting for the currently running jobs to finish") + + ch := make(chan struct{}) + go func() { + s.wgRunning.Wait() + close(ch) + }() + + select { + case <-ctx.Done(): + log.Ctx(ctx).Warn().Msg("job scheduler: stop interrupted") + case <-ch: + log.Ctx(ctx).Info().Msg("job scheduler: gracefully stopped") + } +} + +// CancelJob cancels a currently running or scheduled job. +func (s *Scheduler) CancelJob(ctx context.Context, jobUID string) error { + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return fmt.Errorf("failed to obtain global lock to cancel a job: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err).Msg("failed to release global lock after canceling a job") + } + }() + + job, err := s.store.Find(ctx, jobUID) + if err != nil { + return fmt.Errorf("failed to find job to cancel: %w", err) + } + + if job.IsRecurring { + return errors.New("can't cancel recurring jobs") + } + + if job.State != enum.JobStateScheduled && job.State != enum.JobStateRunning { + return nil // return no error if the job is already canceled or has finished or failed. + } + + // first we update the job in the database... + + job.Updated = time.Now().UnixMilli() + job.State = enum.JobStateCanceled + + err = s.store.UpdateExecution(ctx, job) + if err != nil { + return fmt.Errorf("failed to update job to cancel it: %w", err) + } + + // ... and then we cancel its context. + + s.cancelJobMx.Lock() + cancelFn, ok := s.cancelJobMap[jobUID] + s.cancelJobMx.Unlock() + + if ok { + cancelFn() + return nil + } + + return s.pubsubService.Publish(ctx, PubSubTopicCancelJob, []byte(jobUID)) +} + +func (s *Scheduler) handleCancelJob(payload []byte) error { + jobUID := string(payload) + if jobUID == "" { + return nil + } + + s.cancelJobMx.Lock() + cancelFn, ok := s.cancelJobMap[jobUID] + s.cancelJobMx.Unlock() + + if ok { + cancelFn() + } + + return nil +} + +// scheduleProcessing triggers processing of ready jobs. +// This should be run after adding new jobs to the database. +func (s *Scheduler) scheduleProcessing(scheduled time.Time) { + go func() { + select { + case <-s.done: + case s.signal <- scheduled: + } + }() +} + +// scheduleIfHaveMoreJobs triggers processing of ready jobs if the timer is edgy. +// The timer would be edgy if the previous iteration found more jobs that it could start (full capacity). +// This should be run after a non-recurring job has finished. +func (s *Scheduler) scheduleIfHaveMoreJobs() { + s.scheduleProcessing(time.Time{}) // zero time will trigger the timer if it's edgy +} + +// RunJob runs a single job of the type Definition.Type. +// All parameters a job Handler receives must be inside the Definition.Data string +// (as JSON or whatever the job Handler can interpret). +func (s *Scheduler) RunJob(ctx context.Context, def Definition) error { + if err := def.Validate(); err != nil { + return err + } + + job := def.toNewJob() + + if err := s.store.Create(ctx, job); err != nil { + return fmt.Errorf("failed to add new job to the database: %w", err) + } + + s.scheduleProcessing(time.UnixMilli(job.Scheduled)) + + return nil +} + +// RunJobs runs a several jobs. It's more efficient than calling RunJob several times +// because it locks the DB only once. +func (s *Scheduler) RunJobs(ctx context.Context, groupID string, defs []Definition) error { + if len(defs) == 0 { + return nil + } + + jobs := make([]*types.Job, len(defs)) + for i, def := range defs { + if err := def.Validate(); err != nil { + return err + } + jobs[i] = def.toNewJob() + jobs[i].GroupID = groupID + } + + for _, job := range jobs { + if err := s.store.Create(ctx, job); err != nil { + return fmt.Errorf("failed to add new job to the database: %w", err) + } + } + + s.scheduleProcessing(time.Now()) + + return nil +} + +// processReadyJobs executes jobs that are ready to run. This function is periodically run by the Scheduler. +// The function returns the number of jobs it has is started, the next scheduled execution time (of this function) +// and a bool value if all currently available ready jobs were started. +// Internally the Scheduler uses an "edgy" timer to reschedule calls of this function. +// The edgy option of the timer will be on if this function hasn't been able to start all job that are ready to run. +// If the timer has the edgy option turned on it will trigger the timer (and thus this function will be called) +// when any currently running job finishes successfully or fails. +func (s *Scheduler) processReadyJobs(ctx context.Context, now time.Time) (int, time.Time, bool, error) { + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to obtain global lock to periodically process ready jobs: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err). + Msg("failed to release global lock after periodic processing of ready jobs") + } + }() + + availableCount, err := s.availableSlots(ctx) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to count available slots for job execution: %w", err) + } + + // get one over the limit to check if all ready jobs are fetched + jobs, err := s.store.ListReady(ctx, now, availableCount+1) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to load scheduled jobs: %w", err) + } + + var ( + countExecuted int + knownNextExecTime time.Time + gotAllJobs bool + ) + + if len(jobs) > availableCount { + // More jobs are ready than we are able to run. + jobs = jobs[:availableCount] + } else { + gotAllJobs = true + knownNextExecTime, err = s.store.NextScheduledTime(ctx, now) + if err != nil { + return 0, time.Time{}, false, + fmt.Errorf("failed to read next scheduled time: %w", err) + } + } + + for _, job := range jobs { + jobCtx := log.Ctx(ctx).With(). + Str("job.UID", job.UID). + Str("job.Type", job.Type). + Logger().WithContext(ctx) + + // Update the job fields for the new execution + s.preExec(job) + + if err := s.store.UpdateExecution(ctx, job); err != nil { + knownNextExecTime = time.Time{} + gotAllJobs = false + log.Ctx(jobCtx).Err(err).Msg("failed to update job to mark it as running") + continue + } + + // tell everybody that a job has started + if err := publishStateChange(ctx, s.pubsubService, job); err != nil { + log.Ctx(jobCtx).Err(err).Msg("failed to publish job state change") + } + + s.runJob(jobCtx, job) + + countExecuted++ + } + + return countExecuted, knownNextExecTime, gotAllJobs, nil +} + +func (s *Scheduler) availableSlots(ctx context.Context) (int, error) { + countRunning, err := s.store.CountRunning(ctx) + if err != nil { + return 0, err + } + + availableCount := s.maxRunning - countRunning + if availableCount < 0 { + return 0, nil + } + + return availableCount, nil +} + +// runJob updates the job in the database and starts it in a separate goroutine. +// The function will also log the execution. +func (s *Scheduler) runJob(ctx context.Context, j *types.Job) { + s.wgRunning.Add(1) + go func(ctx context.Context, + jobUID, jobType, jobData string, + jobRunDeadline int64, + ) { + defer s.wgRunning.Done() + + log.Ctx(ctx).Debug().Msg("started job") + + timeStart := time.Now() + + // Run the job + execResult, execFailure := s.doExec(ctx, jobUID, jobType, jobData, jobRunDeadline) + + // Use the context.Background() because we want to update the job even if the job's context is done. + // The context can be done because the job exceeded its deadline or the server is shutting down. + backgroundCtx := context.Background() + + if mx, err := globalLock(backgroundCtx, s.mxManager); err != nil { + // If locking failed, just log the error and proceed to update the DB anyway. + log.Ctx(ctx).Err(err).Msg("failed to obtain global lock to update job after execution") + } else { + defer func() { + if err := mx.Unlock(backgroundCtx); err != nil { + log.Ctx(ctx).Err(err).Msg("failed to release global lock to update job after execution") + } + }() + } + + job, err := s.store.Find(backgroundCtx, jobUID) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to find job after execution") + return + } + + // Update the job fields, reschedule if necessary. + postExec(job, execResult, execFailure) + + err = s.store.UpdateExecution(backgroundCtx, job) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to update job after execution") + return + } + + logInfo := log.Ctx(ctx).Info().Str("duration", time.Since(timeStart).String()) + + if job.IsRecurring { + logInfo = logInfo.Bool("job.IsRecurring", true) + } + if job.Result != "" { + logInfo = logInfo.Str("job.Result", job.Result) + } + if job.LastFailureError != "" { + logInfo = logInfo.Str("job.Failure", job.LastFailureError) + } + + switch job.State { + case enum.JobStateFinished: + logInfo.Msg("job successfully finished") + s.scheduleIfHaveMoreJobs() + + case enum.JobStateFailed: + logInfo.Msg("job failed") + s.scheduleIfHaveMoreJobs() + + case enum.JobStateCanceled: + log.Ctx(ctx).Error().Msg("job canceled") + s.scheduleIfHaveMoreJobs() + + case enum.JobStateScheduled: + scheduledTime := time.UnixMilli(job.Scheduled) + logInfo. + Str("job.Scheduled", scheduledTime.Format(time.RFC3339Nano)). + Msg("job finished and rescheduled") + + s.scheduleProcessing(scheduledTime) + + case enum.JobStateRunning: + log.Ctx(ctx).Error().Msg("should not happen; job still has state=running after finishing") + } + + // tell everybody that a job has finished execution + if err := publishStateChange(backgroundCtx, s.pubsubService, job); err != nil { + log.Ctx(ctx).Err(err).Msg("failed to publish job state change") + } + }(ctx, j.UID, j.Type, j.Data, j.RunDeadline) +} + +// preExec updates the provided types.Job before execution. +func (s *Scheduler) preExec(job *types.Job) { + if job.MaxDurationSeconds < 1 { + job.MaxDurationSeconds = 1 + } + + now := time.Now() + nowMilli := now.UnixMilli() + + execDuration := time.Duration(job.MaxDurationSeconds) * time.Second + execDeadline := now.Add(execDuration) + + job.Updated = nowMilli + job.LastExecuted = nowMilli + job.State = enum.JobStateRunning + job.RunDeadline = execDeadline.UnixMilli() + job.RunBy = s.instanceID + job.RunProgress = ProgressMin + job.TotalExecutions++ + job.Result = "" + job.LastFailureError = "" +} + +// doExec executes the provided types.Job. +func (s *Scheduler) doExec(ctx context.Context, + jobUID, jobType, jobData string, + jobRunDeadline int64, +) (execResult, execError string) { + execDeadline := time.UnixMilli(jobRunDeadline) + + jobCtx, done := context.WithDeadline(ctx, execDeadline) + defer done() + + s.cancelJobMx.Lock() + if _, ok := s.cancelJobMap[jobUID]; ok { + // should not happen: jobs have unique UIDs! + s.cancelJobMx.Unlock() + return "", "failed to start: already running" + } + s.cancelJobMap[jobUID] = done + s.cancelJobMx.Unlock() + + defer func() { + s.cancelJobMx.Lock() + delete(s.cancelJobMap, jobUID) + s.cancelJobMx.Unlock() + }() + + execResult, err := s.executor.exec(jobCtx, jobUID, jobType, jobData) + if err != nil { + execError = err.Error() + } + + return +} + +// postExec updates the provided types.Job after execution and reschedules it if necessary. +func postExec(job *types.Job, resultData, resultErr string) { + // Proceed with the update of the job if it's in the running state or + // if it's marked as canceled but has succeeded nonetheless. + // Other states should not happen, but if they do, just leave the job as it is. + if job.State != enum.JobStateRunning && (job.State != enum.JobStateCanceled || resultErr != "") { + return + } + + now := time.Now() + nowMilli := now.UnixMilli() + + job.Updated = nowMilli + job.Result = resultData + job.RunBy = "" + + if resultErr != "" { + job.ConsecutiveFailures++ + job.State = enum.JobStateFailed + job.LastFailureError = resultErr + } else { + job.State = enum.JobStateFinished + job.RunProgress = ProgressMax + } + + // Reschedule recurring jobs + if job.IsRecurring { + if resultErr == "" { + job.ConsecutiveFailures = 0 + } + + exp, err := cronexpr.Parse(job.RecurringCron) + if err != nil { + job.State = enum.JobStateFailed + + messages := fmt.Sprintf("failed to parse cron string: %s", err.Error()) + if job.LastFailureError != "" { + messages = messages + "; " + job.LastFailureError + } + + job.LastFailureError = messages + } else { + job.State = enum.JobStateScheduled + job.Scheduled = exp.Next(now).UnixMilli() + } + + return + } + + // Reschedule the failed job if retrying is allowed + if job.State == enum.JobStateFailed && job.ConsecutiveFailures <= job.MaxRetries { + const retryDelay = 15 * time.Second + job.State = enum.JobStateScheduled + job.Scheduled = now.Add(retryDelay).UnixMilli() + job.RunProgress = ProgressMin + } +} + +func (s *Scheduler) GetJobProgress(ctx context.Context, jobUID string) (types.JobProgress, error) { + job, err := s.store.Find(ctx, jobUID) + if err != nil { + return types.JobProgress{}, err + } + + return mapToProgress(job), nil +} + +func (s *Scheduler) GetJobProgressForGroup(ctx context.Context, jobGroupUID string) ([]types.JobProgress, error) { + job, err := s.store.ListByGroupID(ctx, jobGroupUID) + if err != nil { + return nil, err + } + return mapToProgressMany(job), nil +} + +func (s *Scheduler) PurgeJobsByGroupId(ctx context.Context, jobGroupID string) (int64, error) { + n, err := s.store.DeleteByGroupID(ctx, jobGroupID) + if err != nil { + return 0, fmt.Errorf("failed to delete jobs by group id=%s: %w", jobGroupID, err) + } + return n, nil +} + +func mapToProgressMany(jobs []*types.Job) []types.JobProgress { + if jobs == nil { + return nil + } + j := make([]types.JobProgress, len(jobs)) + for i, job := range jobs { + j[i] = mapToProgress(job) + } + return j +} + +func mapToProgress(job *types.Job) types.JobProgress { + return types.JobProgress{ + State: job.State, + Progress: job.RunProgress, + Result: job.Result, + Failure: job.LastFailureError, + } +} + +func (s *Scheduler) AddRecurring( + ctx context.Context, + jobUID, + jobType, + cronDef string, + maxDur time.Duration, +) error { + cronExp, err := cronexpr.Parse(cronDef) + if err != nil { + return fmt.Errorf("invalid cron definition string for job type=%s: %w", jobType, err) + } + + now := time.Now() + nowMilli := now.UnixMilli() + + nextExec := cronExp.Next(now) + + job := &types.Job{ + UID: jobUID, + Created: nowMilli, + Updated: nowMilli, + Type: jobType, + Priority: enum.JobPriorityElevated, + Data: "", + Result: "", + MaxDurationSeconds: int(maxDur / time.Second), + MaxRetries: 0, + State: enum.JobStateScheduled, + Scheduled: nextExec.UnixMilli(), + TotalExecutions: 0, + RunBy: "", + RunDeadline: 0, + RunProgress: 0, + LastExecuted: 0, + IsRecurring: true, + RecurringCron: cronDef, + ConsecutiveFailures: 0, + LastFailureError: "", + } + + err = s.store.Upsert(ctx, job) + if err != nil { + return fmt.Errorf("failed to upsert job id=%s type=%s: %w", jobUID, jobType, err) + } + + return nil +} + +func (s *Scheduler) createNecessaryJobs(ctx context.Context) error { + mx, err := globalLock(ctx, s.mxManager) + if err != nil { + return fmt.Errorf("failed to obtain global lock to create necessary jobs: %w", err) + } + + defer func() { + if err := mx.Unlock(ctx); err != nil { + log.Ctx(ctx).Err(err). + Msg("failed to release global lock after creating necessary jobs") + } + }() + + err = s.AddRecurring(ctx, jobUIDPurge, jobTypePurge, jobCronPurge, 5*time.Second) + if err != nil { + return err + } + + err = s.AddRecurring(ctx, jobUIDOverdue, jobTypeOverdue, jobCronOverdue, 5*time.Second) + if err != nil { + return err + } + + return nil +} + +// registerNecessaryJobs registers two jobs: overdue job recovery and purge old finished jobs. +// These two jobs types are integral part of the job scheduler. +func (s *Scheduler) registerNecessaryJobs() error { + handlerOverdue := newJobOverdue(s.store, s.mxManager, s) + err := s.executor.Register(jobTypeOverdue, handlerOverdue) + if err != nil { + return err + } + + handlerPurge := newJobPurge(s.store, s.mxManager, s.purgeMinOldAge) + err = s.executor.Register(jobTypePurge, handlerPurge) + if err != nil { + return err + } + + return nil +} diff --git a/internal/services/job/timer.go b/internal/services/job/timer.go new file mode 100644 index 0000000000..23f221578c --- /dev/null +++ b/internal/services/job/timer.go @@ -0,0 +1,121 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "time" +) + +const timerMaxDur = 30 * time.Minute +const timerMinDur = time.Nanosecond + +type schedulerTimer struct { + timerAt time.Time + timer *time.Timer + edgy bool // if true, the next RescheduleEarlier call will trigger the timer immediately. +} + +// newSchedulerTimer created new timer for the Scheduler. It is created to fire immediately. +func newSchedulerTimer() *schedulerTimer { + return &schedulerTimer{ + timerAt: time.Now().Add(timerMinDur), + timer: time.NewTimer(timerMinDur), + } +} + +// ResetAt resets the internal timer to trigger at the provided time. +// If the provided time is zero, it will schedule it to after the max duration. +func (t *schedulerTimer) ResetAt(next time.Time, edgy bool) time.Duration { + return t.resetAt(time.Now(), next, edgy) +} + +func (t *schedulerTimer) resetAt(now, next time.Time, edgy bool) time.Duration { + var dur time.Duration + + dur = next.Sub(now) + if dur < timerMinDur { + dur = timerMinDur + next = now.Add(dur) + } else if dur > timerMaxDur { + dur = timerMaxDur + next = now.Add(dur) + } + + t.Stop() + t.edgy = edgy + t.timerAt = next + t.timer.Reset(dur) + + return dur +} + +// RescheduleEarlier will reset the timer if the new time is earlier than the previous time. +// Otherwise, the function does nothing and returns 0. +// Providing zero time triggers the timer if it's edgy, otherwise does nothing. +func (t *schedulerTimer) RescheduleEarlier(next time.Time) time.Duration { + return t.rescheduleEarlier(time.Now(), next) +} + +func (t *schedulerTimer) rescheduleEarlier(now, next time.Time) time.Duration { + var dur time.Duration + + switch { + case t.edgy: + // if the timer is edgy trigger it immediately + dur = timerMinDur + + case next.IsZero(): + // if the provided time is zero: trigger the timer if it's edgy otherwise do nothing + if !t.edgy { + return 0 + } + dur = timerMinDur + + case !next.Before(t.timerAt): + // do nothing if the timer is already scheduled to run sooner than the provided time + return 0 + + default: + dur = next.Sub(now) + if dur < timerMinDur { + dur = timerMinDur + } + } + + next = now.Add(dur) + + t.Stop() + t.timerAt = next + t.timer.Reset(dur) + + return dur +} + +func (t *schedulerTimer) Ch() <-chan time.Time { + return t.timer.C +} + +func (t *schedulerTimer) Stop() { + // stop the timer + t.timer.Stop() + + // consume the timer's tick if any + select { + case <-t.timer.C: + default: + } + + t.timerAt = time.Time{} +} diff --git a/internal/services/job/timer_test.go b/internal/services/job/timer_test.go new file mode 100644 index 0000000000..9a894324e8 --- /dev/null +++ b/internal/services/job/timer_test.go @@ -0,0 +1,115 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "testing" + "time" +) + +func TestSchedulerTimer_ResetAt(t *testing.T) { + now := time.Now() + tests := []struct { + name string + at time.Time + exp time.Duration + }{ + { + name: "zero", + at: time.Time{}, + exp: timerMinDur, + }, + { + name: "immediate", + at: now, + exp: timerMinDur, + }, + { + name: "30s", + at: now.Add(30 * time.Second), + exp: 30 * time.Second, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + timer := newSchedulerTimer() + dur := timer.resetAt(now, test.at, false) + if want, got := test.exp, dur; want != dur { + t.Errorf("want: %s, got: %s", want.String(), got.String()) + } + }) + } +} + +func TestSchedulerTimer_TryResetAt(t *testing.T) { + now := time.Now() + tests := []struct { + name string + at time.Time + edgy bool + exp time.Duration + }{ + { + name: "past", + at: now.Add(-time.Second), + exp: timerMinDur, + }, + { + name: "30s", + at: now.Add(30 * time.Second), + exp: 30 * time.Second, + }, + { + name: "90s", + at: now.Add(90 * time.Second), + exp: 0, + }, + { + name: "30s-edgy", + at: now.Add(30 * time.Second), + edgy: true, + exp: timerMinDur, + }, + { + name: "90s-edgy", + at: now.Add(90 * time.Second), + edgy: true, + exp: timerMinDur, + }, + { + name: "zero", + at: time.Time{}, + exp: 0, + }, + { + name: "zero-edgy", + at: time.Time{}, + edgy: true, + exp: timerMinDur, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + timer := newSchedulerTimer() + timer.resetAt(now, now.Add(time.Minute), test.edgy) + dur := timer.rescheduleEarlier(now, test.at) + if want, got := test.exp, dur; want != dur { + t.Errorf("want: %s, got: %s", want.String(), got.String()) + } + }) + } +} diff --git a/internal/services/job/uid.go b/internal/services/job/uid.go new file mode 100644 index 0000000000..25390c4571 --- /dev/null +++ b/internal/services/job/uid.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "crypto/rand" + "encoding/base32" +) + +// UID returns unique random string with length equal to 16. +func UID() (string, error) { + const uidSizeBytes = 10 // must be divisible by 5, the resulting string length will be uidSizeBytes/5*8 + + var buf [uidSizeBytes]byte + _, err := rand.Read(buf[:]) + if err != nil { + return "", err + } + + uid := base32.StdEncoding.EncodeToString(buf[:]) + + return uid, nil +} diff --git a/internal/services/job/wire.go b/internal/services/job/wire.go new file mode 100644 index 0000000000..5d6a116c6d --- /dev/null +++ b/internal/services/job/wire.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package job + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/lock" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideExecutor, + ProvideScheduler, +) + +func ProvideExecutor( + jobStore store.JobStore, + pubsubService pubsub.PubSub, +) *Executor { + return NewExecutor( + jobStore, + pubsubService, + ) +} + +func ProvideScheduler( + jobStore store.JobStore, + executor *Executor, + mutexManager lock.MutexManager, + pubsubService pubsub.PubSub, + config *types.Config, +) (*Scheduler, error) { + return NewScheduler( + jobStore, + executor, + mutexManager, + pubsubService, + config.InstanceID, + config.BackgroundJobs.MaxRunning, + config.BackgroundJobs.PurgeFinishedOlderThan, + ) +} diff --git a/internal/services/metric/metrics.go b/internal/services/metric/metrics.go new file mode 100644 index 0000000000..e5c5378cc2 --- /dev/null +++ b/internal/services/metric/metrics.go @@ -0,0 +1,136 @@ +package metric + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/version" +) + +const jobType = "metric-collector" + +type metricData struct { + IP string `json:"ip"` + Hostname string `json:"hostname"` + Installer string `json:"installed_by"` + Installed string `json:"installed_at"` + Version string `json:"version"` + Users int64 `json:"user_count"` + Repos int64 `json:"repo_count"` + Pipelines int64 `json:"pipeline_count"` + Executions int64 `json:"execution_count"` +} + +type Collector struct { + hostname string + enabled bool + endpoint string + token string + userStore store.PrincipalStore + repoStore store.RepoStore + pipelineStore store.PipelineStore + executionStore store.ExecutionStore + scheduler *job.Scheduler +} + +func (c *Collector) Register(ctx context.Context) { + if !c.enabled { + return + } + c.scheduler.AddRecurring(ctx, jobType, jobType, "0 0 * * *", time.Minute) +} + +func (c *Collector) Handle(ctx context.Context, _ string, _ job.ProgressReporter) (string, error) { + + if !c.enabled { + return "", nil + } + + // get first available user + users, err := c.userStore.ListUsers(ctx, &types.UserFilter{ + Page: 1, + Size: 1, + }) + if err != nil { + return "", err + } + if len(users) == 0 { + return "", nil + } + + // total users in the system + totalUsers, err := c.userStore.CountUsers(ctx, &types.UserFilter{}) + if err != nil { + return "", fmt.Errorf("failed to get users total count: %w", err) + } + + // total repos in the system + totalRepos, err := c.repoStore.Count(ctx, 0, &types.RepoFilter{}) + if err != nil { + return "", fmt.Errorf("failed to get repositories total count: %w", err) + } + + // total pipelines in the system + totalPipelines, err := c.pipelineStore.Count(ctx, 0, types.ListQueryFilter{}) + if err != nil { + return "", fmt.Errorf("failed to get pipelines total count: %w", err) + } + + // total executions in the system + totalExecutions, err := c.executionStore.Count(ctx, 0) + if err != nil { + return "", fmt.Errorf("failed to get executions total count: %w", err) + } + + data := metricData{ + Hostname: c.hostname, + Installer: users[0].Email, + Installed: time.UnixMilli(users[0].Created).Format("2006-01-02 15:04:05"), + Version: version.Version.String(), + Users: totalUsers, + Repos: totalRepos, + Pipelines: totalPipelines, + Executions: totalExecutions, + } + + buf := new(bytes.Buffer) + err = json.NewEncoder(buf).Encode(data) + if err != nil { + return "", fmt.Errorf("failed to encode metric data: %w", err) + } + + endpoint := fmt.Sprintf("%s?api_key=%s", c.endpoint, c.token) + req, err := http.NewRequest("POST", endpoint, buf) + if err != nil { + return "", fmt.Errorf("failed to create a request for metric data to endpoint %s: %w", endpoint, err) + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("failed to send metric data to endpoint %s: %w", endpoint, err) + } + + res.Body.Close() + + return res.Status, nil +} + +// httpClient should be used for HTTP requests. It +// is configured with a timeout for reliability. +var httpClient = &http.Client{ + Transport: &http.Transport{ + Proxy: http.ProxyFromEnvironment, + TLSHandshakeTimeout: 30 * time.Second, + DisableKeepAlives: true, + }, + Timeout: 1 * time.Minute, +} diff --git a/internal/services/metric/wire.go b/internal/services/metric/wire.go new file mode 100644 index 0000000000..de72db5534 --- /dev/null +++ b/internal/services/metric/wire.go @@ -0,0 +1,41 @@ +package metric + +import ( + "github.com/google/wire" + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" +) + +var WireSet = wire.NewSet( + ProvideCollector, +) + +func ProvideCollector( + config *types.Config, + userStore store.PrincipalStore, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + executionStore store.ExecutionStore, + scheduler *job.Scheduler, + executor *job.Executor, +) (*Collector, error) { + job := &Collector{ + hostname: config.InstanceID, + enabled: config.Metric.Enabled, + endpoint: config.Metric.Endpoint, + token: config.Metric.Token, + userStore: userStore, + repoStore: repoStore, + pipelineStore: pipelineStore, + executionStore: executionStore, + scheduler: scheduler, + } + + err := executor.Register(jobType, job) + if err != nil { + return nil, err + } + + return job, nil +} diff --git a/internal/services/pullreq/handlers_branch.go b/internal/services/pullreq/handlers_branch.go new file mode 100644 index 0000000000..a6362aab9b --- /dev/null +++ b/internal/services/pullreq/handlers_branch.go @@ -0,0 +1,231 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// triggerPREventOnBranchUpdate handles branch update events. For every open pull request +// it writes an activity entry and triggers the pull request Branch Updated event. +func (s *Service) triggerPREventOnBranchUpdate(ctx context.Context, + event *events.Event[*gitevents.BranchUpdatedPayload], +) error { + // we should always update PR mergeable status check when target branch is updated. + // - main + // |- develop + // |- feature1 + // |- feature2 + // when feature2 merge changes into develop branch then feature1 branch is not consistent anymore + // and need to run mergeable check even nothing was changed on feature1, same applies to main if someone + // push new commit to main then develop should merge status should be unchecked. + if branch, err := getBranchFromRef(event.Payload.Ref); err == nil { + err = s.pullreqStore.UpdateMergeCheckStatus(ctx, event.Payload.RepoID, branch, enum.MergeCheckStatusUnchecked) + if err != nil { + return err + } + } + + // TODO: This function is currently executed directly on branch update event. + // TODO: But it should be executed after the PR's head ref has been updated. + // TODO: This is to make sure the commit exists on the target repository for forked repositories. + s.forEveryOpenPR(ctx, event.Payload.RepoID, event.Payload.Ref, func(pr *types.PullReq) error { + // First check if the merge base has changed + + targetRepo, err := s.repoGitInfoCache.Get(ctx, pr.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + + mergeBaseInfo, err := s.gitRPCClient.MergeBase(ctx, gitrpc.MergeBaseParams{ + ReadParams: gitrpc.ReadParams{RepoUID: targetRepo.GitUID}, + Ref1: event.Payload.NewSHA, + Ref2: pr.TargetBranch, + }) + if err != nil { + return fmt.Errorf("failed to get merge base after branch update to=%s for PR=%d: %w", + event.Payload.NewSHA, pr.Number, err) + } + + oldMergeBase := pr.MergeBaseSHA + newMergeBase := mergeBaseInfo.MergeBaseSHA + + // Update the database with the latest source commit SHA and the merge base SHA. + + pr, err = s.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.ActivitySeq++ + if pr.SourceSHA != event.Payload.OldSHA { + return fmt.Errorf( + "failed to set SourceSHA for PR %d to value '%s', expected SHA '%s' but current pr has '%s'", + pr.Number, event.Payload.NewSHA, event.Payload.OldSHA, pr.SourceSHA) + } + + pr.SourceSHA = event.Payload.NewSHA + pr.MergeBaseSHA = newMergeBase + + // reset merge-check fields for new run + pr.MergeCheckStatus = enum.MergeCheckStatusUnchecked + pr.MergeSHA = nil + pr.MergeConflicts = nil + return nil + }) + if err != nil { + return err + } + + payload := &types.PullRequestActivityPayloadBranchUpdate{ + Old: event.Payload.OldSHA, + New: event.Payload.NewSHA, + } + + _, err = s.activityStore.CreateWithPayload(ctx, pr, event.Payload.PrincipalID, payload) + if err != nil { + // non-critical error + log.Ctx(ctx).Err(err).Msgf("failed to write pull request activity after branch update") + } + + s.pullreqEvReporter.BranchUpdated(ctx, &pullreqevents.BranchUpdatedPayload{ + Base: pullreqevents.Base{ + PullReqID: pr.ID, + SourceRepoID: pr.SourceRepoID, + TargetRepoID: pr.TargetRepoID, + PrincipalID: event.Payload.PrincipalID, + Number: pr.Number, + }, + OldSHA: event.Payload.OldSHA, + NewSHA: event.Payload.NewSHA, + OldMergeBaseSHA: oldMergeBase, + NewMergeBaseSHA: newMergeBase, + Forced: event.Payload.Forced, + }) + + if err = s.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return nil + }) + return nil +} + +// closePullReqOnBranchDelete handles branch delete events. +// It closes every open pull request for the branch and triggers the pull request BranchDeleted event. +func (s *Service) closePullReqOnBranchDelete(ctx context.Context, + event *events.Event[*gitevents.BranchDeletedPayload], +) error { + s.forEveryOpenPR(ctx, event.Payload.RepoID, event.Payload.Ref, func(pr *types.PullReq) error { + targetRepo, err := s.repoGitInfoCache.Get(ctx, pr.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo info: %w", err) + } + + pr, err = s.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.ActivitySeq++ // because we need to write the activity + + pr.State = enum.PullReqStateClosed + pr.MergeCheckStatus = enum.MergeCheckStatusUnchecked + pr.MergeSHA = nil + pr.MergeConflicts = nil + + return nil + }) + if err != nil { + return fmt.Errorf("failed to close pull request after branch delete: %w", err) + } + + _, errAct := s.activityStore.CreateWithPayload(ctx, pr, event.Payload.PrincipalID, + &types.PullRequestActivityPayloadBranchDelete{SHA: event.Payload.SHA}) + if errAct != nil { + // non-critical error + log.Ctx(ctx).Err(errAct).Msgf("failed to write pull request activity after branch delete") + } + + s.pullreqEvReporter.Closed(ctx, &pullreqevents.ClosedPayload{ + Base: pullreqevents.Base{ + PullReqID: pr.ID, + SourceRepoID: pr.SourceRepoID, + TargetRepoID: pr.TargetRepoID, + PrincipalID: event.Payload.PrincipalID, + Number: pr.Number, + }, + }) + + if err = s.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return nil + }) + return nil +} + +// forEveryOpenPR is utility function that executes the provided function +// for every open pull request created with the source branch given as a git ref. +func (s *Service) forEveryOpenPR(ctx context.Context, + repoID int64, ref string, + fn func(pr *types.PullReq) error, +) { + const largeLimit = 1000000 + + branch, err := getBranchFromRef(ref) + if len(branch) == 0 { + log.Ctx(ctx).Err(err).Send() + return + } + + pullreqList, err := s.pullreqStore.List(ctx, &types.PullReqFilter{ + Page: 0, + Size: largeLimit, + SourceRepoID: repoID, + SourceBranch: branch, + States: []enum.PullReqState{enum.PullReqStateOpen}, + Sort: enum.PullReqSortNumber, + Order: enum.OrderAsc, + }) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to get list of open pull requests") + return + } + + for _, pr := range pullreqList { + if err = fn(pr); err != nil { + log.Ctx(ctx).Err(err).Msg("failed to process pull req") + } + } +} + +func getBranchFromRef(ref string) (string, error) { + const refPrefix = "refs/heads/" + if !strings.HasPrefix(ref, refPrefix) { + return "", fmt.Errorf("failed to get branch name from branch ref %s", ref) + } + + branch := ref[len(refPrefix):] + if len(branch) == 0 { + return "", fmt.Errorf("got an empty branch name from branch ref %s", ref) + } + return branch, nil +} diff --git a/internal/services/pullreq/handlers_code_comments.go b/internal/services/pullreq/handlers_code_comments.go new file mode 100644 index 0000000000..f7521f6296 --- /dev/null +++ b/internal/services/pullreq/handlers_code_comments.go @@ -0,0 +1,78 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/events" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" +) + +func (s *Service) updateCodeCommentsOnBranchUpdate(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload], +) error { + return s.updateCodeComments(ctx, + event.Payload.TargetRepoID, event.Payload.PullReqID, + event.Payload.NewSHA, event.Payload.NewMergeBaseSHA) +} + +func (s *Service) updateCodeCommentsOnReopen(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload], +) error { + return s.updateCodeComments(ctx, + event.Payload.TargetRepoID, event.Payload.PullReqID, + event.Payload.SourceSHA, event.Payload.MergeBaseSHA) +} + +func (s *Service) updateCodeComments(ctx context.Context, + targetRepoID, pullreqID int64, + newSourceSHA, newMergeBaseSHA string, +) error { + repoGit, err := s.repoGitInfoCache.Get(ctx, targetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + + var codeComments []*types.CodeComment + + codeComments, err = s.codeCommentView.ListNotAtMergeBaseSHA(ctx, pullreqID, newMergeBaseSHA) + if err != nil { + return fmt.Errorf("failed to get list of code comments for update after merge base update: %w", err) + } + + s.codeCommentMigrator.MigrateOld(ctx, repoGit.GitUID, newMergeBaseSHA, codeComments) + + err = s.codeCommentView.UpdateAll(ctx, codeComments) + if err != nil { + return fmt.Errorf("failed to update code comments after merge base update: %w", err) + } + + codeComments, err = s.codeCommentView.ListNotAtSourceSHA(ctx, pullreqID, newSourceSHA) + if err != nil { + return fmt.Errorf("failed to get list of code comments for update after source branch update: %w", err) + } + + s.codeCommentMigrator.MigrateNew(ctx, repoGit.GitUID, newSourceSHA, codeComments) + + err = s.codeCommentView.UpdateAll(ctx, codeComments) + if err != nil { + return fmt.Errorf("failed to update code comments after source branch update: %w", err) + } + + return nil +} diff --git a/internal/services/pullreq/handlers_counters.go b/internal/services/pullreq/handlers_counters.go new file mode 100644 index 0000000000..620b8e1b18 --- /dev/null +++ b/internal/services/pullreq/handlers_counters.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + + "github.com/harness/gitness/events" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" +) + +// updatePRCountersOnCreated increments number of PRs and open PRs. +func (s *Service) updatePRCountersOnCreated(ctx context.Context, + event *events.Event[*pullreqevents.CreatedPayload], +) error { + err := s.updatePRNumbers(ctx, event.Payload.TargetRepoID, 1, 1, 0, 0) + if err != nil { + return fmt.Errorf("failed to update repository pull request numbers after PR creation: %w", err) + } + + return nil +} + +// updatePRCountersOnReopened increments number of open PRs and decrements number of closed. +func (s *Service) updatePRCountersOnReopened(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload], +) error { + err := s.updatePRNumbers(ctx, event.Payload.TargetRepoID, 0, 1, -1, 0) + if err != nil { + return fmt.Errorf("failed to update repository pull request numbers after PR reopen: %w", err) + } + + return nil +} + +// updatePRCountersOnClosed increments number of closed PRs and decrements number of open. +func (s *Service) updatePRCountersOnClosed(ctx context.Context, + event *events.Event[*pullreqevents.ClosedPayload], +) error { + err := s.updatePRNumbers(ctx, event.Payload.TargetRepoID, 0, -1, 1, 0) + if err != nil { + return fmt.Errorf("failed to update repository pull request numbers after PR close: %w", err) + } + + return nil +} + +// updatePRCountersOnMerged increments number of merged PRs and decrements number of open. +func (s *Service) updatePRCountersOnMerged(ctx context.Context, + event *events.Event[*pullreqevents.MergedPayload], +) error { + err := s.updatePRNumbers(ctx, event.Payload.TargetRepoID, 0, -1, 0, 1) + if err != nil { + return fmt.Errorf("failed to update repository pull request numbers after PR merge: %w", err) + } + + return nil +} + +func (s *Service) updatePRNumbers(ctx context.Context, repoID int64, + deltaNew, deltaOpen, deltaClosed, deltaMerged int, +) error { + repo, err := s.repoStore.Find(ctx, repoID) + if err != nil { + return fmt.Errorf("failed to get repository to update PR numbers: %w", err) + } + + _, err = s.repoStore.UpdateOptLock(ctx, repo, func(repo *types.Repository) error { + repo.NumPulls += deltaNew + repo.NumOpenPulls += deltaOpen + repo.NumClosedPulls += deltaClosed + repo.NumMergedPulls += deltaMerged + return nil + }) + return err +} diff --git a/internal/services/pullreq/handlers_file_viewed.go b/internal/services/pullreq/handlers_file_viewed.go new file mode 100644 index 0000000000..44a60829ce --- /dev/null +++ b/internal/services/pullreq/handlers_file_viewed.go @@ -0,0 +1,96 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" +) + +// handleFileViewedOnBranchUpdate handles pull request Branch Updated events. +// It marks existing file reviews as obsolete for the PR depending on the change to the file. +// +// The major reason of this handler is to allow detect changes that occured to a file since last reviewed, +// even if the file content is the same - e.g. file got deleted and readded with the same content. +func (s *Service) handleFileViewedOnBranchUpdate(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload], +) error { + repoGit, err := s.repoGitInfoCache.Get(ctx, event.Payload.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + reader := gitrpc.NewStreamReader(s.gitRPCClient.Diff(ctx, &gitrpc.DiffParams{ + ReadParams: gitrpc.ReadParams{ + RepoUID: repoGit.GitUID, + }, + BaseRef: event.Payload.OldSHA, + HeadRef: event.Payload.NewSHA, + MergeBase: false, // we want the direct changes + IncludePatch: false, // we don't care about the actual file changes + })) + + obsoletePaths := []string{} + for { + fileDiff, err := reader.Next() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return fmt.Errorf("failed to read next file diff: %w", err) + } + + // DELETED: mark as obsolete - handles open pr file deletions + // CREATED: mark as obsolete - handles cases in which file deleted while PR was closed + // RENAMED: mark old + new path as obsolete - similar to deleting old file and creating new one + // UPDATED: mark as obsolete - in case pr is closed file SHA is handling it + // This strategy leads to a behavior very similar to what github is doing + switch fileDiff.Status { + case gitrpc.FileDiffStatusAdded: + obsoletePaths = append(obsoletePaths, fileDiff.Path) + case gitrpc.FileDiffStatusDeleted: + obsoletePaths = append(obsoletePaths, fileDiff.OldPath) + case gitrpc.FileDiffStatusRenamed: + obsoletePaths = append(obsoletePaths, fileDiff.OldPath, fileDiff.Path) + case gitrpc.FileDiffStatusModified: + obsoletePaths = append(obsoletePaths, fileDiff.Path) + default: + // other cases we don't care + } + } + + if len(obsoletePaths) == 0 { + return nil + } + + err = s.fileViewStore.MarkObsolete( + ctx, + event.Payload.PullReqID, + obsoletePaths) + if err != nil { + return fmt.Errorf( + "failed to mark files obsolete for repo %d and pr %d: %w", + repoGit.ID, + event.Payload.PullReqID, + err) + } + + return nil +} diff --git a/internal/services/pullreq/handlers_head_ref.go b/internal/services/pullreq/handlers_head_ref.go new file mode 100644 index 0000000000..425dfa4de6 --- /dev/null +++ b/internal/services/pullreq/handlers_head_ref.go @@ -0,0 +1,119 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strconv" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitrpcenum "github.com/harness/gitness/gitrpc/enum" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" +) + +// createHeadRefOnCreated handles pull request Created events. +// It creates the PR head git ref. +func (s *Service) createHeadRefOnCreated(ctx context.Context, + event *events.Event[*pullreqevents.CreatedPayload], +) error { + repoGit, err := s.repoGitInfoCache.Get(ctx, event.Payload.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + + writeParams, err := createSystemRPCWriteParams(ctx, s.urlProvider, repoGit.ID, repoGit.GitUID) + if err != nil { + return fmt.Errorf("failed to generate rpc write params: %w", err) + } + + // TODO: This doesn't work for forked repos (only works when sourceRepo==targetRepo). + // This is because commits from the source repository must be first pulled into the target repository. + err = s.gitRPCClient.UpdateRef(ctx, gitrpc.UpdateRefParams{ + WriteParams: writeParams, + Name: strconv.Itoa(int(event.Payload.Number)), + Type: gitrpcenum.RefTypePullReqHead, + NewValue: event.Payload.SourceSHA, + OldValue: "", // this is a new pull request, so we expect that the ref doesn't exist + }) + if err != nil { + return fmt.Errorf("failed to update PR head ref: %w", err) + } + + return nil +} + +// updateHeadRefOnBranchUpdate handles pull request Branch Updated events. +// It updates the PR head git ref to point to the latest commit. +func (s *Service) updateHeadRefOnBranchUpdate(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload], +) error { + repoGit, err := s.repoGitInfoCache.Get(ctx, event.Payload.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + + writeParams, err := createSystemRPCWriteParams(ctx, s.urlProvider, repoGit.ID, repoGit.GitUID) + if err != nil { + return fmt.Errorf("failed to generate rpc write params: %w", err) + } + + // TODO: This doesn't work for forked repos (only works when sourceRepo==targetRepo) + // This is because commits from the source repository must be first pulled into the target repository. + err = s.gitRPCClient.UpdateRef(ctx, gitrpc.UpdateRefParams{ + WriteParams: writeParams, + Name: strconv.Itoa(int(event.Payload.Number)), + Type: gitrpcenum.RefTypePullReqHead, + NewValue: event.Payload.NewSHA, + OldValue: event.Payload.OldSHA, + }) + if err != nil { + return fmt.Errorf("failed to update PR head ref after new commit: %w", err) + } + + return nil +} + +// updateHeadRefOnReopen handles pull request StateChanged events. +// It updates the PR head git ref to point to the source branch commit SHA. +func (s *Service) updateHeadRefOnReopen(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload], +) error { + repoGit, err := s.repoGitInfoCache.Get(ctx, event.Payload.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get repo git info: %w", err) + } + + writeParams, err := createSystemRPCWriteParams(ctx, s.urlProvider, repoGit.ID, repoGit.GitUID) + if err != nil { + return fmt.Errorf("failed to generate rpc write params: %w", err) + } + + // TODO: This doesn't work for forked repos (only works when sourceRepo==targetRepo) + // This is because commits from the source repository must be first pulled into the target repository. + err = s.gitRPCClient.UpdateRef(ctx, gitrpc.UpdateRefParams{ + WriteParams: writeParams, + Name: strconv.Itoa(int(event.Payload.Number)), + Type: gitrpcenum.RefTypePullReqHead, + NewValue: event.Payload.SourceSHA, + OldValue: "", // the request is re-opened, so anything can be the old value + }) + if err != nil { + return fmt.Errorf("failed to update PR head ref after pull request reopen: %w", err) + } + + return nil +} diff --git a/internal/services/pullreq/handlers_mergeable.go b/internal/services/pullreq/handlers_mergeable.go new file mode 100644 index 0000000000..f3346c0ec8 --- /dev/null +++ b/internal/services/pullreq/handlers_mergeable.go @@ -0,0 +1,279 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitrpcenum "github.com/harness/gitness/gitrpc/enum" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +const ( + cancelMergeCheckKey = "cancel_merge_check_for_sha" + nilSHA = "0000000000000000000000000000000000000000" +) + +// mergeCheckOnCreated handles pull request Created events. +// It creates the PR head git ref. +func (s *Service) mergeCheckOnCreated(ctx context.Context, + event *events.Event[*pullreqevents.CreatedPayload], +) error { + return s.updateMergeData( + ctx, + event.Payload.TargetRepoID, + event.Payload.Number, + nilSHA, + event.Payload.SourceSHA, + ) +} + +// mergeCheckOnBranchUpdate handles pull request Branch Updated events. +// It updates the PR head git ref to point to the latest commit. +func (s *Service) mergeCheckOnBranchUpdate(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload], +) error { + return s.updateMergeData( + ctx, + event.Payload.TargetRepoID, + event.Payload.Number, + event.Payload.OldSHA, + event.Payload.NewSHA, + ) +} + +// mergeCheckOnReopen handles pull request StateChanged events. +// It updates the PR head git ref to point to the source branch commit SHA. +func (s *Service) mergeCheckOnReopen(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload], +) error { + return s.updateMergeData( + ctx, + event.Payload.TargetRepoID, + event.Payload.Number, + "", + event.Payload.SourceSHA, + ) +} + +// mergeCheckOnClosed deletes the merge ref. +func (s *Service) mergeCheckOnClosed(ctx context.Context, + event *events.Event[*pullreqevents.ClosedPayload], +) error { + return s.deleteMergeRef(ctx, event.Payload.SourceRepoID, event.Payload.Number) +} + +// mergeCheckOnMerged deletes the merge ref. +func (s *Service) mergeCheckOnMerged(ctx context.Context, + event *events.Event[*pullreqevents.MergedPayload], +) error { + return s.deleteMergeRef(ctx, event.Payload.SourceRepoID, event.Payload.Number) +} + +func (s *Service) deleteMergeRef(ctx context.Context, repoID int64, prNum int64) error { + repo, err := s.repoGitInfoCache.Get(ctx, repoID) + if err != nil { + return fmt.Errorf("failed to get repo with ID %d: %w", repoID, err) + } + + writeParams, err := createSystemRPCWriteParams(ctx, s.urlProvider, repo.ID, repo.GitUID) + if err != nil { + return fmt.Errorf("failed to generate rpc write params: %w", err) + } + + // TODO: This doesn't work for forked repos + err = s.gitRPCClient.UpdateRef(ctx, gitrpc.UpdateRefParams{ + WriteParams: writeParams, + Name: strconv.Itoa(int(prNum)), + Type: gitrpcenum.RefTypePullReqMerge, + NewValue: "", // when NewValue is empty gitrpc will delete the ref. + OldValue: "", // we don't care about the old value + }) + if err != nil { + return fmt.Errorf("failed to remove PR merge ref: %w", err) + } + + return nil +} + +// UpdateMergeDataIfRequired rechecks the merge data of a PR. +// TODO: This is a temporary solution - doesn't fix changed merge-base or other things. +func (s *Service) UpdateMergeDataIfRequired( + ctx context.Context, + repoID int64, + prNum int64, +) error { + pr, err := s.pullreqStore.FindByNumber(ctx, repoID, prNum) + if err != nil { + return fmt.Errorf("failed to get pull request number %d: %w", prNum, err) + } + + // nothing to-do if check was already performed + if pr.MergeCheckStatus != enum.MergeCheckStatusUnchecked { + return nil + } + + // WARNING: This CAN lead to two (or more) merge-checks on the same SHA + // running on different machines at the same time. + return s.updateMergeDataInner(ctx, pr, "", pr.SourceSHA) +} + +//nolint:funlen // refactor if required. +func (s *Service) updateMergeData( + ctx context.Context, + repoID int64, + prNum int64, + oldSHA string, + newSHA string, +) error { + pr, err := s.pullreqStore.FindByNumber(ctx, repoID, prNum) + if err != nil { + return fmt.Errorf("failed to get pull request number %d: %w", prNum, err) + } + + return s.updateMergeDataInner(ctx, pr, oldSHA, newSHA) +} + +//nolint:funlen // refactor if required. +func (s *Service) updateMergeDataInner( + ctx context.Context, + pr *types.PullReq, + oldSHA string, + newSHA string, +) error { + // TODO: Merge check should not update the merge base. + // TODO: Instead it should accept it as an argument and fail if it doesn't match. + // Then is would not longer be necessary to cancel already active mergeability checks. + + if pr.State != enum.PullReqStateOpen { + return fmt.Errorf("cannot do mergability check on closed PR %d", pr.Number) + } + + // cancel all previous mergability work for this PR based on oldSHA + if err := s.pubsub.Publish(ctx, cancelMergeCheckKey, []byte(oldSHA), + pubsub.WithPublishNamespace("pullreq")); err != nil { + return err + } + + var cancel context.CancelFunc + ctx, cancel = context.WithCancel(ctx) + + s.cancelMutex.Lock() + // NOTE: Temporary workaround to avoid overwriting existing cancel method on same machine. + // This doesn't avoid same SHA running on multiple machines + if _, ok := s.cancelMergeability[newSHA]; ok { + s.cancelMutex.Unlock() + cancel() + return nil + } + s.cancelMergeability[newSHA] = cancel + s.cancelMutex.Unlock() + + defer func() { + cancel() + s.cancelMutex.Lock() + delete(s.cancelMergeability, newSHA) + s.cancelMutex.Unlock() + }() + + // load repository objects + targetRepo, err := s.repoGitInfoCache.Get(ctx, pr.TargetRepoID) + if err != nil { + return err + } + + sourceRepo := targetRepo + if pr.TargetRepoID != pr.SourceRepoID { + sourceRepo, err = s.repoGitInfoCache.Get(ctx, pr.SourceRepoID) + if err != nil { + return err + } + } + + writeParams, err := createSystemRPCWriteParams(ctx, s.urlProvider, targetRepo.ID, targetRepo.GitUID) + if err != nil { + return fmt.Errorf("failed to generate rpc write params: %w", err) + } + + // call merge and store output in pr merge reference. + now := time.Now() + var output gitrpc.MergeOutput + output, err = s.gitRPCClient.Merge(ctx, &gitrpc.MergeParams{ + WriteParams: writeParams, + BaseBranch: pr.TargetBranch, + HeadRepoUID: sourceRepo.GitUID, + HeadBranch: pr.SourceBranch, + RefType: gitrpcenum.RefTypePullReqMerge, + RefName: strconv.Itoa(int(pr.Number)), + HeadExpectedSHA: newSHA, + Force: true, + + // set committer date to ensure repeatability of merge commit across replicas + CommitterDate: &now, + }) + if gitrpc.ErrorStatus(err) == gitrpc.StatusPreconditionFailed { + return events.NewDiscardEventErrorf("Source branch '%s' is not on SHA '%s' anymore.", + pr.SourceBranch, newSHA) + } + + isNotMergeableError := gitrpc.ErrorStatus(err) == gitrpc.StatusNotMergeable + if err != nil && !isNotMergeableError { + return fmt.Errorf("merge check failed for %d:%s and %d:%s with err: %w", + targetRepo.ID, pr.TargetBranch, + sourceRepo.ID, pr.SourceBranch, + err) + } + + // Update DB in both cases (failure or success) + _, err = s.pullreqStore.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + if pr.SourceSHA != newSHA { + return events.NewDiscardEventErrorf("PR SHA %s is newer than %s", pr.SourceSHA, newSHA) + } + + if isNotMergeableError { + // TODO: gitrpc should return sha's either way, and also conflicting files! + pr.MergeCheckStatus = enum.MergeCheckStatusConflict + pr.MergeTargetSHA = &output.BaseSHA + pr.MergeSHA = nil + pr.MergeConflicts = nil + } else { + pr.MergeCheckStatus = enum.MergeCheckStatusMergeable + pr.MergeTargetSHA = &output.BaseSHA + pr.MergeBaseSHA = output.MergeBaseSHA // TODO: Merge check should not update the merge base. + pr.MergeSHA = &output.MergeSHA + pr.MergeConflicts = nil + } + return nil + }) + if err != nil { + return fmt.Errorf("failed to update PR merge ref in db with error: %w", err) + } + + if err = s.sseStreamer.Publish(ctx, targetRepo.ParentID, enum.SSETypePullrequesUpdated, pr); err != nil { + log.Ctx(ctx).Warn().Msg("failed to publish PR changed event") + } + + return nil +} diff --git a/internal/services/pullreq/service.go b/internal/services/pullreq/service.go new file mode 100644 index 0000000000..4cc6e97544 --- /dev/null +++ b/internal/services/pullreq/service.go @@ -0,0 +1,284 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/bootstrap" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/githook" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/stream" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" +) + +type Service struct { + pullreqEvReporter *pullreqevents.Reporter + gitRPCClient gitrpc.Interface + db *sqlx.DB + repoGitInfoCache store.RepoGitInfoCache + repoStore store.RepoStore + pullreqStore store.PullReqStore + activityStore store.PullReqActivityStore + codeCommentView store.CodeCommentView + codeCommentMigrator *codecomments.Migrator + fileViewStore store.PullReqFileViewStore + sseStreamer sse.Streamer + urlProvider *url.Provider + + cancelMutex sync.Mutex + cancelMergeability map[string]context.CancelFunc + + pubsub pubsub.PubSub +} + +//nolint:funlen // needs refactoring +func New(ctx context.Context, + config *types.Config, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + pullreqEvReaderFactory *events.ReaderFactory[*pullreqevents.Reader], + pullreqEvReporter *pullreqevents.Reporter, + gitRPCClient gitrpc.Interface, + db *sqlx.DB, + repoGitInfoCache store.RepoGitInfoCache, + repoStore store.RepoStore, + pullreqStore store.PullReqStore, + activityStore store.PullReqActivityStore, + codeCommentView store.CodeCommentView, + codeCommentMigrator *codecomments.Migrator, + fileViewStore store.PullReqFileViewStore, + bus pubsub.PubSub, + urlProvider *url.Provider, + sseStreamer sse.Streamer, +) (*Service, error) { + service := &Service{ + pullreqEvReporter: pullreqEvReporter, + gitRPCClient: gitRPCClient, + db: db, + repoGitInfoCache: repoGitInfoCache, + repoStore: repoStore, + pullreqStore: pullreqStore, + activityStore: activityStore, + codeCommentView: codeCommentView, + urlProvider: urlProvider, + codeCommentMigrator: codeCommentMigrator, + fileViewStore: fileViewStore, + cancelMergeability: make(map[string]context.CancelFunc), + pubsub: bus, + sseStreamer: sseStreamer, + } + + var err error + + // handle git branch events to trigger specific pull request events + + const groupGit = "gitness:pullreq:git" + _, err = gitReaderFactory.Launch(ctx, groupGit, config.InstanceID, + func(r *gitevents.Reader) error { + const idleTimeout = 15 * time.Second + r.Configure( + stream.WithConcurrency(1), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(3), + )) + + _ = r.RegisterBranchUpdated(service.triggerPREventOnBranchUpdate) + _ = r.RegisterBranchDeleted(service.closePullReqOnBranchDelete) + + return nil + }) + if err != nil { + return nil, err + } + + // pull request ref maintenance + + const groupPullReqHeadRef = "gitness:pullreq:headref" + _, err = pullreqEvReaderFactory.Launch(ctx, groupPullReqHeadRef, config.InstanceID, + func(r *pullreqevents.Reader) error { + const idleTimeout = 10 * time.Second + r.Configure( + stream.WithConcurrency(1), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(3), + )) + + _ = r.RegisterCreated(service.createHeadRefOnCreated) + _ = r.RegisterBranchUpdated(service.updateHeadRefOnBranchUpdate) + _ = r.RegisterReopened(service.updateHeadRefOnReopen) + + return nil + }) + if err != nil { + return nil, err + } + + // pull request file viewed maintenance + + const groupPullReqFileViewed = "gitness:pullreq:fileviewed" + _, err = pullreqEvReaderFactory.Launch(ctx, groupPullReqFileViewed, config.InstanceID, + func(r *pullreqevents.Reader) error { + const idleTimeout = 30 * time.Second + r.Configure( + stream.WithConcurrency(3), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(1), + )) + + _ = r.RegisterBranchUpdated(service.handleFileViewedOnBranchUpdate) + + return nil + }) + if err != nil { + return nil, err + } + + const groupPullReqCounters = "gitness:pullreq:counters" + _, err = pullreqEvReaderFactory.Launch(ctx, groupPullReqCounters, config.InstanceID, + func(r *pullreqevents.Reader) error { + const idleTimeout = 10 * time.Second + r.Configure( + stream.WithConcurrency(1), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(2), + )) + + _ = r.RegisterCreated(service.updatePRCountersOnCreated) + _ = r.RegisterReopened(service.updatePRCountersOnReopened) + _ = r.RegisterClosed(service.updatePRCountersOnClosed) + _ = r.RegisterMerged(service.updatePRCountersOnMerged) + + return nil + }) + if err != nil { + return nil, err + } + + // mergeability check + const groupPullReqMergeable = "gitness:pullreq:mergeable" + _, err = pullreqEvReaderFactory.Launch(ctx, groupPullReqMergeable, config.InstanceID, + func(r *pullreqevents.Reader) error { + const idleTimeout = 30 * time.Second + r.Configure( + stream.WithConcurrency(3), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(2), + )) + + _ = r.RegisterCreated(service.mergeCheckOnCreated) + _ = r.RegisterBranchUpdated(service.mergeCheckOnBranchUpdate) + _ = r.RegisterReopened(service.mergeCheckOnReopen) + _ = r.RegisterClosed(service.mergeCheckOnClosed) + _ = r.RegisterMerged(service.mergeCheckOnMerged) + + return nil + }) + if err != nil { + return nil, err + } + + // cancel any previous pr mergeability check + // payload is oldsha. + _ = bus.Subscribe(ctx, cancelMergeCheckKey, func(payload []byte) error { + oldSHA := string(payload) + if oldSHA == "" { + return nil + } + + service.cancelMutex.Lock() + defer service.cancelMutex.Unlock() + + cancel := service.cancelMergeability[oldSHA] + if cancel != nil { + cancel() + } + + delete(service.cancelMergeability, oldSHA) + + return nil + }, pubsub.WithChannelNamespace("pullreq")) + + // mergeability check + const groupPullReqCodeComments = "gitness:pullreq:codecomments" + _, err = pullreqEvReaderFactory.Launch(ctx, groupPullReqCodeComments, config.InstanceID, + func(r *pullreqevents.Reader) error { + const idleTimeout = 10 * time.Second + r.Configure( + stream.WithConcurrency(3), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(2), + )) + + _ = r.RegisterBranchUpdated(service.updateCodeCommentsOnBranchUpdate) + _ = r.RegisterReopened(service.updateCodeCommentsOnReopen) + + return nil + }) + if err != nil { + return nil, err + } + + return service, nil +} + +// createSystemRPCWriteParams creates base write parameters for gitrpc write operations. +func createSystemRPCWriteParams( + ctx context.Context, + urlProvider *url.Provider, + repoID int64, + repoGITUID string, +) (gitrpc.WriteParams, error) { + principal := bootstrap.NewSystemServiceSession().Principal + + // generate envars (add everything githook CLI needs for execution) + envVars, err := githook.GenerateEnvironmentVariables( + ctx, + urlProvider.GetAPIBaseURLInternal(), + repoID, + principal.ID, + false, + ) + if err != nil { + return gitrpc.WriteParams{}, fmt.Errorf("failed to generate git hook environment variables: %w", err) + } + + return gitrpc.WriteParams{ + Actor: gitrpc.Identity{ + Name: principal.DisplayName, + Email: principal.Email, + }, + RepoUID: repoGITUID, + EnvVars: envVars, + }, nil +} diff --git a/internal/services/pullreq/wire.go b/internal/services/pullreq/wire.go new file mode 100644 index 0000000000..9e3e34fbd9 --- /dev/null +++ b/internal/services/pullreq/wire.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pullreq + +import ( + "context" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/services/codecomments" + "github.com/harness/gitness/internal/sse" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +var WireSet = wire.NewSet( + ProvideService, +) + +func ProvideService(ctx context.Context, + config *types.Config, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + pullReqEvFactory *events.ReaderFactory[*pullreqevents.Reader], + pullReqEvReporter *pullreqevents.Reporter, + gitRPCClient gitrpc.Interface, + db *sqlx.DB, + repoGitInfoCache store.RepoGitInfoCache, + repoStore store.RepoStore, + pullreqStore store.PullReqStore, + activityStore store.PullReqActivityStore, + codeCommentView store.CodeCommentView, + codeCommentMigrator *codecomments.Migrator, + fileViewStore store.PullReqFileViewStore, + pubsub pubsub.PubSub, + urlProvider *url.Provider, + sseStreamer sse.Streamer, +) (*Service, error) { + return New(ctx, config, gitReaderFactory, pullReqEvFactory, pullReqEvReporter, gitRPCClient, + db, repoGitInfoCache, repoStore, pullreqStore, activityStore, + codeCommentView, codeCommentMigrator, fileViewStore, pubsub, urlProvider, sseStreamer) +} diff --git a/internal/services/trigger/handler_branch.go b/internal/services/trigger/handler_branch.go new file mode 100644 index 0000000000..b5e6f8d974 --- /dev/null +++ b/internal/services/trigger/handler_branch.go @@ -0,0 +1,94 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" + gitevents "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/types/enum" +) + +// TODO: This can be moved to SCM library +func ExtractBranch(ref string) string { + return strings.TrimPrefix(ref, "refs/heads/") +} + +func (s *Service) handleEventBranchCreated(ctx context.Context, + event *events.Event[*gitevents.BranchCreatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionBranchCreated, + Ref: event.Payload.Ref, + Source: ExtractBranch(event.Payload.Ref), + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Target: ExtractBranch(event.Payload.Ref), + After: event.Payload.SHA, + } + err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA) + if err != nil { + return fmt.Errorf("could not augment commit info: %w", err) + } + return s.trigger(ctx, event.Payload.RepoID, enum.TriggerActionBranchCreated, hook) +} + +func (s *Service) handleEventBranchUpdated(ctx context.Context, + event *events.Event[*gitevents.BranchUpdatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionBranchUpdated, + Ref: event.Payload.Ref, + Before: event.Payload.OldSHA, + After: event.Payload.NewSHA, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Source: ExtractBranch(event.Payload.Ref), + Target: ExtractBranch(event.Payload.Ref), + } + err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA) + if err != nil { + return fmt.Errorf("could not augment commit info: %w", err) + } + return s.trigger(ctx, event.Payload.RepoID, enum.TriggerActionBranchUpdated, hook) +} + +// augmentCommitInfo adds information about the commit to the hook by interacting with +// the commit service. +func (s *Service) augmentCommitInfo( + ctx context.Context, + hook *triggerer.Hook, + repoID int64, + sha string, +) error { + repo, err := s.repoStore.Find(ctx, repoID) + if err != nil { + return fmt.Errorf("could not find repo: %w", err) + } + commit, err := s.commitSvc.FindCommit(ctx, repo, sha) + if err != nil { + return fmt.Errorf("could not find commit info") + } + hook.AuthorName = commit.Author.Identity.Name + hook.Title = commit.Title + hook.Timestamp = commit.Committer.When.UnixMilli() + hook.AuthorLogin = commit.Author.Identity.Name + hook.AuthorEmail = commit.Author.Identity.Email + hook.Message = commit.Message + return nil +} diff --git a/internal/services/trigger/handler_pullreq.go b/internal/services/trigger/handler_pullreq.go new file mode 100644 index 0000000000..900832ac51 --- /dev/null +++ b/internal/services/trigger/handler_pullreq.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/types/enum" + + "github.com/drone/go-scm/scm" +) + +func (s *Service) handleEventPullReqCreated(ctx context.Context, + event *events.Event[*pullreqevents.CreatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqCreated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.SourceSHA, + } + err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) + if err != nil { + return fmt.Errorf("could not augment pull request info: %w", err) + } + return s.trigger(ctx, event.Payload.SourceRepoID, enum.TriggerActionPullReqCreated, hook) +} + +func (s *Service) handleEventPullReqReopened(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqReopened, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.SourceSHA, + } + err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) + if err != nil { + return fmt.Errorf("could not augment pull request info: %w", err) + } + return s.trigger(ctx, event.Payload.SourceRepoID, enum.TriggerActionPullReqReopened, hook) +} + +func (s *Service) handleEventPullReqBranchUpdated(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionPullReqBranchUpdated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + After: event.Payload.NewSHA, + } + err := s.augmentPullReqInfo(ctx, hook, event.Payload.PullReqID) + if err != nil { + return fmt.Errorf("could not augment pull request info: %w", err) + } + return s.trigger(ctx, event.Payload.SourceRepoID, enum.TriggerActionPullReqBranchUpdated, hook) +} + +// augmentPullReqInfo adds in information into the hook pertaining to the pull request +// by querying the database. +func (s *Service) augmentPullReqInfo( + ctx context.Context, + hook *triggerer.Hook, + pullReqID int64, +) error { + pullreq, err := s.pullReqStore.Find(ctx, pullReqID) + if err != nil { + return fmt.Errorf("could not find pull request: %w", err) + } + hook.Title = pullreq.Title + hook.Timestamp = pullreq.Created + hook.AuthorLogin = pullreq.Author.UID + hook.AuthorName = pullreq.Author.DisplayName + hook.AuthorEmail = pullreq.Author.Email + hook.Message = pullreq.Description + hook.Before = pullreq.MergeBaseSHA + hook.Target = pullreq.TargetBranch + hook.Source = pullreq.SourceBranch + // expand the branch to a git reference. + hook.Ref = scm.ExpandRef(pullreq.SourceBranch, "refs/heads") + return nil +} diff --git a/internal/services/trigger/handler_tag.go b/internal/services/trigger/handler_tag.go new file mode 100644 index 0000000000..45d487ae79 --- /dev/null +++ b/internal/services/trigger/handler_tag.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "fmt" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/internal/bootstrap" + gitevents "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/types/enum" +) + +func (s *Service) handleEventTagCreated(ctx context.Context, + event *events.Event[*gitevents.TagCreatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionTagCreated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Ref: event.Payload.Ref, + Before: event.Payload.SHA, + After: event.Payload.SHA, + Source: event.Payload.Ref, + Target: event.Payload.Ref, + } + err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.SHA) + if err != nil { + return fmt.Errorf("could not augment commit info: %w", err) + } + return s.trigger(ctx, event.Payload.RepoID, enum.TriggerActionTagCreated, hook) +} + +func (s *Service) handleEventTagUpdated(ctx context.Context, + event *events.Event[*gitevents.TagUpdatedPayload]) error { + hook := &triggerer.Hook{ + Trigger: enum.TriggerHook, + Action: enum.TriggerActionTagUpdated, + TriggeredBy: bootstrap.NewSystemServiceSession().Principal.ID, + Ref: event.Payload.Ref, + Before: event.Payload.OldSHA, + After: event.Payload.NewSHA, + Source: event.Payload.Ref, + Target: event.Payload.Ref, + } + err := s.augmentCommitInfo(ctx, hook, event.Payload.RepoID, event.Payload.NewSHA) + if err != nil { + return fmt.Errorf("could not augment commit info: %w", err) + } + return s.trigger(ctx, event.Payload.RepoID, enum.TriggerActionTagUpdated, hook) +} diff --git a/internal/services/trigger/service.go b/internal/services/trigger/service.go new file mode 100644 index 0000000000..cb92dfd5bf --- /dev/null +++ b/internal/services/trigger/service.go @@ -0,0 +1,186 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/harness/gitness/events" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/stream" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/hashicorp/go-multierror" +) + +const ( + eventsReaderGroupName = "gitness:trigger" +) + +type Config struct { + EventReaderName string + Concurrency int + MaxRetries int +} + +func (c *Config) Prepare() error { + if c == nil { + return errors.New("config is required") + } + if c.EventReaderName == "" { + return errors.New("config.EventReaderName is required") + } + if c.Concurrency < 1 { + return errors.New("config.Concurrency has to be a positive number") + } + if c.MaxRetries < 0 { + return errors.New("config.MaxRetries can't be negative") + } + + return nil +} + +type Service struct { + triggerStore store.TriggerStore + pullReqStore store.PullReqStore + repoStore store.RepoStore + pipelineStore store.PipelineStore + triggerSvc triggerer.Triggerer + commitSvc commit.CommitService +} + +func New( + ctx context.Context, + config Config, + triggerStore store.TriggerStore, + pullReqStore store.PullReqStore, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + triggerSvc triggerer.Triggerer, + commitSvc commit.CommitService, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + pullreqEvReaderFactory *events.ReaderFactory[*pullreqevents.Reader], +) (*Service, error) { + if err := config.Prepare(); err != nil { + return nil, fmt.Errorf("provided trigger service config is invalid: %w", err) + } + + service := &Service{ + triggerStore: triggerStore, + pullReqStore: pullReqStore, + repoStore: repoStore, + commitSvc: commitSvc, + pipelineStore: pipelineStore, + triggerSvc: triggerSvc, + } + + _, err := gitReaderFactory.Launch(ctx, eventsReaderGroupName, config.EventReaderName, + func(r *gitevents.Reader) error { + const idleTimeout = 1 * time.Minute + r.Configure( + stream.WithConcurrency(config.Concurrency), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(config.MaxRetries), + )) + + _ = r.RegisterBranchCreated(service.handleEventBranchCreated) + _ = r.RegisterBranchUpdated(service.handleEventBranchUpdated) + + _ = r.RegisterTagCreated(service.handleEventTagCreated) + _ = r.RegisterTagUpdated(service.handleEventTagUpdated) + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to launch git events reader: %w", err) + } + + _, err = pullreqEvReaderFactory.Launch(ctx, eventsReaderGroupName, config.EventReaderName, + func(r *pullreqevents.Reader) error { + const idleTimeout = 1 * time.Minute + r.Configure( + stream.WithConcurrency(config.Concurrency), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + // retries not needed for builds which failed to trigger, can be adjusted when needed + stream.WithMaxRetries(0), + )) + + _ = r.RegisterCreated(service.handleEventPullReqCreated) + _ = r.RegisterBranchUpdated(service.handleEventPullReqBranchUpdated) + _ = r.RegisterReopened(service.handleEventPullReqReopened) + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to launch pr events reader: %w", err) + } + + return service, nil +} + +// trigger a build given an action on a repo and a hook. +// It tries to find all enabled triggers, see if the action is the same +// as the trigger action - and if so, find the pipeline for the trigger +// and fire an execution. +func (s *Service) trigger(ctx context.Context, repoID int64, + action enum.TriggerAction, hook *triggerer.Hook) error { + // Get all enabled triggers for a repo. + ret, err := s.triggerStore.ListAllEnabled(ctx, repoID) + if err != nil { + return fmt.Errorf("failed to list all enabled triggers: %w", err) + } + validTriggers := []*types.Trigger{} + // Check which triggers are eligible to be fired + for _, t := range ret { + for _, a := range t.Actions { + if a == action { + validTriggers = append(validTriggers, t) + break + } + } + } + + var errs error + for _, t := range validTriggers { + // TODO: We can make a minor optimization here to not fetch a pipeline each time + // since there could be multiple triggers for a pipeline. + pipeline, err := s.pipelineStore.Find(ctx, t.PipelineID) + if err != nil { + errs = multierror.Append(errs, err) + continue + } + + // Don't fire triggers for disabled pipelines + if pipeline.Disabled { + continue + } + + _, err = s.triggerSvc.Trigger(ctx, pipeline, hook) + if err != nil { + errs = multierror.Append(errs, err) + } + } + return errs +} diff --git a/internal/services/trigger/wire.go b/internal/services/trigger/wire.go new file mode 100644 index 0000000000..1c758840dc --- /dev/null +++ b/internal/services/trigger/wire.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package trigger + +import ( + "context" + + "github.com/harness/gitness/events" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/pipeline/commit" + "github.com/harness/gitness/internal/pipeline/triggerer" + "github.com/harness/gitness/internal/store" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideService, +) + +func ProvideService( + ctx context.Context, + config Config, + triggerStore store.TriggerStore, + commitSvc commit.CommitService, + pullReqStore store.PullReqStore, + repoStore store.RepoStore, + pipelineStore store.PipelineStore, + triggerSvc triggerer.Triggerer, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + pullReqEvFactory *events.ReaderFactory[*pullreqevents.Reader], +) (*Service, error) { + return New(ctx, config, triggerStore, pullReqStore, repoStore, pipelineStore, triggerSvc, + commitSvc, gitReaderFactory, pullReqEvFactory) +} diff --git a/internal/services/webhook/events.go b/internal/services/webhook/events.go new file mode 100644 index 0000000000..4655aa30c1 --- /dev/null +++ b/internal/services/webhook/events.go @@ -0,0 +1,195 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "errors" + "fmt" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" + "go.uber.org/multierr" +) + +func generateTriggerIDFromEventID(eventID string) string { + return fmt.Sprintf("event-%s", eventID) +} + +// triggerForEventWithRepo triggers all webhooks for the given repo and triggerType +// using the eventID to generate a deterministic triggerID and using the output of bodyFn as payload. +// The method tries to find the repository and principal and provides both to the bodyFn to generate the body. +// NOTE: technically we could avoid this call if we send the data via the event (though then events will get big). +func (s *Service) triggerForEventWithRepo(ctx context.Context, + triggerType enum.WebhookTrigger, eventID string, principalID int64, repoID int64, + createBodyFn func(*types.Principal, *types.Repository) (any, error)) error { + principal, err := s.findPrincipalForEvent(ctx, principalID) + if err != nil { + return err + } + + repo, err := s.findRepositoryForEvent(ctx, repoID) + if err != nil { + return err + } + + // create body + body, err := createBodyFn(principal, repo) + if err != nil { + return fmt.Errorf("body creation function failed: %w", err) + } + + return s.triggerForEvent(ctx, eventID, enum.WebhookParentRepo, repo.ID, triggerType, body) +} + +// triggerForEventWithPullReq triggers all webhooks for the given repo and triggerType +// using the eventID to generate a deterministic triggerID and using the output of bodyFn as payload. +// The method tries to find the pullreq, principal, target repo, and source repo +// and provides all to the bodyFn to generate the body. +// NOTE: technically we could avoid this call if we send the data via the event (though then events will get big). +func (s *Service) triggerForEventWithPullReq(ctx context.Context, + triggerType enum.WebhookTrigger, eventID string, principalID int64, prID int64, + createBodyFn func(principal *types.Principal, pr *types.PullReq, + targetRepo *types.Repository, sourceRepo *types.Repository) (any, error)) error { + principal, err := s.findPrincipalForEvent(ctx, principalID) + if err != nil { + return err + } + + pr, err := s.findPullReqForEvent(ctx, prID) + if err != nil { + return err + } + + targetRepo, err := s.findRepositoryForEvent(ctx, pr.TargetRepoID) + if err != nil { + return fmt.Errorf("failed to get pr target repo: %w", err) + } + + sourceRepo := targetRepo + if pr.SourceRepoID != pr.TargetRepoID { + sourceRepo, err = s.findRepositoryForEvent(ctx, pr.SourceRepoID) + if err != nil { + return fmt.Errorf("failed to get pr source repo: %w", err) + } + } + + // create body + body, err := createBodyFn(principal, pr, targetRepo, sourceRepo) + if err != nil { + return fmt.Errorf("body creation function failed: %w", err) + } + + return s.triggerForEvent(ctx, eventID, enum.WebhookParentRepo, targetRepo.ID, triggerType, body) +} + +// findRepositoryForEvent finds the repository for the provided repoID. +func (s *Service) findRepositoryForEvent(ctx context.Context, repoID int64) (*types.Repository, error) { + repo, err := s.repoStore.Find(ctx, repoID) + + if err != nil && errors.Is(err, store.ErrResourceNotFound) { + // not found error is unrecoverable - most likely a racing condition of repo being deleted by now + return nil, events.NewDiscardEventErrorf("repo with id '%d' doesn't exist anymore", repoID) + } + if err != nil { + // all other errors we return and force the event to be reprocessed + return nil, fmt.Errorf("failed to get repo for id '%d': %w", repoID, err) + } + + return repo, nil +} + +// findPullReqForEvent finds the pullrequest for the provided prID. +func (s *Service) findPullReqForEvent(ctx context.Context, prID int64) (*types.PullReq, error) { + pr, err := s.pullreqStore.Find(ctx, prID) + + if err != nil && errors.Is(err, store.ErrResourceNotFound) { + // not found error is unrecoverable - most likely a racing condition of repo being deleted by now + return nil, events.NewDiscardEventErrorf("PR with id '%d' doesn't exist anymore", prID) + } + if err != nil { + // all other errors we return and force the event to be reprocessed + return nil, fmt.Errorf("failed to get PR for id '%d': %w", prID, err) + } + + return pr, nil +} + +// findPrincipalForEvent finds the principal for the provided principalID. +func (s *Service) findPrincipalForEvent(ctx context.Context, principalID int64) (*types.Principal, error) { + principal, err := s.principalStore.Find(ctx, principalID) + + if err != nil && errors.Is(err, store.ErrResourceNotFound) { + // this should never happen (as we won't delete principals) - discard event + return nil, events.NewDiscardEventErrorf("principal with id '%d' doesn't exist anymore", principalID) + } + if err != nil { + // all other errors we return and force the event to be reprocessed + return nil, fmt.Errorf("failed to get principal for id '%d': %w", principalID, err) + } + + return principal, nil +} + +// triggerForEvent triggers all webhooks for the given parentType/ID and triggerType +// using the eventID to generate a deterministic triggerID and sending the provided body as payload. +func (s *Service) triggerForEvent(ctx context.Context, eventID string, + parentType enum.WebhookParent, parentID int64, triggerType enum.WebhookTrigger, body any) error { + triggerID := generateTriggerIDFromEventID(eventID) + + results, err := s.triggerWebhooksFor(ctx, parentType, parentID, triggerID, triggerType, body) + + // return all errors and force the event to be reprocessed (it's not webhook execution specific!) + if err != nil { + return fmt.Errorf("failed to trigger %s (id: '%s') for webhooks of %s %d: %w", + triggerType, triggerID, parentType, parentID, err) + } + + // go through all events and figure out if we need to retry the event. + // Combine all errors into a single error to log (to reduce number of logs) + retryRequired := false + var errs error + for _, result := range results { + if result.Skipped() { + continue + } + + // combine errors of non-successful executions + if result.Execution.Result != enum.WebhookExecutionResultSuccess { + errs = multierr.Append(errs, fmt.Errorf("execution %d of webhook %d resulted in %s: %w", + result.Execution.ID, result.Webhook.ID, result.Execution.Result, result.Err)) + } + + if result.Execution.Result == enum.WebhookExecutionResultRetriableError { + retryRequired = true + } + } + + // in case there was at least one error, log error details in single log to reduce log flooding + if errs != nil { + log.Ctx(ctx).Warn().Err(errs).Msgf("webhook execution for %s %d had errors", parentType, parentID) + } + + // in case at least one webhook has to be retried, return an error to the event framework to have it reprocessed + if retryRequired { + return fmt.Errorf("at least one webhook execution resulted in a retry for %s %d", parentType, parentID) + } + + return nil +} diff --git a/internal/services/webhook/handler_branch.go b/internal/services/webhook/handler_branch.go new file mode 100644 index 0000000000..a5ac3e71fd --- /dev/null +++ b/internal/services/webhook/handler_branch.go @@ -0,0 +1,163 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "fmt" + + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitevents "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// ReferencePayload describes the payload of Reference related webhook triggers. +// Note: Use same payload for all reference operations to make it easier for consumers. +type ReferencePayload struct { + BaseSegment + ReferenceSegment + ReferenceDetailsSegment + ReferenceUpdateSegment +} + +// handleEventBranchCreated handles branch created events +// and triggers branch created webhooks for the source repo. +func (s *Service) handleEventBranchCreated(ctx context.Context, + event *events.Event[*gitevents.BranchCreatedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerBranchCreated, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, repo.GitUID, event.Payload.SHA) + if err != nil { + return nil, err + } + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerBranchCreated, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.SHA, + Commit: &commitInfo, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: types.NilSHA, + Forced: false, + }, + }, nil + }) +} + +// handleEventBranchUpdated handles branch updated events +// and triggers branch updated webhooks for the source repo. +func (s *Service) handleEventBranchUpdated(ctx context.Context, + event *events.Event[*gitevents.BranchUpdatedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerBranchUpdated, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, repo.GitUID, event.Payload.NewSHA) + if err != nil { + return nil, err + } + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerBranchUpdated, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.NewSHA, + Commit: &commitInfo, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: event.Payload.OldSHA, + Forced: event.Payload.Forced, + }, + }, nil + }) +} + +// handleEventBranchDeleted handles branch deleted events +// and triggers branch deleted webhooks for the source repo. +func (s *Service) handleEventBranchDeleted(ctx context.Context, + event *events.Event[*gitevents.BranchDeletedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerBranchDeleted, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerBranchDeleted, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: types.NilSHA, + Commit: nil, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: event.Payload.SHA, + Forced: false, + }, + }, nil + }) +} + +func (s *Service) fetchCommitInfoForEvent(ctx context.Context, repoUID string, sha string) (CommitInfo, error) { + out, err := s.gitRPCClient.GetCommit(ctx, &gitrpc.GetCommitParams{ + ReadParams: gitrpc.ReadParams{ + RepoUID: repoUID, + }, + SHA: sha, + }) + + if gitrpc.ErrorStatus(err) == gitrpc.StatusNotFound { + // this could happen if the commit has been deleted and garbage collected by now + // or if the sha doesn't point to an event - either way discard the event. + return CommitInfo{}, events.NewDiscardEventErrorf("commit with sha '%s' doesn't exist", sha) + } + + if err != nil { + return CommitInfo{}, fmt.Errorf("failed to get commit with sha '%s': %w", sha, err) + } + + return commitInfoFrom(out.Commit), nil +} diff --git a/internal/services/webhook/handler_pullreq.go b/internal/services/webhook/handler_pullreq.go new file mode 100644 index 0000000000..eecaa42807 --- /dev/null +++ b/internal/services/webhook/handler_pullreq.go @@ -0,0 +1,187 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + + "github.com/harness/gitness/events" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +const ( + // gitReferenceNamePrefixBranch is the prefix of references of type branch. + gitReferenceNamePrefixBranch = "refs/heads/" +) + +// PullReqCreatedPayload describes the body of the pullreq created trigger. +// TODO: move in separate package for small import? +type PullReqCreatedPayload struct { + BaseSegment + PullReqSegment + PullReqTargetReferenceSegment + ReferenceSegment + ReferenceDetailsSegment +} + +// handleEventPullReqCreated handles created events for pull requests +// and triggers pullreq created webhooks for the source repo. +func (s *Service) handleEventPullReqCreated(ctx context.Context, + event *events.Event[*pullreqevents.CreatedPayload]) error { + return s.triggerForEventWithPullReq(ctx, enum.WebhookTriggerPullReqCreated, + event.ID, event.Payload.PrincipalID, event.Payload.PullReqID, + func(principal *types.Principal, pr *types.PullReq, targetRepo, sourceRepo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, sourceRepo.GitUID, event.Payload.SourceSHA) + if err != nil { + return nil, err + } + targetRepoInfo := repositoryInfoFrom(targetRepo, s.urlProvider) + sourceRepoInfo := repositoryInfoFrom(sourceRepo, s.urlProvider) + + return &PullReqCreatedPayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerPullReqCreated, + Repo: targetRepoInfo, + Principal: principalInfoFrom(principal), + }, + PullReqSegment: PullReqSegment{ + PullReq: pullReqInfoFrom(pr), + }, + PullReqTargetReferenceSegment: PullReqTargetReferenceSegment{ + TargetRef: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.TargetBranch, + Repo: targetRepoInfo, + }, + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.SourceBranch, + Repo: sourceRepoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.SourceSHA, + Commit: &commitInfo, + }, + }, nil + }) +} + +// PullReqReopenedPayload describes the body of the pullreq reopened trigger. +// Note: same as payload for created. +type PullReqReopenedPayload PullReqCreatedPayload + +// handleEventPullReqReopened handles reopened events for pull requests +// and triggers pullreq reopened webhooks for the source repo. +func (s *Service) handleEventPullReqReopened(ctx context.Context, + event *events.Event[*pullreqevents.ReopenedPayload]) error { + return s.triggerForEventWithPullReq(ctx, enum.WebhookTriggerPullReqReopened, + event.ID, event.Payload.PrincipalID, event.Payload.PullReqID, + func(principal *types.Principal, pr *types.PullReq, targetRepo, sourceRepo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, sourceRepo.GitUID, event.Payload.SourceSHA) + if err != nil { + return nil, err + } + targetRepoInfo := repositoryInfoFrom(targetRepo, s.urlProvider) + sourceRepoInfo := repositoryInfoFrom(sourceRepo, s.urlProvider) + + return &PullReqReopenedPayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerPullReqReopened, + Repo: targetRepoInfo, + Principal: principalInfoFrom(principal), + }, + PullReqSegment: PullReqSegment{ + PullReq: pullReqInfoFrom(pr), + }, + PullReqTargetReferenceSegment: PullReqTargetReferenceSegment{ + TargetRef: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.TargetBranch, + Repo: targetRepoInfo, + }, + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.SourceBranch, + Repo: sourceRepoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.SourceSHA, + Commit: &commitInfo, + }, + }, nil + }) +} + +// PullReqBranchUpdatedPayload describes the body of the pullreq branch updated trigger. +// TODO: move in separate package for small import? +type PullReqBranchUpdatedPayload struct { + BaseSegment + PullReqSegment + PullReqTargetReferenceSegment + ReferenceSegment + ReferenceDetailsSegment + ReferenceUpdateSegment +} + +// handleEventPullReqBranchUpdated handles branch updated events for pull requests +// and triggers pullreq branch updated webhooks for the source repo. +func (s *Service) handleEventPullReqBranchUpdated(ctx context.Context, + event *events.Event[*pullreqevents.BranchUpdatedPayload]) error { + return s.triggerForEventWithPullReq(ctx, enum.WebhookTriggerPullReqBranchUpdated, + event.ID, event.Payload.PrincipalID, event.Payload.PullReqID, + func(principal *types.Principal, pr *types.PullReq, targetRepo, sourceRepo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, sourceRepo.GitUID, event.Payload.NewSHA) + if err != nil { + return nil, err + } + targetRepoInfo := repositoryInfoFrom(targetRepo, s.urlProvider) + sourceRepoInfo := repositoryInfoFrom(sourceRepo, s.urlProvider) + + return &PullReqBranchUpdatedPayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerPullReqBranchUpdated, + Repo: targetRepoInfo, + Principal: principalInfoFrom(principal), + }, + PullReqSegment: PullReqSegment{ + PullReq: pullReqInfoFrom(pr), + }, + PullReqTargetReferenceSegment: PullReqTargetReferenceSegment{ + TargetRef: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.TargetBranch, + Repo: targetRepoInfo, + }, + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: gitReferenceNamePrefixBranch + pr.SourceBranch, + Repo: sourceRepoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.NewSHA, + Commit: &commitInfo, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: event.Payload.OldSHA, + Forced: event.Payload.Forced, + }, + }, nil + }) +} diff --git a/internal/services/webhook/handler_tag.go b/internal/services/webhook/handler_tag.go new file mode 100644 index 0000000000..b231a29f43 --- /dev/null +++ b/internal/services/webhook/handler_tag.go @@ -0,0 +1,131 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + + "github.com/harness/gitness/events" + gitevents "github.com/harness/gitness/internal/events/git" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// handleEventTagCreated handles tag created events +// and triggers tag created webhooks for the source repo. +func (s *Service) handleEventTagCreated(ctx context.Context, + event *events.Event[*gitevents.TagCreatedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerTagCreated, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, repo.GitUID, event.Payload.SHA) + if err != nil { + return nil, err + } + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerTagCreated, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.SHA, + Commit: &commitInfo, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: types.NilSHA, + Forced: false, + }, + }, nil + }) +} + +// handleEventTagUpdated handles tag updated events +// and triggers tag updated webhooks for the source repo. +func (s *Service) handleEventTagUpdated(ctx context.Context, + event *events.Event[*gitevents.TagUpdatedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerTagUpdated, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + commitInfo, err := s.fetchCommitInfoForEvent(ctx, repo.GitUID, event.Payload.NewSHA) + if err != nil { + return nil, err + } + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerTagUpdated, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: event.Payload.NewSHA, + Commit: &commitInfo, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: event.Payload.OldSHA, + Forced: event.Payload.Forced, + }, + }, nil + }) +} + +// handleEventTagDeleted handles tag deleted events +// and triggers tag deleted webhooks for the source repo. +func (s *Service) handleEventTagDeleted(ctx context.Context, + event *events.Event[*gitevents.TagDeletedPayload]) error { + return s.triggerForEventWithRepo(ctx, enum.WebhookTriggerTagDeleted, + event.ID, event.Payload.PrincipalID, event.Payload.RepoID, + func(principal *types.Principal, repo *types.Repository) (any, error) { + repoInfo := repositoryInfoFrom(repo, s.urlProvider) + + return &ReferencePayload{ + BaseSegment: BaseSegment{ + Trigger: enum.WebhookTriggerTagDeleted, + Repo: repoInfo, + Principal: principalInfoFrom(principal), + }, + ReferenceSegment: ReferenceSegment{ + Ref: ReferenceInfo{ + Name: event.Payload.Ref, + Repo: repoInfo, + }, + }, + ReferenceDetailsSegment: ReferenceDetailsSegment{ + SHA: types.NilSHA, + Commit: nil, + }, + ReferenceUpdateSegment: ReferenceUpdateSegment{ + OldSHA: event.Payload.SHA, + Forced: false, + }, + }, nil + }) +} diff --git a/internal/services/webhook/http_client.go b/internal/services/webhook/http_client.go new file mode 100644 index 0000000000..61e05127ae --- /dev/null +++ b/internal/services/webhook/http_client.go @@ -0,0 +1,101 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "errors" + "fmt" + "net" + "net/http" + "time" + + "github.com/rs/zerolog/log" +) + +var ( + errLoopbackNotAllowed = errors.New("loopback not allowed") + errPrivateNetworkNotAllowed = errors.New("private network not allowed") +) + +func newHTTPClient(allowLoopback bool, allowPrivateNetwork bool, disableSSLVerification bool) *http.Client { + // no customizations? use default client + if allowLoopback && allowPrivateNetwork && !disableSSLVerification { + return http.DefaultClient + } + + // Clone http.DefaultTransport (used by http.DefaultClient) + tr := http.DefaultTransport.(*http.Transport).Clone() + + tr.TLSClientConfig.InsecureSkipVerify = disableSSLVerification + + // create basic net.Dialer (Similar to what is used by http.DefaultTransport) + dialer := &net.Dialer{ + Timeout: 30 * time.Second, + KeepAlive: 30 * time.Second, + } + + // overwrite DialContext method to block sending data to localhost + // NOTE: this doesn't block establishing the connection, but closes it before data is send. + // WARNING: this allows scanning of IP addresses based on error types. + tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) { + // dial connection using + con, err := dialer.DialContext(ctx, network, addr) + if err != nil { + return nil, err + } + + // by default close connection unless explicitly marked to keep it + keepConnection := false + defer func() { + // if we decided to keep the connection, nothing to do + if keepConnection { + return + } + + // otherwise best effort close connection + cErr := con.Close() + if cErr != nil { + log.Ctx(ctx).Warn().Err(err). + Msgf("failed to close potentially malicious connection to '%s' (resolved: '%s')", + addr, con.RemoteAddr()) + } + }() + + // ensure a tcp address got established and close if it's localhost or private + tcpAddr, ok := con.RemoteAddr().(*net.TCPAddr) + if !ok { + // not expected to happen, but to be sure + return nil, fmt.Errorf("address resolved to a non-TCP address (original: '%s', resolved: '%s')", + addr, con.RemoteAddr()) + } + + if !allowLoopback && tcpAddr.IP.IsLoopback() { + return nil, errLoopbackNotAllowed + } + + if !allowPrivateNetwork && tcpAddr.IP.IsPrivate() { + return nil, errPrivateNetworkNotAllowed + } + + // otherwise keep connection + keepConnection = true + + return con, nil + } + + // httpClient is similar to http.DefaultClient, just with custom http.Transport + return &http.Client{Transport: tr} +} diff --git a/internal/services/webhook/service.go b/internal/services/webhook/service.go new file mode 100644 index 0000000000..312d61318d --- /dev/null +++ b/internal/services/webhook/service.go @@ -0,0 +1,173 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "errors" + "fmt" + "net/http" + "time" + + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/stream" +) + +const ( + eventsReaderGroupName = "gitness:webhook" +) + +type Config struct { + // UserAgentIdentity specifies the identity used for the user agent header + // IMPORTANT: do not include version. + UserAgentIdentity string + // HeaderIdentity specifies the identity used for headers in webhook calls (e.g. X-Gitness-Trigger, ...). + // NOTE: If no value is provided, the UserAgentIdentity will be used. + HeaderIdentity string + EventReaderName string + Concurrency int + MaxRetries int + AllowPrivateNetwork bool + AllowLoopback bool +} + +func (c *Config) Prepare() error { + if c == nil { + return errors.New("config is required") + } + if c.EventReaderName == "" { + return errors.New("config.EventReaderName is required") + } + if c.UserAgentIdentity == "" { + return errors.New("config.UserAgentIdentity is required") + } + if c.Concurrency < 1 { + return errors.New("config.Concurrency has to be a positive number") + } + if c.MaxRetries < 0 { + return errors.New("config.MaxRetries can't be negative") + } + + // Backfill data + if c.HeaderIdentity == "" { + c.HeaderIdentity = c.UserAgentIdentity + } + + return nil +} + +// Service is responsible for processing webhook events. +type Service struct { + webhookStore store.WebhookStore + webhookExecutionStore store.WebhookExecutionStore + urlProvider *url.Provider + repoStore store.RepoStore + pullreqStore store.PullReqStore + principalStore store.PrincipalStore + gitRPCClient gitrpc.Interface + encrypter encrypt.Encrypter + + secureHTTPClient *http.Client + insecureHTTPClient *http.Client + + secureHTTPClientInternal *http.Client + insecureHTTPClientInternal *http.Client + + config Config +} + +func NewService(ctx context.Context, config Config, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + prReaderFactory *events.ReaderFactory[*pullreqevents.Reader], + webhookStore store.WebhookStore, webhookExecutionStore store.WebhookExecutionStore, + repoStore store.RepoStore, pullreqStore store.PullReqStore, urlProvider *url.Provider, + principalStore store.PrincipalStore, gitRPCClient gitrpc.Interface, encrypter encrypt.Encrypter, +) (*Service, error) { + if err := config.Prepare(); err != nil { + return nil, fmt.Errorf("provided webhook service config is invalid: %w", err) + } + service := &Service{ + webhookStore: webhookStore, + webhookExecutionStore: webhookExecutionStore, + repoStore: repoStore, + pullreqStore: pullreqStore, + urlProvider: urlProvider, + principalStore: principalStore, + gitRPCClient: gitRPCClient, + encrypter: encrypter, + + secureHTTPClient: newHTTPClient(config.AllowLoopback, config.AllowPrivateNetwork, false), + insecureHTTPClient: newHTTPClient(config.AllowLoopback, config.AllowPrivateNetwork, true), + + secureHTTPClientInternal: newHTTPClient(config.AllowLoopback, true, false), + insecureHTTPClientInternal: newHTTPClient(config.AllowLoopback, true, true), + + config: config, + } + + _, err := gitReaderFactory.Launch(ctx, eventsReaderGroupName, config.EventReaderName, + func(r *gitevents.Reader) error { + const idleTimeout = 1 * time.Minute + r.Configure( + stream.WithConcurrency(config.Concurrency), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(config.MaxRetries), + )) + + // register events + _ = r.RegisterBranchCreated(service.handleEventBranchCreated) + _ = r.RegisterBranchUpdated(service.handleEventBranchUpdated) + _ = r.RegisterBranchDeleted(service.handleEventBranchDeleted) + + _ = r.RegisterTagCreated(service.handleEventTagCreated) + _ = r.RegisterTagUpdated(service.handleEventTagUpdated) + _ = r.RegisterTagDeleted(service.handleEventTagDeleted) + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to launch git event reader for webhooks: %w", err) + } + + _, err = prReaderFactory.Launch(ctx, eventsReaderGroupName, config.EventReaderName, + func(r *pullreqevents.Reader) error { + const idleTimeout = 1 * time.Minute + r.Configure( + stream.WithConcurrency(config.Concurrency), + stream.WithHandlerOptions( + stream.WithIdleTimeout(idleTimeout), + stream.WithMaxRetries(config.MaxRetries), + )) + + // register events + _ = r.RegisterCreated(service.handleEventPullReqCreated) + _ = r.RegisterReopened(service.handleEventPullReqReopened) + _ = r.RegisterBranchUpdated(service.handleEventPullReqBranchUpdated) + + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to launch pr event reader for webhooks: %w", err) + } + + return service, nil +} diff --git a/internal/services/webhook/trigger.go b/internal/services/webhook/trigger.go new file mode 100644 index 0000000000..7fd88762d1 --- /dev/null +++ b/internal/services/webhook/trigger.go @@ -0,0 +1,487 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "bytes" + "context" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "net" + "net/http" + "time" + + "github.com/harness/gitness/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + "github.com/harness/gitness/version" + + "github.com/rs/zerolog/log" +) + +const ( + // webhookTimeLimit defines the time limit of a single webhook execution. + // This is similar to other SCM providers. + webhookTimeLimit = 10 * time.Second + + // responseHeadersBytesLimit defines the maximum number of bytes processed from the webhook response headers. + responseHeadersBytesLimit = 1024 + + // responseBodyBytesLimit defines the maximum number of bytes processed from the webhook response body. + responseBodyBytesLimit = 1024 +) + +var ( + // ErrWebhookNotRetriggerable is returned in case the webhook can't be retriggered due to an incomplete execution. + // This should only occur if we failed to generate the request body (most likely out of memory). + ErrWebhookNotRetriggerable = errors.New("webhook execution is incomplete and can't be retriggered") +) + +type TriggerResult struct { + TriggerID string + TriggerType enum.WebhookTrigger + Webhook *types.Webhook + Execution *types.WebhookExecution + Err error +} + +func (r *TriggerResult) Skipped() bool { + return r.Execution == nil +} + +func (s *Service) triggerWebhooksFor(ctx context.Context, parentType enum.WebhookParent, parentID int64, + triggerID string, triggerType enum.WebhookTrigger, body any) ([]TriggerResult, error) { + // get all webhooks for the given parent + // NOTE: there never should be even close to 1000 webhooks for a repo (that should be blocked in the future). + // We just use 1000 as a safe number to get all hooks + webhooks, err := s.webhookStore.List(ctx, parentType, parentID, &types.WebhookFilter{Size: 1000, Order: enum.OrderAsc}) + if err != nil { + return nil, fmt.Errorf("failed to list webhooks for %s %d: %w", parentType, parentID, err) + } + + return s.triggerWebhooks(ctx, webhooks, triggerID, triggerType, body) +} + +//nolint:gocognit // refactor if needed +func (s *Service) triggerWebhooks(ctx context.Context, webhooks []*types.Webhook, + triggerID string, triggerType enum.WebhookTrigger, body any) ([]TriggerResult, error) { + // return immediately if webhooks are empty + if len(webhooks) == 0 { + return []TriggerResult{}, nil + } + + // get all previous execution for the same trigger + executions, err := s.webhookExecutionStore.ListForTrigger(ctx, triggerID) + if err != nil && !errors.Is(err, store.ErrResourceNotFound) { + return nil, fmt.Errorf("failed to get executions for trigger '%s'", triggerID) + } + + // precalculate whether a webhook should be executed + skipExecution := make(map[int64]bool) + for _, execution := range executions { + // skip execution in case of success or unrecoverable error + if execution.Result == enum.WebhookExecutionResultSuccess || + execution.Result == enum.WebhookExecutionResultFatalError { + skipExecution[execution.WebhookID] = true + } + } + + results := make([]TriggerResult, len(webhooks)) + for i, webhook := range webhooks { + results[i] = TriggerResult{ + TriggerID: triggerID, + TriggerType: triggerType, + Webhook: webhook, + } + + // check if webhook is disabled + if !webhook.Enabled { + continue + } + + // check if webhook already got executed (success or fatal error) + if skipExecution[webhook.ID] { + continue + } + + // check if webhook is registered for trigger (empty list => all triggers are registered) + triggerRegistered := len(webhook.Triggers) == 0 + for _, trigger := range webhook.Triggers { + if trigger == triggerType { + triggerRegistered = true + break + } + } + if !triggerRegistered { + continue + } + + // execute trigger and store output in result + results[i].Execution, results[i].Err = s.executeWebhook(ctx, webhook, triggerID, triggerType, body, nil) + } + + return results, nil +} + +func (s *Service) RetriggerWebhookExecution(ctx context.Context, webhookExecutionID int64) (*TriggerResult, error) { + // find execution + webhookExecution, err := s.webhookExecutionStore.Find(ctx, webhookExecutionID) + if err != nil { + return nil, fmt.Errorf("failed to find webhook execution with id %d: %w", webhookExecutionID, err) + } + + // ensure webhook can be retriggered + if !webhookExecution.Retriggerable { + return nil, ErrWebhookNotRetriggerable + } + + // find webhook + webhook, err := s.webhookStore.Find(ctx, webhookExecution.WebhookID) + if err != nil { + return nil, fmt.Errorf("failed to find webhook with id %d: %w", webhookExecution.WebhookID, err) + } + + // reuse same trigger id as original execution + triggerID := webhookExecution.TriggerID + triggerType := webhookExecution.TriggerType + + // pass body explicitly + body := &bytes.Buffer{} + // NOTE: bBuff.Write(v) will always return (len(v), nil) - no need to error handle + body.WriteString(webhookExecution.Request.Body) + + newExecution, err := s.executeWebhook(ctx, webhook, triggerID, triggerType, body, &webhookExecution.ID) + return &TriggerResult{ + TriggerID: triggerID, + TriggerType: triggerType, + Webhook: webhook, + Execution: newExecution, + Err: err, + }, nil +} + +//nolint:gocognit // refactor into smaller chunks if necessary. +func (s *Service) executeWebhook(ctx context.Context, webhook *types.Webhook, triggerID string, + triggerType enum.WebhookTrigger, body any, rerunOfID *int64) (*types.WebhookExecution, error) { + // build execution entry on the fly (save no matter what) + execution := types.WebhookExecution{ + RetriggerOf: rerunOfID, + WebhookID: webhook.ID, + TriggerID: triggerID, + TriggerType: triggerType, + // for unexpected errors we don't retry - protect the system. User can retrigger manually (if body was set) + Result: enum.WebhookExecutionResultFatalError, + Error: "An unknown error occurred", + } + defer func(oCtx context.Context, start time.Time) { + // set total execution time + execution.Duration = int64(time.Since(start)) + execution.Created = time.Now().UnixMilli() + + // TODO: what if saving execution failed? For now we will rerun it in case of error or not show it in history + err := s.webhookExecutionStore.Create(oCtx, &execution) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf( + "failed to store webhook execution that ended with Result: %s, Response.Status: '%s', Error: '%s'", + execution.Result, execution.Response.Status, execution.Error) + } + + // update latest execution result of webhook IFF it's different from before (best effort) + if webhook.LatestExecutionResult == nil || *webhook.LatestExecutionResult != execution.Result { + _, err = s.webhookStore.UpdateOptLock(oCtx, webhook, func(hook *types.Webhook) error { + hook.LatestExecutionResult = &execution.Result + return nil + }) + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf( + "failed to update latest execution result to %s for webhook %d", + execution.Result, webhook.ID) + } + } + }(ctx, time.Now()) + + // derive context with time limit + ctx, cancel := context.WithTimeout(ctx, webhookTimeLimit) + defer cancel() + + // create request from webhook and body + req, err := s.prepareHTTPRequest(ctx, &execution, triggerType, webhook, body) + if err != nil { + return &execution, err + } + + // Execute HTTP Request (insecure if requested) + var resp *http.Response + switch { + case webhook.Internal && webhook.Insecure: + resp, err = s.insecureHTTPClientInternal.Do(req) + case webhook.Internal: + resp, err = s.secureHTTPClientInternal.Do(req) + case webhook.Insecure: + resp, err = s.insecureHTTPClient.Do(req) + default: + resp, err = s.secureHTTPClient.Do(req) + } + + // always close the body! + if resp != nil && resp.Body != nil { + defer func() { + err = resp.Body.Close() + if err != nil { + log.Ctx(ctx).Warn().Err(err).Msgf("failed to close body after webhook execution %d", execution.ID) + } + }() + } + + // handle certain errors explicitly to give more to-the-point error messages + var dnsError *net.DNSError + switch { + case errors.Is(err, context.DeadlineExceeded): + // we assume timeout without any response is not worth retrying - protect the system + tErr := fmt.Errorf("request exceeded time limit of %s", webhookTimeLimit) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultFatalError + return &execution, tErr + + case errors.As(err, &dnsError) && dnsError.IsNotFound: + // this error is assumed unrecoverable - mark status accordingly and fail execution + execution.Error = fmt.Sprintf("host '%s' was not found", dnsError.Name) + execution.Result = enum.WebhookExecutionResultFatalError + return &execution, fmt.Errorf("failed to resolve host name '%s': %w", dnsError.Name, err) + + case err != nil: + // for all other errors we don't retry - protect the system. User can retrigger manually (if body was set) + tErr := fmt.Errorf("an error occurred while sending the request: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultFatalError + return &execution, tErr + } + + // handle response + err = handleWebhookResponse(&execution, resp) + + return &execution, err +} + +// prepareHTTPRequest prepares a new http.Request object for the webhook using the provided body as request body. +// All execution.Request.XXX values are set accordingly. +// NOTE: if the body is an io.Reader, the value is used as response body as is, otherwise it'll be JSON serialized. +func (s *Service) prepareHTTPRequest(ctx context.Context, execution *types.WebhookExecution, + triggerType enum.WebhookTrigger, webhook *types.Webhook, body any) (*http.Request, error) { + // set URL as is (already has been validated, any other error will be caught in request creation) + execution.Request.URL = webhook.URL + + // Serialize body before anything else. + // This allows the user to retrigger the execution even in case of bad URL. + bBuff := &bytes.Buffer{} + switch v := body.(type) { + case io.Reader: + // if it's already an io.Reader - use value as is and don't serialize (allows to provide custom body) + // NOTE: reader can be read only once - read and store it in buffer to allow storing it in execution object + // and generate hmac. + bBytes, err := io.ReadAll(v) + if err != nil { + // ASSUMPTION: there was an issue with the static user input, not retriable + tErr := fmt.Errorf("failed to generate request body: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultFatalError + return nil, tErr + } + + // NOTE: bBuff.Write(v) will always return (len(v), nil) - no need to error handle + bBuff.Write(bBytes) + + default: + // all other types we json serialize + err := json.NewEncoder(bBuff).Encode(body) + if err != nil { + // this is an internal issue, nothing the user can do - don't expose error details + execution.Error = "an error occurred preparing the request body" + execution.Result = enum.WebhookExecutionResultFatalError + return nil, fmt.Errorf("failed to serialize body to json: %w", err) + } + } + // set executioon body and mark it as retriggerable + execution.Request.Body = bBuff.String() + execution.Retriggerable = true + + // create request (url + body) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, webhook.URL, bBuff) + if err != nil { + // ASSUMPTION: there was an issue with the static user input, not retriable + tErr := fmt.Errorf("failed to create request: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultFatalError + return nil, tErr + } + + // setup headers + req.Header.Add("User-Agent", fmt.Sprintf("%s/%s", s.config.UserAgentIdentity, version.Version)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add(s.toXHeader("Trigger"), string(triggerType)) + req.Header.Add(s.toXHeader("Webhook-Id"), fmt.Sprint(webhook.ID)) + req.Header.Add(s.toXHeader("Webhook-Parent-Type"), string(webhook.ParentType)) + req.Header.Add(s.toXHeader("Webhook-Parent-Id"), fmt.Sprint(webhook.ParentID)) + + // add HMAC only if a secret was provided + if webhook.Secret != "" { + decryptedSecret, err := s.encrypter.Decrypt([]byte(webhook.Secret)) + if err != nil { + return nil, fmt.Errorf("failed to decrypt webhook secret: %w", err) + } + var hmac string + hmac, err = generateHMACSHA256(bBuff.Bytes(), []byte(decryptedSecret)) + if err != nil { + return nil, fmt.Errorf("failed to generate SHA256 based HMAC: %w", err) + } + req.Header.Add(s.toXHeader("Signature"), hmac) + } + + hBuffer := &bytes.Buffer{} + err = req.Header.Write(hBuffer) + if err != nil { + tErr := fmt.Errorf("failed to write request headers: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultRetriableError + return nil, tErr + } + execution.Request.Headers = hBuffer.String() + + return req, nil +} + +func (s *Service) toXHeader(name string) string { + return fmt.Sprintf("X-%s-%s", s.config.HeaderIdentity, name) +} + +//nolint:funlen // refactor if needed +func handleWebhookResponse(execution *types.WebhookExecution, resp *http.Response) error { + // store status (handle status later - want to first read body) + execution.Response.StatusCode = resp.StatusCode + execution.Response.Status = resp.Status + + // store response headers + hBuff := &bytes.Buffer{} + err := resp.Header.Write(hBuff) + if err != nil { + tErr := fmt.Errorf("failed to read response headers: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultRetriableError + return tErr + } + // limit the total number of bytes we store in headers + headerLength := hBuff.Len() + if headerLength > responseHeadersBytesLimit { + headerLength = responseHeadersBytesLimit + } + execution.Response.Headers = string(hBuff.Bytes()[0:headerLength]) + + // handle body (if exists) + if resp.Body != nil { + // read and store response body + var bodyRaw []byte + bodyRaw, err = io.ReadAll(io.LimitReader(resp.Body, responseBodyBytesLimit)) + if err != nil { + tErr := fmt.Errorf("an error occurred while reading the response body: %w", err) + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultRetriableError + return tErr + } + execution.Response.Body = string(bodyRaw) + } + + // Analyze status code + // IMPORTANT: cases are EVALUATED IN ORDER + switch code := resp.StatusCode; { + case code < 200: + // 1XX - server is continuing the processing (call was successful, but not completed yet) + execution.Error = "1xx response codes are not supported" + execution.Result = enum.WebhookExecutionResultFatalError + return fmt.Errorf("received response with unsupported status code %d", code) + + case code < 300: + // 2XX - call was successful + execution.Error = "" + execution.Result = enum.WebhookExecutionResultSuccess + return nil + + case code < 400: + // 3XX - Redirection (further action is required by the client) + // NOTE: technically we could follow the redirect, but not supported as of now + execution.Error = "3xx response codes are not supported" + execution.Result = enum.WebhookExecutionResultFatalError + return fmt.Errorf("received response with unsupported status code %d", code) + + case code == 408: + // 408 - Request Timeout + tErr := errors.New("request timed out") + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultRetriableError + return tErr + + case code == 429: + // 429 - Too Many Requests + tErr := errors.New("request got throttled") + execution.Error = tErr.Error() + execution.Result = enum.WebhookExecutionResultRetriableError + return tErr + + case code < 500: + // 4xx - Issue with request (bad request, url too large, ...) + execution.Error = "4xx response codes are not supported (apart from 408 and 429)" + execution.Result = enum.WebhookExecutionResultFatalError + return fmt.Errorf("received response with unrecoverable status code %d", code) + + case code == 501: + // 501 - Not Implemented + execution.Error = "remote server does not implement requested action" + execution.Result = enum.WebhookExecutionResultFatalError + return fmt.Errorf("received response with unrecoverable status code %d", code) + + case code < 600: + // 5xx - Server Errors + execution.Error = "remote server encountered an error" + execution.Result = enum.WebhookExecutionResultRetriableError + return fmt.Errorf("remote server encountered an error: %d", code) + + default: + // >= 600 - No commonly used response status code + execution.Error = "response code not supported" + execution.Result = enum.WebhookExecutionResultFatalError + return fmt.Errorf("received response with unsupported status code %d", code) + } +} + +// generateHMACSHA256 generates a new HMAC using SHA256 as hash function. +func generateHMACSHA256(data []byte, key []byte) (string, error) { + h := hmac.New(sha256.New, key) + + // write all data into hash + _, err := h.Write(data) + if err != nil { + return "", fmt.Errorf("failed to write data into hash: %w", err) + } + + // sum hash to final value + macBytes := h.Sum(nil) + + // encode MAC as hexadecimal + return hex.EncodeToString(macBytes), nil +} diff --git a/internal/services/webhook/types.go b/internal/services/webhook/types.go new file mode 100644 index 0000000000..45e2064fb1 --- /dev/null +++ b/internal/services/webhook/types.go @@ -0,0 +1,195 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "time" + + "github.com/harness/gitness/gitrpc" + "github.com/harness/gitness/internal/url" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +/* + * The idea of segments is to expose similar fields using the same structure. + * This makes consumption on webhook payloads easier as we ensure related webhooks have similar payload formats. + * Segments are meant to be embedded, while Infos are meant to be used as fields. + */ + +// BaseSegment contains base info of all payloads for webhooks. +type BaseSegment struct { + Trigger enum.WebhookTrigger `json:"trigger"` + Repo RepositoryInfo `json:"repo"` + Principal PrincipalInfo `json:"principal"` +} + +// ReferenceSegment contains the reference info for webhooks. +type ReferenceSegment struct { + Ref ReferenceInfo `json:"ref"` +} + +// ReferenceDetailsSegment contains extra defails for reference related payloads for webhooks. +type ReferenceDetailsSegment struct { + SHA string `json:"sha"` + Commit *CommitInfo `json:"commit,omitempty"` +} + +// ReferenceUpdateSegment contains extra details for reference update related payloads for webhooks. +type ReferenceUpdateSegment struct { + OldSHA string `json:"old_sha"` + Forced bool `json:"forced"` +} + +// PullReqTargetReferenceSegment contains details for the pull req target reference for webhooks. +type PullReqTargetReferenceSegment struct { + TargetRef ReferenceInfo `json:"target_ref"` +} + +// PullReqSegment contains details for all pull req related payloads for webhooks. +type PullReqSegment struct { + PullReq PullReqInfo `json:"pull_req"` +} + +// RepositoryInfo describes the repo related info for a webhook payload. +// NOTE: don't use types package as we want webhook payload to be independent from API calls. +type RepositoryInfo struct { + ID int64 `json:"id"` + Path string `json:"path"` + UID string `json:"uid"` + DefaultBranch string `json:"default_branch"` + GitURL string `json:"git_url"` +} + +// repositoryInfoFrom gets the RespositoryInfo from a types.Repository. +func repositoryInfoFrom(repo *types.Repository, urlProvider *url.Provider) RepositoryInfo { + return RepositoryInfo{ + ID: repo.ID, + Path: repo.Path, + UID: repo.UID, + DefaultBranch: repo.DefaultBranch, + GitURL: urlProvider.GenerateRepoCloneURL(repo.Path), + } +} + +// PullReqInfo describes the pullreq related info for a webhook payload. +// NOTE: don't use types package as we want pullreq payload to be independent from API calls. +type PullReqInfo struct { + Number int64 `json:"number"` + State enum.PullReqState `json:"state"` + IsDraft bool `json:"is_draft"` + Title string `json:"title"` + SourceRepoID int64 `json:"source_repo_id"` + SourceBranch string `json:"source_branch"` + TargetRepoID int64 `json:"target_repo_id"` + TargetBranch string `json:"target_branch"` + MergeStrategy *enum.MergeMethod `json:"merge_strategy"` +} + +// pullReqInfoFrom gets the PullReqInfo from a types.PullReq. +func pullReqInfoFrom(pr *types.PullReq) PullReqInfo { + return PullReqInfo{ + Number: pr.Number, + State: pr.State, + IsDraft: pr.IsDraft, + Title: pr.Title, + SourceRepoID: pr.SourceRepoID, + SourceBranch: pr.SourceBranch, + TargetRepoID: pr.TargetRepoID, + TargetBranch: pr.TargetBranch, + MergeStrategy: pr.MergeMethod, + } +} + +// PrincipalInfo describes the principal related info for a webhook payload. +// NOTE: don't use types package as we want webhook payload to be independent from API calls. +type PrincipalInfo struct { + ID int64 `json:"id"` + UID string `json:"uid"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + Type enum.PrincipalType `json:"type"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` +} + +// principalInfoFrom gets the PrincipalInfo from a types.Principal. +func principalInfoFrom(principal *types.Principal) PrincipalInfo { + return PrincipalInfo{ + ID: principal.ID, + UID: principal.UID, + DisplayName: principal.DisplayName, + Email: principal.Email, + Type: principal.Type, + Created: principal.Created, + Updated: principal.Updated, + } +} + +// CommitInfo describes the commit related info for a webhook payload. +// NOTE: don't use types package as we want webhook payload to be independent from API calls. +type CommitInfo struct { + SHA string `json:"sha"` + Message string `json:"message"` + Author SignatureInfo `json:"author"` + Committer SignatureInfo `json:"committer"` +} + +// commitInfoFrom gets the CommitInfo from a gitrpc.Commit. +func commitInfoFrom(commit gitrpc.Commit) CommitInfo { + return CommitInfo{ + SHA: commit.SHA, + Message: commit.Message, + Author: signatureInfoFrom(commit.Author), + Committer: signatureInfoFrom(commit.Committer), + } +} + +// SignatureInfo describes the commit signature related info for a webhook payload. +// NOTE: don't use types package as we want webhook payload to be independent from API calls. +type SignatureInfo struct { + Identity IdentityInfo `json:"identity"` + When time.Time `json:"when"` +} + +// signatureInfoFrom gets the SignatureInfo from a gitrpc.Signature. +func signatureInfoFrom(signature gitrpc.Signature) SignatureInfo { + return SignatureInfo{ + Identity: identityInfoFrom(signature.Identity), + When: signature.When, + } +} + +// IdentityInfo describes the signature identity related info for a webhook payload. +// NOTE: don't use types package as we want webhook payload to be independent from API calls. +type IdentityInfo struct { + Name string `json:"name"` + Email string `json:"email"` +} + +// identityInfoFrom gets the IdentityInfo from a gitrpc.Identity. +func identityInfoFrom(identity gitrpc.Identity) IdentityInfo { + return IdentityInfo{ + Name: identity.Name, + Email: identity.Email, + } +} + +// ReferenceInfo describes a unique reference in gitness. +// It contains both the reference name as well as the repo the reference belongs to. +type ReferenceInfo struct { + Name string `json:"name"` + Repo RepositoryInfo `json:"repo"` +} diff --git a/internal/services/webhook/wire.go b/internal/services/webhook/wire.go new file mode 100644 index 0000000000..59e9125f58 --- /dev/null +++ b/internal/services/webhook/wire.go @@ -0,0 +1,45 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + + "github.com/harness/gitness/encrypt" + "github.com/harness/gitness/events" + "github.com/harness/gitness/gitrpc" + gitevents "github.com/harness/gitness/internal/events/git" + pullreqevents "github.com/harness/gitness/internal/events/pullreq" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/url" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideService, +) + +func ProvideService(ctx context.Context, config Config, + gitReaderFactory *events.ReaderFactory[*gitevents.Reader], + prReaderFactory *events.ReaderFactory[*pullreqevents.Reader], + webhookStore store.WebhookStore, webhookExecutionStore store.WebhookExecutionStore, + repoStore store.RepoStore, pullreqStore store.PullReqStore, urlProvider *url.Provider, + principalStore store.PrincipalStore, gitRPCClient gitrpc.Interface, encrypter encrypt.Encrypter) (*Service, error) { + return NewService(ctx, config, gitReaderFactory, prReaderFactory, + webhookStore, webhookExecutionStore, repoStore, pullreqStore, + urlProvider, principalStore, gitRPCClient, encrypter) +} diff --git a/internal/services/wire.go b/internal/services/wire.go new file mode 100644 index 0000000000..5b418b2762 --- /dev/null +++ b/internal/services/wire.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package services + +import ( + "github.com/harness/gitness/internal/services/job" + "github.com/harness/gitness/internal/services/metric" + "github.com/harness/gitness/internal/services/pullreq" + "github.com/harness/gitness/internal/services/trigger" + "github.com/harness/gitness/internal/services/webhook" + + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideServices, +) + +type Services struct { + Webhook *webhook.Service + PullReq *pullreq.Service + Trigger *trigger.Service + JobScheduler *job.Scheduler + MetricCollector *metric.Collector +} + +func ProvideServices( + webhooksSvc *webhook.Service, + pullReqSvc *pullreq.Service, + triggerSvc *trigger.Service, + jobScheduler *job.Scheduler, + metricCollector *metric.Collector, +) Services { + return Services{ + Webhook: webhooksSvc, + PullReq: pullReqSvc, + Trigger: triggerSvc, + JobScheduler: jobScheduler, + MetricCollector: metricCollector, + } +} diff --git a/internal/sse/sse.go b/internal/sse/sse.go new file mode 100644 index 0000000000..9cf93d1925 --- /dev/null +++ b/internal/sse/sse.go @@ -0,0 +1,106 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sse + +import ( + "context" + "encoding/json" + "fmt" + "strconv" + + "github.com/harness/gitness/pubsub" + "github.com/harness/gitness/types/enum" +) + +// Event is a server sent event. +type Event struct { + Type enum.SSEType `json:"type"` + Data json.RawMessage `json:"data"` +} + +type Streamer interface { + // Publish publishes an event to a given space ID. + Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error + + // Streams streams the events on a space ID. + Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error) +} + +type pubsubStreamer struct { + pubsub pubsub.PubSub + namespace string +} + +func NewStreamer(pubsub pubsub.PubSub, namespace string) Streamer { + return &pubsubStreamer{ + pubsub: pubsub, + namespace: namespace, + } +} + +func (e *pubsubStreamer) Publish(ctx context.Context, spaceID int64, eventType enum.SSEType, data any) error { + dataSerialized, err := json.Marshal(data) + if err != nil { + return fmt.Errorf("failed to serialize data: %w", err) + } + event := Event{ + Type: eventType, + Data: dataSerialized, + } + serializedEvent, err := json.Marshal(event) + if err != nil { + return fmt.Errorf("failed to serialize event: %w", err) + } + namespaceOption := pubsub.WithPublishNamespace(e.namespace) + topic := getSpaceTopic(spaceID) + err = e.pubsub.Publish(ctx, topic, serializedEvent, namespaceOption) + if err != nil { + return fmt.Errorf("failed to publish event on pubsub: %w", err) + } + + return nil +} + +func (e *pubsubStreamer) Stream(ctx context.Context, spaceID int64) (<-chan *Event, <-chan error, func(context.Context) error) { + chEvent := make(chan *Event, 100) // TODO: check best size here + chErr := make(chan error) + g := func(payload []byte) error { + event := &Event{} + err := json.Unmarshal(payload, event) + if err != nil { + // This should never happen + return err + } + select { + case chEvent <- event: + default: + } + + return nil + } + namespaceOption := pubsub.WithChannelNamespace(e.namespace) + topic := getSpaceTopic(spaceID) + consumer := e.pubsub.Subscribe(ctx, topic, g, namespaceOption) + unsubscribeFN := func(ctx context.Context) error { + return consumer.Unsubscribe(ctx, topic) + } + + return chEvent, chErr, unsubscribeFN +} + +// getSpaceTopic creates the namespace name which will be `spaces:` +func getSpaceTopic(spaceID int64) string { + return "spaces:" + strconv.Itoa(int(spaceID)) +} diff --git a/internal/sse/wire.go b/internal/sse/wire.go new file mode 100644 index 0000000000..8f4d5af6ab --- /dev/null +++ b/internal/sse/wire.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sse + +import ( + "github.com/harness/gitness/pubsub" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideEventsStreaming, +) + +func ProvideEventsStreaming(pubsub pubsub.PubSub) Streamer { + const namespace = "sse" + return NewStreamer(pubsub, namespace) +} diff --git a/internal/store/cache.go b/internal/store/cache.go new file mode 100644 index 0000000000..ba8eae63f7 --- /dev/null +++ b/internal/store/cache.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import ( + "github.com/harness/gitness/cache" + "github.com/harness/gitness/types" +) + +type ( + // PrincipalInfoCache caches principal IDs to principal info. + PrincipalInfoCache cache.ExtendedCache[int64, *types.PrincipalInfo] + + // SpacePathCache caches a raw path to a space path. + SpacePathCache cache.Cache[string, *types.SpacePath] + + // RepoGitInfoCache caches repository IDs to values GitUID. + RepoGitInfoCache cache.Cache[int64, *types.RepositoryGitInfo] +) diff --git a/internal/store/cache/path.go b/internal/store/cache/path.go new file mode 100644 index 0000000000..6d30acb982 --- /dev/null +++ b/internal/store/cache/path.go @@ -0,0 +1,60 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "context" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" +) + +// pathCacheGetter is used to hook a SpacePathStore as source of a PathCache. +// IMPORTANT: It assumes that the pathCache already transformed the key. +type pathCacheGetter struct { + spacePathStore store.SpacePathStore +} + +func (g *pathCacheGetter) Find(ctx context.Context, key string) (*types.SpacePath, error) { + path, err := g.spacePathStore.FindByPath(ctx, key) + if err != nil { + return nil, err + } + + return path, nil +} + +// pathCache is a decorator of a Cache required to handle path transformations. +type pathCache struct { + inner cache.Cache[string, *types.SpacePath] + spacePathTransformation store.SpacePathTransformation +} + +func (c *pathCache) Get(ctx context.Context, key string) (*types.SpacePath, error) { + // build unique key from provided value + segments := paths.Segments(key) + uniqueKey := "" + for i, segment := range segments { + uniqueKey = paths.Concatinate(uniqueKey, c.spacePathTransformation(segment, i == 0)) + } + + return c.inner.Get(ctx, uniqueKey) +} + +func (c *pathCache) Stats() (int64, int64) { + return c.inner.Stats() +} diff --git a/internal/store/cache/wire.go b/internal/store/cache/wire.go new file mode 100644 index 0000000000..94ffe13a23 --- /dev/null +++ b/internal/store/cache/wire.go @@ -0,0 +1,57 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "time" + + "github.com/harness/gitness/cache" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvidePrincipalInfoCache, + ProvidePathCache, + ProvideRepoGitInfoCache, +) + +// ProvidePrincipalInfoCache provides a cache for storing types.PrincipalInfo objects. +func ProvidePrincipalInfoCache(getter store.PrincipalInfoView) store.PrincipalInfoCache { + return cache.NewExtended[int64, *types.PrincipalInfo](getter, 30*time.Second) +} + +// ProvidePathCache provides a cache for storing routing paths and their types.SpacePath objects. +func ProvidePathCache( + pathStore store.SpacePathStore, + spacePathTransformation store.SpacePathTransformation, +) store.SpacePathCache { + return &pathCache{ + inner: cache.New[string, *types.SpacePath]( + &pathCacheGetter{ + spacePathStore: pathStore, + }, + 1*time.Minute), + spacePathTransformation: spacePathTransformation, + } +} + +// ProvideRepoGitInfoCache provides a cache for storing types.RepositoryGitInfo objects. +func ProvideRepoGitInfoCache(getter store.RepoGitInfoView) store.RepoGitInfoCache { + return cache.New[int64, *types.RepositoryGitInfo](getter, 15*time.Minute) +} diff --git a/internal/store/database.go b/internal/store/database.go new file mode 100644 index 0000000000..88f9543aab --- /dev/null +++ b/internal/store/database.go @@ -0,0 +1,726 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package store defines the data storage interfaces. +package store + +import ( + "context" + "time" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +type ( + // PrincipalStore defines the principal data storage. + PrincipalStore interface { + /* + * PRINCIPAL RELATED OPERATIONS. + */ + // Find finds the principal by id. + Find(ctx context.Context, id int64) (*types.Principal, error) + + // FindByUID finds the principal by uid. + FindByUID(ctx context.Context, uid string) (*types.Principal, error) + + // FindManyByUID returns all principals found for the provided UIDs. + // If a UID isn't found, it's not returned in the list. + FindManyByUID(ctx context.Context, uids []string) ([]*types.Principal, error) + + // FindByEmail finds the principal by email. + FindByEmail(ctx context.Context, email string) (*types.Principal, error) + + /* + * USER RELATED OPERATIONS. + */ + + // FindUser finds the user by id. + FindUser(ctx context.Context, id int64) (*types.User, error) + + // List lists the principals matching the provided filter. + List(ctx context.Context, fetchQuery *types.PrincipalFilter) ([]*types.Principal, error) + + // FindUserByUID finds the user by uid. + FindUserByUID(ctx context.Context, uid string) (*types.User, error) + + // FindUserByEmail finds the user by email. + FindUserByEmail(ctx context.Context, email string) (*types.User, error) + + // CreateUser saves the user details. + CreateUser(ctx context.Context, user *types.User) error + + // UpdateUser updates an existing user. + UpdateUser(ctx context.Context, user *types.User) error + + // DeleteUser deletes the user. + DeleteUser(ctx context.Context, id int64) error + + // ListUsers returns a list of users. + ListUsers(ctx context.Context, params *types.UserFilter) ([]*types.User, error) + + // CountUsers returns a count of users which match the given filter. + CountUsers(ctx context.Context, opts *types.UserFilter) (int64, error) + + /* + * SERVICE ACCOUNT RELATED OPERATIONS. + */ + + // FindServiceAccount finds the service account by id. + FindServiceAccount(ctx context.Context, id int64) (*types.ServiceAccount, error) + + // FindServiceAccountByUID finds the service account by uid. + FindServiceAccountByUID(ctx context.Context, uid string) (*types.ServiceAccount, error) + + // CreateServiceAccount saves the service account. + CreateServiceAccount(ctx context.Context, sa *types.ServiceAccount) error + + // UpdateServiceAccount updates the service account details. + UpdateServiceAccount(ctx context.Context, sa *types.ServiceAccount) error + + // DeleteServiceAccount deletes the service account. + DeleteServiceAccount(ctx context.Context, id int64) error + + // ListServiceAccounts returns a list of service accounts for a specific parent. + ListServiceAccounts(ctx context.Context, + parentType enum.ParentResourceType, parentID int64) ([]*types.ServiceAccount, error) + + // CountServiceAccounts returns a count of service accounts for a specific parent. + CountServiceAccounts(ctx context.Context, + parentType enum.ParentResourceType, parentID int64) (int64, error) + + /* + * SERVICE RELATED OPERATIONS. + */ + + // FindService finds the service by id. + FindService(ctx context.Context, id int64) (*types.Service, error) + + // FindServiceByUID finds the service by uid. + FindServiceByUID(ctx context.Context, uid string) (*types.Service, error) + + // CreateService saves the service. + CreateService(ctx context.Context, sa *types.Service) error + + // UpdateService updates the service. + UpdateService(ctx context.Context, sa *types.Service) error + + // DeleteService deletes the service. + DeleteService(ctx context.Context, id int64) error + + // ListServices returns a list of service for a specific parent. + ListServices(ctx context.Context) ([]*types.Service, error) + + // CountServices returns a count of service for a specific parent. + CountServices(ctx context.Context) (int64, error) + } + + // PrincipalInfoView defines helper utility for fetching types.PrincipalInfo objects. + // It uses the same underlying data storage as PrincipalStore. + PrincipalInfoView interface { + Find(ctx context.Context, id int64) (*types.PrincipalInfo, error) + FindMany(ctx context.Context, ids []int64) ([]*types.PrincipalInfo, error) + } + + // SpacePathStore defines the path data storage for spaces. + SpacePathStore interface { + // InsertSegment inserts a space path segment to the table. + InsertSegment(ctx context.Context, segment *types.SpacePathSegment) error + + // FindPrimaryBySpaceID finds the primary path of a space given its ID. + FindPrimaryBySpaceID(ctx context.Context, spaceID int64) (*types.SpacePath, error) + + // FindByPath returns the space path for a given raw path. + FindByPath(ctx context.Context, path string) (*types.SpacePath, error) + + // DeletePrimarySegment deletes the primary segment of a space. + DeletePrimarySegment(ctx context.Context, spaceID int64) error + } + + // SpaceStore defines the space data storage. + SpaceStore interface { + // Find the space by id. + Find(ctx context.Context, id int64) (*types.Space, error) + + // FindByRef finds the space using the spaceRef as either the id or the space path. + FindByRef(ctx context.Context, spaceRef string) (*types.Space, error) + + // Create creates a new space + Create(ctx context.Context, space *types.Space) error + + // Update updates the space details. + Update(ctx context.Context, space *types.Space) error + + // UpdateOptLock updates the space using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, space *types.Space, + mutateFn func(space *types.Space) error) (*types.Space, error) + + // Delete deletes the space. + Delete(ctx context.Context, id int64) error + + // Count the child spaces of a space. + Count(ctx context.Context, id int64, opts *types.SpaceFilter) (int64, error) + + // List returns a list of child spaces in a space. + List(ctx context.Context, id int64, opts *types.SpaceFilter) ([]*types.Space, error) + } + + // RepoStore defines the repository data storage. + RepoStore interface { + // Find the repo by id. + Find(ctx context.Context, id int64) (*types.Repository, error) + + // FindByRef finds the repo using the repoRef as either the id or the repo path. + FindByRef(ctx context.Context, repoRef string) (*types.Repository, error) + + // Create a new repo. + Create(ctx context.Context, repo *types.Repository) error + + // Update the repo details. + Update(ctx context.Context, repo *types.Repository) error + + // UpdateOptLock the repo details using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, repo *types.Repository, + mutateFn func(repository *types.Repository) error) (*types.Repository, error) + + // Delete the repo. + Delete(ctx context.Context, id int64) error + + // Count of repos in a space. + Count(ctx context.Context, parentID int64, opts *types.RepoFilter) (int64, error) + + // List returns a list of repos in a space. + List(ctx context.Context, parentID int64, opts *types.RepoFilter) ([]*types.Repository, error) + } + + // RepoGitInfoView defines the repository GitUID view. + RepoGitInfoView interface { + Find(ctx context.Context, id int64) (*types.RepositoryGitInfo, error) + } + + // MembershipStore defines the membership data storage. + MembershipStore interface { + Find(ctx context.Context, key types.MembershipKey) (*types.Membership, error) + FindUser(ctx context.Context, key types.MembershipKey) (*types.MembershipUser, error) + Create(ctx context.Context, membership *types.Membership) error + Update(ctx context.Context, membership *types.Membership) error + Delete(ctx context.Context, key types.MembershipKey) error + CountUsers(ctx context.Context, spaceID int64, filter types.MembershipUserFilter) (int64, error) + ListUsers(ctx context.Context, spaceID int64, filter types.MembershipUserFilter) ([]types.MembershipUser, error) + CountSpaces(ctx context.Context, userID int64, filter types.MembershipSpaceFilter) (int64, error) + ListSpaces(ctx context.Context, userID int64, filter types.MembershipSpaceFilter) ([]types.MembershipSpace, error) + } + + // TokenStore defines the token data storage. + TokenStore interface { + // Find finds the token by id + Find(ctx context.Context, id int64) (*types.Token, error) + + // FindByUID finds the token by principalId and tokenUID + FindByUID(ctx context.Context, principalID int64, tokenUID string) (*types.Token, error) + + // Create saves the token details. + Create(ctx context.Context, token *types.Token) error + + // Delete deletes the token with the given id. + Delete(ctx context.Context, id int64) error + + // DeleteForPrincipal deletes all tokens for a specific principal + DeleteForPrincipal(ctx context.Context, principalID int64) error + + // List returns a list of tokens of a specific type for a specific principal. + List(ctx context.Context, principalID int64, tokenType enum.TokenType) ([]*types.Token, error) + + // Count returns a count of tokens of a specifc type for a specific principal. + Count(ctx context.Context, principalID int64, tokenType enum.TokenType) (int64, error) + } + + // PullReqStore defines the pull request data storage. + PullReqStore interface { + // Find the pull request by id. + Find(ctx context.Context, id int64) (*types.PullReq, error) + + // FindByNumberWithLock finds the pull request by repo ID and the pull request number + // and acquires an exclusive lock of the pull request database row for the duration of the transaction. + FindByNumberWithLock(ctx context.Context, repoID, number int64) (*types.PullReq, error) + + // FindByNumber finds the pull request by repo ID and the pull request number. + FindByNumber(ctx context.Context, repoID, number int64) (*types.PullReq, error) + + // Create a new pull request. + Create(ctx context.Context, pullreq *types.PullReq) error + + // Update the pull request. It will set new values to the Version and Updated fields. + Update(ctx context.Context, pr *types.PullReq) error + + // UpdateOptLock the pull request details using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, pr *types.PullReq, + mutateFn func(pr *types.PullReq) error) (*types.PullReq, error) + + // UpdateActivitySeq the pull request's activity sequence number. + // It will set new values to the ActivitySeq, Version and Updated fields. + UpdateActivitySeq(ctx context.Context, pr *types.PullReq) (*types.PullReq, error) + + // Update all PR where target branch points to new SHA + UpdateMergeCheckStatus(ctx context.Context, targetRepo int64, targetBranch string, status enum.MergeCheckStatus) error + + // Delete the pull request. + Delete(ctx context.Context, id int64) error + + // Count of pull requests in a space. + Count(ctx context.Context, opts *types.PullReqFilter) (int64, error) + + // List returns a list of pull requests in a space. + List(ctx context.Context, opts *types.PullReqFilter) ([]*types.PullReq, error) + } + + PullReqActivityStore interface { + // Find the pull request activity by id. + Find(ctx context.Context, id int64) (*types.PullReqActivity, error) + + // Create a new pull request activity. Value of the Order field should be fetched with UpdateActivitySeq. + // Value of the SubOrder field (for replies) should be the incremented ReplySeq field (non-replies have 0). + Create(ctx context.Context, act *types.PullReqActivity) error + + // CreateWithPayload create a new system activity from the provided payload. + CreateWithPayload(ctx context.Context, + pr *types.PullReq, principalID int64, payload types.PullReqActivityPayload) (*types.PullReqActivity, error) + + // Update the pull request activity. It will set new values to the Version and Updated fields. + Update(ctx context.Context, act *types.PullReqActivity) error + + // UpdateOptLock updates the pull request activity using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, + act *types.PullReqActivity, + mutateFn func(act *types.PullReqActivity) error, + ) (*types.PullReqActivity, error) + + // Count returns number of pull request activities in a pull request. + Count(ctx context.Context, prID int64, opts *types.PullReqActivityFilter) (int64, error) + + // CountUnresolved returns number of unresolved comments. + CountUnresolved(ctx context.Context, prID int64) (int, error) + + // List returns a list of pull request activities in a pull request (a timeline). + List(ctx context.Context, prID int64, opts *types.PullReqActivityFilter) ([]*types.PullReqActivity, error) + } + + // CodeCommentView is to manipulate only code-comment subset of PullReqActivity. + // It's used by internal service that migrates code comment line numbers after new commits. + CodeCommentView interface { + // ListNotAtSourceSHA loads code comments that need to be updated after a new commit. + // Resulting list is ordered by the file name and the relevant line number. + ListNotAtSourceSHA(ctx context.Context, prID int64, sourceSHA string) ([]*types.CodeComment, error) + + // ListNotAtMergeBaseSHA loads code comments that need to be updated after merge base update. + // Resulting list is ordered by the file name and the relevant line number. + ListNotAtMergeBaseSHA(ctx context.Context, prID int64, targetSHA string) ([]*types.CodeComment, error) + + // UpdateAll updates code comments (pull request activity of types code-comment). + // entities coming from the input channel. + UpdateAll(ctx context.Context, codeComments []*types.CodeComment) error + } + + // PullReqReviewStore defines the pull request review storage. + PullReqReviewStore interface { + // Find returns the pull request review entity or an error if it doesn't exist. + Find(ctx context.Context, id int64) (*types.PullReqReview, error) + + // Create creates a new pull request review. + Create(ctx context.Context, v *types.PullReqReview) error + } + + // PullReqReviewerStore defines the pull request reviewer storage. + PullReqReviewerStore interface { + // Find returns the pull request reviewer or an error if it doesn't exist. + Find(ctx context.Context, prID, principalID int64) (*types.PullReqReviewer, error) + + // Create creates the new pull request reviewer. + Create(ctx context.Context, v *types.PullReqReviewer) error + + // Update updates the pull request reviewer. + Update(ctx context.Context, v *types.PullReqReviewer) error + + // Delete the Pull request reviewer + Delete(ctx context.Context, prID, principalID int64) error + + // List returns all pull request reviewers for the pull request. + List(ctx context.Context, prID int64) ([]*types.PullReqReviewer, error) + } + + // PullReqFileViewStore stores information about what file a user viewed. + PullReqFileViewStore interface { + // Upsert inserts or updates the latest viewed sha for a file in a PR. + Upsert(ctx context.Context, fileView *types.PullReqFileView) error + + // DeleteByFileForPrincipal deletes the entry for the specified PR, principal, and file. + DeleteByFileForPrincipal(ctx context.Context, prID int64, principalID int64, filePath string) error + + // MarkObsolete updates all entries of the files as obsolete for the PR. + MarkObsolete(ctx context.Context, prID int64, filePaths []string) error + + // List lists all files marked as viewed by the user for the specified PR. + List(ctx context.Context, prID int64, principalID int64) ([]*types.PullReqFileView, error) + } + + // WebhookStore defines the webhook data storage. + WebhookStore interface { + // Find finds the webhook by id. + Find(ctx context.Context, id int64) (*types.Webhook, error) + + // Create creates a new webhook. + Create(ctx context.Context, hook *types.Webhook) error + + // Update updates an existing webhook. + Update(ctx context.Context, hook *types.Webhook) error + + // UpdateOptLock updates the webhook using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, hook *types.Webhook, + mutateFn func(hook *types.Webhook) error) (*types.Webhook, error) + + // Delete deletes the webhook for the given id. + Delete(ctx context.Context, id int64) error + + // Count counts the webhooks for a given parent type and id. + Count(ctx context.Context, parentType enum.WebhookParent, parentID int64, + opts *types.WebhookFilter) (int64, error) + + // List lists the webhooks for a given parent type and id. + List(ctx context.Context, parentType enum.WebhookParent, parentID int64, + opts *types.WebhookFilter) ([]*types.Webhook, error) + } + + // WebhookExecutionStore defines the webhook execution data storage. + WebhookExecutionStore interface { + // Find finds the webhook execution by id. + Find(ctx context.Context, id int64) (*types.WebhookExecution, error) + + // Create creates a new webhook execution entry. + Create(ctx context.Context, hook *types.WebhookExecution) error + + // ListForWebhook lists the webhook executions for a given webhook id. + ListForWebhook(ctx context.Context, webhookID int64, + opts *types.WebhookExecutionFilter) ([]*types.WebhookExecution, error) + + // ListForTrigger lists the webhook executions for a given trigger id. + ListForTrigger(ctx context.Context, triggerID string) ([]*types.WebhookExecution, error) + } + + CheckStore interface { + // Upsert creates new or updates an existing status check result. + Upsert(ctx context.Context, check *types.Check) error + + // Count counts status check results for a specific commit in a repo. + Count(ctx context.Context, repoID int64, commitSHA string, opts types.CheckListOptions) (int, error) + + // List returns a list of status check results for a specific commit in a repo. + List(ctx context.Context, repoID int64, commitSHA string, opts types.CheckListOptions) ([]types.Check, error) + + // ListRecent returns a list of recently executed status checks in a repository. + ListRecent(ctx context.Context, repoID int64, since time.Time) ([]string, error) + } + + ReqCheckStore interface { + // Create creates new required status check. + Create(ctx context.Context, reqCheck *types.ReqCheck) error + + // List returns a list of required status checks for a repo. + List(ctx context.Context, repoID int64) ([]*types.ReqCheck, error) + + // Delete removes a required status checks for a repo. + Delete(ctx context.Context, repoID, reqCheckID int64) error + } + + JobStore interface { + // Find fetches a job by its unique identifier. + Find(ctx context.Context, uid string) (*types.Job, error) + + // ListByGroupID fetches all jobs for a group id + ListByGroupID(ctx context.Context, groupId string) ([]*types.Job, error) + + // DeleteByGroupID deletes all jobs for a group id + DeleteByGroupID(ctx context.Context, groupId string) (int64, error) + + // Create is used to create a new job. + Create(ctx context.Context, job *types.Job) error + + // Upsert will insert the job in the database if the job didn't already exist, + // or it will update the existing one but only if its definition has changed. + Upsert(ctx context.Context, job *types.Job) error + + // UpdateDefinition is used to update a job definition. + UpdateDefinition(ctx context.Context, job *types.Job) error + + // UpdateExecution is used to update a job before and after execution. + UpdateExecution(ctx context.Context, job *types.Job) error + + // UpdateProgress is used to update a job progress data. + UpdateProgress(ctx context.Context, job *types.Job) error + + // CountRunning returns number of jobs that are currently being run. + CountRunning(ctx context.Context) (int, error) + + // ListReady returns a list of jobs that are ready for execution. + ListReady(ctx context.Context, now time.Time, limit int) ([]*types.Job, error) + + // ListDeadlineExceeded returns a list of jobs that have exceeded their execution deadline. + ListDeadlineExceeded(ctx context.Context, now time.Time) ([]*types.Job, error) + + // NextScheduledTime returns a scheduled time of the next ready job. + NextScheduledTime(ctx context.Context, now time.Time) (time.Time, error) + + // DeleteOld removes non-recurring jobs that have finished execution or have failed. + DeleteOld(ctx context.Context, olderThan time.Time) (int64, error) + } + + PipelineStore interface { + // Find returns a pipeline given a pipeline ID from the datastore. + Find(ctx context.Context, id int64) (*types.Pipeline, error) + + // FindByUID returns a pipeline with a given UID in a space + FindByUID(ctx context.Context, id int64, uid string) (*types.Pipeline, error) + + // Create creates a new pipeline in the datastore. + Create(ctx context.Context, pipeline *types.Pipeline) error + + // Update tries to update a pipeline in the datastore + Update(ctx context.Context, pipeline *types.Pipeline) error + + // List lists the pipelines present in a repository in the datastore. + List(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) + + // ListLatest lists the pipelines present in a repository in the datastore. + // It also returns latest build information for all the returned entries. + ListLatest(ctx context.Context, repoID int64, pagination types.ListQueryFilter) ([]*types.Pipeline, error) + + // UpdateOptLock updates the pipeline using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, pipeline *types.Pipeline, + mutateFn func(pipeline *types.Pipeline) error) (*types.Pipeline, error) + + // Delete deletes a pipeline ID from the datastore. + Delete(ctx context.Context, id int64) error + + // Count the number of pipelines in a repository matching the given filter. + Count(ctx context.Context, repoID int64, filter types.ListQueryFilter) (int64, error) + + // DeleteByUID deletes a pipeline with a given UID under a repo. + DeleteByUID(ctx context.Context, repoID int64, uid string) error + + // IncrementSeqNum increments the sequence number of the pipeline + IncrementSeqNum(ctx context.Context, pipeline *types.Pipeline) (*types.Pipeline, error) + } + + SecretStore interface { + // Find returns a secret given an ID + Find(ctx context.Context, id int64) (*types.Secret, error) + + // FindByUID returns a secret given a space ID and a UID + FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Secret, error) + + // Create creates a new secret + Create(ctx context.Context, secret *types.Secret) error + + // Count the number of secrets in a space matching the given filter. + Count(ctx context.Context, spaceID int64, pagination types.ListQueryFilter) (int64, error) + + // UpdateOptLock updates the secret using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, secret *types.Secret, + mutateFn func(secret *types.Secret) error) (*types.Secret, error) + + // Update tries to update a secret. + Update(ctx context.Context, secret *types.Secret) error + + // Delete deletes a secret given an ID. + Delete(ctx context.Context, id int64) error + + // DeleteByUID deletes a secret given a space ID and a uid. + DeleteByUID(ctx context.Context, spaceID int64, uid string) error + + // List lists the secrets in a given space. + List(ctx context.Context, spaceID int64, filter types.ListQueryFilter) ([]*types.Secret, error) + + // ListAll lists all the secrets in a given space. + ListAll(ctx context.Context, parentID int64) ([]*types.Secret, error) + } + + ExecutionStore interface { + // Find returns a execution given an execution ID. + Find(ctx context.Context, id int64) (*types.Execution, error) + + // FindByNumber returns a execution given a pipeline and an execution number + FindByNumber(ctx context.Context, pipelineID int64, num int64) (*types.Execution, error) + + // Create creates a new execution in the datastore. + Create(ctx context.Context, execution *types.Execution) error + + // Update tries to update an execution. + Update(ctx context.Context, execution *types.Execution) error + + // List lists the executions for a given pipeline ID + List(ctx context.Context, pipelineID int64, pagination types.Pagination) ([]*types.Execution, error) + + // Delete deletes an execution given a pipeline ID and an execution number + Delete(ctx context.Context, pipelineID int64, num int64) error + + // Count the number of executions in a space + Count(ctx context.Context, parentID int64) (int64, error) + } + + StageStore interface { + // List returns a build stage list from the datastore + // where the stage is incomplete (pending or running). + ListIncomplete(ctx context.Context) ([]*types.Stage, error) + + // List returns a list of stages corresponding to an execution ID. + List(ctx context.Context, executionID int64) ([]*types.Stage, error) + + // ListWithSteps returns a stage list from the datastore corresponding to an execution, + // with the individual steps included. + ListWithSteps(ctx context.Context, executionID int64) ([]*types.Stage, error) + + // Find returns a build stage from the datastore by ID. + Find(ctx context.Context, stageID int64) (*types.Stage, error) + + // FindByNumber returns a stage from the datastore by number. + FindByNumber(ctx context.Context, executionID int64, stageNum int) (*types.Stage, error) + + // Update tries to update a stage and returns an optimistic locking error if it was + // unable to do so. + Update(ctx context.Context, stage *types.Stage) error + + // Create creates a new stage. + Create(ctx context.Context, stage *types.Stage) error + } + + StepStore interface { + // FindByNumber returns a step from the datastore by number. + FindByNumber(ctx context.Context, stageID int64, stepNum int) (*types.Step, error) + + // Create creates a new step. + Create(ctx context.Context, step *types.Step) error + + // Update tries to update a step and returns an optimistic locking error if it was + // unable to do so. + Update(ctx context.Context, e *types.Step) error + } + + ConnectorStore interface { + // Find returns a connector given an ID. + Find(ctx context.Context, id int64) (*types.Connector, error) + + // FindByUID returns a connector given a space ID and a UID. + FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Connector, error) + + // Create creates a new connector. + Create(ctx context.Context, connector *types.Connector) error + + // Count the number of connectors in a space matching the given filter. + Count(ctx context.Context, spaceID int64, pagination types.ListQueryFilter) (int64, error) + + // UpdateOptLock updates the connector using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, connector *types.Connector, + mutateFn func(connector *types.Connector) error) (*types.Connector, error) + + // Update tries to update a connector. + Update(ctx context.Context, connector *types.Connector) error + + // Delete deletes a connector given an ID. + Delete(ctx context.Context, id int64) error + + // DeleteByUID deletes a connector given a space ID and a uid. + DeleteByUID(ctx context.Context, spaceID int64, uid string) error + + // List lists the connectors in a given space. + List(ctx context.Context, spaceID int64, filter types.ListQueryFilter) ([]*types.Connector, error) + } + + TemplateStore interface { + // Find returns a template given an ID. + Find(ctx context.Context, id int64) (*types.Template, error) + + // FindByUID returns a template given a space ID and a UID. + FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Template, error) + + // Create creates a new template. + Create(ctx context.Context, template *types.Template) error + + // Count the number of templates in a space matching the given filter. + Count(ctx context.Context, spaceID int64, pagination types.ListQueryFilter) (int64, error) + + // UpdateOptLock updates the template using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, template *types.Template, + mutateFn func(template *types.Template) error) (*types.Template, error) + + // Update tries to update a template. + Update(ctx context.Context, template *types.Template) error + + // Delete deletes a template given an ID. + Delete(ctx context.Context, id int64) error + + // DeleteByUID deletes a template given a space ID and a uid. + DeleteByUID(ctx context.Context, spaceID int64, uid string) error + + // List lists the templates in a given space. + List(ctx context.Context, spaceID int64, filter types.ListQueryFilter) ([]*types.Template, error) + } + + TriggerStore interface { + // FindByUID returns a trigger given a pipeline and a trigger UID. + FindByUID(ctx context.Context, pipelineID int64, uid string) (*types.Trigger, error) + + // Create creates a new trigger in the datastore. + Create(ctx context.Context, trigger *types.Trigger) error + + // Update tries to update an trigger. + Update(ctx context.Context, trigger *types.Trigger) error + + // UpdateOptLock updates the trigger using the optimistic locking mechanism. + UpdateOptLock(ctx context.Context, trigger *types.Trigger, + mutateFn func(trigger *types.Trigger) error) (*types.Trigger, error) + + // List lists the triggers for a given pipeline ID. + List(ctx context.Context, pipelineID int64, filter types.ListQueryFilter) ([]*types.Trigger, error) + + // Delete deletes an trigger given a pipeline ID and a trigger UID. + DeleteByUID(ctx context.Context, pipelineID int64, uid string) error + + // Count the number of triggers in a pipeline. + Count(ctx context.Context, pipelineID int64, filter types.ListQueryFilter) (int64, error) + + // ListAllEnabled lists all enabled triggers for a given repo without pagination. + // It's used only internally to trigger builds. + ListAllEnabled(ctx context.Context, repoID int64) ([]*types.Trigger, error) + } + + PluginStore interface { + // List returns back the list of plugins matching the given filter + // along with their associated schemas. + List(ctx context.Context, filter types.ListQueryFilter) ([]*types.Plugin, error) + + // ListAll returns back the full list of plugins. + ListAll(ctx context.Context) ([]*types.Plugin, error) + + // Create creates a new entry in the plugin datastore. + Create(ctx context.Context, plugin *types.Plugin) error + + // Update tries to update an trigger. + Update(ctx context.Context, plugin *types.Plugin) error + + // Count counts the number of plugins matching the given filter. + Count(ctx context.Context, filter types.ListQueryFilter) (int64, error) + + // Find returns a plugin given a name and a version. + Find(ctx context.Context, name, version string) (*types.Plugin, error) + } +) diff --git a/internal/store/database/check.go b/internal/store/database/check.go new file mode 100644 index 0000000000..5d334328d2 --- /dev/null +++ b/internal/store/database/check.go @@ -0,0 +1,300 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.CheckStore = (*CheckStore)(nil) + +// NewCheckStore returns a new CheckStore. +func NewCheckStore( + db *sqlx.DB, + pCache store.PrincipalInfoCache, +) *CheckStore { + return &CheckStore{ + db: db, + pCache: pCache, + } +} + +// CheckStore implements store.CheckStore backed by a relational database. +type CheckStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache +} + +const ( + checkColumns = ` + check_id + ,check_created_by + ,check_created + ,check_updated + ,check_repo_id + ,check_commit_sha + ,check_uid + ,check_status + ,check_summary + ,check_link + ,check_payload + ,check_metadata + ,check_payload_kind + ,check_payload_version` +) + +type check struct { + ID int64 `db:"check_id"` + CreatedBy int64 `db:"check_created_by"` + Created int64 `db:"check_created"` + Updated int64 `db:"check_updated"` + RepoID int64 `db:"check_repo_id"` + CommitSHA string `db:"check_commit_sha"` + UID string `db:"check_uid"` + Status enum.CheckStatus `db:"check_status"` + Summary string `db:"check_summary"` + Link string `db:"check_link"` + Payload json.RawMessage `db:"check_payload"` + Metadata json.RawMessage `db:"check_metadata"` + PayloadKind enum.CheckPayloadKind `db:"check_payload_kind"` + PayloadVersion string `db:"check_payload_version"` +} + +// Upsert creates new or updates an existing status check result. +func (s *CheckStore) Upsert(ctx context.Context, check *types.Check) error { + const sqlQuery = ` + INSERT INTO checks ( + check_created_by + ,check_created + ,check_updated + ,check_repo_id + ,check_commit_sha + ,check_uid + ,check_status + ,check_summary + ,check_link + ,check_payload + ,check_metadata + ,check_payload_kind + ,check_payload_version + ) VALUES ( + :check_created_by + ,:check_created + ,:check_updated + ,:check_repo_id + ,:check_commit_sha + ,:check_uid + ,:check_status + ,:check_summary + ,:check_link + ,:check_payload + ,:check_metadata + ,:check_payload_kind + ,:check_payload_version + ) + ON CONFLICT (check_repo_id, check_commit_sha, check_uid) DO + UPDATE SET + check_updated = :check_updated + ,check_status = :check_status + ,check_summary = :check_summary + ,check_link = :check_link + ,check_payload = :check_payload + ,check_metadata = :check_metadata + ,check_payload_kind = :check_payload_kind + ,check_payload_version = :check_payload_version + RETURNING check_id, check_created_by, check_created` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalCheck(check)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind status check object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&check.ID, &check.CreatedBy, &check.Created); err != nil { + return database.ProcessSQLErrorf(err, "Upsert query failed") + } + + return nil +} + +// Count counts status check results for a specific commit in a repo. +func (s *CheckStore) Count(ctx context.Context, + repoID int64, + commitSHA string, + _ types.CheckListOptions, +) (int, error) { + stmt := database.Builder. + Select("count(*)"). + From("checks"). + Where("check_repo_id = ?", repoID). + Where("check_commit_sha = ?", commitSHA) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed to execute count status checks query") + } + + return count, nil +} + +// List returns a list of status check results for a specific commit in a repo. +func (s *CheckStore) List(ctx context.Context, + repoID int64, + commitSHA string, + opts types.CheckListOptions, +) ([]types.Check, error) { + stmt := database.Builder. + Select(checkColumns). + From("checks"). + Where("check_repo_id = ?", repoID). + Where("check_commit_sha = ?", commitSHA). + Limit(database.Limit(opts.Size)). + Offset(database.Offset(opts.Page, opts.Size)). + OrderBy("check_updated desc") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + dst := make([]*check, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to execute list status checks query") + } + + result, err := s.mapSliceCheck(ctx, dst) + if err != nil { + return nil, err + } + + return result, nil +} + +// ListRecent returns a list of recently executed status checks in a repository. +func (s *CheckStore) ListRecent(ctx context.Context, repoID int64, since time.Time) ([]string, error) { + stmt := database.Builder. + Select("distinct check_uid"). + From("checks"). + Where("check_repo_id = ?", repoID). + Where("check_created > ?", since.UnixMilli()). + OrderBy("check_uid") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + dst := make([]string, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to execute list recent status checks query") + } + + return dst, nil +} + +func mapInternalCheck(c *types.Check) *check { + m := &check{ + ID: c.ID, + CreatedBy: c.CreatedBy, + Created: c.Created, + Updated: c.Updated, + RepoID: c.RepoID, + CommitSHA: c.CommitSHA, + UID: c.UID, + Status: c.Status, + Summary: c.Summary, + Link: c.Link, + Payload: c.Payload.Data, + Metadata: c.Metadata, + PayloadKind: c.Payload.Kind, + PayloadVersion: c.Payload.Version, + } + + return m +} + +func mapCheck(c *check) types.Check { + return types.Check{ + ID: c.ID, + CreatedBy: c.CreatedBy, + Created: c.Created, + Updated: c.Updated, + RepoID: c.RepoID, + CommitSHA: c.CommitSHA, + UID: c.UID, + Status: c.Status, + Summary: c.Summary, + Link: c.Link, + Metadata: c.Metadata, + Payload: types.CheckPayload{ + Version: c.PayloadVersion, + Kind: c.PayloadKind, + Data: c.Payload, + }, + ReportedBy: types.PrincipalInfo{}, + } +} + +func (s *CheckStore) mapSliceCheck(ctx context.Context, checks []*check) ([]types.Check, error) { + // collect all principal IDs + ids := make([]int64, len(checks)) + for i, req := range checks { + ids[i] = req.CreatedBy + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load status check principal reporters: %w", err) + } + + // attach the principal infos back to the slice items + m := make([]types.Check, len(checks)) + for i, c := range checks { + m[i] = mapCheck(c) + if reportedBy, ok := infoMap[c.CreatedBy]; ok { + m[i].ReportedBy = *reportedBy + } + } + + return m, nil +} diff --git a/internal/store/database/check_req.go b/internal/store/database/check_req.go new file mode 100644 index 0000000000..ee2580cb35 --- /dev/null +++ b/internal/store/database/check_req.go @@ -0,0 +1,201 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.ReqCheckStore = (*ReqCheckStore)(nil) + +// NewReqCheckStore returns a new CheckStore. +func NewReqCheckStore( + db *sqlx.DB, + pCache store.PrincipalInfoCache, +) *ReqCheckStore { + return &ReqCheckStore{ + db: db, + pCache: pCache, + } +} + +// ReqCheckStore implements store.CheckStore backed by a relational database. +type ReqCheckStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache +} + +const ( + reqCheckColumns = ` + reqcheck_id + ,reqcheck_created_by + ,reqcheck_created + ,reqcheck_repo_id + ,reqcheck_branch_pattern + ,reqcheck_check_uid` +) + +// reqCheck is used to fetch required status check data from the database. +// The object should be later re-packed into a different struct to return it as an API response. +type reqCheck struct { + ID int64 `db:"reqcheck_id"` + CreatedBy int64 `db:"reqcheck_created_by"` + Created int64 `db:"reqcheck_created"` + RepoID int64 `db:"reqcheck_repo_id"` + BranchPattern string `db:"reqcheck_branch_pattern"` + CheckUID string `db:"reqcheck_check_uid"` +} + +// Create creates new required status check. +func (s *ReqCheckStore) Create(ctx context.Context, reqCheck *types.ReqCheck) error { + const sqlQuery = ` + INSERT INTO reqchecks ( + reqcheck_created_by + ,reqcheck_created + ,reqcheck_repo_id + ,reqcheck_branch_pattern + ,reqcheck_check_uid + ) VALUES ( + :reqcheck_created_by + ,:reqcheck_created + ,:reqcheck_repo_id + ,:reqcheck_branch_pattern + ,:reqcheck_check_uid + ) + RETURNING reqcheck_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalReqCheck(reqCheck)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind required status check object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&reqCheck.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// List returns a list of required status checks for a repo. +func (s *ReqCheckStore) List(ctx context.Context, repoID int64) ([]*types.ReqCheck, error) { + stmt := database.Builder. + Select(reqCheckColumns). + From("reqchecks"). + Where("reqcheck_repo_id = ?", repoID). + OrderBy("reqcheck_check_uid") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + dst := make([]*reqCheck, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to execute list required status checks query") + } + + result, err := s.mapSliceReqCheck(ctx, dst) + if err != nil { + return nil, err + } + + return result, nil +} + +// Delete removes a required status checks for a repo. +func (s *ReqCheckStore) Delete(ctx context.Context, repoID, reqCheckID int64) error { + stmt := database.Builder. + Delete("reqchecks"). + Where("reqcheck_repo_id = ?", repoID). + Where("reqcheck_id = ?", reqCheckID) + + sql, args, err := stmt.ToSql() + if err != nil { + return errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + _, err = db.ExecContext(ctx, sql, args...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to execute delete required status check query") + } + + return nil +} + +func mapReqCheck(req *reqCheck) *types.ReqCheck { + return &types.ReqCheck{ + ID: req.ID, + CreatedBy: req.CreatedBy, + Created: req.Created, + RepoID: req.RepoID, + BranchPattern: req.BranchPattern, + CheckUID: req.CheckUID, + AddedBy: types.PrincipalInfo{}, + } +} + +func mapInternalReqCheck(req *types.ReqCheck) *reqCheck { + m := &reqCheck{ + ID: req.ID, + CreatedBy: req.CreatedBy, + Created: req.Created, + RepoID: req.RepoID, + BranchPattern: req.BranchPattern, + CheckUID: req.CheckUID, + } + + return m +} + +func (s *ReqCheckStore) mapSliceReqCheck(ctx context.Context, reqChecks []*reqCheck) ([]*types.ReqCheck, error) { + // collect all principal IDs + ids := make([]int64, len(reqChecks)) + for i, req := range reqChecks { + ids[i] = req.CreatedBy + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load required status check principal infos: %w", err) + } + + // attach the principal infos back to the slice items + m := make([]*types.ReqCheck, len(reqChecks)) + for i, req := range reqChecks { + m[i] = mapReqCheck(req) + if author, ok := infoMap[req.CreatedBy]; ok { + m[i].AddedBy = *author + } + } + + return m, nil +} diff --git a/internal/store/database/code_comment.go b/internal/store/database/code_comment.go new file mode 100644 index 0000000000..f0c13738ef --- /dev/null +++ b/internal/store/database/code_comment.go @@ -0,0 +1,166 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var _ store.CodeCommentView = (*CodeCommentView)(nil) + +// NewCodeCommentView returns a new CodeCommentView. +func NewCodeCommentView(db *sqlx.DB) *CodeCommentView { + return &CodeCommentView{ + db: db, + } +} + +// CodeCommentView implements store.CodeCommentView backed by a relational database. +type CodeCommentView struct { + db *sqlx.DB +} + +// ListNotAtSourceSHA lists all code comments not already at the provided source SHA. +func (s *CodeCommentView) ListNotAtSourceSHA(ctx context.Context, + prID int64, sourceSHA string, +) ([]*types.CodeComment, error) { + return s.list(ctx, prID, "", sourceSHA) +} + +// ListNotAtMergeBaseSHA lists all code comments not already at the provided merge base SHA. +func (s *CodeCommentView) ListNotAtMergeBaseSHA(ctx context.Context, + prID int64, mergeBaseSHA string, +) ([]*types.CodeComment, error) { + return s.list(ctx, prID, mergeBaseSHA, "") +} + +// list is used by internal service that updates line numbers of code comments after +// branch updates and requires either mergeBaseSHA or sourceSHA but not both. +// Resulting list is ordered by the file name and the relevant line number. +func (s *CodeCommentView) list(ctx context.Context, + prID int64, mergeBaseSHA, sourceSHA string, +) ([]*types.CodeComment, error) { + const codeCommentColumns = ` + pullreq_activity_id + ,pullreq_activity_version + ,pullreq_activity_updated + ,coalesce(pullreq_activity_outdated, false) as "pullreq_activity_outdated" + ,coalesce(pullreq_activity_code_comment_merge_base_sha, '') as "pullreq_activity_code_comment_merge_base_sha" + ,coalesce(pullreq_activity_code_comment_source_sha, '') as "pullreq_activity_code_comment_source_sha" + ,coalesce(pullreq_activity_code_comment_path, '') as "pullreq_activity_code_comment_path" + ,coalesce(pullreq_activity_code_comment_line_new, 1) as "pullreq_activity_code_comment_line_new" + ,coalesce(pullreq_activity_code_comment_span_new, 0) as "pullreq_activity_code_comment_span_new" + ,coalesce(pullreq_activity_code_comment_line_old, 1) as "pullreq_activity_code_comment_line_old" + ,coalesce(pullreq_activity_code_comment_span_old, 0) as "pullreq_activity_code_comment_span_old"` + + stmt := database.Builder. + Select(codeCommentColumns). + From("pullreq_activities"). + Where("pullreq_activity_pullreq_id = ?", prID). + Where("not pullreq_activity_outdated"). + Where("pullreq_activity_type = ?", enum.PullReqActivityTypeCodeComment). + Where("pullreq_activity_kind = ?", enum.PullReqActivityKindChangeComment). + Where("pullreq_activity_deleted is null and pullreq_activity_parent_id is null") + + if mergeBaseSHA != "" { + stmt = stmt. + Where("pullreq_activity_code_comment_merge_base_sha <> ?", mergeBaseSHA) + } else { + stmt = stmt. + Where("pullreq_activity_code_comment_source_sha <> ?", sourceSHA) + } + + stmt = stmt.OrderBy("pullreq_activity_code_comment_path asc", + "pullreq_activity_code_comment_line_new asc") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert pull request activity query to sql") + } + + result := make([]*types.CodeComment, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &result, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing code comment list query") + } + + return result, nil +} + +// UpdateAll updates all code comments provided in the slice. +func (s *CodeCommentView) UpdateAll(ctx context.Context, codeComments []*types.CodeComment) error { + if len(codeComments) == 0 { + return nil + } + + const sqlQuery = ` + UPDATE pullreq_activities + SET + pullreq_activity_version = :pullreq_activity_version + ,pullreq_activity_updated = :pullreq_activity_updated + ,pullreq_activity_outdated = :pullreq_activity_outdated + ,pullreq_activity_code_comment_merge_base_sha = :pullreq_activity_code_comment_merge_base_sha + ,pullreq_activity_code_comment_source_sha = :pullreq_activity_code_comment_source_sha + ,pullreq_activity_code_comment_path = :pullreq_activity_code_comment_path + ,pullreq_activity_code_comment_line_new = :pullreq_activity_code_comment_line_new + ,pullreq_activity_code_comment_span_new = :pullreq_activity_code_comment_span_new + ,pullreq_activity_code_comment_line_old = :pullreq_activity_code_comment_line_old + ,pullreq_activity_code_comment_span_old = :pullreq_activity_code_comment_span_old + WHERE pullreq_activity_id = :pullreq_activity_id AND pullreq_activity_version = :pullreq_activity_version - 1` + + db := dbtx.GetAccessor(ctx, s.db) + + stmt, err := db.PrepareNamedContext(ctx, sqlQuery) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to prepare update statement for update code comments") + } + + updatedAt := time.Now() + + for _, codeComment := range codeComments { + codeComment.Version++ + codeComment.Updated = updatedAt.UnixMilli() + + result, err := stmt.ExecContext(ctx, codeComment) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update code comment=%d", codeComment.ID) + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows for code comment=%d", codeComment.ID) + } + + if count == 0 { + log.Ctx(ctx).Warn().Msgf("Version conflict when trying to update code comment=%d", codeComment.ID) + continue + } + } + + return nil +} diff --git a/internal/store/database/connector.go b/internal/store/database/connector.go new file mode 100644 index 0000000000..7f3d3a5de3 --- /dev/null +++ b/internal/store/database/connector.go @@ -0,0 +1,278 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.ConnectorStore = (*connectorStore)(nil) + +const ( + connectorQueryBase = ` + SELECT` + connectorColumns + ` + FROM connectors` + + connectorColumns = ` + connector_id, + connector_description, + connector_space_id, + connector_uid, + connector_data, + connector_created, + connector_updated, + connector_version + ` +) + +// NewConnectorStore returns a new ConnectorStore. +func NewConnectorStore(db *sqlx.DB) *connectorStore { + return &connectorStore{ + db: db, + } +} + +type connectorStore struct { + db *sqlx.DB +} + +// Find returns a connector given a connector ID. +func (s *connectorStore) Find(ctx context.Context, id int64) (*types.Connector, error) { + const findQueryStmt = connectorQueryBase + ` + WHERE connector_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Connector) + if err := db.GetContext(ctx, dst, findQueryStmt, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find connector") + } + return dst, nil +} + +// FindByUID returns a connector in a given space with a given UID. +func (s *connectorStore) FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Connector, error) { + const findQueryStmt = connectorQueryBase + ` + WHERE connector_space_id = $1 AND connector_uid = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Connector) + if err := db.GetContext(ctx, dst, findQueryStmt, spaceID, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find connector") + } + return dst, nil +} + +// Create creates a connector. +func (s *connectorStore) Create(ctx context.Context, connector *types.Connector) error { + const connectorInsertStmt = ` + INSERT INTO connectors ( + connector_description + ,connector_type + ,connector_space_id + ,connector_uid + ,connector_data + ,connector_created + ,connector_updated + ,connector_version + ) VALUES ( + :connector_description + ,:connector_type + ,:connector_space_id + ,:connector_uid + ,:connector_data + ,:connector_created + ,:connector_updated + ,:connector_version + ) RETURNING connector_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(connectorInsertStmt, connector) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind connector object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&connector.ID); err != nil { + return database.ProcessSQLErrorf(err, "connector query failed") + } + + return nil +} + +func (s *connectorStore) Update(ctx context.Context, p *types.Connector) error { + const connectorUpdateStmt = ` + UPDATE connectors + SET + connector_description = :connector_description + ,connector_uid = :connector_uid + ,connector_data = :connector_data + ,connector_type = :connector_type + ,connector_updated = :connector_updated + ,connector_version = :connector_version + WHERE connector_id = :connector_id AND connector_version = :connector_version - 1` + connector := *p + + connector.Version++ + connector.Updated = time.Now().UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(connectorUpdateStmt, connector) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind connector object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update connector") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + p.Version = connector.Version + p.Updated = connector.Updated + return nil +} + +// UpdateOptLock updates the connector using the optimistic locking mechanism. +func (s *connectorStore) UpdateOptLock(ctx context.Context, + connector *types.Connector, + mutateFn func(connector *types.Connector) error, +) (*types.Connector, error) { + for { + dup := *connector + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + connector, err = s.Find(ctx, connector.ID) + if err != nil { + return nil, err + } + } +} + +// List lists all the connectors present in a space. +func (s *connectorStore) List(ctx context.Context, parentID int64, filter types.ListQueryFilter) ([]*types.Connector, error) { + stmt := database.Builder. + Select(connectorColumns). + From("connectors"). + Where("connector_space_id = ?", fmt.Sprint(parentID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(connector_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Connector{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// Delete deletes a connector given a connector ID. +func (s *connectorStore) Delete(ctx context.Context, id int64) error { + const connectorDeleteStmt = ` + DELETE FROM connectors + WHERE connector_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, connectorDeleteStmt, id); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete connector") + } + + return nil +} + +// DeleteByUID deletes a connector with a given UID in a space. +func (s *connectorStore) DeleteByUID(ctx context.Context, spaceID int64, uid string) error { + const connectorDeleteStmt = ` + DELETE FROM connectors + WHERE connector_space_id = $1 AND connector_uid = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, connectorDeleteStmt, spaceID, uid); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete connector") + } + + return nil +} + +// Count of connectors in a space. +func (s *connectorStore) Count(ctx context.Context, parentID int64, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("connectors"). + Where("connector_space_id = ?", parentID) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(connector_uid) LIKE ?", fmt.Sprintf("%%%s%%", filter.Query)) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} diff --git a/internal/store/database/encode.go b/internal/store/database/encode.go new file mode 100644 index 0000000000..c497664f2e --- /dev/null +++ b/internal/store/database/encode.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "encoding/json" + + sqlx "github.com/jmoiron/sqlx/types" +) + +// EncodeToSQLXJSON accepts a generic parameter and returns +// a sqlx.JSONText object which is used to store arbitrary +// data in the DB. We absorb the error here as the value +// gets absorbed in sqlx.JSONText in case of UnsupportedValueError +// or UnsupportedTypeError. +func EncodeToSQLXJSON(v any) sqlx.JSONText { + raw, _ := json.Marshal(v) + return sqlx.JSONText(raw) +} diff --git a/internal/store/database/execution.go b/internal/store/database/execution.go new file mode 100644 index 0000000000..be596e2193 --- /dev/null +++ b/internal/store/database/execution.go @@ -0,0 +1,370 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + sqlxtypes "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" +) + +var _ store.ExecutionStore = (*executionStore)(nil) + +// NewExecutionStore returns a new ExecutionStore. +func NewExecutionStore(db *sqlx.DB) *executionStore { + return &executionStore{ + db: db, + } +} + +type executionStore struct { + db *sqlx.DB +} + +// exection represents an execution object stored in the database. +type execution struct { + ID int64 `db:"execution_id"` + PipelineID int64 `db:"execution_pipeline_id"` + CreatedBy int64 `db:"execution_created_by"` + RepoID int64 `db:"execution_repo_id"` + Trigger string `db:"execution_trigger"` + Number int64 `db:"execution_number"` + Parent int64 `db:"execution_parent"` + Status enum.CIStatus `db:"execution_status"` + Error string `db:"execution_error"` + Event string `db:"execution_event"` + Action string `db:"execution_action"` + Link string `db:"execution_link"` + Timestamp int64 `db:"execution_timestamp"` + Title string `db:"execution_title"` + Message string `db:"execution_message"` + Before string `db:"execution_before"` + After string `db:"execution_after"` + Ref string `db:"execution_ref"` + Fork string `db:"execution_source_repo"` + Source string `db:"execution_source"` + Target string `db:"execution_target"` + Author string `db:"execution_author"` + AuthorName string `db:"execution_author_name"` + AuthorEmail string `db:"execution_author_email"` + AuthorAvatar string `db:"execution_author_avatar"` + Sender string `db:"execution_sender"` + Params sqlxtypes.JSONText `db:"execution_params"` + Cron string `db:"execution_cron"` + Deploy string `db:"execution_deploy"` + DeployID int64 `db:"execution_deploy_id"` + Debug bool `db:"execution_debug"` + Started int64 `db:"execution_started"` + Finished int64 `db:"execution_finished"` + Created int64 `db:"execution_created"` + Updated int64 `db:"execution_updated"` + Version int64 `db:"execution_version"` +} + +const ( + executionColumns = ` + execution_id + ,execution_pipeline_id + ,execution_created_by + ,execution_repo_id + ,execution_trigger + ,execution_number + ,execution_parent + ,execution_status + ,execution_error + ,execution_event + ,execution_action + ,execution_link + ,execution_timestamp + ,execution_title + ,execution_message + ,execution_before + ,execution_after + ,execution_ref + ,execution_source_repo + ,execution_source + ,execution_target + ,execution_author + ,execution_author_name + ,execution_author_email + ,execution_author_avatar + ,execution_sender + ,execution_params + ,execution_cron + ,execution_deploy + ,execution_deploy_id + ,execution_debug + ,execution_started + ,execution_finished + ,execution_created + ,execution_updated + ,execution_version + ` +) + +// Find returns an execution given an execution ID. +func (s *executionStore) Find(ctx context.Context, id int64) (*types.Execution, error) { + const findQueryStmt = ` + SELECT` + executionColumns + ` + FROM executions + WHERE execution_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(execution) + if err := db.GetContext(ctx, dst, findQueryStmt, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find execution") + } + return mapInternalToExecution(dst) +} + +// FindByNumber returns an execution given a pipeline ID and an execution number. +func (s *executionStore) FindByNumber(ctx context.Context, pipelineID int64, executionNum int64) (*types.Execution, error) { + const findQueryStmt = ` + SELECT` + executionColumns + ` + FROM executions + WHERE execution_pipeline_id = $1 AND execution_number = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(execution) + if err := db.GetContext(ctx, dst, findQueryStmt, pipelineID, executionNum); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find execution") + } + return mapInternalToExecution(dst) +} + +// Create creates a new execution in the datastore. +func (s *executionStore) Create(ctx context.Context, execution *types.Execution) error { + const executionInsertStmt = ` + INSERT INTO executions ( + execution_pipeline_id + ,execution_repo_id + ,execution_created_by + ,execution_trigger + ,execution_number + ,execution_parent + ,execution_status + ,execution_error + ,execution_event + ,execution_action + ,execution_link + ,execution_timestamp + ,execution_title + ,execution_message + ,execution_before + ,execution_after + ,execution_ref + ,execution_source_repo + ,execution_source + ,execution_target + ,execution_author + ,execution_author_name + ,execution_author_email + ,execution_author_avatar + ,execution_sender + ,execution_params + ,execution_cron + ,execution_deploy + ,execution_deploy_id + ,execution_debug + ,execution_started + ,execution_finished + ,execution_created + ,execution_updated + ,execution_version + ) VALUES ( + :execution_pipeline_id + ,:execution_repo_id + ,:execution_created_by + ,:execution_trigger + ,:execution_number + ,:execution_parent + ,:execution_status + ,:execution_error + ,:execution_event + ,:execution_action + ,:execution_link + ,:execution_timestamp + ,:execution_title + ,:execution_message + ,:execution_before + ,:execution_after + ,:execution_ref + ,:execution_source_repo + ,:execution_source + ,:execution_target + ,:execution_author + ,:execution_author_name + ,:execution_author_email + ,:execution_author_avatar + ,:execution_sender + ,:execution_params + ,:execution_cron + ,:execution_deploy + ,:execution_deploy_id + ,:execution_debug + ,:execution_started + ,:execution_finished + ,:execution_created + ,:execution_updated + ,:execution_version + ) RETURNING execution_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(executionInsertStmt, mapExecutionToInternal(execution)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind execution object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&execution.ID); err != nil { + return database.ProcessSQLErrorf(err, "Execution query failed") + } + + return nil +} + +// Update tries to update an execution in the datastore with optimistic locking. +func (s *executionStore) Update(ctx context.Context, e *types.Execution) error { + const executionUpdateStmt = ` + UPDATE executions + SET + execution_status = :execution_status + ,execution_error = :execution_error + ,execution_event = :execution_event + ,execution_started = :execution_started + ,execution_finished = :execution_finished + ,execution_updated = :execution_updated + ,execution_version = :execution_version + WHERE execution_id = :execution_id AND execution_version = :execution_version - 1` + updatedAt := time.Now() + stages := e.Stages + + execution := mapExecutionToInternal(e) + + execution.Version++ + execution.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(executionUpdateStmt, execution) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind execution object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update execution") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + m, err := mapInternalToExecution(execution) + if err != nil { + return fmt.Errorf("Could not map execution object: %w", err) + } + *e = *m + e.Version = execution.Version + e.Updated = execution.Updated + e.Stages = stages // stages are not mapped in database. + return nil +} + +// List lists the executions for a given pipeline ID. +// It orders them in descending order of execution number. +func (s *executionStore) List( + ctx context.Context, + pipelineID int64, + pagination types.Pagination, +) ([]*types.Execution, error) { + stmt := database.Builder. + Select(executionColumns). + From("executions"). + Where("execution_pipeline_id = ?", fmt.Sprint(pipelineID)). + OrderBy("execution_number " + enum.OrderDesc.String()) + + stmt = stmt.Limit(database.Limit(pagination.Size)) + stmt = stmt.Offset(database.Offset(pagination.Page, pagination.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*execution{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return mapInternalToExecutionList(dst) +} + +// Count of executions in a pipeline, if pipelineID is 0 then return total number of executions. +func (s *executionStore) Count(ctx context.Context, pipelineID int64) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("executions") + + if pipelineID > 0 { + stmt = stmt.Where("execution_pipeline_id = ?", pipelineID) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} + +// Delete deletes an execution given a pipeline ID and an execution number. +func (s *executionStore) Delete(ctx context.Context, pipelineID int64, executionNum int64) error { + const executionDeleteStmt = ` + DELETE FROM executions + WHERE execution_pipeline_id = $1 AND execution_number = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, executionDeleteStmt, pipelineID, executionNum); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete execution") + } + + return nil +} diff --git a/internal/store/database/execution_map.go b/internal/store/database/execution_map.go new file mode 100644 index 0000000000..51104e4589 --- /dev/null +++ b/internal/store/database/execution_map.go @@ -0,0 +1,104 @@ +package database + +import ( + "github.com/harness/gitness/types" +) + +func mapInternalToExecution(in *execution) (*types.Execution, error) { + var params map[string]string + err := in.Params.Unmarshal(¶ms) + if err != nil { + return nil, err + } + return &types.Execution{ + ID: in.ID, + PipelineID: in.PipelineID, + CreatedBy: in.CreatedBy, + RepoID: in.RepoID, + Trigger: in.Trigger, + Number: in.Number, + Parent: in.Parent, + Status: in.Status, + Error: in.Error, + Event: in.Event, + Action: in.Action, + Link: in.Link, + Timestamp: in.Timestamp, + Title: in.Title, + Message: in.Message, + Before: in.Before, + After: in.After, + Ref: in.Ref, + Fork: in.Fork, + Source: in.Source, + Target: in.Target, + Author: in.Author, + AuthorName: in.AuthorName, + AuthorEmail: in.AuthorEmail, + AuthorAvatar: in.AuthorAvatar, + Sender: in.Sender, + Params: params, + Cron: in.Cron, + Deploy: in.Deploy, + DeployID: in.DeployID, + Debug: in.Debug, + Started: in.Started, + Finished: in.Finished, + Created: in.Created, + Updated: in.Updated, + Version: in.Version, + }, nil +} + +func mapExecutionToInternal(in *types.Execution) *execution { + return &execution{ + ID: in.ID, + PipelineID: in.PipelineID, + CreatedBy: in.CreatedBy, + RepoID: in.RepoID, + Trigger: in.Trigger, + Number: in.Number, + Parent: in.Parent, + Status: in.Status, + Error: in.Error, + Event: in.Event, + Action: in.Action, + Link: in.Link, + Timestamp: in.Timestamp, + Title: in.Title, + Message: in.Message, + Before: in.Before, + After: in.After, + Ref: in.Ref, + Fork: in.Fork, + Source: in.Source, + Target: in.Target, + Author: in.Author, + AuthorName: in.AuthorName, + AuthorEmail: in.AuthorEmail, + AuthorAvatar: in.AuthorAvatar, + Sender: in.Sender, + Params: EncodeToSQLXJSON(in.Params), + Cron: in.Cron, + Deploy: in.Deploy, + DeployID: in.DeployID, + Debug: in.Debug, + Started: in.Started, + Finished: in.Finished, + Created: in.Created, + Updated: in.Updated, + Version: in.Version, + } +} + +func mapInternalToExecutionList(in []*execution) ([]*types.Execution, error) { + executions := make([]*types.Execution, len(in)) + for i, k := range in { + e, err := mapInternalToExecution(k) + if err != nil { + return nil, err + } + executions[i] = e + } + return executions, nil +} diff --git a/internal/store/database/job.go b/internal/store/database/job.go new file mode 100644 index 0000000000..bf9f5e6366 --- /dev/null +++ b/internal/store/database/job.go @@ -0,0 +1,486 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "database/sql" + "errors" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +var _ store.JobStore = (*JobStore)(nil) + +func NewJobStore(db *sqlx.DB) *JobStore { + return &JobStore{ + db: db, + } +} + +type JobStore struct { + db *sqlx.DB +} + +const ( + jobColumns = ` + job_uid + ,job_created + ,job_updated + ,job_type + ,job_priority + ,job_data + ,job_result + ,job_max_duration_seconds + ,job_max_retries + ,job_state + ,job_scheduled + ,job_total_executions + ,job_run_by + ,job_run_deadline + ,job_run_progress + ,job_last_executed + ,job_is_recurring + ,job_recurring_cron + ,job_consecutive_failures + ,job_last_failure_error + ,job_group_id` + + jobSelectBase = ` + SELECT` + jobColumns + ` + FROM jobs` +) + +// Find fetches a job by its unique identifier. +func (s *JobStore) Find(ctx context.Context, uid string) (*types.Job, error) { + const sqlQuery = jobSelectBase + ` + WHERE job_uid = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + result := &types.Job{} + if err := db.GetContext(ctx, result, sqlQuery, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find job by uid") + } + + return result, nil +} + +// DeleteByGroupID deletes all jobs for a group id +func (s *JobStore) DeleteByGroupID(ctx context.Context, groupId string) (int64, error) { + stmt := database.Builder. + Delete("jobs"). + Where("(job_group_id = ?)", groupId) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert delete by group id jobs query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + result, err := db.ExecContext(ctx, sql, args...) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to execute delete jobs by group id query") + } + + n, err := result.RowsAffected() + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to get number of deleted jobs in group") + } + + return n, nil +} + +// ListByGroupID fetches all jobs for a group id +func (s *JobStore) ListByGroupID(ctx context.Context, groupId string) ([]*types.Job, error) { + const sqlQuery = jobSelectBase + ` + WHERE job_group_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := make([]*types.Job, 0) + if err := db.SelectContext(ctx, &dst, sqlQuery, groupId); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find job by group id") + } + + return dst, nil +} + +// Create creates a new job. +func (s *JobStore) Create(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + INSERT INTO jobs (` + jobColumns + ` + ) VALUES ( + :job_uid + ,:job_created + ,:job_updated + ,:job_type + ,:job_priority + ,:job_data + ,:job_result + ,:job_max_duration_seconds + ,:job_max_retries + ,:job_state + ,:job_scheduled + ,:job_total_executions + ,:job_run_by + ,:job_run_deadline + ,:job_run_progress + ,:job_last_executed + ,:job_is_recurring + ,:job_recurring_cron + ,:job_consecutive_failures + ,:job_last_failure_error + ,:job_group_id + )` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object") + } + + if _, err := db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Upsert creates or updates a job. If the job didn't exist it will insert it in the database, +// otherwise it will update it but only if its definition has changed. +func (s *JobStore) Upsert(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + INSERT INTO jobs (` + jobColumns + ` + ) VALUES ( + :job_uid + ,:job_created + ,:job_updated + ,:job_type + ,:job_priority + ,:job_data + ,:job_result + ,:job_max_duration_seconds + ,:job_max_retries + ,:job_state + ,:job_scheduled + ,:job_total_executions + ,:job_run_by + ,:job_run_deadline + ,:job_run_progress + ,:job_last_executed + ,:job_is_recurring + ,:job_recurring_cron + ,:job_consecutive_failures + ,:job_last_failure_error + ,:job_group_id + ) + ON CONFLICT (job_uid) DO + UPDATE SET + job_updated = :job_updated + ,job_type = :job_type + ,job_priority = :job_priority + ,job_data = :job_data + ,job_result = :job_result + ,job_max_duration_seconds = :job_max_duration_seconds + ,job_max_retries = :job_max_retries + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_is_recurring = :job_is_recurring + ,job_recurring_cron = :job_recurring_cron + WHERE + jobs.job_type <> :job_type OR + jobs.job_priority <> :job_priority OR + jobs.job_data <> :job_data OR + jobs.job_max_duration_seconds <> :job_max_duration_seconds OR + jobs.job_max_retries <> :job_max_retries OR + jobs.job_is_recurring <> :job_is_recurring OR + jobs.job_recurring_cron <> :job_recurring_cron` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object") + } + + if _, err := db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Upsert query failed") + } + + return nil +} + +// UpdateDefinition is used to update a job definition. +func (s *JobStore) UpdateDefinition(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_type = :job_type + ,job_priority = :job_priority + ,job_data = :job_data + ,job_result = :job_result + ,job_max_duration_seconds = :job_max_duration_seconds + ,job_max_retries = :job_max_retries + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_is_recurring = :job_is_recurring + ,job_recurring_cron = :job_recurring_cron + ,job_group_id = :job_group_id + WHERE job_uid = :job_uid` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job definition") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +// UpdateExecution is used to update a job before and after execution. +func (s *JobStore) UpdateExecution(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_result = :job_result + ,job_state = :job_state + ,job_scheduled = :job_scheduled + ,job_total_executions = :job_total_executions + ,job_run_by = :job_run_by + ,job_run_deadline = :job_run_deadline + ,job_last_executed = :job_last_executed + ,job_consecutive_failures = :job_consecutive_failures + ,job_last_failure_error = :job_last_failure_error + WHERE job_uid = :job_uid` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job execution") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +func (s *JobStore) UpdateProgress(ctx context.Context, job *types.Job) error { + const sqlQuery = ` + UPDATE jobs + SET + job_updated = :job_updated + ,job_result = :job_result + ,job_run_progress = :job_run_progress + WHERE job_uid = :job_uid AND job_state = 'running'` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, job) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind job object for update") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update job progress") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrResourceNotFound + } + + return nil +} + +// CountRunning returns number of jobs that are currently being run. +func (s *JobStore) CountRunning(ctx context.Context) (int, error) { + stmt := database.Builder. + Select("count(*)"). + From("jobs"). + Where("job_state = ?", enum.JobStateRunning) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert count running jobs query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed executing count running jobs query") + } + + return int(count), nil +} + +// ListReady returns a list of jobs that are ready for execution: +// The jobs with state="scheduled" and scheduled time in the past. +func (s *JobStore) ListReady(ctx context.Context, now time.Time, limit int) ([]*types.Job, error) { + stmt := database.Builder. + Select(jobColumns). + From("jobs"). + Where("job_state = ?", enum.JobStateScheduled). + Where("job_scheduled <= ?", now.UnixMilli()). + OrderBy("job_priority desc, job_scheduled asc, job_uid asc"). + Limit(uint64(limit)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert list scheduled jobs query to sql: %w", err) + } + + result := make([]*types.Job, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &result, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to execute list scheduled jobs query") + } + + return result, nil +} + +// ListDeadlineExceeded returns a list of jobs that have exceeded their execution deadline. +func (s *JobStore) ListDeadlineExceeded(ctx context.Context, now time.Time) ([]*types.Job, error) { + stmt := database.Builder. + Select(jobColumns). + From("jobs"). + Where("job_state = ?", enum.JobStateRunning). + Where("job_run_deadline < ?", now.UnixMilli()). + OrderBy("job_run_deadline asc") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert list overdue jobs query to sql: %w", err) + } + + result := make([]*types.Job, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &result, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to execute list overdue jobs query") + } + + return result, nil +} + +// NextScheduledTime returns a scheduled time of the next ready job or zero time if no such job exists. +func (s *JobStore) NextScheduledTime(ctx context.Context, now time.Time) (time.Time, error) { + stmt := database.Builder. + Select("job_scheduled"). + From("jobs"). + Where("job_state = ?", enum.JobStateScheduled). + Where("job_scheduled > ?", now.UnixMilli()). + OrderBy("job_scheduled asc"). + Limit(1) + + query, args, err := stmt.ToSql() + if err != nil { + return time.Time{}, fmt.Errorf("failed to convert next scheduled time query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var result int64 + + err = db.QueryRowContext(ctx, query, args...).Scan(&result) + if errors.Is(err, sql.ErrNoRows) { + return time.Time{}, nil + } + if err != nil { + return time.Time{}, database.ProcessSQLErrorf(err, "failed to execute next scheduled time query") + } + + return time.UnixMilli(result), nil +} + +// DeleteOld removes non-recurring jobs that have finished execution or have failed. +func (s *JobStore) DeleteOld(ctx context.Context, olderThan time.Time) (int64, error) { + stmt := database.Builder. + Delete("jobs"). + Where("(job_state = ? OR job_state = ? OR job_state = ?)", + enum.JobStateFinished, enum.JobStateFailed, enum.JobStateCanceled). + Where("job_is_recurring = false"). + Where("job_last_executed < ?", olderThan.UnixMilli()) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert delete done jobs query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + result, err := db.ExecContext(ctx, sql, args...) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to execute delete done jobs query") + } + + n, err := result.RowsAffected() + if err != nil { + return 0, database.ProcessSQLErrorf(err, "failed to get number of deleted jobs") + } + + return n, nil +} diff --git a/internal/store/database/membership.go b/internal/store/database/membership.go new file mode 100644 index 0000000000..2b34f91e71 --- /dev/null +++ b/internal/store/database/membership.go @@ -0,0 +1,483 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" +) + +var _ store.MembershipStore = (*MembershipStore)(nil) + +// NewMembershipStore returns a new MembershipStore. +func NewMembershipStore( + db *sqlx.DB, + pCache store.PrincipalInfoCache, + spacePathStore store.SpacePathStore, +) *MembershipStore { + return &MembershipStore{ + db: db, + pCache: pCache, + spacePathStore: spacePathStore, + } +} + +// MembershipStore implements store.MembershipStore backed by a relational database. +type MembershipStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache + spacePathStore store.SpacePathStore +} + +type membership struct { + SpaceID int64 `db:"membership_space_id"` + PrincipalID int64 `db:"membership_principal_id"` + + CreatedBy int64 `db:"membership_created_by"` + Created int64 `db:"membership_created"` + Updated int64 `db:"membership_updated"` + + Role enum.MembershipRole `db:"membership_role"` +} + +type membershipPrincipal struct { + membership + principalInfo +} + +type membershipSpace struct { + membership + space +} + +const ( + membershipColumns = ` + membership_space_id + ,membership_principal_id + ,membership_created_by + ,membership_created + ,membership_updated + ,membership_role` + + membershipSelectBase = ` + SELECT` + membershipColumns + ` + FROM memberships` +) + +// Find finds the membership by space id and principal id. +func (s *MembershipStore) Find(ctx context.Context, key types.MembershipKey) (*types.Membership, error) { + const sqlQuery = membershipSelectBase + ` + WHERE membership_space_id = $1 AND membership_principal_id = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &membership{} + if err := db.GetContext(ctx, dst, sqlQuery, key.SpaceID, key.PrincipalID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find membership") + } + + result := mapToMembership(dst) + + return &result, nil +} + +func (s *MembershipStore) FindUser(ctx context.Context, key types.MembershipKey) (*types.MembershipUser, error) { + m, err := s.Find(ctx, key) + if err != nil { + return nil, err + } + + result, err := s.addPrincipalInfos(ctx, m) + if err != nil { + return nil, err + } + + return &result, nil +} + +// Create creates a new membership. +func (s *MembershipStore) Create(ctx context.Context, membership *types.Membership) error { + const sqlQuery = ` + INSERT INTO memberships ( + membership_space_id + ,membership_principal_id + ,membership_created_by + ,membership_created + ,membership_updated + ,membership_role + ) values ( + :membership_space_id + ,:membership_principal_id + ,:membership_created_by + ,:membership_created + ,:membership_updated + ,:membership_role + )` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapToInternalMembership(membership)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind membership object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Failed to insert membership") + } + + return nil +} + +// Update updates the role of a member of a space. +func (s *MembershipStore) Update(ctx context.Context, membership *types.Membership) error { + const sqlQuery = ` + UPDATE memberships + SET + membership_updated = :membership_updated + ,membership_role = :membership_role + WHERE membership_space_id = :membership_space_id AND + membership_principal_id = :membership_principal_id` + + db := dbtx.GetAccessor(ctx, s.db) + + dbMembership := mapToInternalMembership(membership) + dbMembership.Updated = time.Now().UnixMilli() + + query, arg, err := db.BindNamed(sqlQuery, dbMembership) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind membership object") + } + + _, err = db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update membership role") + } + + membership.Updated = dbMembership.Updated + + return nil +} + +// Delete deletes the membership. +func (s *MembershipStore) Delete(ctx context.Context, key types.MembershipKey) error { + const sqlQuery = ` + DELETE from memberships + WHERE membership_space_id = $1 AND + membership_principal_id = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, key.SpaceID, key.PrincipalID); err != nil { + return database.ProcessSQLErrorf(err, "delete membership query failed") + } + return nil +} + +// CountUsers returns a number of users memberships that matches the provided filter. +func (s *MembershipStore) CountUsers(ctx context.Context, + spaceID int64, + filter types.MembershipUserFilter, +) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("memberships"). + InnerJoin("principals ON membership_principal_id = principal_id"). + Where("membership_space_id = ?", spaceID) + + stmt = applyMembershipUserFilter(stmt, filter) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert membership users count query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing membership users count query") + } + + return count, nil +} + +// ListUsers returns a list of memberships for a space or a user. +func (s *MembershipStore) ListUsers(ctx context.Context, + spaceID int64, + filter types.MembershipUserFilter, +) ([]types.MembershipUser, error) { + const columns = membershipColumns + "," + principalInfoCommonColumns + stmt := database.Builder. + Select(columns). + From("memberships"). + InnerJoin("principals ON membership_principal_id = principal_id"). + Where("membership_space_id = ?", spaceID) + + stmt = applyMembershipUserFilter(stmt, filter) + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + order := filter.Order + if order == enum.OrderDefault { + order = enum.OrderAsc + } + + switch filter.Sort { + case enum.MembershipUserSortName: + stmt = stmt.OrderBy("principal_display_name " + order.String()) + case enum.MembershipUserSortCreated: + stmt = stmt.OrderBy("membership_created " + order.String()) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert membership users list query to sql: %w", err) + } + + dst := make([]*membershipPrincipal, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing membership users list query") + } + + result, err := s.mapToMembershipUsers(ctx, dst) + if err != nil { + return nil, fmt.Errorf("failed to map memberships users to external type: %w", err) + } + + return result, nil +} + +func applyMembershipUserFilter( + stmt squirrel.SelectBuilder, + opts types.MembershipUserFilter, +) squirrel.SelectBuilder { + if opts.Query != "" { + searchTerm := "%%" + strings.ToLower(opts.Query) + "%%" + stmt = stmt.Where("LOWER(principal_display_name) LIKE ?", searchTerm) + } + + return stmt +} + +func (s *MembershipStore) CountSpaces(ctx context.Context, + userID int64, + filter types.MembershipSpaceFilter, +) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("memberships"). + InnerJoin("spaces ON spaces.space_id = membership_space_id"). + Where("membership_principal_id = ?", userID) + + stmt = applyMembershipSpaceFilter(stmt, filter) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert membership spaces count query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing membership spaces count query") + } + + return count, nil +} + +// ListSpaces returns a list of spaces in which the provided user is a member. +func (s *MembershipStore) ListSpaces(ctx context.Context, + userID int64, + filter types.MembershipSpaceFilter, +) ([]types.MembershipSpace, error) { + const columns = membershipColumns + "," + spaceColumns + stmt := database.Builder. + Select(columns). + From("memberships"). + InnerJoin("spaces ON spaces.space_id = membership_space_id"). + Where("membership_principal_id = ?", userID) + + stmt = applyMembershipSpaceFilter(stmt, filter) + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + order := filter.Order + if order == enum.OrderDefault { + order = enum.OrderAsc + } + + switch filter.Sort { + case enum.MembershipSpaceSortUID: + stmt = stmt.OrderBy("space_uid " + order.String()) + case enum.MembershipSpaceSortCreated: + stmt = stmt.OrderBy("membership_created " + order.String()) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert membership spaces list query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := make([]*membershipSpace, 0) + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + result, err := s.mapToMembershipSpaces(ctx, dst) + if err != nil { + return nil, fmt.Errorf("failed to map memberships spaces to external type: %w", err) + } + + return result, nil +} + +func applyMembershipSpaceFilter( + stmt squirrel.SelectBuilder, + opts types.MembershipSpaceFilter, +) squirrel.SelectBuilder { + if opts.Query != "" { + searchTerm := "%%" + strings.ToLower(opts.Query) + "%%" + stmt = stmt.Where("LOWER(space_uid) LIKE ?", searchTerm) + } + + return stmt +} + +func mapToMembership(m *membership) types.Membership { + return types.Membership{ + MembershipKey: types.MembershipKey{ + SpaceID: m.SpaceID, + PrincipalID: m.PrincipalID, + }, + CreatedBy: m.CreatedBy, + Created: m.Created, + Updated: m.Updated, + Role: m.Role, + } +} + +func mapToInternalMembership(m *types.Membership) membership { + return membership{ + SpaceID: m.SpaceID, + PrincipalID: m.PrincipalID, + CreatedBy: m.CreatedBy, + Created: m.Created, + Updated: m.Updated, + Role: m.Role, + } +} + +func (s *MembershipStore) addPrincipalInfos(ctx context.Context, m *types.Membership) (types.MembershipUser, error) { + var result types.MembershipUser + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, []int64{m.CreatedBy, m.PrincipalID}) + if err != nil { + return result, fmt.Errorf("failed to load membership principal infos: %w", err) + } + + if user, ok := infoMap[m.PrincipalID]; ok { + result.Principal = *user + } else { + return result, fmt.Errorf("failed to find membership principal info: %w", err) + } + + if addedBy, ok := infoMap[m.CreatedBy]; ok { + result.AddedBy = *addedBy + } + + result.Membership = *m + + return result, nil +} + +func (s *MembershipStore) mapToMembershipUsers(ctx context.Context, + ms []*membershipPrincipal, +) ([]types.MembershipUser, error) { + // collect all principal IDs + ids := make([]int64, 0, len(ms)) + for _, m := range ms { + ids = append(ids, m.membership.CreatedBy) + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load membership principal infos: %w", err) + } + + // attach the principal infos back to the slice items + res := make([]types.MembershipUser, len(ms)) + for i, m := range ms { + res[i].Membership = mapToMembership(&m.membership) + res[i].Principal = mapToPrincipalInfo(&m.principalInfo) + if addedBy, ok := infoMap[m.membership.CreatedBy]; ok { + res[i].AddedBy = *addedBy + } + } + + return res, nil +} + +func (s *MembershipStore) mapToMembershipSpaces(ctx context.Context, + ms []*membershipSpace, +) ([]types.MembershipSpace, error) { + // collect all principal IDs + ids := make([]int64, 0, len(ms)) + for _, m := range ms { + ids = append(ids, m.membership.CreatedBy) + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load membership principal infos: %w", err) + } + + // attach the principal infos back to the slice items + res := make([]types.MembershipSpace, len(ms)) + for i, m := range ms { + res[i].Membership = mapToMembership(&m.membership) + space, err := mapToSpace(ctx, s.spacePathStore, &m.space) + if err != nil { + return nil, fmt.Errorf("faild to map space %d: %w", m.space.ID, err) + } + res[i].Space = *space + if addedBy, ok := infoMap[m.membership.CreatedBy]; ok { + res[i].AddedBy = *addedBy + } + } + + return res, nil +} diff --git a/internal/store/database/migrate/migrate.go b/internal/store/database/migrate/migrate.go new file mode 100644 index 0000000000..284a7520c4 --- /dev/null +++ b/internal/store/database/migrate/migrate.go @@ -0,0 +1,135 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package migrate + +import ( + "context" + "database/sql" + "embed" + "fmt" + "io/fs" + + "github.com/jmoiron/sqlx" + "github.com/maragudk/migrate" + "github.com/rs/zerolog/log" +) + +//go:embed postgres/*.sql +var postgres embed.FS + +//go:embed sqlite/*.sql +var sqlite embed.FS + +const ( + tableName = "migrations" + + postgresDriverName = "postgres" + postgresSourceDir = "postgres" + + sqliteDriverName = "sqlite3" + sqliteSourceDir = "sqlite" +) + +// Migrate performs the database migration. +func Migrate(ctx context.Context, db *sqlx.DB) error { + opts, err := getMigrator(db) + if err != nil { + return fmt.Errorf("failed to get migrator: %w", err) + } + return migrate.New(opts).MigrateUp(ctx) +} + +// To performs the database migration to the specific version. +func To(ctx context.Context, db *sqlx.DB, version string) error { + opts, err := getMigrator(db) + if err != nil { + return fmt.Errorf("failed to get migrator: %w", err) + } + return migrate.New(opts).MigrateTo(ctx, version) +} + +// Current returns the current version ID (the latest migration applied) of the database. +func Current(ctx context.Context, db *sqlx.DB) (string, error) { + var ( + query string + migrationTableCount int + ) + + switch db.DriverName() { + case sqliteDriverName: + query = ` + SELECT count(*) + FROM sqlite_master + WHERE name = ? and type = 'table'` + case postgresDriverName: + query = ` + SELECT count(*) + FROM information_schema.tables + WHERE table_name = ? and table_schema = 'public'` + default: + return "", fmt.Errorf("unsupported driver '%s'", db.DriverName()) + } + + if err := db.QueryRowContext(ctx, query, tableName).Scan(&migrationTableCount); err != nil { + return "", fmt.Errorf("failed to check migration table existence: %w", err) + } + + if migrationTableCount == 0 { + return "", nil + } + + var version string + + query = "select version from " + tableName + " limit 1" + if err := db.QueryRowContext(ctx, query).Scan(&version); err != nil { + return "", fmt.Errorf("failed to read current DB version from migration table: %w", err) + } + + return version, nil +} + +func getMigrator(db *sqlx.DB) (migrate.Options, error) { + before := func(_ context.Context, _ *sql.Tx, version string) error { + log.Trace().Str("version", version).Msg("migration started") + return nil + } + + after := func(_ context.Context, _ *sql.Tx, version string) error { + log.Trace().Str("version", version).Msg("migration complete") + return nil + } + + opts := migrate.Options{ + After: after, + Before: before, + DB: db.DB, + FS: sqlite, + Table: tableName, + } + + switch db.DriverName() { + case sqliteDriverName: + folder, _ := fs.Sub(sqlite, sqliteSourceDir) + opts.FS = folder + case postgresDriverName: + folder, _ := fs.Sub(postgres, postgresSourceDir) + opts.FS = folder + + default: + return migrate.Options{}, fmt.Errorf("unsupported driver '%s'", db.DriverName()) + } + + return opts, nil +} diff --git a/internal/store/database/migrate/postgres/0000_create_extension_btree.up.sql b/internal/store/database/migrate/postgres/0000_create_extension_btree.up.sql new file mode 100644 index 0000000000..ee2f06101a --- /dev/null +++ b/internal/store/database/migrate/postgres/0000_create_extension_btree.up.sql @@ -0,0 +1 @@ +CREATE EXTENSION IF NOT EXISTS btree_gin; diff --git a/internal/store/database/migrate/postgres/0000_create_extension_citext.up.sql b/internal/store/database/migrate/postgres/0000_create_extension_citext.up.sql new file mode 100644 index 0000000000..1f9a5441f9 --- /dev/null +++ b/internal/store/database/migrate/postgres/0000_create_extension_citext.up.sql @@ -0,0 +1 @@ +CREATE EXTENSION IF NOT EXISTS citext; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0000_create_extension_trgm.up.sql b/internal/store/database/migrate/postgres/0000_create_extension_trgm.up.sql new file mode 100644 index 0000000000..588aec00f4 --- /dev/null +++ b/internal/store/database/migrate/postgres/0000_create_extension_trgm.up.sql @@ -0,0 +1 @@ +CREATE EXTENSION IF NOT EXISTS pg_trgm; diff --git a/internal/store/database/migrate/postgres/0001_create_table_a_principals.up.sql b/internal/store/database/migrate/postgres/0001_create_table_a_principals.up.sql new file mode 100644 index 0000000000..1cad6c0020 --- /dev/null +++ b/internal/store/database/migrate/postgres/0001_create_table_a_principals.up.sql @@ -0,0 +1,20 @@ +CREATE TABLE principals ( +principal_id SERIAL PRIMARY KEY +,principal_uid TEXT +,principal_uid_unique TEXT +,principal_email TEXT +,principal_type TEXT +,principal_display_name TEXT +,principal_admin BOOLEAN +,principal_blocked BOOLEAN +,principal_salt TEXT +,principal_created BIGINT +,principal_updated BIGINT + +,principal_user_password TEXT + +,principal_sa_parent_type TEXT +,principal_sa_parent_id INTEGER + +,UNIQUE(principal_uid_unique) +); diff --git a/internal/store/database/migrate/postgres/0001_create_table_b_spaces.up.sql b/internal/store/database/migrate/postgres/0001_create_table_b_spaces.up.sql new file mode 100644 index 0000000000..3e4e161d0d --- /dev/null +++ b/internal/store/database/migrate/postgres/0001_create_table_b_spaces.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE spaces ( + space_id SERIAL PRIMARY KEY +,space_version INTEGER NOT NULL DEFAULT 0 +,space_parent_id INTEGER DEFAULT NULL +,space_uid TEXT NOT NULL +,space_description TEXT +,space_is_public BOOLEAN NOT NULL +,space_created_by INTEGER NOT NULL +,space_created BIGINT NOT NULL +,space_updated BIGINT NOT NULL + +,CONSTRAINT fk_space_parent_id FOREIGN KEY (space_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0001_create_table_c_repositories.up.sql b/internal/store/database/migrate/postgres/0001_create_table_c_repositories.up.sql new file mode 100644 index 0000000000..4237854b64 --- /dev/null +++ b/internal/store/database/migrate/postgres/0001_create_table_c_repositories.up.sql @@ -0,0 +1,28 @@ +CREATE TABLE repositories ( + repo_id SERIAL PRIMARY KEY +,repo_version INTEGER NOT NULL DEFAULT 0 +,repo_parent_id INTEGER NOT NULL +,repo_uid TEXT NOT NULL +,repo_description TEXT +,repo_is_public BOOLEAN NOT NULL +,repo_created_by INTEGER NOT NULL +,repo_created BIGINT NOT NULL +,repo_updated BIGINT NOT NULL +,repo_git_uid TEXT NOT NULL +,repo_default_branch TEXT NOT NULL +,repo_fork_id INTEGER +,repo_pullreq_seq INTEGER NOT NULL +,repo_num_forks INTEGER NOT NULL +,repo_num_pulls INTEGER NOT NULL +,repo_num_closed_pulls INTEGER NOT NULL +,repo_num_open_pulls INTEGER NOT NULL +,repo_num_merged_pulls INTEGER NOT NULL + +,UNIQUE(repo_git_uid) + +,CONSTRAINT fk_repo_parent_id FOREIGN KEY (repo_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + diff --git a/internal/store/database/migrate/postgres/0001_create_table_d_paths.up.sql b/internal/store/database/migrate/postgres/0001_create_table_d_paths.up.sql new file mode 100644 index 0000000000..fb340a3944 --- /dev/null +++ b/internal/store/database/migrate/postgres/0001_create_table_d_paths.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE paths ( + path_id SERIAL PRIMARY KEY +,path_version INTEGER NOT NULL DEFAULT 0 +,path_value TEXT NOT NULL +,path_value_unique TEXT NOT NULL +,path_is_primary BOOLEAN DEFAULT NULL +,path_repo_id INTEGER +,path_space_id INTEGER +,path_created_by INTEGER NOT NULL +,path_created BIGINT NOT NULL +,path_updated BIGINT NOT NULL +,UNIQUE(path_value_unique) + +,CONSTRAINT fk_path_created_by FOREIGN KEY (path_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_path_space_id FOREIGN KEY (path_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_path_repo_id FOREIGN KEY (path_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0001_create_table_e_tokens.up.sql b/internal/store/database/migrate/postgres/0001_create_table_e_tokens.up.sql new file mode 100644 index 0000000000..438a174097 --- /dev/null +++ b/internal/store/database/migrate/postgres/0001_create_table_e_tokens.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE tokens ( + token_id SERIAL PRIMARY KEY +,token_type TEXT +,token_uid TEXT +,token_principal_id INTEGER +,token_expires_at BIGINT +,token_grants BIGINT +,token_issued_at BIGINT +,token_created_by INTEGER +,UNIQUE(token_principal_id, token_uid) + +,CONSTRAINT fk_token_principal_id FOREIGN KEY (token_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); diff --git a/internal/store/database/migrate/postgres/0002_create_index_paths_repo_id_is_primary.up.sql b/internal/store/database/migrate/postgres/0002_create_index_paths_repo_id_is_primary.up.sql new file mode 100644 index 0000000000..8112642f67 --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_paths_repo_id_is_primary.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX paths_repo_id_is_primary +ON paths(path_repo_id, path_is_primary); diff --git a/internal/store/database/migrate/postgres/0002_create_index_paths_space_id_is_primary.up.sql b/internal/store/database/migrate/postgres/0002_create_index_paths_space_id_is_primary.up.sql new file mode 100644 index 0000000000..775afb1cb6 --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_paths_space_id_is_primary.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX paths_space_id_is_primary +ON paths(path_space_id, path_is_primary); diff --git a/internal/store/database/migrate/postgres/0002_create_index_principals_lower_email.up.sql b/internal/store/database/migrate/postgres/0002_create_index_principals_lower_email.up.sql new file mode 100644 index 0000000000..4ad7abeb9f --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_principals_lower_email.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX principals_lower_email +ON principals(LOWER(principal_email)); diff --git a/internal/store/database/migrate/postgres/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql b/internal/store/database/migrate/postgres/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql new file mode 100644 index 0000000000..df7d7bfcc5 --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX principals_sa_parent_id_sa_parent_type +ON principals(principal_sa_parent_id, principal_sa_parent_type); diff --git a/internal/store/database/migrate/postgres/0002_create_index_repositories_parent_id.up.sql b/internal/store/database/migrate/postgres/0002_create_index_repositories_parent_id.up.sql new file mode 100644 index 0000000000..be95956a2a --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_repositories_parent_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX repositories_parent_id +ON repositories(repo_parent_id); diff --git a/internal/store/database/migrate/postgres/0002_create_index_spaces_parent_id.up.sql b/internal/store/database/migrate/postgres/0002_create_index_spaces_parent_id.up.sql new file mode 100644 index 0000000000..8483589147 --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_spaces_parent_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX spaces_parent_id +ON spaces(space_parent_id); diff --git a/internal/store/database/migrate/postgres/0002_create_index_tokens_principal_id.up.sql b/internal/store/database/migrate/postgres/0002_create_index_tokens_principal_id.up.sql new file mode 100644 index 0000000000..61306e8921 --- /dev/null +++ b/internal/store/database/migrate/postgres/0002_create_index_tokens_principal_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX tokens_principal_id +ON tokens(token_principal_id); diff --git a/internal/store/database/migrate/postgres/0003_create_table_pullreqs.up.sql b/internal/store/database/migrate/postgres/0003_create_table_pullreqs.up.sql new file mode 100644 index 0000000000..d2362bde66 --- /dev/null +++ b/internal/store/database/migrate/postgres/0003_create_table_pullreqs.up.sql @@ -0,0 +1,44 @@ +CREATE TABLE pullreqs ( +pullreq_id SERIAL PRIMARY KEY +,pullreq_version INTEGER NOT NULL DEFAULT 0 +,pullreq_created_by INTEGER NOT NULL +,pullreq_created BIGINT NOT NULL +,pullreq_updated BIGINT NOT NULL +,pullreq_edited BIGINT NOT NULL +,pullreq_number INTEGER NOT NULL +,pullreq_state TEXT NOT NULL +,pullreq_is_draft TEXT NOT NULL DEFAULT FALSE +,pullreq_comment_count INTEGER NOT NULL DEFAULT 0 +,pullreq_title TEXT NOT NULL +,pullreq_description TEXT NOT NULL +,pullreq_source_repo_id INTEGER NOT NULL +,pullreq_source_branch TEXT NOT NULL +,pullreq_source_sha TEXT NOT NULL +,pullreq_target_repo_id INTEGER NOT NULL +,pullreq_target_branch TEXT NOT NULL +,pullreq_activity_seq INTEGER DEFAULT 0 +,pullreq_merged_by INTEGER +,pullreq_merged BIGINT +,pullreq_merge_method TEXT +,pullreq_merge_check_status TEXT NOT NULL +,pullreq_merge_target_sha TEXT +,pullreq_merge_base_sha TEXT +,pullreq_merge_sha TEXT +,pullreq_merge_conflicts TEXT +,CONSTRAINT fk_pullreq_created_by FOREIGN KEY (pullreq_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE SET NULL +,CONSTRAINT fk_pullreq_target_repo_id FOREIGN KEY (pullreq_target_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_merged_by FOREIGN KEY (pullreq_merged_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/postgres/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql b/internal/store/database/migrate/postgres/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql new file mode 100644 index 0000000000..544387e114 --- /dev/null +++ b/internal/store/database/migrate/postgres/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql @@ -0,0 +1,3 @@ +CREATE UNIQUE INDEX pullreqs_source_repo_branch_target_repo_branch + ON pullreqs(pullreq_source_repo_id, pullreq_source_branch, pullreq_target_repo_id, pullreq_target_branch) + WHERE pullreq_state = 'open'; diff --git a/internal/store/database/migrate/postgres/0004_create_index_pullreqs_target_repo_id_number.up.sql b/internal/store/database/migrate/postgres/0004_create_index_pullreqs_target_repo_id_number.up.sql new file mode 100644 index 0000000000..a7e9be14d2 --- /dev/null +++ b/internal/store/database/migrate/postgres/0004_create_index_pullreqs_target_repo_id_number.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX pullreqs_target_repo_id_number +ON pullreqs(pullreq_target_repo_id, pullreq_number); diff --git a/internal/store/database/migrate/postgres/0005_create_table_pullreq_activities.up.sql b/internal/store/database/migrate/postgres/0005_create_table_pullreq_activities.up.sql new file mode 100644 index 0000000000..087b78933e --- /dev/null +++ b/internal/store/database/migrate/postgres/0005_create_table_pullreq_activities.up.sql @@ -0,0 +1,42 @@ +CREATE TABLE pullreq_activities ( + pullreq_activity_id SERIAL PRIMARY KEY +,pullreq_activity_version BIGINT NOT NULL +,pullreq_activity_created_by INTEGER +,pullreq_activity_created BIGINT NOT NULL +,pullreq_activity_updated BIGINT NOT NULL +,pullreq_activity_edited BIGINT NOT NULL +,pullreq_activity_deleted BIGINT +,pullreq_activity_parent_id INTEGER +,pullreq_activity_repo_id INTEGER NOT NULL +,pullreq_activity_pullreq_id INTEGER NOT NULL +,pullreq_activity_order INTEGER NOT NULL +,pullreq_activity_sub_order INTEGER NOT NULL +,pullreq_activity_reply_seq INTEGER NOT NULL +,pullreq_activity_type TEXT NOT NULL +,pullreq_activity_kind TEXT NOT NULL +,pullreq_activity_text TEXT NOT NULL +,pullreq_activity_payload JSONB NOT NULL DEFAULT '{}' +,pullreq_activity_metadata JSONB NOT NULL DEFAULT '{}' +,pullreq_activity_resolved_by INTEGER DEFAULT 0 +,pullreq_activity_resolved BIGINT NULL +,CONSTRAINT fk_pullreq_activities_created_by FOREIGN KEY (pullreq_activity_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_activities_parent_id FOREIGN KEY (pullreq_activity_parent_id) + REFERENCES pullreq_activities (pullreq_activity_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_repo_id FOREIGN KEY (pullreq_activity_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_pullreq_id FOREIGN KEY (pullreq_activity_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_resolved_by FOREIGN KEY (pullreq_activity_resolved_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/postgres/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql b/internal/store/database/migrate/postgres/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql new file mode 100644 index 0000000000..73b0d5361f --- /dev/null +++ b/internal/store/database/migrate/postgres/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX pullreq_activities_pullreq_id_order_sub_order +ON pullreq_activities(pullreq_activity_pullreq_id, pullreq_activity_order, pullreq_activity_sub_order); diff --git a/internal/store/database/migrate/postgres/0007_create_table_webhooks.up.sql b/internal/store/database/migrate/postgres/0007_create_table_webhooks.up.sql new file mode 100644 index 0000000000..ece6b97cbe --- /dev/null +++ b/internal/store/database/migrate/postgres/0007_create_table_webhooks.up.sql @@ -0,0 +1,29 @@ +CREATE TABLE webhooks ( +webhook_id SERIAL PRIMARY KEY +,webhook_version INTEGER NOT NULL DEFAULT 0 +,webhook_created_by INTEGER NOT NULL +,webhook_created BIGINT NOT NULL +,webhook_updated BIGINT NOT NULL +,webhook_space_id INTEGER +,webhook_repo_id INTEGER +,webhook_display_name TEXT NOT NULL +,webhook_description TEXT NOT NULL +,webhook_url TEXT NOT NULL +,webhook_secret TEXT NOT NULL +,webhook_enabled BOOLEAN NOT NULL +,webhook_insecure BOOLEAN NOT NULL +,webhook_triggers TEXT NOT NULL +,webhook_latest_execution_result TEXT +,CONSTRAINT fk_webhook_created_by FOREIGN KEY (webhook_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_webhook_space_id FOREIGN KEY (webhook_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_webhook_repo_id FOREIGN KEY (webhook_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); diff --git a/internal/store/database/migrate/postgres/0008_create_index_webhooks_repo_id.up.sql b/internal/store/database/migrate/postgres/0008_create_index_webhooks_repo_id.up.sql new file mode 100644 index 0000000000..1c277b98f7 --- /dev/null +++ b/internal/store/database/migrate/postgres/0008_create_index_webhooks_repo_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhooks_repo_id +ON webhooks(webhook_repo_id); diff --git a/internal/store/database/migrate/postgres/0008_create_index_webhooks_space_id.up.sql b/internal/store/database/migrate/postgres/0008_create_index_webhooks_space_id.up.sql new file mode 100644 index 0000000000..090c64bbd4 --- /dev/null +++ b/internal/store/database/migrate/postgres/0008_create_index_webhooks_space_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhooks_space_id +ON webhooks(webhook_space_id); diff --git a/internal/store/database/migrate/postgres/0009_create_table_webhook_executions.up.sql b/internal/store/database/migrate/postgres/0009_create_table_webhook_executions.up.sql new file mode 100644 index 0000000000..06a3718efa --- /dev/null +++ b/internal/store/database/migrate/postgres/0009_create_table_webhook_executions.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE webhook_executions ( +webhook_execution_id SERIAL PRIMARY KEY +,webhook_execution_retrigger_of INTEGER +,webhook_execution_retriggerable BOOLEAN NOT NULL +,webhook_execution_webhook_id INTEGER NOT NULL +,webhook_execution_trigger_type TEXT NOT NULL +,webhook_execution_trigger_id TEXT NOT NULL +,webhook_execution_result TEXT NOT NULL +,webhook_execution_created BIGINT NOT NULL +,webhook_execution_duration BIGINT NOT NULL +,webhook_execution_error TEXT NOT NULL +,webhook_execution_request_url TEXT NOT NULL +,webhook_execution_request_headers TEXT NOT NULL +,webhook_execution_request_body TEXT NOT NULL +,webhook_execution_response_status_code INTEGER NOT NULL +,webhook_execution_response_status TEXT NOT NULL +,webhook_execution_response_headers TEXT NOT NULL +,webhook_execution_response_body TEXT NOT NULL +); diff --git a/internal/store/database/migrate/postgres/0010_create_index_webhook_executions_webhook_id.up.sql b/internal/store/database/migrate/postgres/0010_create_index_webhook_executions_webhook_id.up.sql new file mode 100644 index 0000000000..2c008da018 --- /dev/null +++ b/internal/store/database/migrate/postgres/0010_create_index_webhook_executions_webhook_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhook_executions_webhook_id +ON webhook_executions(webhook_execution_webhook_id); diff --git a/internal/store/database/migrate/postgres/0011_create_table_pullreq_reviews.up.sql b/internal/store/database/migrate/postgres/0011_create_table_pullreq_reviews.up.sql new file mode 100644 index 0000000000..a1fd0aa8a5 --- /dev/null +++ b/internal/store/database/migrate/postgres/0011_create_table_pullreq_reviews.up.sql @@ -0,0 +1,17 @@ +CREATE TABLE pullreq_reviews ( +pullreq_review_id SERIAL PRIMARY KEY +,pullreq_review_created_by INTEGER NOT NULL +,pullreq_review_created BIGINT NOT NULL +,pullreq_review_updated BIGINT NOT NULL +,pullreq_review_pullreq_id INTEGER NOT NULL +,pullreq_review_decision TEXT NOT NULL +,pullreq_review_sha TEXT NOT NULL +,CONSTRAINT fk_pullreq_review_created_by FOREIGN KEY (pullreq_review_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_review_pullreq_id FOREIGN KEY (pullreq_review_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0012_create_index_pullreq_reviews_pullreq_id.up.sql b/internal/store/database/migrate/postgres/0012_create_index_pullreq_reviews_pullreq_id.up.sql new file mode 100644 index 0000000000..01db26c1a2 --- /dev/null +++ b/internal/store/database/migrate/postgres/0012_create_index_pullreq_reviews_pullreq_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX index_pullreq_review_pullreq_id +ON pullreq_reviews(pullreq_review_pullreq_id); diff --git a/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.down.sql b/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.down.sql new file mode 100644 index 0000000000..8ef30bf989 --- /dev/null +++ b/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.down.sql @@ -0,0 +1,2 @@ +-- Can't migrate down from this point. +-- This file must be present here. \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.up.sql b/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.up.sql new file mode 100644 index 0000000000..334b47f8d0 --- /dev/null +++ b/internal/store/database/migrate/postgres/0013_create_table_pullreq_reviewers.up.sql @@ -0,0 +1,33 @@ +CREATE TABLE pullreq_reviewers ( +pullreq_reviewer_pullreq_id INTEGER NOT NULL +,pullreq_reviewer_principal_id INTEGER NOT NULL +,pullreq_reviewer_created_by INTEGER NOT NULL +,pullreq_reviewer_created BIGINT NOT NULL +,pullreq_reviewer_updated BIGINT NOT NULL +,pullreq_reviewer_repo_id INTEGER NOT NULL +,pullreq_reviewer_type TEXT NOT NULL +,pullreq_reviewer_latest_review_id INTEGER +,pullreq_reviewer_review_decision TEXT NOT NULL +,pullreq_reviewer_sha TEXT NOT NULL +,CONSTRAINT pk_pullreq_reviewers PRIMARY KEY (pullreq_reviewer_pullreq_id, pullreq_reviewer_principal_id) +,CONSTRAINT fk_pullreq_reviewer_pullreq_id FOREIGN KEY (pullreq_reviewer_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_reviewer_user_id FOREIGN KEY (pullreq_reviewer_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_reviewer_created_by FOREIGN KEY (pullreq_reviewer_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_reviewer_repo_id FOREIGN KEY (pullreq_reviewer_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_reviewer_latest_review_id FOREIGN KEY (pullreq_reviewer_latest_review_id) + REFERENCES pullreq_reviews (pullreq_review_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE SET NULL +); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.down.sql b/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.down.sql new file mode 100644 index 0000000000..e816c57a93 --- /dev/null +++ b/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE pullreq_activities + DROP COLUMN pullreq_activity_outdated, + DROP COLUMN pullreq_activity_code_comment_merge_base_sha, + DROP COLUMN pullreq_activity_code_comment_source_sha, + DROP COLUMN pullreq_activity_code_comment_path, + DROP COLUMN pullreq_activity_code_comment_line_new, + DROP COLUMN pullreq_activity_code_comment_span_new, + DROP COLUMN pullreq_activity_code_comment_line_old, + DROP COLUMN pullreq_activity_code_comment_span_old; diff --git a/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.up.sql b/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.up.sql new file mode 100644 index 0000000000..5f16dfad8a --- /dev/null +++ b/internal/store/database/migrate/postgres/0014_alter_pullreq_activity_code_comments.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE pullreq_activities + ADD COLUMN pullreq_activity_outdated BOOLEAN, + ADD COLUMN pullreq_activity_code_comment_merge_base_sha TEXT, + ADD COLUMN pullreq_activity_code_comment_source_sha TEXT, + ADD COLUMN pullreq_activity_code_comment_path TEXT, + ADD COLUMN pullreq_activity_code_comment_line_new INTEGER, + ADD COLUMN pullreq_activity_code_comment_span_new INTEGER, + ADD COLUMN pullreq_activity_code_comment_line_old INTEGER, + ADD COLUMN pullreq_activity_code_comment_span_old INTEGER; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.down.sql b/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.down.sql new file mode 100644 index 0000000000..b3501caff4 --- /dev/null +++ b/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE pullreqs + ALTER COLUMN pullreq_merge_base_sha DROP DEFAULT, + ALTER COLUMN pullreq_merge_base_sha DROP NOT NULL; +UPDATE pullreqs SET pullreq_merge_base_sha = NULL WHERE pullreq_merge_base_sha = ''; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.up.sql b/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.up.sql new file mode 100644 index 0000000000..e0985cd6be --- /dev/null +++ b/internal/store/database/migrate/postgres/0015_alter_pullreq_merge_base_not_nullable.up.sql @@ -0,0 +1,4 @@ +UPDATE pullreqs SET pullreq_merge_base_sha = '' WHERE pullreq_merge_base_sha IS NULL; +ALTER TABLE pullreqs + ALTER COLUMN pullreq_merge_base_sha SET DEFAULT '', + ALTER COLUMN pullreq_merge_base_sha SET NOT NULL; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.down.sql b/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.down.sql new file mode 100644 index 0000000000..e596101935 --- /dev/null +++ b/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.down.sql @@ -0,0 +1 @@ +ALTER TABLE pullreqs DROP COLUMN pullreq_unresolved_count; diff --git a/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.up.sql b/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.up.sql new file mode 100644 index 0000000000..18449aa105 --- /dev/null +++ b/internal/store/database/migrate/postgres/0016_alter_pullreq_add_unresolved.up.sql @@ -0,0 +1,18 @@ +ALTER TABLE pullreqs ADD COLUMN pullreq_unresolved_count INTEGER NOT NULL DEFAULT 0; + +WITH unresolved_counts AS ( + SELECT + pullreq_activity_pullreq_id AS "unresolved_pullreq_id", + COUNT(*) AS "unresolved_count" + FROM pullreq_activities + WHERE + pullreq_activity_sub_order = 0 AND + pullreq_activity_resolved IS NULL AND + pullreq_activity_deleted IS NULL AND + pullreq_activity_kind <> 'system' + GROUP BY pullreq_activity_pullreq_id +) +UPDATE pullreqs +SET pullreq_unresolved_count = unresolved_counts.unresolved_count +FROM unresolved_counts +WHERE pullreq_id = unresolved_pullreq_id; diff --git a/internal/store/database/migrate/postgres/0017_create_table_checks.down.sql b/internal/store/database/migrate/postgres/0017_create_table_checks.down.sql new file mode 100644 index 0000000000..fae645b93d --- /dev/null +++ b/internal/store/database/migrate/postgres/0017_create_table_checks.down.sql @@ -0,0 +1,6 @@ +DROP INDEX reqchecks_repo_id; +DROP TABLE reqchecks; + +DROP INDEX checks_repo_id_created; +DROP INDEX checks_repo_id_commit_sha_uid; +DROP TABLE checks; diff --git a/internal/store/database/migrate/postgres/0017_create_table_checks.up.sql b/internal/store/database/migrate/postgres/0017_create_table_checks.up.sql new file mode 100644 index 0000000000..f9d962f5c2 --- /dev/null +++ b/internal/store/database/migrate/postgres/0017_create_table_checks.up.sql @@ -0,0 +1,49 @@ +CREATE TABLE checks ( + check_id SERIAL PRIMARY KEY +,check_created_by INTEGER NOT NULL +,check_created BIGINT NOT NULL +,check_updated BIGINT NOT NULL +,check_repo_id INTEGER NOT NULL +,check_commit_sha TEXT NOT NULL +,check_type TEXT NOT NULL +,check_uid TEXT NOT NULL +,check_status TEXT NOT NULL +,check_summary TEXT NOT NULL +,check_link TEXT NOT NULL +,check_payload JSON NOT NULL +,check_metadata JSON NOT NULL +,CONSTRAINT fk_check_created_by FOREIGN KEY (check_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_check_repo_id FOREIGN KEY (check_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE UNIQUE INDEX checks_repo_id_commit_sha_uid + ON checks(check_repo_id, check_commit_sha, check_uid); + +CREATE INDEX checks_repo_id_created + ON checks(check_repo_id, check_created); + +CREATE TABLE reqchecks ( + reqcheck_id SERIAL PRIMARY KEY +,reqcheck_created_by INTEGER NOT NULL +,reqcheck_created BIGINT NOT NULL +,reqcheck_repo_id INTEGER NOT NULL +,reqcheck_branch_pattern TEXT NOT NULL +,reqcheck_check_uid TEXT NOT NULL +,CONSTRAINT fk_check_created_by FOREIGN KEY (reqcheck_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_check_repo_id FOREIGN KEY (reqcheck_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE INDEX reqchecks_repo_id + ON reqchecks(reqcheck_repo_id); diff --git a/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.down.sql b/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.down.sql new file mode 100644 index 0000000000..5bf0a515ac --- /dev/null +++ b/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE checks + ADD COLUMN check_type TEXT NOT NULL DEFAULT '', + DROP COLUMN check_payload_version, + DROP COLUMN check_payload_kind; diff --git a/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.up.sql b/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.up.sql new file mode 100644 index 0000000000..c4e6502c04 --- /dev/null +++ b/internal/store/database/migrate/postgres/0018_alter_check_add_payload_version.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE checks + ADD COLUMN check_payload_version TEXT NOT NULL DEFAULT '', + ADD COLUMN check_payload_kind TEXT NOT NULL DEFAULT '', + DROP COLUMN check_type; diff --git a/internal/store/database/migrate/postgres/0019_create_table_memberships.down.sql b/internal/store/database/migrate/postgres/0019_create_table_memberships.down.sql new file mode 100644 index 0000000000..cb23bba6b0 --- /dev/null +++ b/internal/store/database/migrate/postgres/0019_create_table_memberships.down.sql @@ -0,0 +1,2 @@ +DROP TABLE memberships; + diff --git a/internal/store/database/migrate/postgres/0019_create_table_memberships.up.sql b/internal/store/database/migrate/postgres/0019_create_table_memberships.up.sql new file mode 100644 index 0000000000..e0b9ef5b76 --- /dev/null +++ b/internal/store/database/migrate/postgres/0019_create_table_memberships.up.sql @@ -0,0 +1,21 @@ +CREATE TABLE memberships ( + membership_space_id INTEGER NOT NULL +,membership_principal_id INTEGER NOT NULL +,membership_created_by INTEGER NOT NULL +,membership_created BIGINT NOT NULL +,membership_updated BIGINT NOT NULL +,membership_role TEXT NOT NULL +,CONSTRAINT pk_memberships PRIMARY KEY (membership_space_id, membership_principal_id) +,CONSTRAINT fk_membership_space_id FOREIGN KEY (membership_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_membership_principal_id FOREIGN KEY (membership_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_membership_created_by FOREIGN KEY (membership_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.down.sql b/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.down.sql new file mode 100644 index 0000000000..6c6e2c61f3 --- /dev/null +++ b/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE pullreqs + DROP CONSTRAINT fk_pullreq_source_repo_id; + +ALTER TABLE pullreqs + ADD CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE SET NULL; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.up.sql b/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.up.sql new file mode 100644 index 0000000000..80ea9d5507 --- /dev/null +++ b/internal/store/database/migrate/postgres/0020_alter_pullreq_source_repo_id_constraint.up.sql @@ -0,0 +1,8 @@ +ALTER TABLE pullreqs + DROP CONSTRAINT fk_pullreq_source_repo_id; + +ALTER TABLE pullreqs + ADD CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_down.sql b/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_down.sql new file mode 100644 index 0000000000..68d596a51c --- /dev/null +++ b/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_down.sql @@ -0,0 +1 @@ +ALTER TABLE webhooks DROP COLUMN webhook_internal; diff --git a/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_up.sql b/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_up.sql new file mode 100644 index 0000000000..f11891414c --- /dev/null +++ b/internal/store/database/migrate/postgres/0021_alter_table_webhook_add_internal_up.sql @@ -0,0 +1,2 @@ +ALTER TABLE webhooks + ADD COLUMN webhook_internal BOOLEAN NOT NULL DEFAULT false; diff --git a/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql b/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql new file mode 100644 index 0000000000..7c5d0fba0d --- /dev/null +++ b/internal/store/database/migrate/postgres/0022_create_table_jobs.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +DROP INDEX jobs_run_deadline; +DROP INDEX jobs_scheduled; +DROP TABLE jobs; diff --git a/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql b/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql new file mode 100644 index 0000000000..0f6d6f60d7 --- /dev/null +++ b/internal/store/database/migrate/postgres/0022_create_table_jobs.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE jobs ( + job_uid TEXT NOT NULL +,job_created BIGINT NOT NULL +,job_updated BIGINT NOT NULL +,job_type TEXT NOT NULL +,job_priority INTEGER NOT NULL +,job_data TEXT NOT NULL +,job_result TEXT NOT NULL +,job_max_duration_seconds INTEGER NOT NULL +,job_max_retries INTEGER NOT NULL +,job_state TEXT NOT NULL +,job_scheduled BIGINT NOT NULL +,job_total_executions INTEGER +,job_run_by TEXT NOT NULL +,job_run_deadline BIGINT +,job_run_progress INTEGER NOT NULL +,job_last_executed BIGINT +,job_is_recurring BOOLEAN NOT NULL +,job_recurring_cron TEXT NOT NULL +,job_consecutive_failures INTEGER NOT NULL +,job_last_failure_error TEXT NOT NULL +,CONSTRAINT pk_jobs_uid PRIMARY KEY (job_uid) +); + +CREATE INDEX jobs_scheduled + ON jobs(job_scheduled) + WHERE job_state = 'scheduled'; + +CREATE INDEX jobs_run_deadline + ON jobs(job_run_deadline) + WHERE job_state = 'running'; + +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.down.sql b/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.down.sql new file mode 100644 index 0000000000..098e193fe3 --- /dev/null +++ b/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.up.sql b/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.up.sql new file mode 100644 index 0000000000..a612ba8feb --- /dev/null +++ b/internal/store/database/migrate/postgres/0023_index_jobs_last_executed.up.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_is_recurring = FALSE AND (job_state = 'finished' OR job_state = 'failed' OR job_state = 'canceled'); diff --git a/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.down.sql b/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.down.sql new file mode 100644 index 0000000000..313e773934 --- /dev/null +++ b/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE repositories + DROP CONSTRAINT fk_repo_importing_job_uid, + DROP COLUMN repo_importing_job_uid, + DROP COLUMN repo_importing; diff --git a/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.up.sql b/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.up.sql new file mode 100644 index 0000000000..18efc07401 --- /dev/null +++ b/internal/store/database/migrate/postgres/0024_alter_repo_add_importing.up.sql @@ -0,0 +1,8 @@ +ALTER TABLE repositories + ADD COLUMN repo_importing BOOLEAN NOT NULL DEFAULT false, + ADD COLUMN repo_importing_job_uid TEXT, + ADD CONSTRAINT fk_repo_importing_job_uid + FOREIGN KEY (repo_importing_job_uid) + REFERENCES jobs(job_uid) + ON DELETE SET NULL + ON UPDATE NO ACTION; diff --git a/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.down.sql b/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.down.sql new file mode 100644 index 0000000000..a740594cc3 --- /dev/null +++ b/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.down.sql @@ -0,0 +1 @@ +ALTER TABLE jobs DROP COLUMN job_group_id; diff --git a/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.up.sql b/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.up.sql new file mode 100644 index 0000000000..86a19161b2 --- /dev/null +++ b/internal/store/database/migrate/postgres/0025_alter_table_job_add_group_id.up.sql @@ -0,0 +1 @@ +ALTER TABLE jobs ADD COLUMN job_group_id TEXT NOT NULL DEFAULT ''; diff --git a/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql b/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql new file mode 100644 index 0000000000..db6a8e9433 --- /dev/null +++ b/internal/store/database/migrate/postgres/0026_alter_repo_drop_job_id.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE repositories + DROP CONSTRAINT fk_repo_importing_job_uid, + DROP COLUMN repo_importing_job_uid; diff --git a/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql b/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql new file mode 100644 index 0000000000..4f44adcc9f --- /dev/null +++ b/internal/store/database/migrate/postgres/0026_alter_repo_drop_join_id.down.sql @@ -0,0 +1,7 @@ +ALTER TABLE repositories + ADD COLUMN repo_importing_job_uid TEXT, + ADD CONSTRAINT fk_repo_importing_job_uid + FOREIGN KEY (repo_importing_job_uid) + REFERENCES jobs(job_uid) + ON DELETE SET NULL + ON UPDATE NO ACTION; diff --git a/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql b/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql new file mode 100644 index 0000000000..3200f9ccce --- /dev/null +++ b/internal/store/database/migrate/postgres/0027_create_ci_tables.down.sql @@ -0,0 +1,10 @@ +DROP TABLE pipelines; +DROP TABLE executions; +DROP TABLE stages; +DROP TABLE secrets; +DROP TABLE steps; +DROP TABLE logs; +DROP TABLE plugins; +DROP TABLE connectors; +DROP TABLE templates; +DROP TABLE triggers; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql b/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql new file mode 100644 index 0000000000..8e6e07dd6d --- /dev/null +++ b/internal/store/database/migrate/postgres/0027_create_ci_tables.up.sql @@ -0,0 +1,206 @@ +CREATE TABLE pipelines ( + pipeline_id SERIAL PRIMARY KEY, + pipeline_description TEXT NOT NULL, + pipeline_uid TEXT NOT NULL, + pipeline_seq INTEGER NOT NULL DEFAULT 0, + pipeline_disabled BOOLEAN NOT NULL, + pipeline_repo_id INTEGER NOT NULL, + pipeline_default_branch TEXT NOT NULL, + pipeline_created_by INTEGER NOT NULL, + pipeline_config_path TEXT NOT NULL, + pipeline_created BIGINT NOT NULL, + pipeline_updated BIGINT NOT NULL, + pipeline_version INTEGER NOT NULL, + UNIQUE (pipeline_repo_id, pipeline_uid), + CONSTRAINT fk_pipelines_repo_id FOREIGN KEY (pipeline_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE, + CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE executions ( + execution_id SERIAL PRIMARY KEY, + execution_pipeline_id INTEGER NOT NULL, + execution_repo_id INTEGER NOT NULL, + execution_created_by INTEGER NOT NULL, + execution_trigger TEXT NOT NULL, + execution_number INTEGER NOT NULL, + execution_parent INTEGER NOT NULL, + execution_status TEXT NOT NULL, + execution_error TEXT NOT NULL, + execution_event TEXT NOT NULL, + execution_action TEXT NOT NULL, + execution_link TEXT NOT NULL, + execution_timestamp INTEGER NOT NULL, + execution_title TEXT NOT NULL, + execution_message TEXT NOT NULL, + execution_before TEXT NOT NULL, + execution_after TEXT NOT NULL, + execution_ref TEXT NOT NULL, + execution_source_repo TEXT NOT NULL, + execution_source TEXT NOT NULL, + execution_target TEXT NOT NULL, + execution_author TEXT NOT NULL, + execution_author_name TEXT NOT NULL, + execution_author_email TEXT NOT NULL, + execution_author_avatar TEXT NOT NULL, + execution_sender TEXT NOT NULL, + execution_params TEXT NOT NULL, + execution_cron TEXT NOT NULL, + execution_deploy TEXT NOT NULL, + execution_deploy_id INTEGER NOT NULL, + execution_debug BOOLEAN NOT NULL DEFAULT false, + execution_started BIGINT NOT NULL, + execution_finished BIGINT NOT NULL, + execution_created BIGINT NOT NULL, + execution_updated BIGINT NOT NULL, + execution_version INTEGER NOT NULL, + UNIQUE (execution_pipeline_id, execution_number), + CONSTRAINT fk_executions_pipeline_id FOREIGN KEY (execution_pipeline_id) + REFERENCES pipelines (pipeline_id) ON DELETE CASCADE, + CONSTRAINT fk_executions_repo_id FOREIGN KEY (execution_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE, + CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE secrets ( + secret_id SERIAL PRIMARY KEY, + secret_uid TEXT NOT NULL, + secret_space_id INTEGER NOT NULL, + secret_description TEXT NOT NULL, + secret_data BYTEA NOT NULL, + secret_created_by INTEGER NOT NULL, + secret_created BIGINT NOT NULL, + secret_updated BIGINT NOT NULL, + secret_version INTEGER NOT NULL, + UNIQUE (secret_space_id, secret_uid), + CONSTRAINT fk_secrets_space_id FOREIGN KEY (secret_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE, + CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by) + REFERENCES principals (principal_id) ON DELETE NO ACTION +); + +CREATE TABLE stages ( + stage_id SERIAL PRIMARY KEY, + stage_execution_id INTEGER NOT NULL, + stage_repo_id INTEGER NOT NULL, + stage_number INTEGER NOT NULL, + stage_kind TEXT NOT NULL, + stage_type TEXT NOT NULL, + stage_name TEXT NOT NULL, + stage_status TEXT NOT NULL, + stage_error TEXT NOT NULL, + stage_parent_group_id INTEGER NOT NULL, + stage_errignore BOOLEAN NOT NULL, + stage_exit_code INTEGER NOT NULL, + stage_limit INTEGER NOT NULL, + stage_os TEXT NOT NULL, + stage_arch TEXT NOT NULL, + stage_variant TEXT NOT NULL, + stage_kernel TEXT NOT NULL, + stage_machine TEXT NOT NULL, + stage_started BIGINT NOT NULL, + stage_stopped BIGINT NOT NULL, + stage_created BIGINT NOT NULL, + stage_updated BIGINT NOT NULL, + stage_version INTEGER NOT NULL, + stage_on_success BOOLEAN NOT NULL, + stage_on_failure BOOLEAN NOT NULL, + stage_depends_on TEXT NOT NULL, + stage_labels TEXT NOT NULL, + stage_limit_repo INTEGER NOT NULL DEFAULT 0, + UNIQUE (stage_execution_id, stage_number), + CONSTRAINT fk_stages_execution_id FOREIGN KEY (stage_execution_id) + REFERENCES executions (execution_id) ON DELETE CASCADE +); + +CREATE INDEX ix_stage_in_progress ON stages (stage_status) +WHERE stage_status IN ('pending', 'running'); + +CREATE TABLE steps ( + step_id SERIAL PRIMARY KEY, + step_stage_id INTEGER NOT NULL, + step_number INTEGER NOT NULL, + step_name TEXT NOT NULL, + step_status TEXT NOT NULL, + step_error TEXT NOT NULL, + step_parent_group_id INTEGER NOT NULL, + step_errignore BOOLEAN NOT NULL, + step_exit_code INTEGER NOT NULL, + step_started BIGINT NOT NULL, + step_stopped BIGINT NOT NULL, + step_version INTEGER NOT NULL, + step_depends_on TEXT NOT NULL, + step_image TEXT NOT NULL, + step_detached BOOLEAN NOT NULL, + step_schema TEXT NOT NULL, + UNIQUE (step_stage_id, step_number), + CONSTRAINT fk_steps_stage_id FOREIGN KEY (step_stage_id) + REFERENCES stages (stage_id) ON DELETE CASCADE +); + +CREATE TABLE logs ( + log_id SERIAL PRIMARY KEY, + log_data BYTEA NOT NULL, + CONSTRAINT fk_logs_id FOREIGN KEY (log_id) + REFERENCES steps (step_id) ON DELETE CASCADE +); + +CREATE TABLE connectors ( + connector_id SERIAL PRIMARY KEY, + connector_uid TEXT NOT NULL, + connector_description TEXT NOT NULL, + connector_type TEXT NOT NULL, + connector_space_id INTEGER NOT NULL, + connector_data TEXT NOT NULL, + connector_created BIGINT NOT NULL, + connector_updated BIGINT NOT NULL, + connector_version INTEGER NOT NULL, + UNIQUE (connector_space_id, connector_uid), + CONSTRAINT fk_connectors_space_id FOREIGN KEY (connector_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE +); + +CREATE TABLE templates ( + template_id SERIAL PRIMARY KEY, + template_uid TEXT NOT NULL, + template_description TEXT NOT NULL, + template_space_id INTEGER NOT NULL, + template_data TEXT NOT NULL, + template_created BIGINT NOT NULL, + template_updated BIGINT NOT NULL, + template_version INTEGER NOT NULL, + UNIQUE (template_space_id, template_uid), + CONSTRAINT fk_templates_space_id FOREIGN KEY (template_space_id) + REFERENCES spaces (space_id) ON DELETE CASCADE +); + +CREATE TABLE triggers ( + trigger_id SERIAL PRIMARY KEY, + trigger_uid TEXT NOT NULL, + trigger_pipeline_id INTEGER NOT NULL, + trigger_type TEXT NOT NULL, + trigger_repo_id INTEGER NOT NULL, + trigger_secret TEXT NOT NULL, + trigger_description TEXT NOT NULL, + trigger_disabled BOOLEAN NOT NULL, + trigger_created_by INTEGER NOT NULL, + trigger_actions TEXT NOT NULL, + trigger_created BIGINT NOT NULL, + trigger_updated BIGINT NOT NULL, + trigger_version INTEGER NOT NULL, + UNIQUE (trigger_pipeline_id, trigger_uid), + CONSTRAINT fk_triggers_pipeline_id FOREIGN KEY (trigger_pipeline_id) + REFERENCES pipelines (pipeline_id) ON DELETE CASCADE, + CONSTRAINT fk_triggers_repo_id FOREIGN KEY (trigger_repo_id) + REFERENCES repositories (repo_id) ON DELETE CASCADE +); + +CREATE TABLE plugins ( + plugin_uid TEXT NOT NULL, + plugin_description TEXT NOT NULL, + plugin_logo TEXT NOT NULL, + plugin_spec BYTEA NOT NULL, + UNIQUE (plugin_uid) +); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.down.sql b/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.down.sql new file mode 100644 index 0000000000..3339b927b4 --- /dev/null +++ b/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.down.sql @@ -0,0 +1 @@ +ALTER TABLE tokens ADD COLUMN token_grants BIGINT DEFAULT 0; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.up.sql b/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.up.sql new file mode 100644 index 0000000000..376b0fa634 --- /dev/null +++ b/internal/store/database/migrate/postgres/0028_alter_token_drop_grants.up.sql @@ -0,0 +1 @@ +ALTER TABLE tokens DROP COLUMN token_grants; diff --git a/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_down.sql b/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_down.sql new file mode 100644 index 0000000000..c3d826768c --- /dev/null +++ b/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_down.sql @@ -0,0 +1 @@ +DROP INDEX job_group_id ON jobs; diff --git a/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_up.sql b/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_up.sql new file mode 100644 index 0000000000..0805d82dec --- /dev/null +++ b/internal/store/database/migrate/postgres/0029_create_index_job_job_group_id_up.sql @@ -0,0 +1 @@ +CREATE INDEX job_group_id ON jobs(job_group_id); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0030_create_table_space_paths.down.sql b/internal/store/database/migrate/postgres/0030_create_table_space_paths.down.sql new file mode 100644 index 0000000000..9f8ce0062e --- /dev/null +++ b/internal/store/database/migrate/postgres/0030_create_table_space_paths.down.sql @@ -0,0 +1,2 @@ +-- we fallback to the non-deleted paths table (risk that new entries are missing) +DROP TABLE space_paths; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0030_create_table_space_paths.up.sql b/internal/store/database/migrate/postgres/0030_create_table_space_paths.up.sql new file mode 100644 index 0000000000..cfbf09b881 --- /dev/null +++ b/internal/store/database/migrate/postgres/0030_create_table_space_paths.up.sql @@ -0,0 +1,61 @@ +CREATE TABLE space_paths ( + space_path_id SERIAL PRIMARY KEY +,space_path_uid TEXT NOT NULL +,space_path_uid_unique TEXT NOT NULL +,space_path_is_primary BOOLEAN DEFAULT NULL +,space_path_space_id INTEGER NOT NULL +,space_path_parent_id INTEGER +,space_path_created_by INTEGER NOT NULL +,space_path_created BIGINT NOT NULL +,space_path_updated BIGINT NOT NULL + +,CONSTRAINT fk_space_path_created_by FOREIGN KEY (space_path_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_space_path_space_id FOREIGN KEY (space_path_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_space_path_parent_id FOREIGN KEY (space_path_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE UNIQUE INDEX space_paths_space_id_is_primary +ON space_paths(space_path_space_id, space_path_is_primary); + +CREATE UNIQUE INDEX space_paths_uid_unique_no_parent +ON space_paths(space_path_uid_unique) +WHERE space_path_parent_id IS NULL; + +CREATE UNIQUE INDEX space_paths_uid_unique +ON space_paths(space_path_parent_id, space_path_uid_unique) +WHERE space_path_parent_id IS NOT NULL; + +-- assume no alias paths were created - create fresh primary enries for each space. +INSERT INTO space_paths ( + space_path_uid + ,space_path_uid_unique + ,space_path_is_primary + ,space_path_parent_id + ,space_path_space_id + ,space_path_created_by + ,space_path_created + ,space_path_updated +) +SELECT + space_uid + -- we assume postgres is used by harness - accountID is case sensitive, rest isn't + ,CASE WHEN space_parent_id IS NULL + THEN space_uid + ELSE LOWER(space_uid) + END + ,TRUE + ,space_parent_id + ,space_id + ,space_created_by + ,space_created + ,space_updated +FROM spaces; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0031_alter_index_repositories.down.sql b/internal/store/database/migrate/postgres/0031_alter_index_repositories.down.sql new file mode 100644 index 0000000000..1777cd2400 --- /dev/null +++ b/internal/store/database/migrate/postgres/0031_alter_index_repositories.down.sql @@ -0,0 +1,2 @@ +DROP INDEX repositories_parent_id_uid; +CREATE INDEX repositories_parent_id ON repositories(repo_parent_id); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0031_alter_index_repositories.up.sql b/internal/store/database/migrate/postgres/0031_alter_index_repositories.up.sql new file mode 100644 index 0000000000..71a1251058 --- /dev/null +++ b/internal/store/database/migrate/postgres/0031_alter_index_repositories.up.sql @@ -0,0 +1,2 @@ +DROP INDEX repositories_parent_id; +CREATE UNIQUE INDEX repositories_parent_id_uid ON repositories(repo_parent_id, LOWER(repo_uid)); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.down.sql b/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.down.sql new file mode 100644 index 0000000000..450455f210 --- /dev/null +++ b/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.down.sql @@ -0,0 +1 @@ +DROP TABLE pullreq_file_views; \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.up.sql b/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.up.sql new file mode 100644 index 0000000000..07fb3439cd --- /dev/null +++ b/internal/store/database/migrate/postgres/0032_create_table_pullreq_file_views.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE pullreq_file_views ( + pullreq_file_view_pullreq_id INTEGER NOT NULL +,pullreq_file_view_principal_id INTEGER NOT NULL +,pullreq_file_view_path TEXT NOT NULL +,pullreq_file_view_sha TEXT NOT NULL +,pullreq_file_view_obsolete BOOLEAN NOT NULL +,pullreq_file_view_created BIGINT NOT NULL +,pullreq_file_view_updated BIGINT NOT NULL + +-- for every pr and user at most one entry per file (existing enries are overwritten) +-- this index is also used for quick lookup of viewed files of a user for a given pr +,CONSTRAINT pk_pullreq_file_views PRIMARY KEY (pullreq_file_view_pullreq_id, pullreq_file_view_principal_id, pullreq_file_view_path) + +,CONSTRAINT fk_pullreq_file_view_pullreq_id FOREIGN KEY (pullreq_file_view_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_file_view_principal_id FOREIGN KEY (pullreq_file_view_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +-- this index is used to mark entries obsolete on branch update +CREATE INDEX pullreq_file_views_pullreq_id_file_path + ON pullreq_file_views(pullreq_file_view_pullreq_id, pullreq_file_view_path); \ No newline at end of file diff --git a/internal/store/database/migrate/postgres/0033_alter_ci_tables.up.sql b/internal/store/database/migrate/postgres/0033_alter_ci_tables.up.sql new file mode 100644 index 0000000000..68980e2a7b --- /dev/null +++ b/internal/store/database/migrate/postgres/0033_alter_ci_tables.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE plugins + ADD COLUMN plugin_type TEXT NOT NULL, + ADD COLUMN plugin_version TEXT NOT NULL; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0001_create_table_a_principals.up.sql b/internal/store/database/migrate/sqlite/0001_create_table_a_principals.up.sql new file mode 100644 index 0000000000..31e20dedaf --- /dev/null +++ b/internal/store/database/migrate/sqlite/0001_create_table_a_principals.up.sql @@ -0,0 +1,20 @@ +CREATE TABLE principals ( +principal_id INTEGER PRIMARY KEY AUTOINCREMENT +,principal_uid TEXT +,principal_uid_unique TEXT +,principal_email TEXT +,principal_type TEXT +,principal_display_name TEXT +,principal_admin BOOLEAN +,principal_blocked BOOLEAN +,principal_salt TEXT +,principal_created BIGINT +,principal_updated BIGINT + +,principal_user_password TEXT + +,principal_sa_parent_type TEXT +,principal_sa_parent_id INTEGER + +,UNIQUE(principal_uid_unique) +); diff --git a/internal/store/database/migrate/sqlite/0001_create_table_b_spaces.up.sql b/internal/store/database/migrate/sqlite/0001_create_table_b_spaces.up.sql new file mode 100644 index 0000000000..f1a6cc7ce6 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0001_create_table_b_spaces.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE spaces ( + space_id INTEGER PRIMARY KEY AUTOINCREMENT +,space_version INTEGER NOT NULL DEFAULT 0 +,space_parent_id INTEGER DEFAULT NULL +,space_uid TEXT NOT NULL +,space_description TEXT +,space_is_public BOOLEAN NOT NULL +,space_created_by INTEGER NOT NULL +,space_created BIGINT NOT NULL +,space_updated BIGINT NOT NULL + +,CONSTRAINT fk_space_parent_id FOREIGN KEY (space_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0001_create_table_c_repositories.up.sql b/internal/store/database/migrate/sqlite/0001_create_table_c_repositories.up.sql new file mode 100644 index 0000000000..20acb529e5 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0001_create_table_c_repositories.up.sql @@ -0,0 +1,27 @@ +CREATE TABLE repositories ( + repo_id INTEGER PRIMARY KEY AUTOINCREMENT +,repo_version INTEGER NOT NULL DEFAULT 0 +,repo_parent_id INTEGER NOT NULL +,repo_uid TEXT NOT NULL +,repo_description TEXT +,repo_is_public BOOLEAN NOT NULL +,repo_created_by INTEGER NOT NULL +,repo_created BIGINT NOT NULL +,repo_updated BIGINT NOT NULL +,repo_git_uid TEXT NOT NULL +,repo_default_branch TEXT NOT NULL +,repo_fork_id INTEGER +,repo_pullreq_seq INTEGER NOT NULL +,repo_num_forks INTEGER NOT NULL +,repo_num_pulls INTEGER NOT NULL +,repo_num_closed_pulls INTEGER NOT NULL +,repo_num_open_pulls INTEGER NOT NULL +,repo_num_merged_pulls INTEGER NOT NULL + +,UNIQUE(repo_git_uid) + +,CONSTRAINT fk_repo_parent_id FOREIGN KEY (repo_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); diff --git a/internal/store/database/migrate/sqlite/0001_create_table_d_paths.up.sql b/internal/store/database/migrate/sqlite/0001_create_table_d_paths.up.sql new file mode 100644 index 0000000000..a506ee67b3 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0001_create_table_d_paths.up.sql @@ -0,0 +1,27 @@ +CREATE TABLE paths ( + path_id INTEGER PRIMARY KEY AUTOINCREMENT +,path_version INTEGER NOT NULL DEFAULT 0 +,path_value TEXT NOT NULL +,path_value_unique TEXT NOT NULL +,path_is_primary BOOLEAN DEFAULT NULL +,path_repo_id INTEGER +,path_space_id INTEGER +,path_created_by INTEGER NOT NULL +,path_created BIGINT NOT NULL +,path_updated BIGINT NOT NULL + +,UNIQUE(path_value_unique) + +,CONSTRAINT fk_path_created_by FOREIGN KEY (path_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_path_space_id FOREIGN KEY (path_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_path_repo_id FOREIGN KEY (path_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0001_create_table_e_tokens.up.sql b/internal/store/database/migrate/sqlite/0001_create_table_e_tokens.up.sql new file mode 100644 index 0000000000..908e4d3319 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0001_create_table_e_tokens.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE tokens ( + token_id INTEGER PRIMARY KEY AUTOINCREMENT +,token_type TEXT COLLATE NOCASE +,token_uid TEXT COLLATE NOCASE +,token_principal_id INTEGER +,token_expires_at BIGINT +,token_grants BIGINT +,token_issued_at BIGINT +,token_created_by INTEGER +,UNIQUE(token_principal_id, token_uid COLLATE NOCASE) + +,CONSTRAINT fk_token_principal_id FOREIGN KEY (token_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_paths_repo_id_is_primary.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_paths_repo_id_is_primary.up.sql new file mode 100644 index 0000000000..8112642f67 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_paths_repo_id_is_primary.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX paths_repo_id_is_primary +ON paths(path_repo_id, path_is_primary); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_paths_space_id_is_primary.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_paths_space_id_is_primary.up.sql new file mode 100644 index 0000000000..775afb1cb6 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_paths_space_id_is_primary.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX paths_space_id_is_primary +ON paths(path_space_id, path_is_primary); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_principals_lower_email.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_principals_lower_email.up.sql new file mode 100644 index 0000000000..4ad7abeb9f --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_principals_lower_email.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX principals_lower_email +ON principals(LOWER(principal_email)); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql new file mode 100644 index 0000000000..df7d7bfcc5 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_principals_sa_parent_id_sa_parent_type.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX principals_sa_parent_id_sa_parent_type +ON principals(principal_sa_parent_id, principal_sa_parent_type); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_repositories_parent_id.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_repositories_parent_id.up.sql new file mode 100644 index 0000000000..be95956a2a --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_repositories_parent_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX repositories_parent_id +ON repositories(repo_parent_id); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_spaces_parent_id.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_spaces_parent_id.up.sql new file mode 100644 index 0000000000..8483589147 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_spaces_parent_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX spaces_parent_id +ON spaces(space_parent_id); diff --git a/internal/store/database/migrate/sqlite/0002_create_index_tokens_principal_id.up.sql b/internal/store/database/migrate/sqlite/0002_create_index_tokens_principal_id.up.sql new file mode 100644 index 0000000000..61306e8921 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0002_create_index_tokens_principal_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX tokens_principal_id +ON tokens(token_principal_id); diff --git a/internal/store/database/migrate/sqlite/0003_create_table_pullreqs.up.sql b/internal/store/database/migrate/sqlite/0003_create_table_pullreqs.up.sql new file mode 100644 index 0000000000..94a8b52868 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0003_create_table_pullreqs.up.sql @@ -0,0 +1,44 @@ +CREATE TABLE pullreqs ( +pullreq_id INTEGER PRIMARY KEY AUTOINCREMENT +,pullreq_version INTEGER NOT NULL DEFAULT 0 +,pullreq_created_by INTEGER NOT NULL +,pullreq_created BIGINT NOT NULL +,pullreq_updated BIGINT NOT NULL +,pullreq_edited BIGINT NOT NULL +,pullreq_number INTEGER NOT NULL +,pullreq_state TEXT NOT NULL +,pullreq_is_draft TEXT NOT NULL DEFAULT FALSE +,pullreq_comment_count INTEGER NOT NULL DEFAULT 0 +,pullreq_title TEXT NOT NULL +,pullreq_description TEXT NOT NULL +,pullreq_source_repo_id INTEGER NOT NULL +,pullreq_source_branch TEXT NOT NULL +,pullreq_source_sha TEXT NOT NULL +,pullreq_target_repo_id INTEGER NOT NULL +,pullreq_target_branch TEXT NOT NULL +,pullreq_activity_seq INTEGER DEFAULT 0 +,pullreq_merged_by INTEGER +,pullreq_merged BIGINT +,pullreq_merge_method TEXT +,pullreq_merge_check_status TEXT NOT NULL +,pullreq_merge_target_sha TEXT +,pullreq_merge_base_sha TEXT +,pullreq_merge_sha TEXT +,pullreq_merge_conflicts TEXT +,CONSTRAINT fk_pullreq_created_by FOREIGN KEY (pullreq_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE SET NULL +,CONSTRAINT fk_pullreq_target_repo_id FOREIGN KEY (pullreq_target_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_merged_by FOREIGN KEY (pullreq_merged_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql b/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql new file mode 100644 index 0000000000..544387e114 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_source_repo_branch_target_repo_branch.up.sql @@ -0,0 +1,3 @@ +CREATE UNIQUE INDEX pullreqs_source_repo_branch_target_repo_branch + ON pullreqs(pullreq_source_repo_id, pullreq_source_branch, pullreq_target_repo_id, pullreq_target_branch) + WHERE pullreq_state = 'open'; diff --git a/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_target_repo_id_number.up.sql b/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_target_repo_id_number.up.sql new file mode 100644 index 0000000000..a7e9be14d2 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0004_create_index_pullreqs_target_repo_id_number.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX pullreqs_target_repo_id_number +ON pullreqs(pullreq_target_repo_id, pullreq_number); diff --git a/internal/store/database/migrate/sqlite/0005_create_table_pullreq_activities.up.sql b/internal/store/database/migrate/sqlite/0005_create_table_pullreq_activities.up.sql new file mode 100644 index 0000000000..4b3479591f --- /dev/null +++ b/internal/store/database/migrate/sqlite/0005_create_table_pullreq_activities.up.sql @@ -0,0 +1,42 @@ +CREATE TABLE pullreq_activities ( + pullreq_activity_id INTEGER PRIMARY KEY AUTOINCREMENT +,pullreq_activity_version BIGINT NOT NULL +,pullreq_activity_created_by INTEGER +,pullreq_activity_created BIGINT NOT NULL +,pullreq_activity_updated BIGINT NOT NULL +,pullreq_activity_edited BIGINT NOT NULL +,pullreq_activity_deleted BIGINT +,pullreq_activity_parent_id INTEGER +,pullreq_activity_repo_id INTEGER NOT NULL +,pullreq_activity_pullreq_id INTEGER NOT NULL +,pullreq_activity_order INTEGER NOT NULL +,pullreq_activity_sub_order INTEGER NOT NULL +,pullreq_activity_reply_seq INTEGER NOT NULL +,pullreq_activity_type TEXT NOT NULL +,pullreq_activity_kind TEXT NOT NULL +,pullreq_activity_text TEXT NOT NULL +,pullreq_activity_payload TEXT NOT NULL DEFAULT '{}' +,pullreq_activity_metadata TEXT NOT NULL DEFAULT '{}' +,pullreq_activity_resolved_by INTEGER DEFAULT 0 +,pullreq_activity_resolved BIGINT NULL +,CONSTRAINT fk_pullreq_activities_created_by FOREIGN KEY (pullreq_activity_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_activities_parent_id FOREIGN KEY (pullreq_activity_parent_id) + REFERENCES pullreq_activities (pullreq_activity_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_repo_id FOREIGN KEY (pullreq_activity_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_pullreq_id FOREIGN KEY (pullreq_activity_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_activities_resolved_by FOREIGN KEY (pullreq_activity_resolved_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/sqlite/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql b/internal/store/database/migrate/sqlite/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql new file mode 100644 index 0000000000..73b0d5361f --- /dev/null +++ b/internal/store/database/migrate/sqlite/0006_create_index_pullreq_activities_pullreq_id_order_sub_order.up.sql @@ -0,0 +1,2 @@ +CREATE UNIQUE INDEX pullreq_activities_pullreq_id_order_sub_order +ON pullreq_activities(pullreq_activity_pullreq_id, pullreq_activity_order, pullreq_activity_sub_order); diff --git a/internal/store/database/migrate/sqlite/0007_create_table_webhooks.up.sql b/internal/store/database/migrate/sqlite/0007_create_table_webhooks.up.sql new file mode 100644 index 0000000000..178b0ed77c --- /dev/null +++ b/internal/store/database/migrate/sqlite/0007_create_table_webhooks.up.sql @@ -0,0 +1,29 @@ +CREATE TABLE webhooks ( +webhook_id INTEGER PRIMARY KEY AUTOINCREMENT +,webhook_version INTEGER NOT NULL DEFAULT 0 +,webhook_created_by INTEGER NOT NULL +,webhook_created BIGINT NOT NULL +,webhook_updated BIGINT NOT NULL +,webhook_space_id INTEGER +,webhook_repo_id INTEGER +,webhook_display_name TEXT NOT NULL +,webhook_description TEXT NOT NULL +,webhook_url TEXT NOT NULL +,webhook_secret TEXT NOT NULL +,webhook_enabled BOOLEAN NOT NULL +,webhook_insecure BOOLEAN NOT NULL +,webhook_triggers TEXT NOT NULL +,webhook_latest_execution_result TEXT +,CONSTRAINT fk_webhook_created_by FOREIGN KEY (webhook_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_webhook_space_id FOREIGN KEY (webhook_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_webhook_repo_id FOREIGN KEY (webhook_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); diff --git a/internal/store/database/migrate/sqlite/0008_create_index_webhooks_repo_id.up.sql b/internal/store/database/migrate/sqlite/0008_create_index_webhooks_repo_id.up.sql new file mode 100644 index 0000000000..1c277b98f7 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0008_create_index_webhooks_repo_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhooks_repo_id +ON webhooks(webhook_repo_id); diff --git a/internal/store/database/migrate/sqlite/0008_create_index_webhooks_space_id.up.sql b/internal/store/database/migrate/sqlite/0008_create_index_webhooks_space_id.up.sql new file mode 100644 index 0000000000..090c64bbd4 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0008_create_index_webhooks_space_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhooks_space_id +ON webhooks(webhook_space_id); diff --git a/internal/store/database/migrate/sqlite/0009_create_table_webhook_executions.up.sql b/internal/store/database/migrate/sqlite/0009_create_table_webhook_executions.up.sql new file mode 100644 index 0000000000..bdef648cf2 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0009_create_table_webhook_executions.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE webhook_executions ( +webhook_execution_id INTEGER PRIMARY KEY AUTOINCREMENT +,webhook_execution_retrigger_of INTEGER +,webhook_execution_retriggerable BOOLEAN NOT NULL +,webhook_execution_webhook_id INTEGER NOT NULL +,webhook_execution_trigger_type TEXT NOT NULL +,webhook_execution_trigger_id TEXT NOT NULL +,webhook_execution_result TEXT NOT NULL +,webhook_execution_created BIGINT NOT NULL +,webhook_execution_duration BIGINT NOT NULL +,webhook_execution_error TEXT NOT NULL +,webhook_execution_request_url TEXT NOT NULL +,webhook_execution_request_headers TEXT NOT NULL +,webhook_execution_request_body TEXT NOT NULL +,webhook_execution_response_status_code INTEGER NOT NULL +,webhook_execution_response_status TEXT NOT NULL +,webhook_execution_response_headers TEXT NOT NULL +,webhook_execution_response_body TEXT NOT NULL +); diff --git a/internal/store/database/migrate/sqlite/0010_create_index_webhook_executions_webhook_id.up.sql b/internal/store/database/migrate/sqlite/0010_create_index_webhook_executions_webhook_id.up.sql new file mode 100644 index 0000000000..2c008da018 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0010_create_index_webhook_executions_webhook_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX webhook_executions_webhook_id +ON webhook_executions(webhook_execution_webhook_id); diff --git a/internal/store/database/migrate/sqlite/0011_create_table_pullreq_reviews.up.sql b/internal/store/database/migrate/sqlite/0011_create_table_pullreq_reviews.up.sql new file mode 100644 index 0000000000..be04baf49a --- /dev/null +++ b/internal/store/database/migrate/sqlite/0011_create_table_pullreq_reviews.up.sql @@ -0,0 +1,17 @@ +CREATE TABLE pullreq_reviews ( +pullreq_review_id INTEGER PRIMARY KEY AUTOINCREMENT +,pullreq_review_created_by INTEGER NOT NULL +,pullreq_review_created BIGINT NOT NULL +,pullreq_review_updated BIGINT NOT NULL +,pullreq_review_pullreq_id INTEGER NOT NULL +,pullreq_review_decision TEXT NOT NULL +,pullreq_review_sha TEXT NOT NULL +,CONSTRAINT fk_pullreq_review_created_by FOREIGN KEY (pullreq_review_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_review_pullreq_id FOREIGN KEY (pullreq_review_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0012_create_index_pullreq_reviews_pullreq_id.up.sql b/internal/store/database/migrate/sqlite/0012_create_index_pullreq_reviews_pullreq_id.up.sql new file mode 100644 index 0000000000..01db26c1a2 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0012_create_index_pullreq_reviews_pullreq_id.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX index_pullreq_review_pullreq_id +ON pullreq_reviews(pullreq_review_pullreq_id); diff --git a/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.down.sql b/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.down.sql new file mode 100644 index 0000000000..8ef30bf989 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.down.sql @@ -0,0 +1,2 @@ +-- Can't migrate down from this point. +-- This file must be present here. \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.up.sql b/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.up.sql new file mode 100644 index 0000000000..334b47f8d0 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0013_create_table_pullreq_reviewers.up.sql @@ -0,0 +1,33 @@ +CREATE TABLE pullreq_reviewers ( +pullreq_reviewer_pullreq_id INTEGER NOT NULL +,pullreq_reviewer_principal_id INTEGER NOT NULL +,pullreq_reviewer_created_by INTEGER NOT NULL +,pullreq_reviewer_created BIGINT NOT NULL +,pullreq_reviewer_updated BIGINT NOT NULL +,pullreq_reviewer_repo_id INTEGER NOT NULL +,pullreq_reviewer_type TEXT NOT NULL +,pullreq_reviewer_latest_review_id INTEGER +,pullreq_reviewer_review_decision TEXT NOT NULL +,pullreq_reviewer_sha TEXT NOT NULL +,CONSTRAINT pk_pullreq_reviewers PRIMARY KEY (pullreq_reviewer_pullreq_id, pullreq_reviewer_principal_id) +,CONSTRAINT fk_pullreq_reviewer_pullreq_id FOREIGN KEY (pullreq_reviewer_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_reviewer_user_id FOREIGN KEY (pullreq_reviewer_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_reviewer_created_by FOREIGN KEY (pullreq_reviewer_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_reviewer_repo_id FOREIGN KEY (pullreq_reviewer_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_reviewer_latest_review_id FOREIGN KEY (pullreq_reviewer_latest_review_id) + REFERENCES pullreq_reviews (pullreq_review_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE SET NULL +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.down.sql b/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.down.sql new file mode 100644 index 0000000000..87ee284694 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_outdated; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_merge_base_sha; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_source_sha; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_path; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_line_new; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_span_new; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_line_old; +ALTER TABLE pullreq_activities DROP COLUMN pullreq_activity_code_comment_span_old; diff --git a/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.up.sql b/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.up.sql new file mode 100644 index 0000000000..3bddb0ce84 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0014_alter_pullreq_activity_code_comments.up.sql @@ -0,0 +1,8 @@ +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_outdated BOOLEAN; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_merge_base_sha TEXT; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_source_sha TEXT; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_path TEXT; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_line_new INTEGER; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_span_new INTEGER; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_line_old INTEGER; +ALTER TABLE pullreq_activities ADD COLUMN pullreq_activity_code_comment_span_old INTEGER; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.down.sql b/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.down.sql new file mode 100644 index 0000000000..e2648bb562 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE pullreqs ADD COLUMN pullreq_merge_base_sha_nullable TEXT; +UPDATE pullreqs SET pullreq_merge_base_sha_nullable = pullreq_merge_base_sha WHERE pullreq_merge_base_sha <> ''; +ALTER TABLE pullreqs DROP COLUMN pullreq_merge_base_sha; +ALTER TABLE pullreqs RENAME COLUMN pullreq_merge_base_sha_nullable TO pullreq_merge_base_sha; diff --git a/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.up.sql b/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.up.sql new file mode 100644 index 0000000000..8f3ec955d0 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0015_alter_pullreq_merge_base_not_nullable.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE pullreqs ADD COLUMN pullreq_merge_base_sha_not_nullable TEXT NOT NULL DEFAULT ''; +UPDATE pullreqs SET pullreq_merge_base_sha_not_nullable = pullreq_merge_base_sha WHERE pullreq_merge_base_sha IS NOT NULL; +ALTER TABLE pullreqs DROP COLUMN pullreq_merge_base_sha; +ALTER TABLE pullreqs RENAME COLUMN pullreq_merge_base_sha_not_nullable TO pullreq_merge_base_sha; diff --git a/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.down.sql b/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.down.sql new file mode 100644 index 0000000000..e596101935 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.down.sql @@ -0,0 +1 @@ +ALTER TABLE pullreqs DROP COLUMN pullreq_unresolved_count; diff --git a/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.up.sql b/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.up.sql new file mode 100644 index 0000000000..18449aa105 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0016_alter_pullreq_add_unresolved.up.sql @@ -0,0 +1,18 @@ +ALTER TABLE pullreqs ADD COLUMN pullreq_unresolved_count INTEGER NOT NULL DEFAULT 0; + +WITH unresolved_counts AS ( + SELECT + pullreq_activity_pullreq_id AS "unresolved_pullreq_id", + COUNT(*) AS "unresolved_count" + FROM pullreq_activities + WHERE + pullreq_activity_sub_order = 0 AND + pullreq_activity_resolved IS NULL AND + pullreq_activity_deleted IS NULL AND + pullreq_activity_kind <> 'system' + GROUP BY pullreq_activity_pullreq_id +) +UPDATE pullreqs +SET pullreq_unresolved_count = unresolved_counts.unresolved_count +FROM unresolved_counts +WHERE pullreq_id = unresolved_pullreq_id; diff --git a/internal/store/database/migrate/sqlite/0017_create_table_checks.down.sql b/internal/store/database/migrate/sqlite/0017_create_table_checks.down.sql new file mode 100644 index 0000000000..fae645b93d --- /dev/null +++ b/internal/store/database/migrate/sqlite/0017_create_table_checks.down.sql @@ -0,0 +1,6 @@ +DROP INDEX reqchecks_repo_id; +DROP TABLE reqchecks; + +DROP INDEX checks_repo_id_created; +DROP INDEX checks_repo_id_commit_sha_uid; +DROP TABLE checks; diff --git a/internal/store/database/migrate/sqlite/0017_create_table_checks.up.sql b/internal/store/database/migrate/sqlite/0017_create_table_checks.up.sql new file mode 100644 index 0000000000..3139ce243a --- /dev/null +++ b/internal/store/database/migrate/sqlite/0017_create_table_checks.up.sql @@ -0,0 +1,49 @@ +CREATE TABLE checks ( + check_id INTEGER PRIMARY KEY AUTOINCREMENT +,check_created_by INTEGER NOT NULL +,check_created BIGINT NOT NULL +,check_updated BIGINT NOT NULL +,check_repo_id INTEGER NOT NULL +,check_commit_sha TEXT NOT NULL +,check_type TEXT NOT NULL +,check_uid TEXT NOT NULL +,check_status TEXT NOT NULL +,check_summary TEXT NOT NULL +,check_link TEXT NOT NULL +,check_payload TEXT NOT NULL +,check_metadata TEXT NOT NULL +,CONSTRAINT fk_check_created_by FOREIGN KEY (check_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_check_repo_id FOREIGN KEY (check_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE UNIQUE INDEX checks_repo_id_commit_sha_uid + ON checks(check_repo_id, check_commit_sha, check_uid); + +CREATE INDEX checks_repo_id_created + ON checks(check_repo_id, check_created); + +CREATE TABLE reqchecks ( + reqcheck_id INTEGER PRIMARY KEY AUTOINCREMENT +,reqcheck_created_by INTEGER NOT NULL +,reqcheck_created BIGINT NOT NULL +,reqcheck_repo_id INTEGER NOT NULL +,reqcheck_branch_pattern TEXT NOT NULL +,reqcheck_check_uid TEXT NOT NULL +,CONSTRAINT fk_check_created_by FOREIGN KEY (reqcheck_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_check_repo_id FOREIGN KEY (reqcheck_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE INDEX reqchecks_repo_id + ON reqchecks(reqcheck_repo_id); diff --git a/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.down.sql b/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.down.sql new file mode 100644 index 0000000000..4298c5757b --- /dev/null +++ b/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE checks ADD COLUMN check_type TEXT NOT NULL DEFAULT ''; +ALTER TABLE checks DROP COLUMN check_payload_version; +ALTER TABLE checks DROP COLUMN check_payload_kind; diff --git a/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.up.sql b/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.up.sql new file mode 100644 index 0000000000..42f2c613c0 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0018_alter_check_add_payload_version.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE checks ADD COLUMN check_payload_version TEXT NOT NULL DEFAULT ''; +ALTER TABLE checks ADD COLUMN check_payload_kind TEXT NOT NULL DEFAULT ''; +ALTER TABLE checks DROP COLUMN check_type; diff --git a/internal/store/database/migrate/sqlite/0019_create_table_memberships.down.sql b/internal/store/database/migrate/sqlite/0019_create_table_memberships.down.sql new file mode 100644 index 0000000000..cb23bba6b0 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0019_create_table_memberships.down.sql @@ -0,0 +1,2 @@ +DROP TABLE memberships; + diff --git a/internal/store/database/migrate/sqlite/0019_create_table_memberships.up.sql b/internal/store/database/migrate/sqlite/0019_create_table_memberships.up.sql new file mode 100644 index 0000000000..e0b9ef5b76 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0019_create_table_memberships.up.sql @@ -0,0 +1,21 @@ +CREATE TABLE memberships ( + membership_space_id INTEGER NOT NULL +,membership_principal_id INTEGER NOT NULL +,membership_created_by INTEGER NOT NULL +,membership_created BIGINT NOT NULL +,membership_updated BIGINT NOT NULL +,membership_role TEXT NOT NULL +,CONSTRAINT pk_memberships PRIMARY KEY (membership_space_id, membership_principal_id) +,CONSTRAINT fk_membership_space_id FOREIGN KEY (membership_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_membership_principal_id FOREIGN KEY (membership_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_membership_created_by FOREIGN KEY (membership_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); diff --git a/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.down.sql b/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.down.sql new file mode 100644 index 0000000000..b534b9b562 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.down.sql @@ -0,0 +1,116 @@ +CREATE TABLE pullreqs_new ( + pullreq_id INTEGER PRIMARY KEY AUTOINCREMENT +,pullreq_version INTEGER NOT NULL DEFAULT 0 +,pullreq_created_by INTEGER NOT NULL +,pullreq_created BIGINT NOT NULL +,pullreq_updated BIGINT NOT NULL +,pullreq_edited BIGINT NOT NULL +,pullreq_number INTEGER NOT NULL +,pullreq_state TEXT NOT NULL +,pullreq_is_draft TEXT NOT NULL DEFAULT FALSE +,pullreq_comment_count INTEGER NOT NULL DEFAULT 0 +,pullreq_title TEXT NOT NULL +,pullreq_description TEXT NOT NULL +,pullreq_source_repo_id INTEGER NOT NULL +,pullreq_source_branch TEXT NOT NULL +,pullreq_source_sha TEXT NOT NULL +,pullreq_target_repo_id INTEGER NOT NULL +,pullreq_target_branch TEXT NOT NULL +,pullreq_activity_seq INTEGER DEFAULT 0 +,pullreq_merged_by INTEGER +,pullreq_merged BIGINT +,pullreq_merge_method TEXT +,pullreq_merge_check_status TEXT NOT NULL +,pullreq_merge_target_sha TEXT +,pullreq_merge_sha TEXT +,pullreq_merge_conflicts TEXT +,pullreq_merge_base_sha TEXT NOT NULL DEFAULT '' +,pullreq_unresolved_count INTEGER NOT NULL DEFAULT 0 +,CONSTRAINT fk_pullreq_created_by FOREIGN KEY (pullreq_created_by) + REFERENCES principals + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories + ON UPDATE NO ACTION + ON DELETE SET NULL +,CONSTRAINT fk_pullreq_target_repo_id FOREIGN KEY (pullreq_target_repo_id) + REFERENCES repositories + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_merged_by FOREIGN KEY (pullreq_merged_by) + REFERENCES principals + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +INSERT INTO pullreqs_new( + pullreq_id +,pullreq_version +,pullreq_created_by +,pullreq_created +,pullreq_updated +,pullreq_edited +,pullreq_number +,pullreq_state +,pullreq_is_draft +,pullreq_comment_count +,pullreq_title +,pullreq_description +,pullreq_source_repo_id +,pullreq_source_branch +,pullreq_source_sha +,pullreq_target_repo_id +,pullreq_target_branch +,pullreq_activity_seq +,pullreq_merged_by +,pullreq_merged +,pullreq_merge_method +,pullreq_merge_check_status +,pullreq_merge_target_sha +,pullreq_merge_sha +,pullreq_merge_conflicts +,pullreq_merge_base_sha +,pullreq_unresolved_count +) +SELECT + pullreq_id +,pullreq_version +,pullreq_created_by +,pullreq_created +,pullreq_updated +,pullreq_edited +,pullreq_number +,pullreq_state +,pullreq_is_draft +,pullreq_comment_count +,pullreq_title +,pullreq_description +,pullreq_source_repo_id +,pullreq_source_branch +,pullreq_source_sha +,pullreq_target_repo_id +,pullreq_target_branch +,pullreq_activity_seq +,pullreq_merged_by +,pullreq_merged +,pullreq_merge_method +,pullreq_merge_check_status +,pullreq_merge_target_sha +,pullreq_merge_sha +,pullreq_merge_conflicts +,pullreq_merge_base_sha +,pullreq_unresolved_count +FROM pullreqs; + +DROP TABLE pullreqs; + +ALTER TABLE pullreqs_new + RENAME TO pullreqs; + +CREATE UNIQUE INDEX pullreqs_source_repo_branch_target_repo_branch + ON pullreqs (pullreq_source_repo_id, pullreq_source_branch, pullreq_target_repo_id, pullreq_target_branch) + WHERE pullreq_state = 'open'; + +CREATE UNIQUE INDEX pullreqs_target_repo_id_number + ON pullreqs (pullreq_target_repo_id, pullreq_number); diff --git a/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.up.sql b/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.up.sql new file mode 100644 index 0000000000..b3dbc9e65f --- /dev/null +++ b/internal/store/database/migrate/sqlite/0020_alter_pullreq_source_repo_id_constraint.up.sql @@ -0,0 +1,116 @@ +CREATE TABLE pullreqs_new ( + pullreq_id INTEGER PRIMARY KEY AUTOINCREMENT +,pullreq_version INTEGER NOT NULL DEFAULT 0 +,pullreq_created_by INTEGER NOT NULL +,pullreq_created BIGINT NOT NULL +,pullreq_updated BIGINT NOT NULL +,pullreq_edited BIGINT NOT NULL +,pullreq_number INTEGER NOT NULL +,pullreq_state TEXT NOT NULL +,pullreq_is_draft TEXT NOT NULL DEFAULT FALSE +,pullreq_comment_count INTEGER NOT NULL DEFAULT 0 +,pullreq_title TEXT NOT NULL +,pullreq_description TEXT NOT NULL +,pullreq_source_repo_id INTEGER NOT NULL +,pullreq_source_branch TEXT NOT NULL +,pullreq_source_sha TEXT NOT NULL +,pullreq_target_repo_id INTEGER NOT NULL +,pullreq_target_branch TEXT NOT NULL +,pullreq_activity_seq INTEGER DEFAULT 0 +,pullreq_merged_by INTEGER +,pullreq_merged BIGINT +,pullreq_merge_method TEXT +,pullreq_merge_check_status TEXT NOT NULL +,pullreq_merge_target_sha TEXT +,pullreq_merge_sha TEXT +,pullreq_merge_conflicts TEXT +,pullreq_merge_base_sha TEXT NOT NULL DEFAULT '' +,pullreq_unresolved_count INTEGER NOT NULL DEFAULT 0 +,CONSTRAINT fk_pullreq_created_by FOREIGN KEY (pullreq_created_by) + REFERENCES principals + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_pullreq_source_repo_id FOREIGN KEY (pullreq_source_repo_id) + REFERENCES repositories + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_target_repo_id FOREIGN KEY (pullreq_target_repo_id) + REFERENCES repositories + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_merged_by FOREIGN KEY (pullreq_merged_by) + REFERENCES principals + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +INSERT INTO pullreqs_new( + pullreq_id +,pullreq_version +,pullreq_created_by +,pullreq_created +,pullreq_updated +,pullreq_edited +,pullreq_number +,pullreq_state +,pullreq_is_draft +,pullreq_comment_count +,pullreq_title +,pullreq_description +,pullreq_source_repo_id +,pullreq_source_branch +,pullreq_source_sha +,pullreq_target_repo_id +,pullreq_target_branch +,pullreq_activity_seq +,pullreq_merged_by +,pullreq_merged +,pullreq_merge_method +,pullreq_merge_check_status +,pullreq_merge_target_sha +,pullreq_merge_sha +,pullreq_merge_conflicts +,pullreq_merge_base_sha +,pullreq_unresolved_count +) +SELECT + pullreq_id +,pullreq_version +,pullreq_created_by +,pullreq_created +,pullreq_updated +,pullreq_edited +,pullreq_number +,pullreq_state +,pullreq_is_draft +,pullreq_comment_count +,pullreq_title +,pullreq_description +,pullreq_source_repo_id +,pullreq_source_branch +,pullreq_source_sha +,pullreq_target_repo_id +,pullreq_target_branch +,pullreq_activity_seq +,pullreq_merged_by +,pullreq_merged +,pullreq_merge_method +,pullreq_merge_check_status +,pullreq_merge_target_sha +,pullreq_merge_sha +,pullreq_merge_conflicts +,pullreq_merge_base_sha +,pullreq_unresolved_count +FROM pullreqs; + +DROP TABLE pullreqs; + +ALTER TABLE pullreqs_new + RENAME TO pullreqs; + +CREATE UNIQUE INDEX pullreqs_source_repo_branch_target_repo_branch + ON pullreqs (pullreq_source_repo_id, pullreq_source_branch, pullreq_target_repo_id, pullreq_target_branch) + WHERE pullreq_state = 'open'; + +CREATE UNIQUE INDEX pullreqs_target_repo_id_number + ON pullreqs (pullreq_target_repo_id, pullreq_number); diff --git a/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_down.sql b/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_down.sql new file mode 100644 index 0000000000..68d596a51c --- /dev/null +++ b/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_down.sql @@ -0,0 +1 @@ +ALTER TABLE webhooks DROP COLUMN webhook_internal; diff --git a/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_up.sql b/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_up.sql new file mode 100644 index 0000000000..e0d32cb7c7 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0021_alter_table_webhook_add_internal_up.sql @@ -0,0 +1,2 @@ +ALTER TABLE webhooks + ADD COLUMN webhook_internal BOOLEAN NOT NULL DEFAULT false; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql b/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql new file mode 100644 index 0000000000..7c5d0fba0d --- /dev/null +++ b/internal/store/database/migrate/sqlite/0022_create_table_jobs.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +DROP INDEX jobs_run_deadline; +DROP INDEX jobs_scheduled; +DROP TABLE jobs; diff --git a/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql b/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql new file mode 100644 index 0000000000..9b625f378c --- /dev/null +++ b/internal/store/database/migrate/sqlite/0022_create_table_jobs.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE jobs ( + job_uid TEXT NOT NULL +,job_created BIGINT NOT NULL +,job_updated BIGINT NOT NULL +,job_type TEXT NOT NULL +,job_priority INTEGER NOT NULL +,job_data TEXT NOT NULL +,job_result TEXT NOT NULL +,job_max_duration_seconds INTEGER NOT NULL +,job_max_retries INTEGER NOT NULL +,job_state TEXT NOT NULL +,job_scheduled BIGINT NOT NULL +,job_total_executions INTEGER +,job_run_by TEXT NOT NULL +,job_run_deadline BIGINT +,job_run_progress INTEGER NOT NULL +,job_last_executed BIGINT +,job_is_recurring BOOLEAN NOT NULL +,job_recurring_cron TEXT NOT NULL +,job_consecutive_failures INTEGER NOT NULL +,job_last_failure_error TEXT NOT NULL +,CONSTRAINT pk_jobs_uid PRIMARY KEY (job_uid) +); + +CREATE INDEX jobs_scheduled + ON jobs(job_scheduled) + WHERE job_state = 'scheduled'; + +CREATE INDEX jobs_run_deadline + ON jobs(job_run_deadline) + WHERE job_state = 'running'; + +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.down.sql b/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.down.sql new file mode 100644 index 0000000000..098e193fe3 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.down.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_state = 'finished' OR job_state = 'failed'; diff --git a/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.up.sql b/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.up.sql new file mode 100644 index 0000000000..a612ba8feb --- /dev/null +++ b/internal/store/database/migrate/sqlite/0023_index_jobs_last_executed.up.sql @@ -0,0 +1,4 @@ +DROP INDEX jobs_last_executed; +CREATE INDEX jobs_last_executed + ON jobs(job_last_executed) + WHERE job_is_recurring = FALSE AND (job_state = 'finished' OR job_state = 'failed' OR job_state = 'canceled'); diff --git a/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.down.sql b/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.down.sql new file mode 100644 index 0000000000..802f19b7af --- /dev/null +++ b/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE repositories DROP COLUMN repo_importing_job_uid; +ALTER TABLE repositories DROP COLUMN repo_importing; diff --git a/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.up.sql b/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.up.sql new file mode 100644 index 0000000000..38ad144dc1 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0024_alter_repo_add_importing.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE repositories ADD COLUMN repo_importing BOOLEAN NOT NULL DEFAULT false; +ALTER TABLE repositories ADD COLUMN repo_importing_job_uid TEXT; diff --git a/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.down.sql b/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.down.sql new file mode 100644 index 0000000000..a740594cc3 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.down.sql @@ -0,0 +1 @@ +ALTER TABLE jobs DROP COLUMN job_group_id; diff --git a/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.up.sql b/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.up.sql new file mode 100644 index 0000000000..86a19161b2 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0025_alter_table_job_add_group_id.up.sql @@ -0,0 +1 @@ +ALTER TABLE jobs ADD COLUMN job_group_id TEXT NOT NULL DEFAULT ''; diff --git a/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql new file mode 100644 index 0000000000..799a215c6a --- /dev/null +++ b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.down.sql @@ -0,0 +1 @@ +ALTER TABLE repositories ADD COLUMN repo_importing_job_uid TEXT; diff --git a/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql new file mode 100644 index 0000000000..9849283e9d --- /dev/null +++ b/internal/store/database/migrate/sqlite/0026_alter_repo_drop_job_id.up.sql @@ -0,0 +1 @@ +ALTER TABLE repositories DROP COLUMN repo_importing_job_uid; diff --git a/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql b/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql new file mode 100644 index 0000000000..3200f9ccce --- /dev/null +++ b/internal/store/database/migrate/sqlite/0027_create_ci_tables.down.sql @@ -0,0 +1,10 @@ +DROP TABLE pipelines; +DROP TABLE executions; +DROP TABLE stages; +DROP TABLE secrets; +DROP TABLE steps; +DROP TABLE logs; +DROP TABLE plugins; +DROP TABLE connectors; +DROP TABLE templates; +DROP TABLE triggers; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql b/internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql new file mode 100644 index 0000000000..8caad8d498 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0027_create_ci_tables.up.sql @@ -0,0 +1,284 @@ +CREATE TABLE pipelines ( + pipeline_id INTEGER PRIMARY KEY AUTOINCREMENT + ,pipeline_description TEXT NOT NULL + ,pipeline_uid TEXT NOT NULL + ,pipeline_seq INTEGER NOT NULL DEFAULT 0 + ,pipeline_disabled BOOLEAN NOT NULL + ,pipeline_repo_id INTEGER NOT NULL + ,pipeline_default_branch TEXT NOT NULL + ,pipeline_created_by INTEGER NOT NULL + ,pipeline_config_path TEXT NOT NULL + ,pipeline_created INTEGER NOT NULL + ,pipeline_updated INTEGER NOT NULL + ,pipeline_version INTEGER NOT NULL + + -- Ensure unique combination of UID and repo ID + ,UNIQUE (pipeline_repo_id, pipeline_uid) + + -- Foreign key to repositories table + ,CONSTRAINT fk_pipelines_repo_id FOREIGN KEY (pipeline_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_pipelines_created_by FOREIGN KEY (pipeline_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE executions ( + execution_id INTEGER PRIMARY KEY AUTOINCREMENT + ,execution_pipeline_id INTEGER NOT NULL + ,execution_repo_id INTEGER NOT NULL + ,execution_created_by INTEGER NOT NULL + ,execution_trigger TEXT NOT NULL + ,execution_number INTEGER NOT NULL + ,execution_parent INTEGER NOT NULL + ,execution_status TEXT NOT NULL + ,execution_error TEXT NOT NULL + ,execution_event TEXT NOT NULL + ,execution_action TEXT NOT NULL + ,execution_link TEXT NOT NULL + ,execution_timestamp INTEGER NOT NULL + ,execution_title TEXT NOT NULL + ,execution_message TEXT NOT NULL + ,execution_before TEXT NOT NULL + ,execution_after TEXT NOT NULL + ,execution_ref TEXT NOT NULL + ,execution_source_repo TEXT NOT NULL + ,execution_source TEXT NOT NULL + ,execution_target TEXT NOT NULL + ,execution_author TEXT NOT NULL + ,execution_author_name TEXT NOT NULL + ,execution_author_email TEXT NOT NULL + ,execution_author_avatar TEXT NOT NULL + ,execution_sender TEXT NOT NULL + ,execution_params TEXT NOT NULL + ,execution_cron TEXT NOT NULL + ,execution_deploy TEXT NOT NULL + ,execution_deploy_id INTEGER NOT NULL + ,execution_debug BOOLEAN NOT NULL DEFAULT 0 + ,execution_started INTEGER NOT NULL + ,execution_finished INTEGER NOT NULL + ,execution_created INTEGER NOT NULL + ,execution_updated INTEGER NOT NULL + ,execution_version INTEGER NOT NULL + + -- Ensure unique combination of pipeline ID and number + ,UNIQUE (execution_pipeline_id, execution_number) + + -- Foreign key to pipelines table + ,CONSTRAINT fk_executions_pipeline_id FOREIGN KEY (execution_pipeline_id) + REFERENCES pipelines (pipeline_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE + + -- Foreign key to repositories table + ,CONSTRAINT fk_executions_repo_id FOREIGN KEY (execution_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_executions_created_by FOREIGN KEY (execution_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE secrets ( + secret_id INTEGER PRIMARY KEY AUTOINCREMENT + ,secret_uid TEXT NOT NULL + ,secret_space_id INTEGER NOT NULL + ,secret_description TEXT NOT NULL + ,secret_data BLOB NOT NULL + ,secret_created INTEGER NOT NULL + ,secret_updated INTEGER NOT NULL + ,secret_version INTEGER NOT NULL + ,secret_created_by INTEGER NOT NULL + + -- Ensure unique combination of space ID and UID + ,UNIQUE (secret_space_id, secret_uid) + + -- Foreign key to spaces table + ,CONSTRAINT fk_secrets_space_id FOREIGN KEY (secret_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE + + -- Foreign key to principals table + ,CONSTRAINT fk_secrets_created_by FOREIGN KEY (secret_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +); + +CREATE TABLE stages ( + stage_id INTEGER PRIMARY KEY AUTOINCREMENT + ,stage_execution_id INTEGER NOT NULL + ,stage_repo_id INTEGER NOT NULL + ,stage_number INTEGER NOT NULL + ,stage_kind TEXT NOT NULL + ,stage_type TEXT NOT NULL + ,stage_name TEXT NOT NULL + ,stage_status TEXT NOT NULL + ,stage_error TEXT NOT NULL + ,stage_parent_group_id INTEGER NOT NULL + ,stage_errignore BOOLEAN NOT NULL + ,stage_exit_code INTEGER NOT NULL + ,stage_limit INTEGER NOT NULL + ,stage_os TEXT NOT NULL + ,stage_arch TEXT NOT NULL + ,stage_variant TEXT NOT NULL + ,stage_kernel TEXT NOT NULL + ,stage_machine TEXT NOT NULL + ,stage_started INTEGER NOT NULL + ,stage_stopped INTEGER NOT NULL + ,stage_created INTEGER NOT NULL + ,stage_updated INTEGER NOT NULL + ,stage_version INTEGER NOT NULL + ,stage_on_success BOOLEAN NOT NULL + ,stage_on_failure BOOLEAN NOT NULL + ,stage_depends_on TEXT NOT NULL + ,stage_labels TEXT NOT NULL + ,stage_limit_repo INTEGER NOT NULL DEFAULT 0 + + -- Ensure unique combination of stage execution ID and stage number + ,UNIQUE(stage_execution_id, stage_number) + + -- Foreign key to executions table + ,CONSTRAINT fk_stages_execution_id FOREIGN KEY (stage_execution_id) + REFERENCES executions (execution_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +-- name: create-index-stages-status + +CREATE INDEX ix_stage_in_progress ON stages (stage_status) +WHERE stage_status IN ('pending', 'running'); + + +CREATE TABLE steps ( + step_id INTEGER PRIMARY KEY AUTOINCREMENT + ,step_stage_id INTEGER NOT NULL + ,step_number INTEGER NOT NULL + ,step_name VARCHAR(100) NOT NULL + ,step_status VARCHAR(50) NOT NULL + ,step_error VARCHAR(500) NOT NULL + ,step_parent_group_id INTEGER NOT NULL + ,step_errignore BOOLEAN NOT NULL + ,step_exit_code INTEGER NOT NULL + ,step_started INTEGER NOT NULL + ,step_stopped INTEGER NOT NULL + ,step_version INTEGER NOT NULL + ,step_depends_on TEXT NOT NULL + ,step_image TEXT NOT NULL + ,step_detached BOOLEAN NOT NULL + ,step_schema TEXT NOT NULL + + -- Ensure unique comination of stage ID and step number + ,UNIQUE(step_stage_id, step_number) + + -- Foreign key to stages table + ,CONSTRAINT fk_steps_stage_id FOREIGN KEY (step_stage_id) + REFERENCES stages (stage_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + + +CREATE TABLE logs ( + log_id INTEGER PRIMARY KEY + ,log_data BLOB NOT NULL + + -- Foreign key to steps table + ,CONSTRAINT fk_logs_id FOREIGN KEY (log_id) + REFERENCES steps (step_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE TABLE connectors ( + connector_id INTEGER PRIMARY KEY AUTOINCREMENT + ,connector_uid TEXT NOT NULL + ,connector_description TEXT NOT NULL + ,connector_type TEXT NOT NULL + ,connector_space_id INTEGER NOT NULL + ,connector_data TEXT NOT NULL + ,connector_created INTEGER NOT NULL + ,connector_updated INTEGER NOT NULL + ,connector_version INTEGER NOT NULL + + -- Ensure unique combination of space ID and UID + ,UNIQUE (connector_space_id, connector_uid) + + -- Foreign key to spaces table + ,CONSTRAINT fk_connectors_space_id FOREIGN KEY (connector_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE TABLE templates ( + template_id INTEGER PRIMARY KEY AUTOINCREMENT + ,template_uid TEXT NOT NULL + ,template_description TEXT NOT NULL + ,template_space_id INTEGER NOT NULL + ,template_data TEXT NOT NULL + ,template_created INTEGER NOT NULL + ,template_updated INTEGER NOT NULL + ,template_version INTEGER NOT NULL + + -- Ensure unique combination of space ID and UID + ,UNIQUE (template_space_id, template_uid) + + -- Foreign key to spaces table + ,CONSTRAINT fk_templates_space_id FOREIGN KEY (template_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE TABLE triggers ( + trigger_id INTEGER PRIMARY KEY AUTOINCREMENT + ,trigger_uid TEXT NOT NULL + ,trigger_pipeline_id INTEGER NOT NULL + ,trigger_type TEXT NOT NULL + ,trigger_repo_id INTEGER NOT NULL + ,trigger_secret TEXT NOT NULL + ,trigger_description TEXT NOT NULL + ,trigger_disabled BOOLEAN NOT NULL + ,trigger_created_by INTEGER NOT NULL + ,trigger_actions TEXT NOT NULL + ,trigger_created INTEGER NOT NULL + ,trigger_updated INTEGER NOT NULL + ,trigger_version INTEGER NOT NULL + + -- Ensure unique combination of pipeline ID and UID + ,UNIQUE (trigger_pipeline_id, trigger_uid) + + -- Foreign key to pipelines table + ,CONSTRAINT fk_triggers_pipeline_id FOREIGN KEY (trigger_pipeline_id) + REFERENCES pipelines (pipeline_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE + + -- Foreign key to repositories table + ,CONSTRAINT fk_triggers_repo_id FOREIGN KEY (trigger_repo_id) + REFERENCES repositories (repo_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE TABLE plugins ( + plugin_uid TEXT NOT NULL + ,plugin_description TEXT NOT NULL + ,plugin_logo TEXT NOT NULL + ,plugin_spec BLOB NOT NULL + + -- Ensure unique plugin names + ,UNIQUE(plugin_uid) +); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.down.sql b/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.down.sql new file mode 100644 index 0000000000..3339b927b4 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.down.sql @@ -0,0 +1 @@ +ALTER TABLE tokens ADD COLUMN token_grants BIGINT DEFAULT 0; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.up.sql b/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.up.sql new file mode 100644 index 0000000000..376b0fa634 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0028_alter_token_drop_grants.up.sql @@ -0,0 +1 @@ +ALTER TABLE tokens DROP COLUMN token_grants; diff --git a/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_down.sql b/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_down.sql new file mode 100644 index 0000000000..c3d826768c --- /dev/null +++ b/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_down.sql @@ -0,0 +1 @@ +DROP INDEX job_group_id ON jobs; diff --git a/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_up.sql b/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_up.sql new file mode 100644 index 0000000000..0805d82dec --- /dev/null +++ b/internal/store/database/migrate/sqlite/0029_create_index_job_job_group_id_up.sql @@ -0,0 +1 @@ +CREATE INDEX job_group_id ON jobs(job_group_id); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0030_create_table_space_paths.down.sql b/internal/store/database/migrate/sqlite/0030_create_table_space_paths.down.sql new file mode 100644 index 0000000000..9f8ce0062e --- /dev/null +++ b/internal/store/database/migrate/sqlite/0030_create_table_space_paths.down.sql @@ -0,0 +1,2 @@ +-- we fallback to the non-deleted paths table (risk that new entries are missing) +DROP TABLE space_paths; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0030_create_table_space_paths.up.sql b/internal/store/database/migrate/sqlite/0030_create_table_space_paths.up.sql new file mode 100644 index 0000000000..99305b6f90 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0030_create_table_space_paths.up.sql @@ -0,0 +1,57 @@ +CREATE TABLE space_paths ( + space_path_id INTEGER PRIMARY KEY AUTOINCREMENT +,space_path_uid TEXT NOT NULL +,space_path_uid_unique TEXT NOT NULL +,space_path_is_primary BOOLEAN DEFAULT NULL +,space_path_space_id INTEGER NOT NULL +,space_path_parent_id INTEGER +,space_path_created_by INTEGER NOT NULL +,space_path_created BIGINT NOT NULL +,space_path_updated BIGINT NOT NULL + +,CONSTRAINT fk_space_path_created_by FOREIGN KEY (space_path_created_by) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE NO ACTION +,CONSTRAINT fk_space_path_space_id FOREIGN KEY (space_path_space_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_space_path_parent_id FOREIGN KEY (space_path_parent_id) + REFERENCES spaces (space_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +CREATE UNIQUE INDEX space_paths_space_id_is_primary +ON space_paths(space_path_space_id, space_path_is_primary); + +CREATE UNIQUE INDEX space_paths_uid_unique_no_parent +ON space_paths(space_path_uid_unique) +WHERE space_path_parent_id IS NULL; + +CREATE UNIQUE INDEX space_paths_uid_unique +ON space_paths(space_path_parent_id, space_path_uid_unique) +WHERE space_path_parent_id IS NOT NULL; + +-- assume no alias paths were created - create fresh primary enries for each space. +INSERT INTO space_paths ( + space_path_uid + ,space_path_uid_unique + ,space_path_is_primary + ,space_path_parent_id + ,space_path_space_id + ,space_path_created_by + ,space_path_created + ,space_path_updated +) +SELECT + space_uid + ,LOWER(space_uid) + ,TRUE + ,space_parent_id + ,space_id + ,space_created_by + ,space_created + ,space_updated +FROM spaces; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0031_alter_index_repositories.down.sql b/internal/store/database/migrate/sqlite/0031_alter_index_repositories.down.sql new file mode 100644 index 0000000000..1777cd2400 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0031_alter_index_repositories.down.sql @@ -0,0 +1,2 @@ +DROP INDEX repositories_parent_id_uid; +CREATE INDEX repositories_parent_id ON repositories(repo_parent_id); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0031_alter_index_repositories.up.sql b/internal/store/database/migrate/sqlite/0031_alter_index_repositories.up.sql new file mode 100644 index 0000000000..71a1251058 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0031_alter_index_repositories.up.sql @@ -0,0 +1,2 @@ +DROP INDEX repositories_parent_id; +CREATE UNIQUE INDEX repositories_parent_id_uid ON repositories(repo_parent_id, LOWER(repo_uid)); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.down.sql b/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.down.sql new file mode 100644 index 0000000000..450455f210 --- /dev/null +++ b/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.down.sql @@ -0,0 +1 @@ +DROP TABLE pullreq_file_views; \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.up.sql b/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.up.sql new file mode 100644 index 0000000000..07fb3439cd --- /dev/null +++ b/internal/store/database/migrate/sqlite/0032_create_table_pullreq_file_views.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE pullreq_file_views ( + pullreq_file_view_pullreq_id INTEGER NOT NULL +,pullreq_file_view_principal_id INTEGER NOT NULL +,pullreq_file_view_path TEXT NOT NULL +,pullreq_file_view_sha TEXT NOT NULL +,pullreq_file_view_obsolete BOOLEAN NOT NULL +,pullreq_file_view_created BIGINT NOT NULL +,pullreq_file_view_updated BIGINT NOT NULL + +-- for every pr and user at most one entry per file (existing enries are overwritten) +-- this index is also used for quick lookup of viewed files of a user for a given pr +,CONSTRAINT pk_pullreq_file_views PRIMARY KEY (pullreq_file_view_pullreq_id, pullreq_file_view_principal_id, pullreq_file_view_path) + +,CONSTRAINT fk_pullreq_file_view_pullreq_id FOREIGN KEY (pullreq_file_view_pullreq_id) + REFERENCES pullreqs (pullreq_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +,CONSTRAINT fk_pullreq_file_view_principal_id FOREIGN KEY (pullreq_file_view_principal_id) + REFERENCES principals (principal_id) MATCH SIMPLE + ON UPDATE NO ACTION + ON DELETE CASCADE +); + +-- this index is used to mark entries obsolete on branch update +CREATE INDEX pullreq_file_views_pullreq_id_file_path + ON pullreq_file_views(pullreq_file_view_pullreq_id, pullreq_file_view_path); \ No newline at end of file diff --git a/internal/store/database/migrate/sqlite/0033_alter_ci_tables.up.sql b/internal/store/database/migrate/sqlite/0033_alter_ci_tables.up.sql new file mode 100644 index 0000000000..9dae2d417c --- /dev/null +++ b/internal/store/database/migrate/sqlite/0033_alter_ci_tables.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE plugins ADD COLUMN plugin_type TEXT NOT NULL; +ALTER TABLE plugins ADD COLUMN plugin_version TEXT NOT NULL; \ No newline at end of file diff --git a/internal/store/database/mutex/mutex.go b/internal/store/database/mutex/mutex.go new file mode 100644 index 0000000000..9ad95f6da5 --- /dev/null +++ b/internal/store/database/mutex/mutex.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package mutex provides a global mutex. +package mutex + +import "sync" + +var m sync.RWMutex + +// RLock locks the global mutex for reads. +func RLock() { m.RLock() } + +// RUnlock unlocks the global mutex. +func RUnlock() { m.RUnlock() } + +// Lock locks the global mutex for writes. +func Lock() { m.Lock() } + +// Unlock unlocks the global mutex. +func Unlock() { m.Unlock() } diff --git a/internal/store/database/pipeline.go b/internal/store/database/pipeline.go new file mode 100644 index 0000000000..83558cd583 --- /dev/null +++ b/internal/store/database/pipeline.go @@ -0,0 +1,392 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.PipelineStore = (*pipelineStore)(nil) + +const ( + pipelineQueryBase = ` + SELECT` + + pipelineColumns + ` + FROM pipelines` + + pipelineColumns = ` + pipeline_id + ,pipeline_description + ,pipeline_created_by + ,pipeline_disabled + ,pipeline_uid + ,pipeline_seq + ,pipeline_repo_id + ,pipeline_default_branch + ,pipeline_config_path + ,pipeline_created + ,pipeline_updated + ,pipeline_version + ` +) + +// NewPipelineStore returns a new PipelineStore. +func NewPipelineStore(db *sqlx.DB) *pipelineStore { + return &pipelineStore{ + db: db, + } +} + +type pipelineStore struct { + db *sqlx.DB +} + +// Find returns a pipeline given a pipeline ID. +func (s *pipelineStore) Find(ctx context.Context, id int64) (*types.Pipeline, error) { + const findQueryStmt = pipelineQueryBase + ` + WHERE pipeline_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Pipeline) + if err := db.GetContext(ctx, dst, findQueryStmt, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pipeline") + } + return dst, nil +} + +// FindByUID returns a pipeline for a given repo with a given UID. +func (s *pipelineStore) FindByUID(ctx context.Context, repoID int64, uid string) (*types.Pipeline, error) { + const findQueryStmt = pipelineQueryBase + ` + WHERE pipeline_repo_id = $1 AND pipeline_uid = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Pipeline) + if err := db.GetContext(ctx, dst, findQueryStmt, repoID, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pipeline") + } + return dst, nil +} + +// Create creates a pipeline. +func (s *pipelineStore) Create(ctx context.Context, pipeline *types.Pipeline) error { + const pipelineInsertStmt = ` + INSERT INTO pipelines ( + pipeline_description + ,pipeline_uid + ,pipeline_seq + ,pipeline_repo_id + ,pipeline_disabled + ,pipeline_created_by + ,pipeline_default_branch + ,pipeline_config_path + ,pipeline_created + ,pipeline_updated + ,pipeline_version + ) VALUES ( + :pipeline_description, + :pipeline_uid, + :pipeline_seq, + :pipeline_repo_id, + :pipeline_disabled, + :pipeline_created_by, + :pipeline_default_branch, + :pipeline_config_path, + :pipeline_created, + :pipeline_updated, + :pipeline_version + ) RETURNING pipeline_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(pipelineInsertStmt, pipeline) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pipeline object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&pipeline.ID); err != nil { + return database.ProcessSQLErrorf(err, "Pipeline query failed") + } + + return nil +} + +// Update updates a pipeline. +func (s *pipelineStore) Update(ctx context.Context, p *types.Pipeline) error { + const pipelineUpdateStmt = ` + UPDATE pipelines + SET + pipeline_description = :pipeline_description, + pipeline_uid = :pipeline_uid, + pipeline_seq = :pipeline_seq, + pipeline_disabled = :pipeline_disabled, + pipeline_default_branch = :pipeline_default_branch, + pipeline_config_path = :pipeline_config_path, + pipeline_updated = :pipeline_updated, + pipeline_version = :pipeline_version + WHERE pipeline_id = :pipeline_id AND pipeline_version = :pipeline_version - 1` + updatedAt := time.Now() + pipeline := *p + + pipeline.Version++ + pipeline.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(pipelineUpdateStmt, pipeline) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pipeline object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update pipeline") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + p.Updated = pipeline.Updated + p.Version = pipeline.Version + return nil +} + +// List lists all the pipelines for a repository. +func (s *pipelineStore) List( + ctx context.Context, + repoID int64, + filter types.ListQueryFilter, +) ([]*types.Pipeline, error) { + stmt := database.Builder. + Select(pipelineColumns). + From("pipelines"). + Where("pipeline_repo_id = ?", fmt.Sprint(repoID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Pipeline{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// ListLatest lists all the pipelines under a repository with information +// about the latest build if available. +func (s *pipelineStore) ListLatest( + ctx context.Context, + repoID int64, + filter types.ListQueryFilter, +) ([]*types.Pipeline, error) { + const pipelineExecutionColumns = pipelineColumns + ` + ,executions.execution_id + ,executions.execution_pipeline_id + ,execution_repo_id + ,execution_trigger + ,execution_number + ,execution_status + ,execution_error + ,execution_link + ,execution_message + ,execution_after + ,execution_timestamp + ,execution_title + ,execution_author + ,execution_author_name + ,execution_author_email + ,execution_author_avatar + ,execution_source + ,execution_target + ,execution_source_repo + ,execution_started + ,execution_finished + ,execution_created + ,execution_updated + ` + // Create a subquery to get max execution IDs for each unique execution pipeline ID. + subquery := database.Builder. + Select("execution_pipeline_id, MAX(execution_id) AS execution_id"). + From("executions"). + Where("execution_repo_id = ?"). + GroupBy("execution_pipeline_id") + + // Convert the subquery to SQL. + subquerySQL, _, err := subquery.ToSql() + if err != nil { + return nil, err + } + + // Left join the previous table with executions and pipelines table. + stmt := database.Builder. + Select(pipelineExecutionColumns). + From("pipelines"). + LeftJoin("("+subquerySQL+") AS max_executions ON pipelines.pipeline_id = max_executions.execution_pipeline_id"). + LeftJoin("executions ON executions.execution_id = max_executions.execution_id"). + Where("pipeline_repo_id = ?", fmt.Sprint(repoID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*pipelineExecutionJoin{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return convert(dst), nil +} + +// UpdateOptLock updates the pipeline using the optimistic locking mechanism. +func (s *pipelineStore) UpdateOptLock(ctx context.Context, + pipeline *types.Pipeline, + mutateFn func(pipeline *types.Pipeline) error) (*types.Pipeline, error) { + for { + dup := *pipeline + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + pipeline, err = s.Find(ctx, pipeline.ID) + if err != nil { + return nil, err + } + } +} + +// Count of pipelines under a repo, if repoID is zero it will count all pipelines in the system. +func (s *pipelineStore) Count(ctx context.Context, repoID int64, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("pipelines") + + if repoID > 0 { + stmt = stmt.Where("pipeline_repo_id = ?", repoID) + } + + if filter.Query != "" { + stmt = stmt.Where("LOWER(pipeline_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} + +// Delete deletes a pipeline given a pipeline ID. +func (s *pipelineStore) Delete(ctx context.Context, id int64) error { + const pipelineDeleteStmt = ` + DELETE FROM pipelines + WHERE pipeline_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, pipelineDeleteStmt, id); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete pipeline") + } + + return nil +} + +// DeleteByUID deletes a pipeline with a given UID under a given repo. +func (s *pipelineStore) DeleteByUID(ctx context.Context, repoID int64, uid string) error { + const pipelineDeleteStmt = ` + DELETE FROM pipelines + WHERE pipeline_repo_id = $1 AND pipeline_uid = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, pipelineDeleteStmt, repoID, uid); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete pipeline") + } + + return nil +} + +// Increment increments the pipeline sequence number. It will keep retrying in case +// of optimistic lock errors. +func (s *pipelineStore) IncrementSeqNum(ctx context.Context, pipeline *types.Pipeline) (*types.Pipeline, error) { + for { + var err error + pipeline.Seq++ + err = s.Update(ctx, pipeline) + if err == nil { + return pipeline, nil + } else if !errors.Is(err, gitness_store.ErrVersionConflict) { + return pipeline, errors.Wrap(err, "could not increment pipeline sequence number") + } + pipeline, err = s.Find(ctx, pipeline.ID) + if err != nil { + return nil, errors.Wrap(err, "could not increment pipeline sequence number") + } + } +} diff --git a/internal/store/database/pipeline_join.go b/internal/store/database/pipeline_join.go new file mode 100644 index 0000000000..585ed04324 --- /dev/null +++ b/internal/store/database/pipeline_join.go @@ -0,0 +1,94 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "database/sql" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" +) + +// pipelineExecutionjoin struct represents a joined row between pipelines and executions +type pipelineExecutionJoin struct { + *types.Pipeline + ID sql.NullInt64 `db:"execution_id"` + PipelineID sql.NullInt64 `db:"execution_pipeline_id"` + Action sql.NullString `db:"execution_action"` + Message sql.NullString `db:"execution_message"` + After sql.NullString `db:"execution_after"` + RepoID sql.NullInt64 `db:"execution_repo_id"` + Trigger sql.NullString `db:"execution_trigger"` + Number sql.NullInt64 `db:"execution_number"` + Status sql.NullString `db:"execution_status"` + Error sql.NullString `db:"execution_error"` + Link sql.NullString `db:"execution_link"` + Timestamp sql.NullInt64 `db:"execution_timestamp"` + Title sql.NullString `db:"execution_title"` + Fork sql.NullString `db:"execution_source_repo"` + Source sql.NullString `db:"execution_source"` + Target sql.NullString `db:"execution_target"` + Author sql.NullString `db:"execution_author"` + AuthorName sql.NullString `db:"execution_author_name"` + AuthorEmail sql.NullString `db:"execution_author_email"` + AuthorAvatar sql.NullString `db:"execution_author_avatar"` + Started sql.NullInt64 `db:"execution_started"` + Finished sql.NullInt64 `db:"execution_finished"` + Created sql.NullInt64 `db:"execution_created"` + Updated sql.NullInt64 `db:"execution_updated"` +} + +func convert(rows []*pipelineExecutionJoin) []*types.Pipeline { + pipelines := []*types.Pipeline{} + for _, k := range rows { + pipeline := convertPipelineJoin(k) + pipelines = append(pipelines, pipeline) + } + return pipelines +} + +func convertPipelineJoin(join *pipelineExecutionJoin) *types.Pipeline { + ret := join.Pipeline + if !join.ID.Valid { + return ret + } + ret.Execution = &types.Execution{ + ID: join.ID.Int64, + PipelineID: join.PipelineID.Int64, + RepoID: join.RepoID.Int64, + Action: join.Action.String, + Trigger: join.Trigger.String, + Number: join.Number.Int64, + After: join.After.String, + Message: join.Message.String, + Status: enum.ParseCIStatus(join.Status.String), + Error: join.Error.String, + Link: join.Link.String, + Timestamp: join.Timestamp.Int64, + Title: join.Title.String, + Fork: join.Fork.String, + Source: join.Source.String, + Target: join.Target.String, + Author: join.Author.String, + AuthorName: join.AuthorName.String, + AuthorEmail: join.AuthorEmail.String, + AuthorAvatar: join.AuthorAvatar.String, + Started: join.Started.Int64, + Finished: join.Finished.Int64, + Created: join.Created.Int64, + Updated: join.Updated.Int64, + } + return ret +} diff --git a/internal/store/database/plugin.go b/internal/store/database/plugin.go new file mode 100644 index 0000000000..db2efed58f --- /dev/null +++ b/internal/store/database/plugin.go @@ -0,0 +1,208 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.PluginStore = (*pluginStore)(nil) + +const ( + pluginColumns = ` + plugin_uid + ,plugin_description + ,plugin_type + ,plugin_version + ,plugin_logo + ,plugin_spec + ` +) + +// NewPluginStore returns a new PluginStore. +func NewPluginStore(db *sqlx.DB) *pluginStore { + return &pluginStore{ + db: db, + } +} + +type pluginStore struct { + db *sqlx.DB +} + +// Create creates a new entry in the plugin datastore. +func (s *pluginStore) Create(ctx context.Context, plugin *types.Plugin) error { + const pluginInsertStmt = ` + INSERT INTO plugins ( + plugin_uid + ,plugin_description + ,plugin_type + ,plugin_version + ,plugin_logo + ,plugin_spec + ) VALUES ( + :plugin_uid + ,:plugin_description + ,:plugin_type + ,:plugin_version + ,:plugin_logo + ,:plugin_spec + ) RETURNING plugin_uid` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(pluginInsertStmt, plugin) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind plugin object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&plugin.UID); err != nil { + return database.ProcessSQLErrorf(err, "plugin query failed") + } + + return nil +} + +// Find finds a version of a plugin +func (s *pluginStore) Find(ctx context.Context, name, version string) (*types.Plugin, error) { + const pluginFindStmt = ` + SELECT` + pluginColumns + + `FROM plugins + WHERE plugin_uid = $1 AND plugin_version = $2 + ` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Plugin) + if err := db.GetContext(ctx, dst, pluginFindStmt, name, version); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pipeline") + } + + return dst, nil +} + +// List returns back the list of plugins along with their associated schemas. +func (s *pluginStore) List( + ctx context.Context, + filter types.ListQueryFilter, +) ([]*types.Plugin, error) { + stmt := database.Builder. + Select(pluginColumns). + From("plugins") + + if filter.Query != "" { + stmt = stmt.Where("LOWER(plugin_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Plugin{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// ListAll returns back the full list of plugins in the database. +func (s *pluginStore) ListAll( + ctx context.Context, +) ([]*types.Plugin, error) { + stmt := database.Builder. + Select(pluginColumns). + From("plugins") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Plugin{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// Count of plugins matching the filter criteria. +func (s *pluginStore) Count(ctx context.Context, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("plugins") + + if filter.Query != "" { + stmt = stmt.Where("LOWER(plugin_uid) LIKE ?", fmt.Sprintf("%%%s%%", filter.Query)) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} + +// Update updates a plugin row. +func (s *pluginStore) Update(ctx context.Context, p *types.Plugin) error { + const pluginUpdateStmt = ` + UPDATE plugins + SET + plugin_description = :plugin_description + ,plugin_type = :plugin_type + ,plugin_version = :plugin_version + ,plugin_logo = :plugin_logo + ,plugin_spec = :plugin_spec + WHERE plugin_uid = :plugin_uid` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(pluginUpdateStmt, p) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind plugin object") + } + + _, err = db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update plugin") + } + + return nil +} diff --git a/internal/store/database/principal.go b/internal/store/database/principal.go new file mode 100644 index 0000000000..f039c762b1 --- /dev/null +++ b/internal/store/database/principal.go @@ -0,0 +1,218 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var _ store.PrincipalStore = (*PrincipalStore)(nil) + +// NewPrincipalStore returns a new PrincipalStore. +func NewPrincipalStore(db *sqlx.DB, uidTransformation store.PrincipalUIDTransformation) *PrincipalStore { + return &PrincipalStore{ + db: db, + uidTransformation: uidTransformation, + } +} + +// PrincipalStore implements a PrincipalStore backed by a relational database. +type PrincipalStore struct { + db *sqlx.DB + uidTransformation store.PrincipalUIDTransformation +} + +// principal is a DB representation of a principal. +// It is required to allow storing transformed UIDs used for uniquness constraints and searching. +type principal struct { + types.Principal + UIDUnique string `db:"principal_uid_unique"` +} + +// principalCommonColumns defines the columns that are the same across all principals. +const principalCommonColumns = ` + principal_id + ,principal_uid + ,principal_uid_unique + ,principal_email + ,principal_display_name + ,principal_admin + ,principal_blocked + ,principal_salt + ,principal_created + ,principal_updated` + +// principalColumns defines the column that are used only in a principal itself +// (for explicit principals the type is implicit, only the generic principal struct stores it explicitly). +const principalColumns = principalCommonColumns + ` + ,principal_type` + +const principalSelectBase = ` + SELECT` + principalColumns + ` + FROM principals` + +// Find finds the principal by id. +func (s *PrincipalStore) Find(ctx context.Context, id int64) (*types.Principal, error) { + const sqlQuery = principalSelectBase + ` + WHERE principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(principal) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by id query failed") + } + + return s.mapDBPrincipal(dst), nil +} + +// FindByUID finds the principal by uid. +func (s *PrincipalStore) FindByUID(ctx context.Context, uid string) (*types.Principal, error) { + const sqlQuery = principalSelectBase + ` + WHERE principal_uid_unique = $1` + + // map the UID to unique UID before searching! + uidUnique, err := s.uidTransformation(uid) + if err != nil { + // in case we fail to transform, return a not found (as it can't exist in the first place) + log.Ctx(ctx).Debug().Msgf("failed to transform uid '%s': %s", uid, err.Error()) + return nil, gitness_store.ErrResourceNotFound + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(principal) + if err = db.GetContext(ctx, dst, sqlQuery, uidUnique); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by uid query failed") + } + + return s.mapDBPrincipal(dst), nil +} + +// FindManyByUID returns all principals found for the provided UIDs. +// If a UID isn't found, it's not returned in the list. +func (s *PrincipalStore) FindManyByUID(ctx context.Context, uids []string) ([]*types.Principal, error) { + // map the UIDs to unique UIDs before searching! + uniqueUIDs := make([]string, len(uids)) + for i := range uids { + var err error + uniqueUIDs[i], err = s.uidTransformation(uids[i]) + if err != nil { + // in case we fail to transform, skip the entry (as it can't exist in the first place) + log.Ctx(ctx).Warn().Msgf("failed to transform uid '%s': %s", uids[i], err.Error()) + } + } + + stmt := database.Builder. + Select(principalColumns). + From("principals"). + Where(squirrel.Eq{"principal_uid_unique": uids}) + db := dbtx.GetAccessor(ctx, s.db) + + sqlQuery, params, err := stmt.ToSql() + if err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to generate find many principal query") + } + + dst := []*principal{} + if err := db.SelectContext(ctx, &dst, sqlQuery, params...); err != nil { + return nil, database.ProcessSQLErrorf(err, "find many by uid for principals query failed") + } + + return s.mapDBPrincipals(dst), nil +} + +// FindByEmail finds the principal by email. +func (s *PrincipalStore) FindByEmail(ctx context.Context, email string) (*types.Principal, error) { + const sqlQuery = principalSelectBase + ` + WHERE LOWER(principal_email) = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(principal) + if err := db.GetContext(ctx, dst, sqlQuery, strings.ToLower(email)); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by email query failed") + } + + return s.mapDBPrincipal(dst), nil +} + +// List lists the principals matching the provided filter. +func (s *PrincipalStore) List(ctx context.Context, + opts *types.PrincipalFilter) ([]*types.Principal, error) { + stmt := database.Builder. + Select(principalColumns). + From("principals") + + if len(opts.Types) == 1 { + stmt = stmt.Where("principal_type = ?", opts.Types[0]) + } else if len(opts.Types) > 1 { + stmt = stmt.Where(squirrel.Eq{"principal_type": opts.Types}) + } + + if opts.Query != "" { + // TODO: optimize performance + // https://harness.atlassian.net/browse/CODE-522 + searchTerm := fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query)) + stmt = stmt.Where( + "(LOWER(principal_uid) LIKE ? OR LOWER(principal_email) LIKE ? OR LOWER(principal_display_name) LIKE ?)", + searchTerm, + searchTerm, + searchTerm, + ) + } + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*principal{} + if err := db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Search by display_name and email query failed") + } + + return s.mapDBPrincipals(dst), nil +} + +func (s *PrincipalStore) mapDBPrincipal(dbPrincipal *principal) *types.Principal { + return &dbPrincipal.Principal +} + +func (s *PrincipalStore) mapDBPrincipals(dbPrincipals []*principal) []*types.Principal { + res := make([]*types.Principal, len(dbPrincipals)) + for i := range dbPrincipals { + res[i] = s.mapDBPrincipal(dbPrincipals[i]) + } + return res +} diff --git a/internal/store/database/principal_info.go b/internal/store/database/principal_info.go new file mode 100644 index 0000000000..8b9a6908e7 --- /dev/null +++ b/internal/store/database/principal_info.go @@ -0,0 +1,142 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" +) + +var _ store.PrincipalInfoView = (*PrincipalInfoView)(nil) + +// NewPrincipalInfoView returns a new PrincipalInfoView. +// It's used by the principal info cache. +func NewPrincipalInfoView(db *sqlx.DB) *PrincipalInfoView { + return &PrincipalInfoView{ + db: db, + } +} + +type PrincipalInfoView struct { + db *sqlx.DB +} + +const ( + principalInfoCommonColumns = ` + principal_id + ,principal_uid + ,principal_email + ,principal_display_name + ,principal_type + ,principal_created + ,principal_updated` +) + +type principalInfo struct { + ID int64 `db:"principal_id"` + UID string `db:"principal_uid"` + DisplayName string `db:"principal_display_name"` + Email string `db:"principal_email"` + Type enum.PrincipalType `db:"principal_type"` + Created int64 `db:"principal_created"` + Updated int64 `db:"principal_updated"` +} + +// Find returns a single principal info object by id from the `principals` database table. +func (s *PrincipalInfoView) Find(ctx context.Context, id int64) (*types.PrincipalInfo, error) { + const sqlQuery = ` + SELECT ` + principalInfoCommonColumns + ` + FROM principals + WHERE principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + v := db.QueryRowContext(ctx, sqlQuery, id) + if err := v.Err(); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to find principal info") + } + + info := &types.PrincipalInfo{} + + if err := v.Scan(&info.ID, &info.UID, &info.Email, &info.DisplayName, + &info.Type, &info.Created, &info.Updated); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to scan principal info") + } + + return info, nil +} + +// FindMany returns a several principal info objects by id from the `principals` database table. +func (s *PrincipalInfoView) FindMany(ctx context.Context, ids []int64) ([]*types.PrincipalInfo, error) { + db := dbtx.GetAccessor(ctx, s.db) + + stmt := database.Builder. + Select(principalInfoCommonColumns). + From("principals"). + Where(squirrel.Eq{"principal_id": ids}) + + sqlQuery, params, err := stmt.ToSql() + if err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to generate find many principal info SQL query") + } + + rows, err := db.QueryContext(ctx, sqlQuery, params...) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to query find many principal info") + } + defer func() { + _ = rows.Close() + }() + + result := make([]*types.PrincipalInfo, 0, len(ids)) + + for rows.Next() { + info := &types.PrincipalInfo{} + err = rows.Scan(&info.ID, &info.UID, &info.Email, &info.DisplayName, + &info.Type, &info.Created, &info.Updated) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to scan principal info") + } + + result = append(result, info) + } + + err = rows.Err() + if err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to read principal info data") + } + + return result, nil +} + +func mapToPrincipalInfo(p *principalInfo) types.PrincipalInfo { + return types.PrincipalInfo{ + ID: p.ID, + UID: p.UID, + DisplayName: p.DisplayName, + Email: p.Email, + Type: p.Type, + Created: p.Created, + Updated: p.Updated, + } +} diff --git a/internal/store/database/principal_service.go b/internal/store/database/principal_service.go new file mode 100644 index 0000000000..c41cb846f0 --- /dev/null +++ b/internal/store/database/principal_service.go @@ -0,0 +1,238 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/rs/zerolog/log" +) + +// service is a DB representation of a service principal. +// It is required to allow storing transformed UIDs used for uniquness constraints and searching. +type service struct { + types.Service + UIDUnique string `db:"principal_uid_unique"` +} + +// service doesn't have any extra columns. +const serviceColumns = principalCommonColumns + +const serviceSelectBase = ` + SELECT` + serviceColumns + ` + FROM principals` + +// FindService finds the service by id. +func (s *PrincipalStore) FindService(ctx context.Context, id int64) (*types.Service, error) { + const sqlQuery = serviceSelectBase + ` + WHERE principal_type = 'service' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(service) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by id query failed") + } + + return s.mapDBService(dst), nil +} + +// FindServiceByUID finds the service by uid. +func (s *PrincipalStore) FindServiceByUID(ctx context.Context, uid string) (*types.Service, error) { + const sqlQuery = serviceSelectBase + ` + WHERE principal_type = 'service' AND principal_uid_unique = $1` + + // map the UID to unique UID before searching! + uidUnique, err := s.uidTransformation(uid) + if err != nil { + // in case we fail to transform, return a not found (as it can't exist in the first place) + log.Ctx(ctx).Debug().Msgf("failed to transform uid '%s': %s", uid, err.Error()) + return nil, gitness_store.ErrResourceNotFound + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(service) + if err = db.GetContext(ctx, dst, sqlQuery, uidUnique); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by uid query failed") + } + + return s.mapDBService(dst), nil +} + +// CreateService saves the service. +func (s *PrincipalStore) CreateService(ctx context.Context, svc *types.Service) error { + const sqlQuery = ` + INSERT INTO principals ( + principal_type + ,principal_uid + ,principal_uid_unique + ,principal_email + ,principal_display_name + ,principal_admin + ,principal_blocked + ,principal_salt + ,principal_created + ,principal_updated + ) values ( + 'service' + ,:principal_uid + ,:principal_uid_unique + ,:principal_email + ,:principal_display_name + ,:principal_admin + ,:principal_blocked + ,:principal_salt + ,:principal_created + ,:principal_updated + ) RETURNING principal_id` + + dbSVC, err := s.mapToDBservice(svc) + if err != nil { + return fmt.Errorf("failed to map db service: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbSVC) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind service object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&svc.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// UpdateService updates the service. +func (s *PrincipalStore) UpdateService(ctx context.Context, svc *types.Service) error { + const sqlQuery = ` + UPDATE principals + SET + principal_email = :principal_email + ,principal_display_name = :principal_display_name + ,principal_admin = :principal_admin + ,principal_blocked = :principal_blocked + ,principal_updated = :principal_updated + WHERE principal_type = 'service' AND principal_id = :principal_id` + + dbSVC, err := s.mapToDBservice(svc) + if err != nil { + return fmt.Errorf("failed to map db service: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbSVC) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind service object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Update query failed") + } + + return err +} + +// DeleteService deletes the service. +func (s *PrincipalStore) DeleteService(ctx context.Context, id int64) error { + const sqlQuery = ` + DELETE FROM principals + WHERE principal_type = 'service' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + // delete the service + if _, err := db.ExecContext(ctx, sqlQuery, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// ListServices returns a list of service for a specific parent. +func (s *PrincipalStore) ListServices(ctx context.Context) ([]*types.Service, error) { + const sqlQuery = serviceSelectBase + ` + WHERE principal_type = 'service' + ORDER BY principal_uid ASC` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*service{} + + err := db.SelectContext(ctx, &dst, sqlQuery) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing default list query") + } + + return s.mapDBServices(dst), nil +} + +// CountServices returns a count of service for a specific parent. +func (s *PrincipalStore) CountServices(ctx context.Context) (int64, error) { + const sqlQuery = ` + SELECT count(*) + FROM principals + WHERE principal_type = 'service'` + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err := db.QueryRowContext(ctx, sqlQuery).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +func (s *PrincipalStore) mapDBService(dbSvc *service) *types.Service { + return &dbSvc.Service +} + +func (s *PrincipalStore) mapDBServices(dbSVCs []*service) []*types.Service { + res := make([]*types.Service, len(dbSVCs)) + for i := range dbSVCs { + res[i] = s.mapDBService(dbSVCs[i]) + } + return res +} + +func (s *PrincipalStore) mapToDBservice(svc *types.Service) (*service, error) { + // service comes from outside. + if svc == nil { + return nil, fmt.Errorf("service is nil") + } + + uidUnique, err := s.uidTransformation(svc.UID) + if err != nil { + return nil, fmt.Errorf("failed to transform service UID: %w", err) + } + dbService := &service{ + Service: *svc, + UIDUnique: uidUnique, + } + + return dbService, nil +} diff --git a/internal/store/database/principal_service_account.go b/internal/store/database/principal_service_account.go new file mode 100644 index 0000000000..9c8931aaf2 --- /dev/null +++ b/internal/store/database/principal_service_account.go @@ -0,0 +1,243 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/rs/zerolog/log" +) + +// serviceAccount is a DB representation of a service account principal. +// It is required to allow storing transformed UIDs used for uniquness constraints and searching. +type serviceAccount struct { + types.ServiceAccount + UIDUnique string `db:"principal_uid_unique"` +} + +const serviceAccountColumns = principalCommonColumns + ` + ,principal_sa_parent_type + ,principal_sa_parent_id` + +const serviceAccountSelectBase = ` + SELECT` + serviceAccountColumns + ` + FROM principals` + +// FindServiceAccount finds the service account by id. +func (s *PrincipalStore) FindServiceAccount(ctx context.Context, id int64) (*types.ServiceAccount, error) { + const sqlQuery = serviceAccountSelectBase + ` + WHERE principal_type = 'serviceaccount' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(serviceAccount) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by id query failed") + } + return s.mapDBServiceAccount(dst), nil +} + +// FindServiceAccountByUID finds the service account by uid. +func (s *PrincipalStore) FindServiceAccountByUID(ctx context.Context, uid string) (*types.ServiceAccount, error) { + const sqlQuery = serviceAccountSelectBase + ` + WHERE principal_type = 'serviceaccount' AND principal_uid_unique = $1` + + // map the UID to unique UID before searching! + uidUnique, err := s.uidTransformation(uid) + if err != nil { + // in case we fail to transform, return a not found (as it can't exist in the first place) + log.Ctx(ctx).Debug().Msgf("failed to transform uid '%s': %s", uid, err.Error()) + return nil, gitness_store.ErrResourceNotFound + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(serviceAccount) + if err = db.GetContext(ctx, dst, sqlQuery, uidUnique); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by uid query failed") + } + + return s.mapDBServiceAccount(dst), nil +} + +// CreateServiceAccount saves the service account. +func (s *PrincipalStore) CreateServiceAccount(ctx context.Context, sa *types.ServiceAccount) error { + const sqlQuery = ` + INSERT INTO principals ( + principal_type + ,principal_uid + ,principal_uid_unique + ,principal_email + ,principal_display_name + ,principal_admin + ,principal_blocked + ,principal_salt + ,principal_created + ,principal_updated + ,principal_sa_parent_type + ,principal_sa_parent_id + ) values ( + 'serviceaccount' + ,:principal_uid + ,:principal_uid_unique + ,:principal_email + ,:principal_display_name + ,false + ,:principal_blocked + ,:principal_salt + ,:principal_created + ,:principal_updated + ,:principal_sa_parent_type + ,:principal_sa_parent_id + ) RETURNING principal_id` + + dbSA, err := s.mapToDBserviceAccount(sa) + if err != nil { + return fmt.Errorf("failed to map db service account: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbSA) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind service account object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&sa.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// UpdateServiceAccount updates the service account details. +func (s *PrincipalStore) UpdateServiceAccount(ctx context.Context, sa *types.ServiceAccount) error { + const sqlQuery = ` + UPDATE principals + SET + principal_email = :principal_email + ,principal_display_name = :principal_display_name + ,principal_blocked = :principal_blocked + ,principal_salt = :principal_salt + ,principal_updated = :principal_updated + WHERE principal_type = 'serviceaccount' AND principal_id = :principal_id` + + dbSA, err := s.mapToDBserviceAccount(sa) + if err != nil { + return fmt.Errorf("failed to map db service account: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbSA) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind service account object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Update query failed") + } + + return err +} + +// DeleteServiceAccount deletes the service account. +func (s *PrincipalStore) DeleteServiceAccount(ctx context.Context, id int64) error { + const sqlQuery = ` + DELETE FROM principals + WHERE principal_type = 'serviceaccount' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// ListServiceAccounts returns a list of service accounts for a specific parent. +func (s *PrincipalStore) ListServiceAccounts(ctx context.Context, parentType enum.ParentResourceType, + parentID int64) ([]*types.ServiceAccount, error) { + const sqlQuery = serviceAccountSelectBase + ` + WHERE principal_type = 'serviceaccount' AND principal_sa_parent_type = $1 AND principal_sa_parent_id = $2 + ORDER BY principal_uid ASC` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*serviceAccount{} + err := db.SelectContext(ctx, &dst, sqlQuery, parentType, parentID) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing default list query") + } + + return s.mapDBServiceAccounts(dst), nil +} + +// CountServiceAccounts returns a count of service accounts for a specific parent. +func (s *PrincipalStore) CountServiceAccounts(ctx context.Context, + parentType enum.ParentResourceType, parentID int64) (int64, error) { + const sqlQuery = ` + SELECT count(*) + FROM principals + WHERE principal_type = 'serviceaccount' and principal_sa_parentType = $1 and principal_sa_parentId = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err := db.QueryRowContext(ctx, sqlQuery, parentType, parentID).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +func (s *PrincipalStore) mapDBServiceAccount(dbSA *serviceAccount) *types.ServiceAccount { + return &dbSA.ServiceAccount +} + +func (s *PrincipalStore) mapDBServiceAccounts(dbSAs []*serviceAccount) []*types.ServiceAccount { + res := make([]*types.ServiceAccount, len(dbSAs)) + for i := range dbSAs { + res[i] = s.mapDBServiceAccount(dbSAs[i]) + } + return res +} + +func (s *PrincipalStore) mapToDBserviceAccount(sa *types.ServiceAccount) (*serviceAccount, error) { + // service account comes from outside. + if sa == nil { + return nil, fmt.Errorf("service account is nil") + } + + uidUnique, err := s.uidTransformation(sa.UID) + if err != nil { + return nil, fmt.Errorf("failed to transform service account UID: %w", err) + } + dbSA := &serviceAccount{ + ServiceAccount: *sa, + UIDUnique: uidUnique, + } + + return dbSA, nil +} diff --git a/internal/store/database/principal_user.go b/internal/store/database/principal_user.go new file mode 100644 index 0000000000..b44a2e4d88 --- /dev/null +++ b/internal/store/database/principal_user.go @@ -0,0 +1,297 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +// user is a DB representation of a user principal. +// It is required to allow storing transformed UIDs used for uniquness constraints and searching. +type user struct { + types.User + UIDUnique string `db:"principal_uid_unique"` +} + +const userColumns = principalCommonColumns + ` + ,principal_user_password` + +const userSelectBase = ` + SELECT` + userColumns + ` + FROM principals` + +// FindUser finds the user by id. +func (s *PrincipalStore) FindUser(ctx context.Context, id int64) (*types.User, error) { + const sqlQuery = userSelectBase + ` + WHERE principal_type = 'user' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(user) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by id query failed") + } + + return s.mapDBUser(dst), nil +} + +// FindUserByUID finds the user by uid. +func (s *PrincipalStore) FindUserByUID(ctx context.Context, uid string) (*types.User, error) { + const sqlQuery = userSelectBase + ` + WHERE principal_type = 'user' AND principal_uid_unique = $1` + + // map the UID to unique UID before searching! + uidUnique, err := s.uidTransformation(uid) + if err != nil { + // in case we fail to transform, return a not found (as it can't exist in the first place) + log.Ctx(ctx).Debug().Msgf("failed to transform uid '%s': %s", uid, err.Error()) + return nil, gitness_store.ErrResourceNotFound + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(user) + if err = db.GetContext(ctx, dst, sqlQuery, uidUnique); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by uid query failed") + } + + return s.mapDBUser(dst), nil +} + +// FindUserByEmail finds the user by email. +func (s *PrincipalStore) FindUserByEmail(ctx context.Context, email string) (*types.User, error) { + const sqlQuery = userSelectBase + ` + WHERE principal_type = 'user' AND LOWER(principal_email) = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(user) + if err := db.GetContext(ctx, dst, sqlQuery, strings.ToLower(email)); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select by email query failed") + } + + return s.mapDBUser(dst), nil +} + +// CreateUser saves the user details. +func (s *PrincipalStore) CreateUser(ctx context.Context, user *types.User) error { + const sqlQuery = ` + INSERT INTO principals ( + principal_type + ,principal_uid + ,principal_uid_unique + ,principal_email + ,principal_display_name + ,principal_admin + ,principal_blocked + ,principal_salt + ,principal_created + ,principal_updated + ,principal_user_password + ) values ( + 'user' + ,:principal_uid + ,:principal_uid_unique + ,:principal_email + ,:principal_display_name + ,:principal_admin + ,:principal_blocked + ,:principal_salt + ,:principal_created + ,:principal_updated + ,:principal_user_password + ) RETURNING principal_id` + + dbUser, err := s.mapToDBUser(user) + if err != nil { + return fmt.Errorf("failed to map db user: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbUser) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind user object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&user.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// UpdateUser updates an existing user. +func (s *PrincipalStore) UpdateUser(ctx context.Context, user *types.User) error { + const sqlQuery = ` + UPDATE principals + SET + principal_email = :principal_email + ,principal_display_name = :principal_display_name + ,principal_admin = :principal_admin + ,principal_blocked = :principal_blocked + ,principal_salt = :principal_salt + ,principal_updated = :principal_updated + ,principal_user_password = :principal_user_password + WHERE principal_type = 'user' AND principal_id = :principal_id` + + dbUser, err := s.mapToDBUser(user) + if err != nil { + return fmt.Errorf("failed to map db user: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbUser) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind user object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Update query failed") + } + + return err +} + +// DeleteUser deletes the user. +func (s *PrincipalStore) DeleteUser(ctx context.Context, id int64) error { + const sqlQuery = ` + DELETE FROM principals + WHERE principal_type = 'user' AND principal_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// ListUsers returns a list of users. +func (s *PrincipalStore) ListUsers(ctx context.Context, opts *types.UserFilter) ([]*types.User, error) { + db := dbtx.GetAccessor(ctx, s.db) + dst := []*user{} + + stmt := database.Builder. + Select(userColumns). + From("principals"). + Where("principal_type = 'user'") + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + order := opts.Order + if order == enum.OrderDefault { + order = enum.OrderAsc + } + + switch opts.Sort { + case enum.UserAttrName, enum.UserAttrNone: + // NOTE: string concatenation is safe because the + // order attribute is an enum and is not user-defined, + // and is therefore not subject to injection attacks. + stmt = stmt.OrderBy("principal_display_name " + order.String()) + case enum.UserAttrCreated: + stmt = stmt.OrderBy("principal_created " + order.String()) + case enum.UserAttrUpdated: + stmt = stmt.OrderBy("principal_updated " + order.String()) + case enum.UserAttrEmail: + stmt = stmt.OrderBy("LOWER(principal_email) " + order.String()) + case enum.UserAttrUID: + stmt = stmt.OrderBy("principal_uid " + order.String()) + case enum.UserAttrAdmin: + stmt = stmt.OrderBy("principal_admin " + order.String()) + } + + sql, _, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + if err = db.SelectContext(ctx, &dst, sql); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return s.mapDBUsers(dst), nil +} + +// CountUsers returns a count of users matching the given filter. +func (s *PrincipalStore) CountUsers(ctx context.Context, opts *types.UserFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("principals"). + Where("principal_type = 'user'") + + if opts.Admin { + stmt = stmt.Where("principal_admin = ?", opts.Admin) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +func (s *PrincipalStore) mapDBUser(dbUser *user) *types.User { + return &dbUser.User +} + +func (s *PrincipalStore) mapDBUsers(dbUsers []*user) []*types.User { + res := make([]*types.User, len(dbUsers)) + for i := range dbUsers { + res[i] = s.mapDBUser(dbUsers[i]) + } + return res +} + +func (s *PrincipalStore) mapToDBUser(usr *types.User) (*user, error) { + // user comes from outside. + if usr == nil { + return nil, fmt.Errorf("user is nil") + } + + uidUnique, err := s.uidTransformation(usr.UID) + if err != nil { + return nil, fmt.Errorf("failed to transform user UID: %w", err) + } + dbUser := &user{ + User: *usr, + UIDUnique: uidUnique, + } + + return dbUser, nil +} diff --git a/internal/store/database/pullreq.go b/internal/store/database/pullreq.go new file mode 100644 index 0000000000..7027d2c4aa --- /dev/null +++ b/internal/store/database/pullreq.go @@ -0,0 +1,642 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/Masterminds/squirrel" + "github.com/guregu/null" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var _ store.PullReqStore = (*PullReqStore)(nil) + +// NewPullReqStore returns a new PullReqStore. +func NewPullReqStore(db *sqlx.DB, + pCache store.PrincipalInfoCache) *PullReqStore { + return &PullReqStore{ + db: db, + pCache: pCache, + } +} + +// PullReqStore implements store.PullReqStore backed by a relational database. +type PullReqStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache +} + +// pullReq is used to fetch pull request data from the database. +// The object should be later re-packed into a different struct to return it as an API response. +type pullReq struct { + ID int64 `db:"pullreq_id"` + Version int64 `db:"pullreq_version"` + Number int64 `db:"pullreq_number"` + + CreatedBy int64 `db:"pullreq_created_by"` + Created int64 `db:"pullreq_created"` + Updated int64 `db:"pullreq_updated"` + Edited int64 `db:"pullreq_edited"` + + State enum.PullReqState `db:"pullreq_state"` + IsDraft bool `db:"pullreq_is_draft"` + + CommentCount int `db:"pullreq_comment_count"` + UnresolvedCount int `db:"pullreq_unresolved_count"` + + Title string `db:"pullreq_title"` + Description string `db:"pullreq_description"` + + SourceRepoID int64 `db:"pullreq_source_repo_id"` + SourceBranch string `db:"pullreq_source_branch"` + SourceSHA string `db:"pullreq_source_sha"` + TargetRepoID int64 `db:"pullreq_target_repo_id"` + TargetBranch string `db:"pullreq_target_branch"` + + ActivitySeq int64 `db:"pullreq_activity_seq"` + + MergedBy null.Int `db:"pullreq_merged_by"` + Merged null.Int `db:"pullreq_merged"` + MergeMethod null.String `db:"pullreq_merge_method"` + + MergeCheckStatus enum.MergeCheckStatus `db:"pullreq_merge_check_status"` + MergeTargetSHA null.String `db:"pullreq_merge_target_sha"` + MergeBaseSHA string `db:"pullreq_merge_base_sha"` + MergeSHA null.String `db:"pullreq_merge_sha"` + MergeConflicts null.String `db:"pullreq_merge_conflicts"` +} + +const ( + pullReqColumns = ` + pullreq_id + ,pullreq_version + ,pullreq_number + ,pullreq_created_by + ,pullreq_created + ,pullreq_updated + ,pullreq_edited + ,pullreq_state + ,pullreq_is_draft + ,pullreq_comment_count + ,pullreq_unresolved_count + ,pullreq_title + ,pullreq_description + ,pullreq_source_repo_id + ,pullreq_source_branch + ,pullreq_source_sha + ,pullreq_target_repo_id + ,pullreq_target_branch + ,pullreq_activity_seq + ,pullreq_merged_by + ,pullreq_merged + ,pullreq_merge_method + ,pullreq_merge_check_status + ,pullreq_merge_target_sha + ,pullreq_merge_base_sha + ,pullreq_merge_sha + ,pullreq_merge_conflicts` + + pullReqSelectBase = ` + SELECT` + pullReqColumns + ` + FROM pullreqs` +) + +// Find finds the pull request by id. +func (s *PullReqStore) Find(ctx context.Context, id int64) (*types.PullReq, error) { + const sqlQuery = pullReqSelectBase + ` + WHERE pullreq_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &pullReq{} + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pull request") + } + + return s.mapPullReq(ctx, dst), nil +} + +func (s *PullReqStore) findByNumberInternal( + ctx context.Context, + repoID, + number int64, + lock bool, +) (*types.PullReq, error) { + sqlQuery := pullReqSelectBase + ` + WHERE pullreq_target_repo_id = $1 AND pullreq_number = $2` + + if lock && !strings.HasPrefix(s.db.DriverName(), "sqlite") { + sqlQuery += "\n" + database.SQLForUpdate + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &pullReq{} + if err := db.GetContext(ctx, dst, sqlQuery, repoID, number); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pull request by number") + } + + return s.mapPullReq(ctx, dst), nil +} + +// FindByNumberWithLock finds the pull request by repo ID and pull request number +// and locks the pull request for the duration of the transaction. +func (s *PullReqStore) FindByNumberWithLock( + ctx context.Context, + repoID, + number int64, +) (*types.PullReq, error) { + return s.findByNumberInternal(ctx, repoID, number, true) +} + +// FindByNumber finds the pull request by repo ID and pull request number. +func (s *PullReqStore) FindByNumber(ctx context.Context, repoID, number int64) (*types.PullReq, error) { + return s.findByNumberInternal(ctx, repoID, number, false) +} + +// Create creates a new pull request. +func (s *PullReqStore) Create(ctx context.Context, pr *types.PullReq) error { + const sqlQuery = ` + INSERT INTO pullreqs ( + pullreq_version + ,pullreq_number + ,pullreq_created_by + ,pullreq_created + ,pullreq_updated + ,pullreq_edited + ,pullreq_state + ,pullreq_is_draft + ,pullreq_comment_count + ,pullreq_unresolved_count + ,pullreq_title + ,pullreq_description + ,pullreq_source_repo_id + ,pullreq_source_branch + ,pullreq_source_sha + ,pullreq_target_repo_id + ,pullreq_target_branch + ,pullreq_activity_seq + ,pullreq_merged_by + ,pullreq_merged + ,pullreq_merge_method + ,pullreq_merge_check_status + ,pullreq_merge_target_sha + ,pullreq_merge_base_sha + ,pullreq_merge_sha + ,pullreq_merge_conflicts + ) values ( + :pullreq_version + ,:pullreq_number + ,:pullreq_created_by + ,:pullreq_created + ,:pullreq_updated + ,:pullreq_edited + ,:pullreq_state + ,:pullreq_is_draft + ,:pullreq_comment_count + ,:pullreq_unresolved_count + ,:pullreq_title + ,:pullreq_description + ,:pullreq_source_repo_id + ,:pullreq_source_branch + ,:pullreq_source_sha + ,:pullreq_target_repo_id + ,:pullreq_target_branch + ,:pullreq_activity_seq + ,:pullreq_merged_by + ,:pullreq_merged + ,:pullreq_merge_method + ,:pullreq_merge_check_status + ,:pullreq_merge_target_sha + ,:pullreq_merge_base_sha + ,:pullreq_merge_sha + ,:pullreq_merge_conflicts + ) RETURNING pullreq_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalPullReq(pr)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pullReq object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&pr.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Update updates the pull request. +func (s *PullReqStore) Update(ctx context.Context, pr *types.PullReq) error { + const sqlQuery = ` + UPDATE pullreqs + SET + pullreq_version = :pullreq_version + ,pullreq_updated = :pullreq_updated + ,pullreq_edited = :pullreq_edited + ,pullreq_state = :pullreq_state + ,pullreq_is_draft = :pullreq_is_draft + ,pullreq_comment_count = :pullreq_comment_count + ,pullreq_unresolved_count = :pullreq_unresolved_count + ,pullreq_title = :pullreq_title + ,pullreq_description = :pullreq_description + ,pullreq_activity_seq = :pullreq_activity_seq + ,pullreq_source_sha = :pullreq_source_sha + ,pullreq_merged_by = :pullreq_merged_by + ,pullreq_merged = :pullreq_merged + ,pullreq_merge_method = :pullreq_merge_method + ,pullreq_merge_check_status = :pullreq_merge_check_status + ,pullreq_merge_target_sha = :pullreq_merge_target_sha + ,pullreq_merge_base_sha = :pullreq_merge_base_sha + ,pullreq_merge_sha = :pullreq_merge_sha + ,pullreq_merge_conflicts = :pullreq_merge_conflicts + WHERE pullreq_id = :pullreq_id AND pullreq_version = :pullreq_version - 1` + + db := dbtx.GetAccessor(ctx, s.db) + + updatedAt := time.Now() + + dbPR := mapInternalPullReq(pr) + dbPR.Version++ + dbPR.Updated = updatedAt.UnixMilli() + + query, arg, err := db.BindNamed(sqlQuery, dbPR) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update pull request") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + *pr = *s.mapPullReq(ctx, dbPR) + + return nil +} + +// UpdateOptLock the pull request details using the optimistic locking mechanism. +func (s *PullReqStore) UpdateOptLock(ctx context.Context, pr *types.PullReq, + mutateFn func(pr *types.PullReq) error, +) (*types.PullReq, error) { + for { + dup := *pr + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + pr, err = s.Find(ctx, pr.ID) + if err != nil { + return nil, err + } + } +} + +// UpdateActivitySeq updates the pull request's activity sequence. +func (s *PullReqStore) UpdateActivitySeq(ctx context.Context, pr *types.PullReq) (*types.PullReq, error) { + return s.UpdateOptLock(ctx, pr, func(pr *types.PullReq) error { + pr.ActivitySeq++ + return nil + }) +} + +// UpdateMergeCheckStatus updates the pull request's mergeability status +// for all pr which target branch points to targetBranch. +func (s *PullReqStore) UpdateMergeCheckStatus( + ctx context.Context, + targetRepo int64, + targetBranch string, + status enum.MergeCheckStatus, +) error { + const query = ` + UPDATE pullreqs + SET + pullreq_updated = $1 + ,pullreq_merge_check_status = $2 + ,pullreq_version = pullreq_version + 1 + WHERE pullreq_target_repo_id = $3 AND + pullreq_target_branch = $4 AND + pullreq_state not in ($5, $6)` + + db := dbtx.GetAccessor(ctx, s.db) + + now := time.Now().UnixMilli() + + _, err := db.ExecContext(ctx, query, now, status, targetRepo, targetBranch, + enum.PullReqStateClosed, enum.PullReqStateMerged) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update mergeable status check %s in pull requests", status) + } + + return nil +} + +// Delete the pull request. +func (s *PullReqStore) Delete(ctx context.Context, id int64) error { + const pullReqDelete = `DELETE FROM pullreqs WHERE pullreq_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, pullReqDelete, id); err != nil { + return database.ProcessSQLErrorf(err, "the delete query failed") + } + + return nil +} + +// Count of pull requests for a repo. +func (s *PullReqStore) Count(ctx context.Context, opts *types.PullReqFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("pullreqs") + + if len(opts.States) == 1 { + stmt = stmt.Where("pullreq_state = ?", opts.States[0]) + } else if len(opts.States) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_state": opts.States}) + } + + if opts.SourceRepoID != 0 { + stmt = stmt.Where("pullreq_source_repo_id = ?", opts.SourceRepoID) + } + + if opts.SourceBranch != "" { + stmt = stmt.Where("pullreq_source_branch = ?", opts.SourceBranch) + } + + if opts.TargetRepoID != 0 { + stmt = stmt.Where("pullreq_target_repo_id = ?", opts.TargetRepoID) + } + + if opts.TargetBranch != "" { + stmt = stmt.Where("pullreq_target_branch = ?", opts.TargetBranch) + } + + if opts.Query != "" { + stmt = stmt.Where("LOWER(pullreq_title) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + if opts.CreatedBy != 0 { + stmt = stmt.Where("pullreq_created_by = ?", opts.CreatedBy) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +// List returns a list of pull requests for a repo. +func (s *PullReqStore) List(ctx context.Context, opts *types.PullReqFilter) ([]*types.PullReq, error) { + stmt := database.Builder. + Select(pullReqColumns). + From("pullreqs") + + if len(opts.States) == 1 { + stmt = stmt.Where("pullreq_state = ?", opts.States[0]) + } else if len(opts.States) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_state": opts.States}) + } + + if opts.SourceRepoID != 0 { + stmt = stmt.Where("pullreq_source_repo_id = ?", opts.SourceRepoID) + } + + if opts.SourceBranch != "" { + stmt = stmt.Where("pullreq_source_branch = ?", opts.SourceBranch) + } + + if opts.TargetRepoID != 0 { + stmt = stmt.Where("pullreq_target_repo_id = ?", opts.TargetRepoID) + } + + if opts.TargetBranch != "" { + stmt = stmt.Where("pullreq_target_branch = ?", opts.TargetBranch) + } + + if opts.Query != "" { + stmt = stmt.Where("LOWER(pullreq_title) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + if opts.CreatedBy != 0 { + stmt = stmt.Where("pullreq_created_by = ?", opts.CreatedBy) + } + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + // NOTE: string concatenation is safe because the + // order attribute is an enum and is not user-defined, + // and is therefore not subject to injection attacks. + opts.Sort, _ = opts.Sort.Sanitize() + stmt = stmt.OrderBy("pullreq_" + string(opts.Sort) + " " + opts.Order.String()) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + dst := make([]*pullReq, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + result, err := s.mapSlicePullReq(ctx, dst) + if err != nil { + return nil, err + } + + return result, nil +} + +func mapPullReq(pr *pullReq) *types.PullReq { + return &types.PullReq{ + ID: pr.ID, + Version: pr.Version, + Number: pr.Number, + CreatedBy: pr.CreatedBy, + Created: pr.Created, + Updated: pr.Updated, + Edited: pr.Edited, + State: pr.State, + IsDraft: pr.IsDraft, + CommentCount: pr.CommentCount, + UnresolvedCount: pr.UnresolvedCount, + Title: pr.Title, + Description: pr.Description, + SourceRepoID: pr.SourceRepoID, + SourceBranch: pr.SourceBranch, + SourceSHA: pr.SourceSHA, + TargetRepoID: pr.TargetRepoID, + TargetBranch: pr.TargetBranch, + ActivitySeq: pr.ActivitySeq, + MergedBy: pr.MergedBy.Ptr(), + Merged: pr.Merged.Ptr(), + MergeMethod: (*enum.MergeMethod)(pr.MergeMethod.Ptr()), + MergeCheckStatus: pr.MergeCheckStatus, + MergeTargetSHA: pr.MergeTargetSHA.Ptr(), + MergeBaseSHA: pr.MergeBaseSHA, + MergeSHA: pr.MergeSHA.Ptr(), + MergeConflicts: pr.MergeConflicts.Ptr(), + Author: types.PrincipalInfo{}, + Merger: nil, + Stats: types.PullReqStats{ + Conversations: pr.CommentCount, + UnresolvedCount: pr.UnresolvedCount, + DiffStats: types.DiffStats{ + Commits: 0, + FilesChanged: 0, + }, + }, + } +} + +func mapInternalPullReq(pr *types.PullReq) *pullReq { + m := &pullReq{ + ID: pr.ID, + Version: pr.Version, + Number: pr.Number, + CreatedBy: pr.CreatedBy, + Created: pr.Created, + Updated: pr.Updated, + Edited: pr.Edited, + State: pr.State, + IsDraft: pr.IsDraft, + CommentCount: pr.CommentCount, + UnresolvedCount: pr.UnresolvedCount, + Title: pr.Title, + Description: pr.Description, + SourceRepoID: pr.SourceRepoID, + SourceBranch: pr.SourceBranch, + SourceSHA: pr.SourceSHA, + TargetRepoID: pr.TargetRepoID, + TargetBranch: pr.TargetBranch, + ActivitySeq: pr.ActivitySeq, + MergedBy: null.IntFromPtr(pr.MergedBy), + Merged: null.IntFromPtr(pr.Merged), + MergeMethod: null.StringFromPtr((*string)(pr.MergeMethod)), + MergeCheckStatus: pr.MergeCheckStatus, + MergeTargetSHA: null.StringFromPtr(pr.MergeTargetSHA), + MergeBaseSHA: pr.MergeBaseSHA, + MergeSHA: null.StringFromPtr(pr.MergeSHA), + MergeConflicts: null.StringFromPtr(pr.MergeConflicts), + } + + return m +} + +func (s *PullReqStore) mapPullReq(ctx context.Context, pr *pullReq) *types.PullReq { + m := mapPullReq(pr) + + var author, merger *types.PrincipalInfo + var err error + + author, err = s.pCache.Get(ctx, pr.CreatedBy) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR author") + } + if author != nil { + m.Author = *author + } + + if pr.MergedBy.Valid { + merger, err = s.pCache.Get(ctx, pr.MergedBy.Int64) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR merger") + } + m.Merger = merger + } + + return m +} + +func (s *PullReqStore) mapSlicePullReq(ctx context.Context, prs []*pullReq) ([]*types.PullReq, error) { + // collect all principal IDs + ids := make([]int64, 0, 2*len(prs)) + for _, pr := range prs { + ids = append(ids, pr.CreatedBy) + if pr.MergedBy.Valid { + ids = append(ids, pr.MergedBy.Int64) + } + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load PR principal infos: %w", err) + } + + // attach the principal infos back to the slice items + m := make([]*types.PullReq, len(prs)) + for i, pr := range prs { + m[i] = mapPullReq(pr) + if author, ok := infoMap[pr.CreatedBy]; ok { + m[i].Author = *author + } + if pr.MergedBy.Valid { + if merger, ok := infoMap[pr.MergedBy.Int64]; ok { + m[i].Merger = merger + } + } + } + + return m, nil +} diff --git a/internal/store/database/pullreq_activity.go b/internal/store/database/pullreq_activity.go new file mode 100644 index 0000000000..bf2de62ef4 --- /dev/null +++ b/internal/store/database/pullreq_activity.go @@ -0,0 +1,607 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/Masterminds/squirrel" + "github.com/guregu/null" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var _ store.PullReqActivityStore = (*PullReqActivityStore)(nil) + +// NewPullReqActivityStore returns a new PullReqJournalStore. +func NewPullReqActivityStore( + db *sqlx.DB, + pCache store.PrincipalInfoCache, +) *PullReqActivityStore { + return &PullReqActivityStore{ + db: db, + pCache: pCache, + } +} + +// PullReqActivityStore implements store.PullReqActivityStore backed by a relational database. +type PullReqActivityStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache +} + +// journal is used to fetch pull request data from the database. +// The object should be later re-packed into a different struct to return it as an API response. +type pullReqActivity struct { + ID int64 `db:"pullreq_activity_id"` + Version int64 `db:"pullreq_activity_version"` + + CreatedBy int64 `db:"pullreq_activity_created_by"` + Created int64 `db:"pullreq_activity_created"` + Updated int64 `db:"pullreq_activity_updated"` + Edited int64 `db:"pullreq_activity_edited"` + Deleted null.Int `db:"pullreq_activity_deleted"` + + ParentID null.Int `db:"pullreq_activity_parent_id"` + RepoID int64 `db:"pullreq_activity_repo_id"` + PullReqID int64 `db:"pullreq_activity_pullreq_id"` + + Order int64 `db:"pullreq_activity_order"` + SubOrder int64 `db:"pullreq_activity_sub_order"` + ReplySeq int64 `db:"pullreq_activity_reply_seq"` + + Type enum.PullReqActivityType `db:"pullreq_activity_type"` + Kind enum.PullReqActivityKind `db:"pullreq_activity_kind"` + + Text string `db:"pullreq_activity_text"` + Payload json.RawMessage `db:"pullreq_activity_payload"` + Metadata json.RawMessage `db:"pullreq_activity_metadata"` + + ResolvedBy null.Int `db:"pullreq_activity_resolved_by"` + Resolved null.Int `db:"pullreq_activity_resolved"` + + Outdated null.Bool `db:"pullreq_activity_outdated"` + CodeCommentMergeBaseSHA null.String `db:"pullreq_activity_code_comment_merge_base_sha"` + CodeCommentSourceSHA null.String `db:"pullreq_activity_code_comment_source_sha"` + CodeCommentPath null.String `db:"pullreq_activity_code_comment_path"` + CodeCommentLineNew null.Int `db:"pullreq_activity_code_comment_line_new"` + CodeCommentSpanNew null.Int `db:"pullreq_activity_code_comment_span_new"` + CodeCommentLineOld null.Int `db:"pullreq_activity_code_comment_line_old"` + CodeCommentSpanOld null.Int `db:"pullreq_activity_code_comment_span_old"` +} + +const ( + pullreqActivityColumns = ` + pullreq_activity_id + ,pullreq_activity_version + ,pullreq_activity_created_by + ,pullreq_activity_created + ,pullreq_activity_updated + ,pullreq_activity_edited + ,pullreq_activity_deleted + ,pullreq_activity_parent_id + ,pullreq_activity_repo_id + ,pullreq_activity_pullreq_id + ,pullreq_activity_order + ,pullreq_activity_sub_order + ,pullreq_activity_reply_seq + ,pullreq_activity_type + ,pullreq_activity_kind + ,pullreq_activity_text + ,pullreq_activity_payload + ,pullreq_activity_metadata + ,pullreq_activity_resolved_by + ,pullreq_activity_resolved + ,pullreq_activity_outdated + ,pullreq_activity_code_comment_merge_base_sha + ,pullreq_activity_code_comment_source_sha + ,pullreq_activity_code_comment_path + ,pullreq_activity_code_comment_line_new + ,pullreq_activity_code_comment_span_new + ,pullreq_activity_code_comment_line_old + ,pullreq_activity_code_comment_span_old` + + pullreqActivitySelectBase = ` + SELECT` + pullreqActivityColumns + ` + FROM pullreq_activities` +) + +// Find finds the pull request activity by id. +func (s *PullReqActivityStore) Find(ctx context.Context, id int64) (*types.PullReqActivity, error) { + const sqlQuery = pullreqActivitySelectBase + ` + WHERE pullreq_activity_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &pullReqActivity{} + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pull request activity") + } + + return s.mapPullReqActivity(ctx, dst), nil +} + +// Create creates a new pull request. +func (s *PullReqActivityStore) Create(ctx context.Context, act *types.PullReqActivity) error { + const sqlQuery = ` + INSERT INTO pullreq_activities ( + pullreq_activity_version + ,pullreq_activity_created_by + ,pullreq_activity_created + ,pullreq_activity_updated + ,pullreq_activity_edited + ,pullreq_activity_deleted + ,pullreq_activity_parent_id + ,pullreq_activity_repo_id + ,pullreq_activity_pullreq_id + ,pullreq_activity_order + ,pullreq_activity_sub_order + ,pullreq_activity_reply_seq + ,pullreq_activity_type + ,pullreq_activity_kind + ,pullreq_activity_text + ,pullreq_activity_payload + ,pullreq_activity_metadata + ,pullreq_activity_resolved_by + ,pullreq_activity_resolved + ,pullreq_activity_outdated + ,pullreq_activity_code_comment_merge_base_sha + ,pullreq_activity_code_comment_source_sha + ,pullreq_activity_code_comment_path + ,pullreq_activity_code_comment_line_new + ,pullreq_activity_code_comment_span_new + ,pullreq_activity_code_comment_line_old + ,pullreq_activity_code_comment_span_old + ) values ( + :pullreq_activity_version + ,:pullreq_activity_created_by + ,:pullreq_activity_created + ,:pullreq_activity_updated + ,:pullreq_activity_edited + ,:pullreq_activity_deleted + ,:pullreq_activity_parent_id + ,:pullreq_activity_repo_id + ,:pullreq_activity_pullreq_id + ,:pullreq_activity_order + ,:pullreq_activity_sub_order + ,:pullreq_activity_reply_seq + ,:pullreq_activity_type + ,:pullreq_activity_kind + ,:pullreq_activity_text + ,:pullreq_activity_payload + ,:pullreq_activity_metadata + ,:pullreq_activity_resolved_by + ,:pullreq_activity_resolved + ,:pullreq_activity_outdated + ,:pullreq_activity_code_comment_merge_base_sha + ,:pullreq_activity_code_comment_source_sha + ,:pullreq_activity_code_comment_path + ,:pullreq_activity_code_comment_line_new + ,:pullreq_activity_code_comment_span_new + ,:pullreq_activity_code_comment_line_old + ,:pullreq_activity_code_comment_span_old + ) RETURNING pullreq_activity_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalPullReqActivity(act)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request activity object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&act.ID); err != nil { + return database.ProcessSQLErrorf(err, "Failed to insert pull request activity") + } + + return nil +} + +func (s *PullReqActivityStore) CreateWithPayload(ctx context.Context, + pr *types.PullReq, principalID int64, payload types.PullReqActivityPayload, +) (*types.PullReqActivity, error) { + now := time.Now().UnixMilli() + act := &types.PullReqActivity{ + CreatedBy: principalID, + Created: now, + Updated: now, + Edited: now, + RepoID: pr.TargetRepoID, + PullReqID: pr.ID, + Order: pr.ActivitySeq, + SubOrder: 0, + ReplySeq: 0, + Type: payload.ActivityType(), + Kind: enum.PullReqActivityKindSystem, + Text: "", + } + + _ = act.SetPayload(payload) + + err := s.Create(ctx, act) + if err != nil { + err = fmt.Errorf("failed to write pull request system '%s' activity: %w", payload.ActivityType(), err) + return nil, err + } + + return act, nil +} + +// Update updates the pull request. +func (s *PullReqActivityStore) Update(ctx context.Context, act *types.PullReqActivity) error { + const sqlQuery = ` + UPDATE pullreq_activities + SET + pullreq_activity_version = :pullreq_activity_version + ,pullreq_activity_updated = :pullreq_activity_updated + ,pullreq_activity_edited = :pullreq_activity_edited + ,pullreq_activity_deleted = :pullreq_activity_deleted + ,pullreq_activity_reply_seq = :pullreq_activity_reply_seq + ,pullreq_activity_text = :pullreq_activity_text + ,pullreq_activity_payload = :pullreq_activity_payload + ,pullreq_activity_metadata = :pullreq_activity_metadata + ,pullreq_activity_resolved_by = :pullreq_activity_resolved_by + ,pullreq_activity_resolved = :pullreq_activity_resolved + ,pullreq_activity_outdated = :pullreq_activity_outdated + ,pullreq_activity_code_comment_merge_base_sha = :pullreq_activity_code_comment_merge_base_sha + ,pullreq_activity_code_comment_source_sha = :pullreq_activity_code_comment_source_sha + ,pullreq_activity_code_comment_path = :pullreq_activity_code_comment_path + ,pullreq_activity_code_comment_line_new = :pullreq_activity_code_comment_line_new + ,pullreq_activity_code_comment_span_new = :pullreq_activity_code_comment_span_new + ,pullreq_activity_code_comment_line_old = :pullreq_activity_code_comment_line_old + ,pullreq_activity_code_comment_span_old = :pullreq_activity_code_comment_span_old + WHERE pullreq_activity_id = :pullreq_activity_id AND pullreq_activity_version = :pullreq_activity_version - 1` + + db := dbtx.GetAccessor(ctx, s.db) + + updatedAt := time.Now() + + dbAct := mapInternalPullReqActivity(act) + dbAct.Version++ + dbAct.Updated = updatedAt.UnixMilli() + + query, arg, err := db.BindNamed(sqlQuery, dbAct) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request activity object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update pull request activity") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + *act = *s.mapPullReqActivity(ctx, dbAct) + + return nil +} + +// UpdateOptLock updates the pull request using the optimistic locking mechanism. +func (s *PullReqActivityStore) UpdateOptLock(ctx context.Context, + act *types.PullReqActivity, + mutateFn func(act *types.PullReqActivity) error, +) (*types.PullReqActivity, error) { + for { + dup := *act + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + act, err = s.Find(ctx, act.ID) + if err != nil { + return nil, err + } + } +} + +// Count of pull requests for a repo. +func (s *PullReqActivityStore) Count(ctx context.Context, + prID int64, + opts *types.PullReqActivityFilter, +) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("pullreq_activities"). + Where("pullreq_activity_pullreq_id = ?", prID) + + if len(opts.Types) == 1 { + stmt = stmt.Where("pullreq_activity_type = ?", opts.Types[0]) + } else if len(opts.Types) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_activity_type": opts.Types}) + } + + if len(opts.Kinds) == 1 { + stmt = stmt.Where("pullreq_activity_kind = ?", opts.Kinds[0]) + } else if len(opts.Kinds) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_activity_kind": opts.Kinds}) + } + + if opts.After != 0 { + stmt = stmt.Where("pullreq_activity_created > ?", opts.After) + } + + if opts.Before != 0 { + stmt = stmt.Where("pullreq_activity_created < ?", opts.Before) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +// List returns a list of pull requests for a repo. +func (s *PullReqActivityStore) List(ctx context.Context, + prID int64, + opts *types.PullReqActivityFilter, +) ([]*types.PullReqActivity, error) { + stmt := database.Builder. + Select(pullreqActivityColumns). + From("pullreq_activities"). + Where("pullreq_activity_pullreq_id = ?", prID) + + if len(opts.Types) == 1 { + stmt = stmt.Where("pullreq_activity_type = ?", opts.Types[0]) + } else if len(opts.Types) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_activity_type": opts.Types}) + } + + if len(opts.Kinds) == 1 { + stmt = stmt.Where("pullreq_activity_kind = ?", opts.Kinds[0]) + } else if len(opts.Kinds) > 1 { + stmt = stmt.Where(squirrel.Eq{"pullreq_activity_kind": opts.Kinds}) + } + + if opts.After != 0 { + stmt = stmt.Where("pullreq_activity_created > ?", opts.After) + } + + if opts.Before != 0 { + stmt = stmt.Where("pullreq_activity_created < ?", opts.Before) + } + + if opts.Limit > 0 { + stmt = stmt.Limit(database.Limit(opts.Limit)) + } + + stmt = stmt.OrderBy("pullreq_activity_order asc", "pullreq_activity_sub_order asc") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert pull request activity query to sql") + } + + dst := make([]*pullReqActivity, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing pull request activity list query") + } + + result, err := s.mapSlicePullReqActivity(ctx, dst) + if err != nil { + return nil, err + } + + return result, nil +} + +func (s *PullReqActivityStore) CountUnresolved(ctx context.Context, prID int64) (int, error) { + stmt := database.Builder. + Select("count(*)"). + From("pullreq_activities"). + Where("pullreq_activity_pullreq_id = ?", prID). + Where("pullreq_activity_sub_order = 0"). + Where("pullreq_activity_resolved IS NULL"). + Where("pullreq_activity_deleted IS NULL"). + Where("pullreq_activity_kind <> ?", enum.PullReqActivityKindSystem) + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count unresolved query") + } + + return count, nil +} + +func mapPullReqActivity(act *pullReqActivity) *types.PullReqActivity { + m := &types.PullReqActivity{ + ID: act.ID, + Version: act.Version, + CreatedBy: act.CreatedBy, + Created: act.Created, + Updated: act.Updated, + Edited: act.Edited, + Deleted: act.Deleted.Ptr(), + ParentID: act.ParentID.Ptr(), + RepoID: act.RepoID, + PullReqID: act.PullReqID, + Order: act.Order, + SubOrder: act.SubOrder, + ReplySeq: act.ReplySeq, + Type: act.Type, + Kind: act.Kind, + Text: act.Text, + PayloadRaw: act.Payload, + Metadata: make(map[string]interface{}), + ResolvedBy: act.ResolvedBy.Ptr(), + Resolved: act.Resolved.Ptr(), + Author: types.PrincipalInfo{}, + Resolver: nil, + } + if m.Type == enum.PullReqActivityTypeCodeComment && m.Kind == enum.PullReqActivityKindChangeComment { + m.CodeComment = &types.CodeCommentFields{ + Outdated: act.Outdated.Bool, + MergeBaseSHA: act.CodeCommentMergeBaseSHA.String, + SourceSHA: act.CodeCommentSourceSHA.String, + Path: act.CodeCommentPath.String, + LineNew: int(act.CodeCommentLineNew.Int64), + SpanNew: int(act.CodeCommentSpanNew.Int64), + LineOld: int(act.CodeCommentLineOld.Int64), + SpanOld: int(act.CodeCommentSpanOld.Int64), + } + } + + _ = json.Unmarshal(act.Metadata, &m.Metadata) + + return m +} + +func mapInternalPullReqActivity(act *types.PullReqActivity) *pullReqActivity { + m := &pullReqActivity{ + ID: act.ID, + Version: act.Version, + CreatedBy: act.CreatedBy, + Created: act.Created, + Updated: act.Updated, + Edited: act.Edited, + Deleted: null.IntFromPtr(act.Deleted), + ParentID: null.IntFromPtr(act.ParentID), + RepoID: act.RepoID, + PullReqID: act.PullReqID, + Order: act.Order, + SubOrder: act.SubOrder, + ReplySeq: act.ReplySeq, + Type: act.Type, + Kind: act.Kind, + Text: act.Text, + Payload: act.PayloadRaw, + Metadata: nil, + ResolvedBy: null.IntFromPtr(act.ResolvedBy), + Resolved: null.IntFromPtr(act.Resolved), + } + if act.IsValidCodeComment() { + m.Outdated = null.BoolFrom(act.CodeComment.Outdated) + m.CodeCommentMergeBaseSHA = null.StringFrom(act.CodeComment.MergeBaseSHA) + m.CodeCommentSourceSHA = null.StringFrom(act.CodeComment.SourceSHA) + m.CodeCommentPath = null.StringFrom(act.CodeComment.Path) + m.CodeCommentLineNew = null.IntFrom(int64(act.CodeComment.LineNew)) + m.CodeCommentSpanNew = null.IntFrom(int64(act.CodeComment.SpanNew)) + m.CodeCommentLineOld = null.IntFrom(int64(act.CodeComment.LineOld)) + m.CodeCommentSpanOld = null.IntFrom(int64(act.CodeComment.SpanOld)) + } + + m.Metadata, _ = json.Marshal(act.Metadata) + + return m +} + +func (s *PullReqActivityStore) mapPullReqActivity(ctx context.Context, act *pullReqActivity) *types.PullReqActivity { + m := mapPullReqActivity(act) + + var author, resolver *types.PrincipalInfo + var err error + + author, err = s.pCache.Get(ctx, act.CreatedBy) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR activity author") + } + if author != nil { + m.Author = *author + } + + if act.ResolvedBy.Valid { + resolver, err = s.pCache.Get(ctx, act.ResolvedBy.Int64) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR activity resolver") + } + m.Resolver = resolver + } + + return m +} + +func (s *PullReqActivityStore) mapSlicePullReqActivity( + ctx context.Context, + activities []*pullReqActivity, +) ([]*types.PullReqActivity, error) { + // collect all principal IDs + ids := make([]int64, 0, 2*len(activities)) + for _, act := range activities { + ids = append(ids, act.CreatedBy) + if act.ResolvedBy.Valid { + ids = append(ids, act.ResolvedBy.Int64) + } + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load PR principal infos: %w", err) + } + + // attach the principal infos back to the slice items + m := make([]*types.PullReqActivity, len(activities)) + for i, act := range activities { + m[i] = mapPullReqActivity(act) + if author, ok := infoMap[act.CreatedBy]; ok { + m[i].Author = *author + } + if act.ResolvedBy.Valid { + if merger, ok := infoMap[act.ResolvedBy.Int64]; ok { + m[i].Resolver = merger + } + } + } + + return m, nil +} diff --git a/internal/store/database/pullreq_file_view_store.go b/internal/store/database/pullreq_file_view_store.go new file mode 100644 index 0000000000..a4d0a29205 --- /dev/null +++ b/internal/store/database/pullreq_file_view_store.go @@ -0,0 +1,205 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "time" + + "github.com/Masterminds/squirrel" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.PullReqFileViewStore = (*PullReqFileViewStore)(nil) + +// NewPullReqFileViewStore returns a new PullReqFileViewStore. +func NewPullReqFileViewStore( + db *sqlx.DB, +) *PullReqFileViewStore { + return &PullReqFileViewStore{ + db: db, + } +} + +// PullReqFileViewStore implements store.PullReqFileViewStore backed by a relational database. +type PullReqFileViewStore struct { + db *sqlx.DB +} + +type pullReqFileView struct { + PullReqID int64 `db:"pullreq_file_view_pullreq_id"` + PrincipalID int64 `db:"pullreq_file_view_principal_id"` + + Path string `db:"pullreq_file_view_path"` + SHA string `db:"pullreq_file_view_sha"` + Obsolete bool `db:"pullreq_file_view_obsolete"` + + Created int64 `db:"pullreq_file_view_created"` + Updated int64 `db:"pullreq_file_view_updated"` +} + +const ( + pullReqFileViewsColumn = ` + pullreq_file_view_pullreq_id + ,pullreq_file_view_principal_id + ,pullreq_file_view_path + ,pullreq_file_view_sha + ,pullreq_file_view_obsolete + ,pullreq_file_view_created + ,pullreq_file_view_updated` +) + +// Upsert inserts or updates the latest viewed sha for a file in a PR. +func (s *PullReqFileViewStore) Upsert(ctx context.Context, view *types.PullReqFileView) error { + const sqlQuery = ` + INSERT INTO pullreq_file_views ( + pullreq_file_view_pullreq_id + ,pullreq_file_view_principal_id + ,pullreq_file_view_path + ,pullreq_file_view_sha + ,pullreq_file_view_obsolete + ,pullreq_file_view_created + ,pullreq_file_view_updated + ) VALUES ( + :pullreq_file_view_pullreq_id + ,:pullreq_file_view_principal_id + ,:pullreq_file_view_path + ,:pullreq_file_view_sha + ,:pullreq_file_view_obsolete + ,:pullreq_file_view_created + ,:pullreq_file_view_updated + ) + ON CONFLICT (pullreq_file_view_pullreq_id, pullreq_file_view_principal_id, pullreq_file_view_path) DO + UPDATE SET + pullreq_file_view_updated = :pullreq_file_view_updated + ,pullreq_file_view_sha = :pullreq_file_view_sha + ,pullreq_file_view_obsolete = :pullreq_file_view_obsolete + RETURNING pullreq_file_view_created` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapToInternalPullreqFileView(view)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pullreq file view object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&view.Created); err != nil { + return database.ProcessSQLErrorf(err, "Upsert query failed") + } + + return nil +} + +// DeleteByFileForPrincipal deletes the entry for the specified PR, principal, and file. +func (s *PullReqFileViewStore) DeleteByFileForPrincipal(ctx context.Context, prID int64, principalID int64, filePath string) error { + const sqlQuery = ` + DELETE from pullreq_file_views + WHERE pullreq_file_view_pullreq_id = $1 AND + pullreq_file_view_principal_id = $2 AND + pullreq_file_view_path = $3` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, prID, principalID, filePath); err != nil { + return database.ProcessSQLErrorf(err, "delete query failed") + } + + return nil +} + +// MarkObsolete updates all entries of the files as obsolete for the PR. +func (s *PullReqFileViewStore) MarkObsolete(ctx context.Context, prID int64, filePaths []string) error { + stmt := database.Builder. + Update("pullreq_file_views"). + Set("pullreq_file_view_obsolete", true). + Set("pullreq_file_view_updated", time.Now().UnixMilli()). + Where("pullreq_file_view_pullreq_id = ?", prID). + Where(squirrel.Eq{"pullreq_file_view_path": filePaths}). + Where("pullreq_file_view_obsolete = ?", false) + + sql, args, err := stmt.ToSql() + if err != nil { + return errors.Wrap(err, "Failed to create sql query") + } + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sql, args...); err != nil { + return database.ProcessSQLErrorf(err, "failed to execute update query") + } + + return nil +} + +// List lists all files marked as viewed by the user for the specified PR. +func (s *PullReqFileViewStore) List(ctx context.Context, prID int64, principalID int64) ([]*types.PullReqFileView, error) { + stmt := database.Builder. + Select(pullReqFileViewsColumn). + From("pullreq_file_views"). + Where("pullreq_file_view_pullreq_id = ?", prID). + Where("pullreq_file_view_principal_id = ?", principalID) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var dst []*pullReqFileView + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to execute list query") + } + + return mapToPullreqFileViews(dst), nil +} + +func mapToInternalPullreqFileView(view *types.PullReqFileView) *pullReqFileView { + return &pullReqFileView{ + PullReqID: view.PullReqID, + PrincipalID: view.PrincipalID, + Path: view.Path, + SHA: view.SHA, + Obsolete: view.Obsolete, + Created: view.Created, + Updated: view.Updated, + } +} + +func mapToPullreqFileView(view *pullReqFileView) *types.PullReqFileView { + return &types.PullReqFileView{ + PullReqID: view.PullReqID, + PrincipalID: view.PrincipalID, + Path: view.Path, + SHA: view.SHA, + Obsolete: view.Obsolete, + Created: view.Created, + Updated: view.Updated, + } +} + +func mapToPullreqFileViews(views []*pullReqFileView) []*types.PullReqFileView { + m := make([]*types.PullReqFileView, len(views)) + for i, view := range views { + m[i] = mapToPullreqFileView(view) + } + return m +} diff --git a/internal/store/database/pullreq_reviewers.go b/internal/store/database/pullreq_reviewers.go new file mode 100644 index 0000000000..17396b97bb --- /dev/null +++ b/internal/store/database/pullreq_reviewers.go @@ -0,0 +1,315 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/guregu/null" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +var _ store.PullReqReviewerStore = (*PullReqReviewerStore)(nil) + +const maxPullRequestReviewers = 100 + +// NewPullReqReviewerStore returns a new PullReqReviewerStore. +func NewPullReqReviewerStore(db *sqlx.DB, + pCache store.PrincipalInfoCache) *PullReqReviewerStore { + return &PullReqReviewerStore{ + db: db, + pCache: pCache, + } +} + +// PullReqReviewerStore implements store.PullReqReviewerStore backed by a relational database. +type PullReqReviewerStore struct { + db *sqlx.DB + pCache store.PrincipalInfoCache +} + +// pullReqReviewer is used to fetch pull request reviewer data from the database. +type pullReqReviewer struct { + PullReqID int64 `db:"pullreq_reviewer_pullreq_id"` + PrincipalID int64 `db:"pullreq_reviewer_principal_id"` + CreatedBy int64 `db:"pullreq_reviewer_created_by"` + Created int64 `db:"pullreq_reviewer_created"` + Updated int64 `db:"pullreq_reviewer_updated"` + + RepoID int64 `db:"pullreq_reviewer_repo_id"` + Type enum.PullReqReviewerType `db:"pullreq_reviewer_type"` + LatestReviewID null.Int `db:"pullreq_reviewer_latest_review_id"` + + ReviewDecision enum.PullReqReviewDecision `db:"pullreq_reviewer_review_decision"` + SHA string `db:"pullreq_reviewer_sha"` +} + +const ( + pullreqReviewerColumns = ` + pullreq_reviewer_pullreq_id + ,pullreq_reviewer_principal_id + ,pullreq_reviewer_created_by + ,pullreq_reviewer_created + ,pullreq_reviewer_updated + ,pullreq_reviewer_repo_id + ,pullreq_reviewer_type + ,pullreq_reviewer_latest_review_id + ,pullreq_reviewer_review_decision + ,pullreq_reviewer_sha` + + pullreqReviewerSelectBase = ` + SELECT` + pullreqReviewerColumns + ` + FROM pullreq_reviewers` +) + +// Find finds the pull request reviewer by pull request id and principal id. +func (s *PullReqReviewerStore) Find(ctx context.Context, prID, principalID int64) (*types.PullReqReviewer, error) { + const sqlQuery = pullreqReviewerSelectBase + ` + WHERE pullreq_reviewer_pullreq_id = $1 AND pullreq_reviewer_principal_id = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &pullReqReviewer{} + if err := db.GetContext(ctx, dst, sqlQuery, prID, principalID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pull request reviewer") + } + + return s.mapPullReqReviewer(ctx, dst), nil +} + +// Create creates a new pull request reviewer. +func (s *PullReqReviewerStore) Create(ctx context.Context, v *types.PullReqReviewer) error { + const sqlQuery = ` + INSERT INTO pullreq_reviewers ( + pullreq_reviewer_pullreq_id + ,pullreq_reviewer_principal_id + ,pullreq_reviewer_created_by + ,pullreq_reviewer_created + ,pullreq_reviewer_updated + ,pullreq_reviewer_repo_id + ,pullreq_reviewer_type + ,pullreq_reviewer_latest_review_id + ,pullreq_reviewer_review_decision + ,pullreq_reviewer_sha + ) values ( + :pullreq_reviewer_pullreq_id + ,:pullreq_reviewer_principal_id + ,:pullreq_reviewer_created_by + ,:pullreq_reviewer_created + ,:pullreq_reviewer_updated + ,:pullreq_reviewer_repo_id + ,:pullreq_reviewer_type + ,:pullreq_reviewer_latest_review_id + ,:pullreq_reviewer_review_decision + ,:pullreq_reviewer_sha + )` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalPullReqReviewer(v)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request reviewer object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Failed to insert pull request reviewer") + } + + return nil +} + +// Update updates the pull request reviewer. +func (s *PullReqReviewerStore) Update(ctx context.Context, v *types.PullReqReviewer) error { + const sqlQuery = ` + UPDATE pullreq_reviewers + SET + pullreq_reviewer_updated = :pullreq_reviewer_updated + ,pullreq_reviewer_latest_review_id = :pullreq_reviewer_latest_review_id + ,pullreq_reviewer_review_decision = :pullreq_reviewer_review_decision + ,pullreq_reviewer_sha = :pullreq_reviewer_sha + WHERE pullreq_reviewer_pullreq_id = :pullreq_reviewer_pullreq_id AND + pullreq_reviewer_principal_id = :pullreq_reviewer_principal_id` + + db := dbtx.GetAccessor(ctx, s.db) + + updatedAt := time.Now() + + dbv := mapInternalPullReqReviewer(v) + dbv.Updated = updatedAt.UnixMilli() + + query, arg, err := db.BindNamed(sqlQuery, dbv) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request activity object") + } + + _, err = db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update pull request activity") + } + + v.Updated = dbv.Updated + + return nil +} + +// Delete deletes the pull request reviewer. +func (s *PullReqReviewerStore) Delete(ctx context.Context, prID, reviewerID int64) error { + const sqlQuery = ` + DELETE from pullreq_reviewers + WHERE pullreq_reviewer_pullreq_id = $1 AND + pullreq_reviewer_principal_id = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, prID, reviewerID); err != nil { + return database.ProcessSQLErrorf(err, "delete reviewer query failed") + } + return nil +} + +// List returns a list of pull reviewers for a pull request. +func (s *PullReqReviewerStore) List(ctx context.Context, prID int64) ([]*types.PullReqReviewer, error) { + stmt := database.Builder. + Select(pullreqReviewerColumns). + From("pullreq_reviewers"). + Where("pullreq_reviewer_pullreq_id = ?", prID). + OrderBy("pullreq_reviewer_created asc"). + Limit(maxPullRequestReviewers) // memory safety limit + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert pull request reviewer list query to sql") + } + + dst := make([]*pullReqReviewer, 0) + + db := dbtx.GetAccessor(ctx, s.db) + + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing pull request reviewer list query") + } + + result, err := s.mapSlicePullReqReviewer(ctx, dst) + if err != nil { + return nil, err + } + + return result, nil +} + +func mapPullReqReviewer(v *pullReqReviewer) *types.PullReqReviewer { + m := &types.PullReqReviewer{ + PullReqID: v.PullReqID, + PrincipalID: v.PrincipalID, + CreatedBy: v.CreatedBy, + Created: v.Created, + Updated: v.Updated, + RepoID: v.RepoID, + Type: v.Type, + LatestReviewID: v.LatestReviewID.Ptr(), + ReviewDecision: v.ReviewDecision, + SHA: v.SHA, + } + return m +} + +func mapInternalPullReqReviewer(v *types.PullReqReviewer) *pullReqReviewer { + m := &pullReqReviewer{ + PullReqID: v.PullReqID, + PrincipalID: v.PrincipalID, + CreatedBy: v.CreatedBy, + Created: v.Created, + Updated: v.Updated, + RepoID: v.RepoID, + Type: v.Type, + LatestReviewID: null.IntFromPtr(v.LatestReviewID), + ReviewDecision: v.ReviewDecision, + SHA: v.SHA, + } + return m +} + +func (s *PullReqReviewerStore) mapPullReqReviewer(ctx context.Context, v *pullReqReviewer) *types.PullReqReviewer { + m := &types.PullReqReviewer{ + PullReqID: v.PullReqID, + PrincipalID: v.PrincipalID, + CreatedBy: v.CreatedBy, + Created: v.Created, + Updated: v.Updated, + RepoID: v.RepoID, + Type: v.Type, + LatestReviewID: v.LatestReviewID.Ptr(), + ReviewDecision: v.ReviewDecision, + SHA: v.SHA, + } + + addedBy, err := s.pCache.Get(ctx, v.CreatedBy) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR reviewer addedBy") + } + if addedBy != nil { + m.AddedBy = *addedBy + } + + reviewer, err := s.pCache.Get(ctx, v.PrincipalID) + if err != nil { + log.Ctx(ctx).Err(err).Msg("failed to load PR reviewer principal") + } + if reviewer != nil { + m.Reviewer = *reviewer + } + + return m +} + +func (s *PullReqReviewerStore) mapSlicePullReqReviewer(ctx context.Context, + reviewers []*pullReqReviewer) ([]*types.PullReqReviewer, error) { + // collect all principal IDs + ids := make([]int64, 0, 2*len(reviewers)) + for _, v := range reviewers { + ids = append(ids, v.CreatedBy) + ids = append(ids, v.PrincipalID) + } + + // pull principal infos from cache + infoMap, err := s.pCache.Map(ctx, ids) + if err != nil { + return nil, fmt.Errorf("failed to load PR principal infos: %w", err) + } + + // attach the principal infos back to the slice items + m := make([]*types.PullReqReviewer, len(reviewers)) + for i, v := range reviewers { + m[i] = mapPullReqReviewer(v) + if addedBy, ok := infoMap[v.CreatedBy]; ok { + m[i].AddedBy = *addedBy + } + if reviewer, ok := infoMap[v.PrincipalID]; ok { + m[i].Reviewer = *reviewer + } + } + + return m, nil +} diff --git a/internal/store/database/pullreq_reviews.go b/internal/store/database/pullreq_reviews.go new file mode 100644 index 0000000000..ce4a86374e --- /dev/null +++ b/internal/store/database/pullreq_reviews.go @@ -0,0 +1,126 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +var _ store.PullReqReviewStore = (*PullReqReviewStore)(nil) + +// NewPullReqReviewStore returns a new PullReqReviewStore. +func NewPullReqReviewStore(db *sqlx.DB) *PullReqReviewStore { + return &PullReqReviewStore{ + db: db, + } +} + +// PullReqReviewStore implements store.PullReqReviewStore backed by a relational database. +type PullReqReviewStore struct { + db *sqlx.DB +} + +// pullReqReview is used to fetch pull request review data from the database. +type pullReqReview struct { + ID int64 `db:"pullreq_review_id"` + + CreatedBy int64 `db:"pullreq_review_created_by"` + Created int64 `db:"pullreq_review_created"` + Updated int64 `db:"pullreq_review_updated"` + + PullReqID int64 `db:"pullreq_review_pullreq_id"` + + Decision enum.PullReqReviewDecision `db:"pullreq_review_decision"` + SHA string `db:"pullreq_review_sha"` +} + +const ( + pullreqReviewColumns = ` + pullreq_review_id + ,pullreq_review_created_by + ,pullreq_review_created + ,pullreq_review_updated + ,pullreq_review_pullreq_id + ,pullreq_review_decision + ,pullreq_review_sha` + + pullreqReviewSelectBase = ` + SELECT` + pullreqReviewColumns + ` + FROM pullreq_reviews` +) + +// Find finds the pull request activity by id. +func (s *PullReqReviewStore) Find(ctx context.Context, id int64) (*types.PullReqReview, error) { + const sqlQuery = pullreqReviewSelectBase + ` + WHERE pullreq_review_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &pullReqReview{} + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find pull request activity") + } + + return mapPullReqReview(dst), nil +} + +// Create creates a new pull request. +func (s *PullReqReviewStore) Create(ctx context.Context, v *types.PullReqReview) error { + const sqlQuery = ` + INSERT INTO pullreq_reviews ( + pullreq_review_created_by + ,pullreq_review_created + ,pullreq_review_updated + ,pullreq_review_pullreq_id + ,pullreq_review_decision + ,pullreq_review_sha + ) values ( + :pullreq_review_created_by + ,:pullreq_review_created + ,:pullreq_review_updated + ,:pullreq_review_pullreq_id + ,:pullreq_review_decision + ,:pullreq_review_sha + ) RETURNING pullreq_review_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapInternalPullReqReview(v)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind pull request review object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&v.ID); err != nil { + return database.ProcessSQLErrorf(err, "Failed to insert pull request review") + } + + return nil +} + +func mapPullReqReview(v *pullReqReview) *types.PullReqReview { + return (*types.PullReqReview)(v) // the two types are identical, except for the tags +} + +func mapInternalPullReqReview(v *types.PullReqReview) *pullReqReview { + return (*pullReqReview)(v) // the two types are identical, except for the tags +} diff --git a/internal/store/database/repo.go b/internal/store/database/repo.go new file mode 100644 index 0000000000..78cecd6175 --- /dev/null +++ b/internal/store/database/repo.go @@ -0,0 +1,476 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.RepoStore = (*RepoStore)(nil) + +// NewRepoStore returns a new RepoStore. +func NewRepoStore( + db *sqlx.DB, + spacePathCache store.SpacePathCache, + spacePathStore store.SpacePathStore, +) *RepoStore { + return &RepoStore{ + db: db, + spacePathCache: spacePathCache, + spacePathStore: spacePathStore, + } +} + +// RepoStore implements a store.RepoStore backed by a relational database. +type RepoStore struct { + db *sqlx.DB + spacePathCache store.SpacePathCache + spacePathStore store.SpacePathStore +} + +type repository struct { + // TODO: int64 ID doesn't match DB + ID int64 `db:"repo_id"` + Version int64 `db:"repo_version"` + ParentID int64 `db:"repo_parent_id"` + UID string `db:"repo_uid"` + Description string `db:"repo_description"` + IsPublic bool `db:"repo_is_public"` + CreatedBy int64 `db:"repo_created_by"` + Created int64 `db:"repo_created"` + Updated int64 `db:"repo_updated"` + + GitUID string `db:"repo_git_uid"` + DefaultBranch string `db:"repo_default_branch"` + ForkID int64 `db:"repo_fork_id"` + PullReqSeq int64 `db:"repo_pullreq_seq"` + + NumForks int `db:"repo_num_forks"` + NumPulls int `db:"repo_num_pulls"` + NumClosedPulls int `db:"repo_num_closed_pulls"` + NumOpenPulls int `db:"repo_num_open_pulls"` + NumMergedPulls int `db:"repo_num_merged_pulls"` + + Importing bool `db:"repo_importing"` +} + +const ( + repoColumnsForJoin = ` + repo_id + ,repo_version + ,repo_parent_id + ,repo_uid + ,repo_description + ,repo_is_public + ,repo_created_by + ,repo_created + ,repo_updated + ,repo_git_uid + ,repo_default_branch + ,repo_pullreq_seq + ,repo_fork_id + ,repo_num_forks + ,repo_num_pulls + ,repo_num_closed_pulls + ,repo_num_open_pulls + ,repo_num_merged_pulls + ,repo_importing` + + repoSelectBase = ` + SELECT` + repoColumnsForJoin + ` + FROM repositories` +) + +// Find finds the repo by id. +func (s *RepoStore) Find(ctx context.Context, id int64) (*types.Repository, error) { + const sqlQuery = repoSelectBase + ` + WHERE repo_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(repository) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find repo") + } + + return s.mapToRepo(ctx, dst) +} + +// Find finds the repo with the given UID in the given space ID. +func (s *RepoStore) FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Repository, error) { + const sqlQuery = repoSelectBase + ` + WHERE repo_parent_id = $1 AND LOWER(repo_uid) = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(repository) + if err := db.GetContext(ctx, dst, sqlQuery, spaceID, strings.ToLower(uid)); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find repo") + } + + return s.mapToRepo(ctx, dst) +} + +// FindByRef finds the repo using the repoRef as either the id or the repo path. +func (s *RepoStore) FindByRef(ctx context.Context, repoRef string) (*types.Repository, error) { + // ASSUMPTION: digits only is not a valid repo path + id, err := strconv.ParseInt(repoRef, 10, 64) + if err != nil { + spacePath, repoUID, err := paths.DisectLeaf(repoRef) + pathObject, err := s.spacePathCache.Get(ctx, spacePath) + if err != nil { + return nil, fmt.Errorf("failed to get space path: %w", err) + } + + return s.FindByUID(ctx, pathObject.SpaceID, repoUID) + } + + return s.Find(ctx, id) +} + +// Create creates a new repository. +func (s *RepoStore) Create(ctx context.Context, repo *types.Repository) error { + const sqlQuery = ` + INSERT INTO repositories ( + repo_version + ,repo_parent_id + ,repo_uid + ,repo_description + ,repo_is_public + ,repo_created_by + ,repo_created + ,repo_updated + ,repo_git_uid + ,repo_default_branch + ,repo_fork_id + ,repo_pullreq_seq + ,repo_num_forks + ,repo_num_pulls + ,repo_num_closed_pulls + ,repo_num_open_pulls + ,repo_num_merged_pulls + ,repo_importing + ) values ( + :repo_version + ,:repo_parent_id + ,:repo_uid + ,:repo_description + ,:repo_is_public + ,:repo_created_by + ,:repo_created + ,:repo_updated + ,:repo_git_uid + ,:repo_default_branch + ,:repo_fork_id + ,:repo_pullreq_seq + ,:repo_num_forks + ,:repo_num_pulls + ,:repo_num_closed_pulls + ,:repo_num_open_pulls + ,:repo_num_merged_pulls + ,:repo_importing + ) RETURNING repo_id` + + db := dbtx.GetAccessor(ctx, s.db) + + // insert repo first so we get id + query, arg, err := db.BindNamed(sqlQuery, mapToInternalRepo(repo)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind repo object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&repo.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + repo.Path, err = s.getRepoPath(ctx, repo.ParentID, repo.UID) + if err != nil { + return err + } + + return nil +} + +// Update updates the repo details. +func (s *RepoStore) Update(ctx context.Context, repo *types.Repository) error { + const sqlQuery = ` + UPDATE repositories + SET + repo_version = :repo_version + ,repo_updated = :repo_updated + ,repo_parent_id = :repo_parent_id + ,repo_uid = :repo_uid + ,repo_git_uid = :repo_git_uid + ,repo_description = :repo_description + ,repo_is_public = :repo_is_public + ,repo_default_branch = :repo_default_branch + ,repo_pullreq_seq = :repo_pullreq_seq + ,repo_num_forks = :repo_num_forks + ,repo_num_pulls = :repo_num_pulls + ,repo_num_closed_pulls = :repo_num_closed_pulls + ,repo_num_open_pulls = :repo_num_open_pulls + ,repo_num_merged_pulls = :repo_num_merged_pulls + ,repo_importing = :repo_importing + WHERE repo_id = :repo_id AND repo_version = :repo_version - 1` + + dbRepo := mapToInternalRepo(repo) + + // update Version (used for optimistic locking) and Updated time + dbRepo.Version++ + dbRepo.Updated = time.Now().UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbRepo) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind repo object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update repository") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + repo.Version = dbRepo.Version + repo.Updated = dbRepo.Updated + + // update path in case parent/uid changed (its most likely cached anyway) + repo.Path, err = s.getRepoPath(ctx, repo.ParentID, repo.UID) + if err != nil { + return err + } + + return nil +} + +// UpdateOptLock updates the repository using the optimistic locking mechanism. +func (s *RepoStore) UpdateOptLock(ctx context.Context, + repo *types.Repository, + mutateFn func(repository *types.Repository) error, +) (*types.Repository, error) { + for { + dup := *repo + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + repo, err = s.Find(ctx, repo.ID) + if err != nil { + return nil, err + } + } +} + +// Delete the repository. +func (s *RepoStore) Delete(ctx context.Context, id int64) error { + const repoDelete = ` + DELETE FROM repositories + WHERE repo_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, repoDelete, id); err != nil { + return database.ProcessSQLErrorf(err, "the delete query failed") + } + + return nil +} + +// Count of repos in a space. if parentID (space) is zero then it will count all repositories in the system. +func (s *RepoStore) Count(ctx context.Context, parentID int64, opts *types.RepoFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("repositories") + + if parentID > 0 { + stmt = stmt.Where("repo_parent_id = ?", parentID) + } + + if opts.Query != "" { + stmt = stmt.Where("LOWER(repo_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} + +// List returns a list of repos in a space. +func (s *RepoStore) List(ctx context.Context, parentID int64, opts *types.RepoFilter) ([]*types.Repository, error) { + stmt := database.Builder. + Select(repoColumnsForJoin). + From("repositories"). + Where("repo_parent_id = ?", fmt.Sprint(parentID)) + + if opts.Query != "" { + stmt = stmt.Where("LOWER(repo_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + switch opts.Sort { + case enum.RepoAttrUID, enum.RepoAttrNone: + // NOTE: string concatenation is safe because the + // order attribute is an enum and is not user-defined, + // and is therefore not subject to injection attacks. + stmt = stmt.OrderBy("repo_importing desc, repo_uid " + opts.Order.String()) + case enum.RepoAttrCreated: + stmt = stmt.OrderBy("repo_created " + opts.Order.String()) + case enum.RepoAttrUpdated: + stmt = stmt.OrderBy("repo_updated " + opts.Order.String()) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*repository{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return s.mapToRepos(ctx, dst) +} + +func (s *RepoStore) mapToRepo( + ctx context.Context, + in *repository, +) (*types.Repository, error) { + var err error + res := &types.Repository{ + ID: in.ID, + Version: in.Version, + ParentID: in.ParentID, + UID: in.UID, + Description: in.Description, + IsPublic: in.IsPublic, + Created: in.Created, + CreatedBy: in.CreatedBy, + Updated: in.Updated, + GitUID: in.GitUID, + DefaultBranch: in.DefaultBranch, + ForkID: in.ForkID, + PullReqSeq: in.PullReqSeq, + NumForks: in.NumForks, + NumPulls: in.NumPulls, + NumClosedPulls: in.NumClosedPulls, + NumOpenPulls: in.NumOpenPulls, + NumMergedPulls: in.NumMergedPulls, + Importing: in.Importing, + // Path: is set below + } + + res.Path, err = s.getRepoPath(ctx, in.ParentID, in.UID) + if err != nil { + return nil, err + } + + return res, nil +} + +func (s *RepoStore) getRepoPath(ctx context.Context, parentID int64, repoUID string) (string, error) { + spacePath, err := s.spacePathStore.FindPrimaryBySpaceID(ctx, parentID) + if err != nil { + return "", fmt.Errorf("failed to get primary path for space %d: %w", parentID, err) + } + return paths.Concatinate(spacePath.Value, repoUID), nil +} + +func (s *RepoStore) mapToRepos( + ctx context.Context, + repos []*repository, +) ([]*types.Repository, error) { + var err error + res := make([]*types.Repository, len(repos)) + for i := range repos { + res[i], err = s.mapToRepo(ctx, repos[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func mapToInternalRepo(in *types.Repository) *repository { + return &repository{ + ID: in.ID, + Version: in.Version, + ParentID: in.ParentID, + UID: in.UID, + Description: in.Description, + IsPublic: in.IsPublic, + Created: in.Created, + CreatedBy: in.CreatedBy, + Updated: in.Updated, + GitUID: in.GitUID, + DefaultBranch: in.DefaultBranch, + ForkID: in.ForkID, + PullReqSeq: in.PullReqSeq, + NumForks: in.NumForks, + NumPulls: in.NumPulls, + NumClosedPulls: in.NumClosedPulls, + NumOpenPulls: in.NumOpenPulls, + NumMergedPulls: in.NumMergedPulls, + Importing: in.Importing, + } +} diff --git a/internal/store/database/repo_git_info.go b/internal/store/database/repo_git_info.go new file mode 100644 index 0000000000..51cd2f4471 --- /dev/null +++ b/internal/store/database/repo_git_info.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" +) + +var _ store.RepoGitInfoView = (*RepoGitInfoView)(nil) + +// NewRepoGitInfoView returns a new RepoGitInfoView. +// It's used by the repository git UID cache. +func NewRepoGitInfoView(db *sqlx.DB) *RepoGitInfoView { + return &RepoGitInfoView{ + db: db, + } +} + +type RepoGitInfoView struct { + db *sqlx.DB +} + +func (s *RepoGitInfoView) Find(ctx context.Context, id int64) (*types.RepositoryGitInfo, error) { + const sqlQuery = ` + SELECT repo_git_uid, repo_parent_id + FROM repositories + WHERE repo_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + v := db.QueryRowContext(ctx, sqlQuery, id) + if err := v.Err(); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to find git uid by repository id") + } + + var result = types.RepositoryGitInfo{ID: id} + + if err := v.Scan(&result.GitUID, &result.ParentID); err != nil { + return nil, database.ProcessSQLErrorf(err, "failed to scan git uid") + } + + return &result, nil +} diff --git a/internal/store/database/secret.go b/internal/store/database/secret.go new file mode 100644 index 0000000000..3777f330b1 --- /dev/null +++ b/internal/store/database/secret.go @@ -0,0 +1,301 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.SecretStore = (*secretStore)(nil) + +const ( + secretQueryBase = ` + SELECT` + secretColumns + ` + FROM secrets` + + secretColumns = ` + secret_id, + secret_description, + secret_space_id, + secret_created_by, + secret_uid, + secret_data, + secret_created, + secret_updated, + secret_version + ` +) + +// NewSecretStore returns a new SecretStore. +func NewSecretStore(db *sqlx.DB) *secretStore { + return &secretStore{ + db: db, + } +} + +type secretStore struct { + db *sqlx.DB +} + +// Find returns a secret given a secret ID. +func (s *secretStore) Find(ctx context.Context, id int64) (*types.Secret, error) { + const findQueryStmt = secretQueryBase + ` + WHERE secret_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Secret) + if err := db.GetContext(ctx, dst, findQueryStmt, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find secret") + } + return dst, nil +} + +// FindByUID returns a secret in a given space with a given UID. +func (s *secretStore) FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Secret, error) { + const findQueryStmt = secretQueryBase + ` + WHERE secret_space_id = $1 AND secret_uid = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Secret) + if err := db.GetContext(ctx, dst, findQueryStmt, spaceID, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find secret") + } + return dst, nil +} + +// Create creates a secret. +func (s *secretStore) Create(ctx context.Context, secret *types.Secret) error { + const secretInsertStmt = ` + INSERT INTO secrets ( + secret_description, + secret_space_id, + secret_created_by, + secret_uid, + secret_data, + secret_created, + secret_updated, + secret_version + ) VALUES ( + :secret_description, + :secret_space_id, + :secret_created_by, + :secret_uid, + :secret_data, + :secret_created, + :secret_updated, + :secret_version + ) RETURNING secret_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(secretInsertStmt, secret) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind secret object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&secret.ID); err != nil { + return database.ProcessSQLErrorf(err, "secret query failed") + } + + return nil +} + +func (s *secretStore) Update(ctx context.Context, p *types.Secret) error { + const secretUpdateStmt = ` + UPDATE secrets + SET + secret_description = :secret_description, + secret_uid = :secret_uid, + secret_data = :secret_data, + secret_updated = :secret_updated, + secret_version = :secret_version + WHERE secret_id = :secret_id AND secret_version = :secret_version - 1` + updatedAt := time.Now() + secret := *p + + secret.Version++ + secret.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(secretUpdateStmt, secret) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind secret object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update secret") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + p.Version = secret.Version + p.Updated = secret.Updated + return nil +} + +// UpdateOptLock updates the pipeline using the optimistic locking mechanism. +func (s *secretStore) UpdateOptLock(ctx context.Context, + secret *types.Secret, + mutateFn func(secret *types.Secret) error, +) (*types.Secret, error) { + for { + dup := *secret + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + secret, err = s.Find(ctx, secret.ID) + if err != nil { + return nil, err + } + } +} + +// List lists all the secrets present in a space. +func (s *secretStore) List(ctx context.Context, parentID int64, filter types.ListQueryFilter) ([]*types.Secret, error) { + stmt := database.Builder. + Select(secretColumns). + From("secrets"). + Where("secret_space_id = ?", fmt.Sprint(parentID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(secret_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Secret{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// ListAll lists all the secrets present in a space. +func (s *secretStore) ListAll(ctx context.Context, parentID int64) ([]*types.Secret, error) { + stmt := database.Builder. + Select(secretColumns). + From("secrets"). + Where("secret_space_id = ?", fmt.Sprint(parentID)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Secret{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// Delete deletes a secret given a secret ID. +func (s *secretStore) Delete(ctx context.Context, id int64) error { + const secretDeleteStmt = ` + DELETE FROM secrets + WHERE secret_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, secretDeleteStmt, id); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete secret") + } + + return nil +} + +// DeleteByUID deletes a secret with a given UID in a space. +func (s *secretStore) DeleteByUID(ctx context.Context, spaceID int64, uid string) error { + const secretDeleteStmt = ` + DELETE FROM secrets + WHERE secret_space_id = $1 AND secret_uid = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, secretDeleteStmt, spaceID, uid); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete secret") + } + + return nil +} + +// Count of secrets in a space. +func (s *secretStore) Count(ctx context.Context, parentID int64, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("secrets"). + Where("secret_space_id = ?", parentID) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(secret_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} diff --git a/internal/store/database/space.go b/internal/store/database/space.go new file mode 100644 index 0000000000..bf07e1698b --- /dev/null +++ b/internal/store/database/space.go @@ -0,0 +1,409 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/guregu/null" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.SpaceStore = (*SpaceStore)(nil) + +// NewSpaceStore returns a new SpaceStore. +func NewSpaceStore( + db *sqlx.DB, + spacePathCache store.SpacePathCache, + spacePathStore store.SpacePathStore, +) *SpaceStore { + return &SpaceStore{ + db: db, + spacePathCache: spacePathCache, + spacePathStore: spacePathStore, + } +} + +// SpaceStore implements a SpaceStore backed by a relational database. +type SpaceStore struct { + db *sqlx.DB + spacePathCache store.SpacePathCache + spacePathStore store.SpacePathStore +} + +// space is an internal representation used to store space data in DB. +type space struct { + ID int64 `db:"space_id"` + Version int64 `db:"space_version"` + // IMPORTANT: We need to make parentID optional for spaces to allow it to be a foreign key. + ParentID null.Int `db:"space_parent_id"` + UID string `db:"space_uid"` + Description string `db:"space_description"` + IsPublic bool `db:"space_is_public"` + CreatedBy int64 `db:"space_created_by"` + Created int64 `db:"space_created"` + Updated int64 `db:"space_updated"` +} + +const ( + spaceColumns = ` + space_id + ,space_version + ,space_parent_id + ,space_uid + ,space_description + ,space_is_public + ,space_created_by + ,space_created + ,space_updated` + + spaceSelectBase = ` + SELECT` + spaceColumns + ` + FROM spaces` +) + +// Find the space by id. +func (s *SpaceStore) Find(ctx context.Context, id int64) (*types.Space, error) { + const sqlQuery = spaceSelectBase + ` + WHERE space_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(space) + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find space") + } + + return mapToSpace(ctx, s.spacePathStore, dst) +} + +// FindByRef finds the space using the spaceRef as either the id or the space path. +func (s *SpaceStore) FindByRef(ctx context.Context, spaceRef string) (*types.Space, error) { + // ASSUMPTION: digits only is not a valid space path + id, err := strconv.ParseInt(spaceRef, 10, 64) + if err != nil { + var path *types.SpacePath + path, err = s.spacePathCache.Get(ctx, spaceRef) + if err != nil { + return nil, fmt.Errorf("failed to get path: %w", err) + } + + id = path.SpaceID + } + + return s.Find(ctx, id) +} + +// Create a new space. +func (s *SpaceStore) Create(ctx context.Context, space *types.Space) error { + if space == nil { + return errors.New("space is nil") + } + + const sqlQuery = ` + INSERT INTO spaces ( + space_version + ,space_parent_id + ,space_uid + ,space_description + ,space_is_public + ,space_created_by + ,space_created + ,space_updated + ) values ( + :space_version + ,:space_parent_id + ,:space_uid + ,:space_description + ,:space_is_public + ,:space_created_by + ,:space_created + ,:space_updated + ) RETURNING space_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, args, err := db.BindNamed(sqlQuery, mapToInternalSpace(space)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind space object") + } + + if err = db.QueryRowContext(ctx, query, args...).Scan(&space.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Update updates the space details. +func (s *SpaceStore) Update(ctx context.Context, space *types.Space) error { + if space == nil { + return errors.New("space is nil") + } + + const sqlQuery = ` + UPDATE spaces + SET + space_version = :space_version + ,space_updated = :space_updated + ,space_parent_id = :space_parent_id + ,space_uid = :space_uid + ,space_description = :space_description + ,space_is_public = :space_is_public + WHERE space_id = :space_id AND space_version = :space_version - 1` + + dbSpace := mapToInternalSpace(space) + + // update Version (used for optimistic locking) and Updated time + dbSpace.Version++ + dbSpace.Updated = time.Now().UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, dbSpace) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind space object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Update query failed") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + space.Version = dbSpace.Version + space.Updated = dbSpace.Updated + + // update path in case parent/uid changed + space.Path, err = getSpacePath(ctx, s.spacePathStore, space.ID) + if err != nil { + return err + } + + return nil +} + +// UpdateOptLock updates the space using the optimistic locking mechanism. +func (s *SpaceStore) UpdateOptLock(ctx context.Context, + space *types.Space, + mutateFn func(space *types.Space) error, +) (*types.Space, error) { + for { + dup := *space + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + space, err = s.Find(ctx, space.ID) + if err != nil { + return nil, err + } + } +} + +// Delete deletes a space. +func (s *SpaceStore) Delete(ctx context.Context, id int64) error { + const sqlQuery = ` + DELETE FROM spaces + WHERE space_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// Count the child spaces of a space. +func (s *SpaceStore) Count(ctx context.Context, id int64, opts *types.SpaceFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("spaces"). + Where("space_parent_id = ?", id) + + if opts.Query != "" { + stmt = stmt.Where("LOWER(space_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +// List returns a list of spaces under the parent space. +func (s *SpaceStore) List(ctx context.Context, id int64, opts *types.SpaceFilter) ([]*types.Space, error) { + stmt := database.Builder. + Select(spaceColumns). + From("spaces"). + Where("space_parent_id = ?", fmt.Sprint(id)) + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + if opts.Query != "" { + stmt = stmt.Where("LOWER(space_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + switch opts.Sort { + case enum.SpaceAttrUID, enum.SpaceAttrNone: + // NOTE: string concatenation is safe because the + // order attribute is an enum and is not user-defined, + // and is therefore not subject to injection attacks. + stmt = stmt.OrderBy("space_uid " + opts.Order.String()) + //TODO: Postgres does not support COLLATE NOCASE for UTF8 + // stmt = stmt.OrderBy("space_uid COLLATE NOCASE " + opts.Order.String()) + case enum.SpaceAttrCreated: + stmt = stmt.OrderBy("space_created " + opts.Order.String()) + case enum.SpaceAttrUpdated: + stmt = stmt.OrderBy("space_updated " + opts.Order.String()) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var dst []*space + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return s.mapToSpaces(ctx, dst) +} + +func mapToSpace( + ctx context.Context, + spacePathStore store.SpacePathStore, + in *space, +) (*types.Space, error) { + var err error + res := &types.Space{ + ID: in.ID, + Version: in.Version, + UID: in.UID, + Description: in.Description, + IsPublic: in.IsPublic, + Created: in.Created, + CreatedBy: in.CreatedBy, + Updated: in.Updated, + } + + // Only overwrite ParentID if it's not a root space + if in.ParentID.Valid { + res.ParentID = in.ParentID.Int64 + } + + // backfill path + res.Path, err = getSpacePath(ctx, spacePathStore, in.ID) + if err != nil { + return nil, fmt.Errorf("failed to get primary path for space %d: %w", in.ID, err) + } + + return res, nil +} + +func getSpacePath( + ctx context.Context, + spacePathStore store.SpacePathStore, + spaceID int64, +) (string, error) { + spacePath, err := spacePathStore.FindPrimaryBySpaceID(ctx, spaceID) + if err != nil { + return "", fmt.Errorf("failed to get primary path for space %d: %w", spaceID, err) + } + + return spacePath.Value, nil +} + +func (s *SpaceStore) mapToSpaces( + ctx context.Context, + spaces []*space, +) ([]*types.Space, error) { + var err error + res := make([]*types.Space, len(spaces)) + for i := range spaces { + res[i], err = mapToSpace(ctx, s.spacePathStore, spaces[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func mapToInternalSpace(s *types.Space) *space { + res := &space{ + ID: s.ID, + Version: s.Version, + UID: s.UID, + Description: s.Description, + IsPublic: s.IsPublic, + Created: s.Created, + CreatedBy: s.CreatedBy, + Updated: s.Updated, + } + + // Only overwrite ParentID if it's not a root space + // IMPORTANT: s.ParentID==0 has to be translated to nil as otherwise the foreign key fails + if s.ParentID > 0 { + res.ParentID = null.IntFrom(s.ParentID) + } + + return res +} diff --git a/internal/store/database/space_path.go b/internal/store/database/space_path.go new file mode 100644 index 0000000000..0945822792 --- /dev/null +++ b/internal/store/database/space_path.go @@ -0,0 +1,220 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/paths" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/guregu/null" + "github.com/jmoiron/sqlx" +) + +var _ store.SpacePathStore = (*SpacePathStore)(nil) + +// NewSpacePathStore returns a new SpacePathStore. +func NewSpacePathStore(db *sqlx.DB, pathTransformation store.SpacePathTransformation) *SpacePathStore { + return &SpacePathStore{ + db: db, + spacePathTransformation: pathTransformation, + } +} + +// SpacePathStore implements a store.SpacePathStore backed by a relational database. +type SpacePathStore struct { + db *sqlx.DB + spacePathTransformation store.SpacePathTransformation +} + +// spacePathSegment is an internal representation of a segment of a space path. +type spacePathSegment struct { + ID int64 `db:"space_path_id"` + // UID is the original uid that was provided + UID string `db:"space_path_uid"` + // UIDUnique is a transformed version of UID which is used to ensure uniqueness guarantees + UIDUnique string `db:"space_path_uid_unique"` + // IsPrimary indicates whether the path is the primary path of the space + // IMPORTANT: to allow DB enforcement of at most one primary path per repo/space + // we have a unique index on spaceID + IsPrimary and set IsPrimary to true + // for primary paths and to nil for non-primary paths. + IsPrimary null.Bool `db:"space_path_is_primary"` + ParentID null.Int `db:"space_path_parent_id"` + SpaceID int64 `db:"space_path_space_id"` + CreatedBy int64 `db:"space_path_created_by"` + Created int64 `db:"space_path_created"` + Updated int64 `db:"space_path_updated"` +} + +const ( + spacePathColumns = ` + space_path_uid + ,space_path_uid_unique + ,space_path_is_primary + ,space_path_parent_id + ,space_path_space_id + ,space_path_created_by + ,space_path_created + ,space_path_updated` + + spacePathSelectBase = ` + SELECT` + spacePathColumns + ` + FROM space_paths` +) + +// InsertSegment inserts a space path segment to the table - returns the full path. +func (s *SpacePathStore) InsertSegment(ctx context.Context, segment *types.SpacePathSegment) error { + const sqlQuery = ` + INSERT INTO space_paths ( + space_path_uid + ,space_path_uid_unique + ,space_path_is_primary + ,space_path_parent_id + ,space_path_space_id + ,space_path_created_by + ,space_path_created + ,space_path_updated + ) values ( + :space_path_uid + ,:space_path_uid_unique + ,:space_path_is_primary + ,:space_path_parent_id + ,:space_path_space_id + ,:space_path_created_by + ,:space_path_created + ,:space_path_updated + )` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, s.mapToInternalSpacePathSegment(segment)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind path segment object") + } + + if _, err = db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +func (s *SpacePathStore) FindPrimaryBySpaceID(ctx context.Context, spaceID int64) (*types.SpacePath, error) { + sqlQuery := spacePathSelectBase + ` + where space_path_space_id = $1 AND space_path_is_primary = TRUE` + + db := dbtx.GetAccessor(ctx, s.db) + dst := new(spacePathSegment) + + path := "" + nextSpaceID := null.IntFrom(spaceID) + + for nextSpaceID.Valid { + err := db.GetContext(ctx, dst, sqlQuery, nextSpaceID.Int64) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find primary segment for %d", nextSpaceID.Int64) + } + + path = paths.Concatinate(dst.UID, path) + nextSpaceID = dst.ParentID + } + + return &types.SpacePath{ + SpaceID: spaceID, + Value: path, + IsPrimary: true, + }, nil +} +func (s *SpacePathStore) FindByPath(ctx context.Context, path string) (*types.SpacePath, error) { + const sqlQueryParent = spacePathSelectBase + ` WHERE space_path_uid_unique = $1 AND space_path_parent_id = $2` + const sqlQueryNoParent = spacePathSelectBase + ` WHERE space_path_uid_unique = $1 AND space_path_parent_id IS NULL` + + db := dbtx.GetAccessor(ctx, s.db) + segment := new(spacePathSegment) + + segmentUIDs := paths.Segments(path) + if len(segmentUIDs) == 0 { + return nil, fmt.Errorf("path with no segments was passed '%s'", path) + } + + var parentID int64 + sqlquery := sqlQueryNoParent + originalPath := "" + isPrimary := true + for i, segmentUID := range segmentUIDs { + uniqueSegmentUID := s.spacePathTransformation(segmentUID, i == 0) + err := db.GetContext(ctx, segment, sqlquery, uniqueSegmentUID, parentID) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find segment for '%s' in '%s'", uniqueSegmentUID, path) + } + + originalPath = paths.Concatinate(originalPath, segment.UID) + parentID = segment.SpaceID + isPrimary = isPrimary && segment.IsPrimary.ValueOrZero() + sqlquery = sqlQueryParent + } + + return &types.SpacePath{ + Value: originalPath, + IsPrimary: isPrimary, + SpaceID: segment.SpaceID, + }, nil +} + +// DeletePrimarySegment deletes the primary segment of the space. +func (s *SpacePathStore) DeletePrimarySegment(ctx context.Context, spaceID int64) error { + const sqlQuery = ` + DELETE FROM space_paths + WHERE space_path_space_id = $1 AND space_path_is_primary = TRUE` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, sqlQuery, spaceID); err != nil { + return database.ProcessSQLErrorf(err, "the delete query failed") + } + + return nil +} + +func (s *SpacePathStore) mapToInternalSpacePathSegment(p *types.SpacePathSegment) *spacePathSegment { + res := &spacePathSegment{ + ID: p.ID, + UID: p.UID, + UIDUnique: s.spacePathTransformation(p.UID, p.ParentID == 0), + SpaceID: p.SpaceID, + Created: p.Created, + CreatedBy: p.CreatedBy, + Updated: p.Updated, + + // ParentID: is set below + // IsPrimary: is set below + } + + // only set IsPrimary to a value if it's true (Unique Index doesn't allow multiple false, hence keep it nil) + if p.IsPrimary { + res.IsPrimary = null.BoolFrom(true) + } + + if p.ParentID > 0 { + res.ParentID = null.IntFrom(p.ParentID) + } + + return res +} diff --git a/internal/store/database/stage.go b/internal/store/database/stage.go new file mode 100644 index 0000000000..a7d0517ee1 --- /dev/null +++ b/internal/store/database/stage.go @@ -0,0 +1,329 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + sqlxtypes "github.com/jmoiron/sqlx/types" +) + +var _ store.StageStore = (*stageStore)(nil) + +const ( + stageColumns = ` + stage_id + ,stage_execution_id + ,stage_repo_id + ,stage_number + ,stage_name + ,stage_kind + ,stage_type + ,stage_status + ,stage_error + ,stage_errignore + ,stage_exit_code + ,stage_machine + ,stage_os + ,stage_arch + ,stage_variant + ,stage_kernel + ,stage_limit + ,stage_limit_repo + ,stage_started + ,stage_stopped + ,stage_created + ,stage_updated + ,stage_version + ,stage_on_success + ,stage_on_failure + ,stage_depends_on + ,stage_labels + ` +) + +type stage struct { + ID int64 `db:"stage_id"` + ExecutionID int64 `db:"stage_execution_id"` + RepoID int64 `db:"stage_repo_id"` + Number int64 `db:"stage_number"` + Name string `db:"stage_name"` + Kind string `db:"stage_kind"` + Type string `db:"stage_type"` + Status enum.CIStatus `db:"stage_status"` + Error string `db:"stage_error"` + ParentGroupID int64 `db:"stage_parent_group_id"` + ErrIgnore bool `db:"stage_errignore"` + ExitCode int `db:"stage_exit_code"` + Machine string `db:"stage_machine"` + OS string `db:"stage_os"` + Arch string `db:"stage_arch"` + Variant string `db:"stage_variant"` + Kernel string `db:"stage_kernel"` + Limit int `db:"stage_limit"` + LimitRepo int `db:"stage_limit_repo"` + Started int64 `db:"stage_started"` + Stopped int64 `db:"stage_stopped"` + Created int64 `db:"stage_created"` + Updated int64 `db:"stage_updated"` + Version int64 `db:"stage_version"` + OnSuccess bool `db:"stage_on_success"` + OnFailure bool `db:"stage_on_failure"` + DependsOn sqlxtypes.JSONText `db:"stage_depends_on"` + Labels sqlxtypes.JSONText `db:"stage_labels"` +} + +// NewStageStore returns a new StageStore. +func NewStageStore(db *sqlx.DB) *stageStore { + return &stageStore{ + db: db, + } +} + +type stageStore struct { + db *sqlx.DB +} + +// FindByNumber returns a stage given an execution ID and a stage number. +func (s *stageStore) FindByNumber(ctx context.Context, executionID int64, stageNum int) (*types.Stage, error) { + const findQueryStmt = ` + SELECT` + stageColumns + ` + FROM stages + WHERE stage_execution_id = $1 AND stage_number = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(stage) + if err := db.GetContext(ctx, dst, findQueryStmt, executionID, stageNum); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find stage") + } + return mapInternalToStage(dst) +} + +// Create adds a stage in the database. +func (s *stageStore) Create(ctx context.Context, st *types.Stage) error { + const stageInsertStmt = ` + INSERT INTO stages ( + stage_execution_id + ,stage_repo_id + ,stage_number + ,stage_name + ,stage_kind + ,stage_type + ,stage_status + ,stage_error + ,stage_errignore + ,stage_exit_code + ,stage_machine + ,stage_parent_group_id + ,stage_os + ,stage_arch + ,stage_variant + ,stage_kernel + ,stage_limit + ,stage_limit_repo + ,stage_started + ,stage_stopped + ,stage_created + ,stage_updated + ,stage_version + ,stage_on_success + ,stage_on_failure + ,stage_depends_on + ,stage_labels + ) VALUES ( + :stage_execution_id + ,:stage_repo_id + ,:stage_number + ,:stage_name + ,:stage_kind + ,:stage_type + ,:stage_status + ,:stage_error + ,:stage_errignore + ,:stage_exit_code + ,:stage_machine + ,:stage_parent_group_id + ,:stage_os + ,:stage_arch + ,:stage_variant + ,:stage_kernel + ,:stage_limit + ,:stage_limit_repo + ,:stage_started + ,:stage_stopped + ,:stage_created + ,:stage_updated + ,:stage_version + ,:stage_on_success + ,:stage_on_failure + ,:stage_depends_on + ,:stage_labels + + ) RETURNING stage_id` + db := dbtx.GetAccessor(ctx, s.db) + + stage := mapStageToInternal(st) + query, arg, err := db.BindNamed(stageInsertStmt, stage) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind stage object") + } + if err = db.QueryRowContext(ctx, query, arg...).Scan(&stage.ID); err != nil { + return database.ProcessSQLErrorf(err, "Stage query failed") + } + return nil +} + +// ListWithSteps returns a stage with information about all its containing steps. +func (s *stageStore) ListWithSteps(ctx context.Context, executionID int64) ([]*types.Stage, error) { + const queryNumberWithSteps = ` + SELECT` + stageColumns + "," + stepColumns + ` + FROM stages + LEFT JOIN steps + ON stages.stage_id=steps.step_stage_id + WHERE stages.stage_execution_id = $1 + ORDER BY + stage_id ASC + ,step_id ASC + ` + db := dbtx.GetAccessor(ctx, s.db) + + rows, err := db.QueryContext(ctx, queryNumberWithSteps, executionID) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to query stages and steps") + } + return scanRowsWithSteps(rows) +} + +// Find returns a stage given the stage ID. +func (s *stageStore) Find(ctx context.Context, stageID int64) (*types.Stage, error) { + const queryFind = ` + SELECT` + stageColumns + ` + FROM stages + WHERE stage_id = $1 + ` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(stage) + if err := db.GetContext(ctx, dst, queryFind, stageID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find stage") + } + return mapInternalToStage(dst) +} + +// ListIncomplete returns a list of stages with a pending status. +func (s *stageStore) ListIncomplete(ctx context.Context) ([]*types.Stage, error) { + const queryListIncomplete = ` + SELECT` + stageColumns + ` + FROM stages + WHERE stage_status IN ('pending','running') + ORDER BY stage_id ASC + ` + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*stage{} + if err := db.SelectContext(ctx, &dst, queryListIncomplete); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find incomplete stages") + } + // map stages list + return mapInternalToStageList(dst) +} + +// List returns a list of stages corresponding to an execution ID. +func (s *stageStore) List(ctx context.Context, executionID int64) ([]*types.Stage, error) { + const queryList = ` + SELECT` + stageColumns + ` + FROM stages + WHERE stage_execution_id = $1 + ORDER BY stage_number ASC + ` + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*stage{} + if err := db.SelectContext(ctx, &dst, queryList, executionID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find stages") + } + // map stages list + return mapInternalToStageList(dst) +} + +// Update tries to update a stage in the datastore and returns a locking error +// if it was unable to do so. +func (s *stageStore) Update(ctx context.Context, st *types.Stage) error { + const stageUpdateStmt = ` + UPDATE stages + SET + stage_status = :stage_status + ,stage_machine = :stage_machine + ,stage_started = :stage_started + ,stage_stopped = :stage_stopped + ,stage_exit_code = :stage_exit_code + ,stage_updated = :stage_updated + ,stage_version = :stage_version + ,stage_error = :stage_error + ,stage_on_success = :stage_on_success + ,stage_on_failure = :stage_on_failure + ,stage_errignore = :stage_errignore + ,stage_depends_on = :stage_depends_on + ,stage_labels = :stage_labels + WHERE stage_id = :stage_id AND stage_version = :stage_version - 1` + updatedAt := time.Now() + steps := st.Steps + + stage := mapStageToInternal(st) + + stage.Version++ + stage.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(stageUpdateStmt, stage) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind stage object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update stage") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + m, err := mapInternalToStage(stage) + if err != nil { + return fmt.Errorf("Could not map stage object: %w", err) + } + *st = *m + st.Version = stage.Version + st.Updated = stage.Updated + st.Steps = steps // steps is not mapped in database. + return nil +} diff --git a/internal/store/database/stage_map.go b/internal/store/database/stage_map.go new file mode 100644 index 0000000000..b147284e60 --- /dev/null +++ b/internal/store/database/stage_map.go @@ -0,0 +1,243 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "database/sql" + "encoding/json" + "fmt" + + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + sqlxtypes "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" +) + +type nullstep struct { + ID sql.NullInt64 `db:"step_id"` + StageID sql.NullInt64 `db:"step_stage_id"` + Number sql.NullInt64 `db:"step_number"` + ParentGroupID sql.NullInt64 `db:"step_parent_group_id"` + Name sql.NullString `db:"step_name"` + Status sql.NullString `db:"step_status"` + Error sql.NullString `db:"step_error"` + ErrIgnore sql.NullBool `db:"step_errignore"` + ExitCode sql.NullInt64 `db:"step_exit_code"` + Started sql.NullInt64 `db:"step_started"` + Stopped sql.NullInt64 `db:"step_stopped"` + Version sql.NullInt64 `db:"step_version"` + DependsOn sqlxtypes.JSONText `db:"step_depends_on"` + Image sql.NullString `db:"step_image"` + Detached sql.NullBool `db:"step_detached"` + Schema sql.NullString `db:"step_schema"` +} + +// used for join operations where fields may be null +func convertFromNullStep(nullstep *nullstep) (*types.Step, error) { + var dependsOn []string + err := json.Unmarshal(nullstep.DependsOn, &dependsOn) + if err != nil { + return nil, fmt.Errorf("could not unmarshal step.depends_on: %w", err) + } + return &types.Step{ + ID: nullstep.ID.Int64, + StageID: nullstep.StageID.Int64, + Number: nullstep.Number.Int64, + Name: nullstep.Name.String, + Status: enum.ParseCIStatus(nullstep.Status.String), + Error: nullstep.Error.String, + ErrIgnore: nullstep.ErrIgnore.Bool, + ExitCode: int(nullstep.ExitCode.Int64), + Started: nullstep.Started.Int64, + Stopped: nullstep.Stopped.Int64, + Version: nullstep.Version.Int64, + DependsOn: dependsOn, + Image: nullstep.Image.String, + Detached: nullstep.Detached.Bool, + Schema: nullstep.Schema.String, + }, nil +} + +func mapInternalToStage(in *stage) (*types.Stage, error) { + var dependsOn []string + err := json.Unmarshal(in.DependsOn, &dependsOn) + if err != nil { + return nil, errors.Wrap(err, "could not unmarshal stage.depends_on") + } + var labels map[string]string + err = json.Unmarshal(in.Labels, &labels) + if err != nil { + return nil, errors.Wrap(err, "could not unmarshal stage.labels") + } + return &types.Stage{ + ID: in.ID, + ExecutionID: in.ExecutionID, + RepoID: in.RepoID, + Number: in.Number, + Name: in.Name, + Kind: in.Kind, + Type: in.Type, + Status: in.Status, + Error: in.Error, + ErrIgnore: in.ErrIgnore, + ExitCode: in.ExitCode, + Machine: in.Machine, + OS: in.OS, + Arch: in.Arch, + Variant: in.Variant, + Kernel: in.Kernel, + Limit: in.Limit, + LimitRepo: in.LimitRepo, + Started: in.Started, + Stopped: in.Stopped, + Created: in.Created, + Updated: in.Updated, + Version: in.Version, + OnSuccess: in.OnSuccess, + OnFailure: in.OnFailure, + DependsOn: dependsOn, + Labels: labels, + }, nil +} + +func mapStageToInternal(in *types.Stage) *stage { + return &stage{ + ID: in.ID, + ExecutionID: in.ExecutionID, + RepoID: in.RepoID, + Number: in.Number, + Name: in.Name, + Kind: in.Kind, + Type: in.Type, + Status: in.Status, + Error: in.Error, + ErrIgnore: in.ErrIgnore, + ExitCode: in.ExitCode, + Machine: in.Machine, + OS: in.OS, + Arch: in.Arch, + Variant: in.Variant, + Kernel: in.Kernel, + Limit: in.Limit, + LimitRepo: in.LimitRepo, + Started: in.Started, + Stopped: in.Stopped, + Created: in.Created, + Updated: in.Updated, + Version: in.Version, + OnSuccess: in.OnSuccess, + OnFailure: in.OnFailure, + DependsOn: EncodeToSQLXJSON(in.DependsOn), + Labels: EncodeToSQLXJSON(in.Labels), + } +} + +func mapInternalToStageList(in []*stage) ([]*types.Stage, error) { + stages := make([]*types.Stage, len(in)) + for i, k := range in { + s, err := mapInternalToStage(k) + if err != nil { + return nil, err + } + stages[i] = s + } + return stages, nil +} + +// helper function scans the sql.Row and copies the column +// values to the destination object. +func scanRowsWithSteps(rows *sql.Rows) ([]*types.Stage, error) { + defer rows.Close() + + stages := []*types.Stage{} + var curr *types.Stage + for rows.Next() { + stage := new(types.Stage) + step := new(nullstep) + err := scanRowStep(rows, stage, step) + if err != nil { + return nil, err + } + if curr == nil || curr.ID != stage.ID { + curr = stage + stages = append(stages, curr) + } + if step.ID.Valid { + convertedStep, err := convertFromNullStep(step) + if err != nil { + return nil, err + } + curr.Steps = append(curr.Steps, convertedStep) + } + } + return stages, nil +} + +// helper function scans the sql.Row and copies the column +// values to the destination object. +func scanRowStep(rows *sql.Rows, stage *types.Stage, step *nullstep) error { + depJSON := sqlxtypes.JSONText{} + labJSON := sqlxtypes.JSONText{} + stepDepJSON := sqlxtypes.JSONText{} + err := rows.Scan( + &stage.ID, + &stage.ExecutionID, + &stage.RepoID, + &stage.Number, + &stage.Name, + &stage.Kind, + &stage.Type, + &stage.Status, + &stage.Error, + &stage.ErrIgnore, + &stage.ExitCode, + &stage.Machine, + &stage.OS, + &stage.Arch, + &stage.Variant, + &stage.Kernel, + &stage.Limit, + &stage.LimitRepo, + &stage.Started, + &stage.Stopped, + &stage.Created, + &stage.Updated, + &stage.Version, + &stage.OnSuccess, + &stage.OnFailure, + &depJSON, + &labJSON, + &step.ID, + &step.StageID, + &step.Number, + &step.Name, + &step.Status, + &step.Error, + &step.ErrIgnore, + &step.ExitCode, + &step.Started, + &step.Stopped, + &step.Version, + &stepDepJSON, + &step.Image, + &step.Detached, + &step.Schema, + ) + json.Unmarshal(depJSON, &stage.DependsOn) + json.Unmarshal(labJSON, &stage.Labels) + json.Unmarshal(stepDepJSON, &step.DependsOn) + return err +} diff --git a/internal/store/database/step.go b/internal/store/database/step.go new file mode 100644 index 0000000000..64e5223756 --- /dev/null +++ b/internal/store/database/step.go @@ -0,0 +1,200 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + sqlxtypes "github.com/jmoiron/sqlx/types" +) + +var _ store.StepStore = (*stepStore)(nil) + +const ( + stepColumns = ` + step_id + ,step_stage_id + ,step_number + ,step_name + ,step_status + ,step_error + ,step_errignore + ,step_exit_code + ,step_started + ,step_stopped + ,step_version + ,step_depends_on + ,step_image + ,step_detached + ,step_schema + ` +) + +type step struct { + ID int64 `db:"step_id"` + StageID int64 `db:"step_stage_id"` + Number int64 `db:"step_number"` + ParentGroupID int64 `db:"step_parent_group_id"` + Name string `db:"step_name"` + Status enum.CIStatus `db:"step_status"` + Error string `db:"step_error"` + ErrIgnore bool `db:"step_errignore"` + ExitCode int `db:"step_exit_code"` + Started int64 `db:"step_started"` + Stopped int64 `db:"step_stopped"` + Version int64 `db:"step_version"` + DependsOn sqlxtypes.JSONText `db:"step_depends_on"` + Image string `db:"step_image"` + Detached bool `db:"step_detached"` + Schema string `db:"step_schema"` +} + +// NewStepStore returns a new StepStore. +func NewStepStore(db *sqlx.DB) *stepStore { + return &stepStore{ + db: db, + } +} + +type stepStore struct { + db *sqlx.DB +} + +// FindByNumber returns a step given a stage ID and a step number. +func (s *stepStore) FindByNumber(ctx context.Context, stageID int64, stepNum int) (*types.Step, error) { + const findQueryStmt = ` + SELECT` + stepColumns + ` + FROM steps + WHERE step_stage_id = $1 AND step_number = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(step) + if err := db.GetContext(ctx, dst, findQueryStmt, stageID, stepNum); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find step") + } + return mapInternalToStep(dst) +} + +// Create creates a step. +func (s *stepStore) Create(ctx context.Context, step *types.Step) error { + const stepInsertStmt = ` + INSERT INTO steps ( + step_stage_id + ,step_number + ,step_name + ,step_status + ,step_error + ,step_parent_group_id + ,step_errignore + ,step_exit_code + ,step_started + ,step_stopped + ,step_version + ,step_depends_on + ,step_image + ,step_detached + ,step_schema + ) VALUES ( + :step_stage_id + ,:step_number + ,:step_name + ,:step_status + ,:step_error + ,:step_parent_group_id + ,:step_errignore + ,:step_exit_code + ,:step_started + ,:step_stopped + ,:step_version + ,:step_depends_on + ,:step_image + ,:step_detached + ,:step_schema + ) RETURNING step_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(stepInsertStmt, mapStepToInternal(step)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind step object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&step.ID); err != nil { + return database.ProcessSQLErrorf(err, "Step query failed") + } + + return nil +} + +// Update tries to update a step in the datastore and returns a locking error +// if it was unable to do so. +func (s *stepStore) Update(ctx context.Context, e *types.Step) error { + const stepUpdateStmt = ` + UPDATE steps + SET + step_name = :step_name + ,step_status = :step_status + ,step_error = :step_error + ,step_errignore = :step_errignore + ,step_exit_code = :step_exit_code + ,step_started = :step_started + ,step_stopped = :step_stopped + ,step_depends_on = :step_depends_on + ,step_image = :step_image + ,step_detached = :step_detached + ,step_schema = :step_schema + ,step_version = :step_version + WHERE step_id = :step_id AND step_version = :step_version - 1` + step := mapStepToInternal(e) + + step.Version++ + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(stepUpdateStmt, step) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind step object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update step") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + m, err := mapInternalToStep(step) + if err != nil { + return fmt.Errorf("Could not map step object: %w", err) + } + *e = *m + e.Version = step.Version + return nil +} diff --git a/internal/store/database/step_map.go b/internal/store/database/step_map.go new file mode 100644 index 0000000000..780b7795fd --- /dev/null +++ b/internal/store/database/step_map.go @@ -0,0 +1,67 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "encoding/json" + "fmt" + + "github.com/harness/gitness/types" +) + +func mapInternalToStep(in *step) (*types.Step, error) { + var dependsOn []string + err := json.Unmarshal(in.DependsOn, &dependsOn) + if err != nil { + return nil, fmt.Errorf("could not unmarshal step.DependsOn: %w", err) + } + return &types.Step{ + ID: in.ID, + StageID: in.StageID, + Number: in.Number, + Name: in.Name, + Status: in.Status, + Error: in.Error, + ErrIgnore: in.ErrIgnore, + ExitCode: in.ExitCode, + Started: in.Started, + Stopped: in.Stopped, + Version: in.Version, + DependsOn: dependsOn, + Image: in.Image, + Detached: in.Detached, + Schema: in.Schema, + }, nil +} + +func mapStepToInternal(in *types.Step) *step { + return &step{ + ID: in.ID, + StageID: in.StageID, + Number: in.Number, + Name: in.Name, + Status: in.Status, + Error: in.Error, + ErrIgnore: in.ErrIgnore, + ExitCode: in.ExitCode, + Started: in.Started, + Stopped: in.Stopped, + Version: in.Version, + DependsOn: EncodeToSQLXJSON(in.DependsOn), + Image: in.Image, + Detached: in.Detached, + Schema: in.Schema, + } +} diff --git a/internal/store/database/store_test.go b/internal/store/database/store_test.go new file mode 100644 index 0000000000..588ec7bdbd --- /dev/null +++ b/internal/store/database/store_test.go @@ -0,0 +1,81 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "encoding/json" + "os" + + "github.com/harness/gitness/store/database" + + "github.com/jmoiron/sqlx" + + _ "github.com/lib/pq" + _ "github.com/mattn/go-sqlite3" +) + +// connect opens a new test database connection. +func connect() (*sqlx.DB, error) { + var ( + driver = "sqlite3" + config = ":memory:" + ) + if os.Getenv("DATABASE_CONFIG") != "" { + driver = os.Getenv("DATABASE_DRIVER") + config = os.Getenv("DATABASE_CONFIG") + } + return database.Connect(context.Background(), driver, config) +} + +// seed seed the database state. +func seed(db *sqlx.DB) error { + /* + _, err := db.Exec("DELETE FROM executions") + if err != nil { + return err + } + _, err = db.Exec("DELETE FROM pipelines") + if err != nil { + return err + } + _, err = db.Exec("DELETE FROM users") + if err != nil { + return err + } + _, err = db.Exec("ALTER SEQUENCE users_user_id_seq RESTART WITH 1") + if err != nil { + return err + } + _, err = db.Exec("ALTER SEQUENCE pipelines_pipeline_id_seq RESTART WITH 1") + if err != nil { + return err + } + _, err = db.Exec("ALTER SEQUENCE executions_execution_id_seq RESTART WITH 1") + return err + */ + return nil +} + +// unmarshal a testdata file. +// +//nolint:unparam // expected to be called for other paths in the future. +func unmarshal(path string, v interface{}) error { + out, err := os.ReadFile(path) + if err != nil { + return err + } + return json.Unmarshal(out, v) +} diff --git a/internal/store/database/template.go b/internal/store/database/template.go new file mode 100644 index 0000000000..1e50f59d63 --- /dev/null +++ b/internal/store/database/template.go @@ -0,0 +1,276 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.TemplateStore = (*templateStore)(nil) + +const ( + templateQueryBase = ` + SELECT` + templateColumns + ` + FROM templates` + + templateColumns = ` + template_id, + template_description, + template_space_id, + template_uid, + template_data, + template_created, + template_updated, + template_version + ` +) + +// NewTemplateStore returns a new TemplateStore. +func NewTemplateStore(db *sqlx.DB) *templateStore { + return &templateStore{ + db: db, + } +} + +type templateStore struct { + db *sqlx.DB +} + +// Find returns a template given a template ID. +func (s *templateStore) Find(ctx context.Context, id int64) (*types.Template, error) { + const findQueryStmt = templateQueryBase + ` + WHERE template_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Template) + if err := db.GetContext(ctx, dst, findQueryStmt, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find template") + } + return dst, nil +} + +// FindByUID returns a template in a given space with a given UID. +func (s *templateStore) FindByUID(ctx context.Context, spaceID int64, uid string) (*types.Template, error) { + const findQueryStmt = templateQueryBase + ` + WHERE template_space_id = $1 AND template_uid = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Template) + if err := db.GetContext(ctx, dst, findQueryStmt, spaceID, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find template") + } + return dst, nil +} + +// Create creates a template. +func (s *templateStore) Create(ctx context.Context, template *types.Template) error { + const templateInsertStmt = ` + INSERT INTO templates ( + template_description, + template_space_id, + template_uid, + template_data, + template_created, + template_updated, + template_version + ) VALUES ( + :template_description, + :template_space_id, + :template_uid, + :template_data, + :template_created, + :template_updated, + :template_version + ) RETURNING template_id` + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(templateInsertStmt, template) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind template object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&template.ID); err != nil { + return database.ProcessSQLErrorf(err, "template query failed") + } + + return nil +} + +func (s *templateStore) Update(ctx context.Context, p *types.Template) error { + const templateUpdateStmt = ` + UPDATE templates + SET + template_description = :template_description, + template_uid = :template_uid, + template_data = :template_data, + template_updated = :template_updated, + template_version = :template_version + WHERE template_id = :template_id AND template_version = :template_version - 1` + updatedAt := time.Now() + template := *p + + template.Version++ + template.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(templateUpdateStmt, template) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind template object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update template") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + p.Version = template.Version + p.Updated = template.Updated + return nil +} + +// UpdateOptLock updates the pipeline using the optimistic locking mechanism. +func (s *templateStore) UpdateOptLock(ctx context.Context, + template *types.Template, + mutateFn func(template *types.Template) error, +) (*types.Template, error) { + for { + dup := *template + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + template, err = s.Find(ctx, template.ID) + if err != nil { + return nil, err + } + } +} + +// List lists all the templates present in a space. +func (s *templateStore) List(ctx context.Context, parentID int64, filter types.ListQueryFilter) ([]*types.Template, error) { + stmt := database.Builder. + Select(templateColumns). + From("templates"). + Where("template_space_id = ?", fmt.Sprint(parentID)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(template_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Template{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return dst, nil +} + +// Delete deletes a template given a template ID. +func (s *templateStore) Delete(ctx context.Context, id int64) error { + const templateDeleteStmt = ` + DELETE FROM templates + WHERE template_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, templateDeleteStmt, id); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete template") + } + + return nil +} + +// DeleteByUID deletes a template with a given UID in a space. +func (s *templateStore) DeleteByUID(ctx context.Context, spaceID int64, uid string) error { + const templateDeleteStmt = ` + DELETE FROM templates + WHERE template_space_id = $1 AND template_uid = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, templateDeleteStmt, spaceID, uid); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete template") + } + + return nil +} + +// Count of templates in a space. +func (s *templateStore) Count(ctx context.Context, parentID int64, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("templates"). + Where("template_space_id = ?", parentID) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(template_uid) LIKE ?", fmt.Sprintf("%%%s%%", filter.Query)) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} diff --git a/internal/store/database/testdata/repos.json b/internal/store/database/testdata/repos.json new file mode 100644 index 0000000000..3f818b8116 --- /dev/null +++ b/internal/store/database/testdata/repos.json @@ -0,0 +1,32 @@ +[ + { + "id": 1, + "uid": "repo1", + "parentId": 1, + "description": "Some repository.", + "isPublic": true, + "createdBy": 1, + "created": 1662427496787, + "updated": 1662427496787, + "forkId": 0, + "numForks": 0, + "numPulls": 0, + "numClosedPulls": 0, + "numOpenPulls": 0 + }, + { + "id": 2, + "uid": "repo2", + "parentId": 2, + "description": "Some other repository.", + "isPublic": true, + "createdBy": 1, + "created": 1662427602488, + "updated": 1662427602488, + "forkId": 0, + "numForks": 0, + "numPulls": 0, + "numClosedPulls": 0, + "numOpenPulls": 0 + } +] \ No newline at end of file diff --git a/internal/store/database/testdata/spaces.json b/internal/store/database/testdata/spaces.json new file mode 100644 index 0000000000..a246ac1d47 --- /dev/null +++ b/internal/store/database/testdata/spaces.json @@ -0,0 +1,22 @@ +[ + { + "id": 1, + "uid": "space1", + "parentId": 0, + "description": "Some space.", + "isPublic": true, + "createdBy": 1, + "created": 1662427417128, + "updated": 1662427417128 + }, + { + "id": 2, + "uid": "space2", + "parentId": 1, + "description": "Some subspace.", + "isPublic": true, + "createdBy": 1, + "created": 1662427428536, + "updated": 1662427428536 + } +] \ No newline at end of file diff --git a/internal/store/database/testdata/users.json b/internal/store/database/testdata/users.json new file mode 100644 index 0000000000..d87a14cf9c --- /dev/null +++ b/internal/store/database/testdata/users.json @@ -0,0 +1,26 @@ +[ + { + "id": 1, + "uid": "jane21", + "email": "jane@example.com", + "name": "jane", + "company": "acme", + "admin": true, + "blocked": false, + "created": 0, + "updated": 0, + "authed": 0 + }, + { + "id": 2, + "uid": "john21", + "email": "john@example.com", + "name": "john", + "company": "acme", + "admin": false, + "blocked": false, + "created": 0, + "updated": 0, + "authed": 0 + } +] \ No newline at end of file diff --git a/internal/store/database/token.go b/internal/store/database/token.go new file mode 100644 index 0000000000..fea8c0dd75 --- /dev/null +++ b/internal/store/database/token.go @@ -0,0 +1,190 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" +) + +var _ store.TokenStore = (*TokenStore)(nil) + +// NewTokenStore returns a new TokenStore. +func NewTokenStore(db *sqlx.DB) *TokenStore { + return &TokenStore{db} +} + +// TokenStore implements a TokenStore backed by a relational database. +type TokenStore struct { + db *sqlx.DB +} + +// Find finds the token by id. +func (s *TokenStore) Find(ctx context.Context, id int64) (*types.Token, error) { + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Token) + if err := db.GetContext(ctx, dst, TokenSelectByID, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find token") + } + + return dst, nil +} + +// FindByUID finds the token by principalId and tokenUID. +func (s *TokenStore) FindByUID(ctx context.Context, principalID int64, tokenUID string) (*types.Token, error) { + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(types.Token) + if err := db.GetContext(ctx, dst, TokenSelectByPrincipalIDAndUID, principalID, tokenUID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find token by UID") + } + + return dst, nil +} + +// Create saves the token details. +func (s *TokenStore) Create(ctx context.Context, token *types.Token) error { + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(tokenInsert, token) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind token object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&token.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Delete deletes the token with the given id. +func (s *TokenStore) Delete(ctx context.Context, id int64) error { + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, tokenDelete, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// DeleteForPrincipal deletes all tokens for a specific principal. +func (s *TokenStore) DeleteForPrincipal(ctx context.Context, principalID int64) error { + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, tokenDeleteForPrincipal, principalID); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// Count returns a count of tokens of a specifc type for a specific principal. +func (s *TokenStore) Count(ctx context.Context, + principalID int64, tokenType enum.TokenType) (int64, error) { + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err := db.QueryRowContext(ctx, tokenCountForPrincipalIDOfType, principalID, tokenType).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +// List returns a list of tokens of a specific type for a specific principal. +func (s *TokenStore) List(ctx context.Context, + principalID int64, tokenType enum.TokenType) ([]*types.Token, error) { + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*types.Token{} + + // TODO: custom filters / sorting for tokens. + + err := db.SelectContext(ctx, &dst, tokenSelectForPrincipalIDOfType, principalID, tokenType) + if err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing token list query") + } + return dst, nil +} + +const tokenSelectBase = ` +SELECT +token_id +,token_type +,token_uid +,token_principal_id +,token_expires_at +,token_issued_at +,token_created_by +FROM tokens +` //#nosec G101 + +const tokenSelectForPrincipalIDOfType = tokenSelectBase + ` +WHERE token_principal_id = $1 AND token_type = $2 +ORDER BY token_issued_at DESC +` //#nosec G101 + +const tokenCountForPrincipalIDOfType = ` +SELECT count(*) +FROM tokens +WHERE token_principal_id = $1 AND token_type = $2 +` //#nosec G101 + +const TokenSelectByID = tokenSelectBase + ` +WHERE token_id = $1 +` + +const TokenSelectByPrincipalIDAndUID = tokenSelectBase + ` +WHERE token_principal_id = $1 AND token_uid = $2 +` + +const tokenDelete = ` +DELETE FROM tokens +WHERE token_id = $1 +` + +const tokenDeleteForPrincipal = ` +DELETE FROM tokens +WHERE token_principal_id = $1 +` + +const tokenInsert = ` +INSERT INTO tokens ( + token_type + ,token_uid + ,token_principal_id + ,token_expires_at + ,token_issued_at + ,token_created_by +) values ( + :token_type + ,:token_uid + ,:token_principal_id + ,:token_expires_at + ,:token_issued_at + ,:token_created_by +) RETURNING token_id +` diff --git a/internal/store/database/trigger.go b/internal/store/database/trigger.go new file mode 100644 index 0000000000..4bef2b86f7 --- /dev/null +++ b/internal/store/database/trigger.go @@ -0,0 +1,361 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/jmoiron/sqlx" + sqlxtypes "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" +) + +var _ store.TriggerStore = (*triggerStore)(nil) + +type trigger struct { + ID int64 `db:"trigger_id"` + UID string `db:"trigger_uid"` + Description string `db:"trigger_description"` + Type string `db:"trigger_type"` + Secret string `db:"trigger_secret"` + PipelineID int64 `db:"trigger_pipeline_id"` + RepoID int64 `db:"trigger_repo_id"` + CreatedBy int64 `db:"trigger_created_by"` + Disabled bool `db:"trigger_disabled"` + Actions sqlxtypes.JSONText `db:"trigger_actions"` + Created int64 `db:"trigger_created"` + Updated int64 `db:"trigger_updated"` + Version int64 `db:"trigger_version"` +} + +func mapInternalToTrigger(trigger *trigger) (*types.Trigger, error) { + var actions []enum.TriggerAction + err := json.Unmarshal(trigger.Actions, &actions) + if err != nil { + return nil, errors.Wrap(err, "could not unmarshal trigger.actions") + } + + return &types.Trigger{ + ID: trigger.ID, + Description: trigger.Description, + Type: trigger.Type, + Secret: trigger.Secret, + PipelineID: trigger.PipelineID, + RepoID: trigger.RepoID, + CreatedBy: trigger.CreatedBy, + Disabled: trigger.Disabled, + Actions: actions, + UID: trigger.UID, + Created: trigger.Created, + Updated: trigger.Updated, + Version: trigger.Version, + }, nil +} + +func mapInternalToTriggerList(triggers []*trigger) ([]*types.Trigger, error) { + ret := make([]*types.Trigger, len(triggers)) + for i, t := range triggers { + trigger, err := mapInternalToTrigger(t) + if err != nil { + return nil, err + } + ret[i] = trigger + } + return ret, nil +} + +func mapTriggerToInternal(t *types.Trigger) *trigger { + return &trigger{ + ID: t.ID, + UID: t.UID, + Description: t.Description, + Type: t.Type, + PipelineID: t.PipelineID, + Secret: t.Secret, + RepoID: t.RepoID, + CreatedBy: t.CreatedBy, + Disabled: t.Disabled, + Actions: EncodeToSQLXJSON(t.Actions), + Created: t.Created, + Updated: t.Updated, + Version: t.Version, + } +} + +// NewTriggerStore returns a new TriggerStore. +func NewTriggerStore(db *sqlx.DB) *triggerStore { + return &triggerStore{ + db: db, + } +} + +type triggerStore struct { + db *sqlx.DB +} + +const ( + triggerColumns = ` + trigger_id + ,trigger_uid + ,trigger_disabled + ,trigger_actions + ,trigger_description + ,trigger_pipeline_id + ,trigger_created + ,trigger_updated + ,trigger_version + ` +) + +// Find returns an trigger given a pipeline ID and a trigger UID. +func (s *triggerStore) FindByUID(ctx context.Context, pipelineID int64, uid string) (*types.Trigger, error) { + const findQueryStmt = ` + SELECT` + triggerColumns + ` + FROM triggers + WHERE trigger_pipeline_id = $1 AND trigger_uid = $2` + db := dbtx.GetAccessor(ctx, s.db) + + dst := new(trigger) + if err := db.GetContext(ctx, dst, findQueryStmt, pipelineID, uid); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find trigger") + } + return mapInternalToTrigger(dst) +} + +// Create creates a new trigger in the datastore. +func (s *triggerStore) Create(ctx context.Context, t *types.Trigger) error { + const triggerInsertStmt = ` + INSERT INTO triggers ( + trigger_uid + ,trigger_description + ,trigger_actions + ,trigger_disabled + ,trigger_type + ,trigger_secret + ,trigger_created_by + ,trigger_pipeline_id + ,trigger_repo_id + ,trigger_created + ,trigger_updated + ,trigger_version + ) VALUES ( + :trigger_uid + ,:trigger_description + ,:trigger_actions + ,:trigger_disabled + ,:trigger_type + ,:trigger_secret + ,:trigger_created_by + ,:trigger_pipeline_id + ,:trigger_repo_id + ,:trigger_created + ,:trigger_updated + ,:trigger_version + ) RETURNING trigger_id` + db := dbtx.GetAccessor(ctx, s.db) + + trigger := mapTriggerToInternal(t) + query, arg, err := db.BindNamed(triggerInsertStmt, trigger) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind trigger object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&trigger.ID); err != nil { + return database.ProcessSQLErrorf(err, "Trigger query failed") + } + + return nil +} + +// Update tries to update an trigger in the datastore with optimistic locking. +func (s *triggerStore) Update(ctx context.Context, t *types.Trigger) error { + const triggerUpdateStmt = ` + UPDATE triggers + SET + trigger_uid = :trigger_uid + ,trigger_description = :trigger_description + ,trigger_disabled = :trigger_disabled + ,trigger_updated = :trigger_updated + ,trigger_actions = :trigger_actions + ,trigger_version = :trigger_version + WHERE trigger_id = :trigger_id AND trigger_version = :trigger_version - 1` + updatedAt := time.Now() + trigger := mapTriggerToInternal(t) + + trigger.Version++ + trigger.Updated = updatedAt.UnixMilli() + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(triggerUpdateStmt, trigger) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind trigger object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update trigger") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + t.Version = trigger.Version + t.Updated = trigger.Updated + return nil +} + +// UpdateOptLock updates the pipeline using the optimistic locking mechanism. +func (s *triggerStore) UpdateOptLock(ctx context.Context, + trigger *types.Trigger, + mutateFn func(trigger *types.Trigger) error) (*types.Trigger, error) { + for { + dup := *trigger + + err := mutateFn(&dup) + if err != nil { + return nil, err + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, err + } + + trigger, err = s.FindByUID(ctx, trigger.PipelineID, trigger.UID) + if err != nil { + return nil, err + } + } +} + +// List lists the triggers for a given pipeline ID. +func (s *triggerStore) List( + ctx context.Context, + pipelineID int64, + filter types.ListQueryFilter, +) ([]*types.Trigger, error) { + stmt := database.Builder. + Select(triggerColumns). + From("triggers"). + Where("trigger_pipeline_id = ?", fmt.Sprint(pipelineID)) + + stmt = stmt.Limit(database.Limit(filter.Size)) + stmt = stmt.Offset(database.Offset(filter.Page, filter.Size)) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(trigger_uid) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(filter.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*trigger{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return mapInternalToTriggerList(dst) +} + +// ListAllEnabled lists all enabled triggers for a given repo without pagination +func (s *triggerStore) ListAllEnabled( + ctx context.Context, + repoID int64, +) ([]*types.Trigger, error) { + stmt := database.Builder. + Select(triggerColumns). + From("triggers"). + Where("trigger_repo_id = ? AND trigger_disabled = false", fmt.Sprint(repoID)) + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*trigger{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed executing custom list query") + } + + return mapInternalToTriggerList(dst) +} + +// Count of triggers under a given pipeline. +func (s *triggerStore) Count(ctx context.Context, pipelineID int64, filter types.ListQueryFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("triggers"). + Where("trigger_pipeline_id = ?", pipelineID) + + if filter.Query != "" { + stmt = stmt.Where("LOWER(trigger_uid) LIKE ?", fmt.Sprintf("%%%s%%", filter.Query)) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, errors.Wrap(err, "Failed to convert query to sql") + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + return count, nil +} + +// Delete deletes an trigger given a pipeline ID and a trigger UID. +func (s *triggerStore) DeleteByUID(ctx context.Context, pipelineID int64, uid string) error { + const triggerDeleteStmt = ` + DELETE FROM triggers + WHERE trigger_pipeline_id = $1 AND trigger_uid = $2` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, triggerDeleteStmt, pipelineID, uid); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete trigger") + } + + return nil +} diff --git a/internal/store/database/webhook.go b/internal/store/database/webhook.go new file mode 100644 index 0000000000..54b28444e2 --- /dev/null +++ b/internal/store/database/webhook.go @@ -0,0 +1,458 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/harness/gitness/internal/store" + gitness_store "github.com/harness/gitness/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/guregu/null" + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +var _ store.WebhookStore = (*WebhookStore)(nil) + +// NewWebhookStore returns a new WebhookStore. +func NewWebhookStore(db *sqlx.DB) *WebhookStore { + return &WebhookStore{ + db: db, + } +} + +// WebhookStore implements store.Webhook backed by a relational database. +type WebhookStore struct { + db *sqlx.DB +} + +// webhook is an internal representation used to store webhook data in the database. +type webhook struct { + ID int64 `db:"webhook_id"` + Version int64 `db:"webhook_version"` + RepoID null.Int `db:"webhook_repo_id"` + SpaceID null.Int `db:"webhook_space_id"` + CreatedBy int64 `db:"webhook_created_by"` + Created int64 `db:"webhook_created"` + Updated int64 `db:"webhook_updated"` + Internal bool `db:"webhook_internal"` + + DisplayName string `db:"webhook_display_name"` + Description string `db:"webhook_description"` + URL string `db:"webhook_url"` + Secret string `db:"webhook_secret"` + Enabled bool `db:"webhook_enabled"` + Insecure bool `db:"webhook_insecure"` + Triggers string `db:"webhook_triggers"` + LatestExecutionResult null.String `db:"webhook_latest_execution_result"` +} + +const ( + webhookColumns = ` + webhook_id + ,webhook_version + ,webhook_repo_id + ,webhook_space_id + ,webhook_created_by + ,webhook_created + ,webhook_updated + ,webhook_display_name + ,webhook_description + ,webhook_url + ,webhook_secret + ,webhook_enabled + ,webhook_insecure + ,webhook_triggers + ,webhook_latest_execution_result + ,webhook_internal` + + webhookSelectBase = ` + SELECT` + webhookColumns + ` + FROM webhooks` +) + +// Find finds the webhook by id. +func (s *WebhookStore) Find(ctx context.Context, id int64) (*types.Webhook, error) { + const sqlQuery = webhookSelectBase + ` + WHERE webhook_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &webhook{} + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select query failed") + } + + res, err := mapToWebhook(dst) + if err != nil { + return nil, fmt.Errorf("failed to map webhook to external type: %w", err) + } + + return res, nil +} + +// Create creates a new webhook. +func (s *WebhookStore) Create(ctx context.Context, hook *types.Webhook) error { + const sqlQuery = ` + INSERT INTO webhooks ( + webhook_repo_id + ,webhook_space_id + ,webhook_created_by + ,webhook_created + ,webhook_updated + ,webhook_display_name + ,webhook_description + ,webhook_url + ,webhook_secret + ,webhook_enabled + ,webhook_insecure + ,webhook_triggers + ,webhook_latest_execution_result + ,webhook_internal + ) values ( + :webhook_repo_id + ,:webhook_space_id + ,:webhook_created_by + ,:webhook_created + ,:webhook_updated + ,:webhook_display_name + ,:webhook_description + ,:webhook_url + ,:webhook_secret + ,:webhook_enabled + ,:webhook_insecure + ,:webhook_triggers + ,:webhook_latest_execution_result + ,:webhook_internal + ) RETURNING webhook_id` + + db := dbtx.GetAccessor(ctx, s.db) + + dbHook, err := mapToInternalWebhook(hook) + if err != nil { + return fmt.Errorf("failed to map webhook to internal db type: %w", err) + } + + query, arg, err := db.BindNamed(sqlQuery, dbHook) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind webhook object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&hook.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// Update updates an existing webhook. +func (s *WebhookStore) Update(ctx context.Context, hook *types.Webhook) error { + const sqlQuery = ` + UPDATE webhooks + SET + webhook_version = :webhook_version + ,webhook_updated = :webhook_updated + ,webhook_display_name = :webhook_display_name + ,webhook_description = :webhook_description + ,webhook_url = :webhook_url + ,webhook_secret = :webhook_secret + ,webhook_enabled = :webhook_enabled + ,webhook_insecure = :webhook_insecure + ,webhook_triggers = :webhook_triggers + ,webhook_latest_execution_result = :webhook_latest_execution_result + ,webhook_internal = :webhook_internal + WHERE webhook_id = :webhook_id and webhook_version = :webhook_version - 1` + + db := dbtx.GetAccessor(ctx, s.db) + + dbHook, err := mapToInternalWebhook(hook) + if err != nil { + return fmt.Errorf("failed to map webhook to internal db type: %w", err) + } + + // update Version (used for optimistic locking) and Updated time + dbHook.Version++ + dbHook.Updated = time.Now().UnixMilli() + + query, arg, err := db.BindNamed(sqlQuery, dbHook) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind webhook object") + } + + result, err := db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "failed to update webhook") + } + + count, err := result.RowsAffected() + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to get number of updated rows") + } + + if count == 0 { + return gitness_store.ErrVersionConflict + } + + hook.Version = dbHook.Version + hook.Updated = dbHook.Updated + + return nil +} + +// UpdateOptLock updates the webhook using the optimistic locking mechanism. +func (s *WebhookStore) UpdateOptLock(ctx context.Context, hook *types.Webhook, + mutateFn func(hook *types.Webhook) error) (*types.Webhook, error) { + for { + dup := *hook + + err := mutateFn(&dup) + if err != nil { + return nil, fmt.Errorf("failed to mutate the webhook: %w", err) + } + + err = s.Update(ctx, &dup) + if err == nil { + return &dup, nil + } + if !errors.Is(err, gitness_store.ErrVersionConflict) { + return nil, fmt.Errorf("failed to update the webhook: %w", err) + } + + hook, err = s.Find(ctx, hook.ID) + if err != nil { + return nil, fmt.Errorf("failed to find the latst version of the webhook: %w", err) + } + } +} + +// Delete deletes the webhook for the given id. +func (s *WebhookStore) Delete(ctx context.Context, id int64) error { + const sqlQuery = ` + DELETE FROM webhooks + WHERE webhook_id = $1` + + if _, err := s.db.ExecContext(ctx, sqlQuery, id); err != nil { + return database.ProcessSQLErrorf(err, "The delete query failed") + } + + return nil +} + +// Count counts the webhooks for a given parent type and id. +func (s *WebhookStore) Count(ctx context.Context, parentType enum.WebhookParent, parentID int64, + opts *types.WebhookFilter) (int64, error) { + stmt := database.Builder. + Select("count(*)"). + From("webhooks") + + switch parentType { + case enum.WebhookParentRepo: + stmt = stmt.Where("webhook_repo_id = ?", parentID) + case enum.WebhookParentSpace: + stmt = stmt.Where("webhook_space_id = ?", parentID) + default: + return 0, fmt.Errorf("webhook parent type '%s' is not supported", parentType) + } + + if opts.Query != "" { + stmt = stmt.Where("LOWER(webhook_display_name) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return 0, fmt.Errorf("failed to convert query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + var count int64 + err = db.QueryRowContext(ctx, sql, args...).Scan(&count) + if err != nil { + return 0, database.ProcessSQLErrorf(err, "Failed executing count query") + } + + return count, nil +} + +// List lists the webhooks for a given parent type and id. +func (s *WebhookStore) List(ctx context.Context, parentType enum.WebhookParent, parentID int64, + opts *types.WebhookFilter) ([]*types.Webhook, error) { + stmt := database.Builder. + Select(webhookColumns). + From("webhooks") + + switch parentType { + case enum.WebhookParentRepo: + stmt = stmt.Where("webhook_repo_id = ?", parentID) + case enum.WebhookParentSpace: + stmt = stmt.Where("webhook_space_id = ?", parentID) + default: + return nil, fmt.Errorf("webhook parent type '%s' is not supported", parentType) + } + + if opts.Query != "" { + stmt = stmt.Where("LOWER(webhook_display_name) LIKE ?", fmt.Sprintf("%%%s%%", strings.ToLower(opts.Query))) + } + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + switch opts.Sort { + case enum.WebhookAttrID, enum.WebhookAttrNone: + // NOTE: string concatenation is safe because the + // order attribute is an enum and is not user-defined, + // and is therefore not subject to injection attacks. + stmt = stmt.OrderBy("webhook_id " + opts.Order.String()) + case enum.WebhookAttrDisplayName: + stmt = stmt.OrderBy("webhook_display_name " + opts.Order.String()) + //TODO: Postgres does not support COLLATE NOCASE for UTF8 + case enum.WebhookAttrCreated: + stmt = stmt.OrderBy("webhook_created " + opts.Order.String()) + case enum.WebhookAttrUpdated: + stmt = stmt.OrderBy("webhook_updated " + opts.Order.String()) + } + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*webhook{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select query failed") + } + + res, err := mapToWebhooks(dst) + if err != nil { + return nil, fmt.Errorf("failed to map webhooks to external type: %w", err) + } + + return res, nil +} + +func mapToWebhook(hook *webhook) (*types.Webhook, error) { + res := &types.Webhook{ + ID: hook.ID, + Version: hook.Version, + CreatedBy: hook.CreatedBy, + Created: hook.Created, + Updated: hook.Updated, + DisplayName: hook.DisplayName, + Description: hook.Description, + URL: hook.URL, + Secret: hook.Secret, + Enabled: hook.Enabled, + Insecure: hook.Insecure, + Triggers: triggersFromString(hook.Triggers), + LatestExecutionResult: (*enum.WebhookExecutionResult)(hook.LatestExecutionResult.Ptr()), + Internal: hook.Internal, + } + + switch { + case hook.RepoID.Valid && hook.SpaceID.Valid: + return nil, fmt.Errorf("both repoID and spaceID are set for hook %d", hook.ID) + case hook.RepoID.Valid: + res.ParentType = enum.WebhookParentRepo + res.ParentID = hook.RepoID.Int64 + case hook.SpaceID.Valid: + res.ParentType = enum.WebhookParentSpace + res.ParentID = hook.SpaceID.Int64 + default: + return nil, fmt.Errorf("neither repoID nor spaceID are set for hook %d", hook.ID) + } + + return res, nil +} + +func mapToInternalWebhook(hook *types.Webhook) (*webhook, error) { + res := &webhook{ + ID: hook.ID, + Version: hook.Version, + CreatedBy: hook.CreatedBy, + Created: hook.Created, + Updated: hook.Updated, + DisplayName: hook.DisplayName, + Description: hook.Description, + URL: hook.URL, + Secret: hook.Secret, + Enabled: hook.Enabled, + Insecure: hook.Insecure, + Triggers: triggersToString(hook.Triggers), + LatestExecutionResult: null.StringFromPtr((*string)(hook.LatestExecutionResult)), + Internal: hook.Internal, + } + + switch hook.ParentType { + case enum.WebhookParentRepo: + res.RepoID = null.IntFrom(hook.ParentID) + case enum.WebhookParentSpace: + res.SpaceID = null.IntFrom(hook.ParentID) + default: + return nil, fmt.Errorf("webhook parent type '%s' is not supported", hook.ParentType) + } + + return res, nil +} + +func mapToWebhooks(hooks []*webhook) ([]*types.Webhook, error) { + var err error + m := make([]*types.Webhook, len(hooks)) + for i, hook := range hooks { + m[i], err = mapToWebhook(hook) + if err != nil { + return nil, err + } + } + return m, nil +} + +// triggersSeparator defines the character that's used to join triggers for storing them in the DB +// ASSUMPTION: triggers are defined in an enum and don't contain ",". +const triggersSeparator = "," + +func triggersFromString(triggersString string) []enum.WebhookTrigger { + if triggersString == "" { + return []enum.WebhookTrigger{} + } + + rawTriggers := strings.Split(triggersString, triggersSeparator) + + triggers := make([]enum.WebhookTrigger, len(rawTriggers)) + for i, rawTrigger := range rawTriggers { + // ASSUMPTION: trigger is valid value (as we wrote it to DB) + triggers[i] = enum.WebhookTrigger(rawTrigger) + } + + return triggers +} + +func triggersToString(triggers []enum.WebhookTrigger) string { + rawTriggers := make([]string, len(triggers)) + for i := range triggers { + rawTriggers[i] = string(triggers[i]) + } + + return strings.Join(rawTriggers, triggersSeparator) +} diff --git a/internal/store/database/webhook_execution.go b/internal/store/database/webhook_execution.go new file mode 100644 index 0000000000..106a77acaf --- /dev/null +++ b/internal/store/database/webhook_execution.go @@ -0,0 +1,260 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + "fmt" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/guregu/null" + "github.com/jmoiron/sqlx" +) + +var _ store.WebhookExecutionStore = (*WebhookExecutionStore)(nil) + +// NewWebhookExecutionStore returns a new WebhookExecutionStore. +func NewWebhookExecutionStore(db *sqlx.DB) *WebhookExecutionStore { + return &WebhookExecutionStore{ + db: db, + } +} + +// WebhookExecutionStore implements store.WebhookExecution backed by a relational database. +type WebhookExecutionStore struct { + db *sqlx.DB +} + +// webhookExecution is used to store executions of webhooks +// The object should be later re-packed into a different struct to return it as an API response. +type webhookExecution struct { + ID int64 `db:"webhook_execution_id"` + RetriggerOf null.Int `db:"webhook_execution_retrigger_of"` + Retriggerable bool `db:"webhook_execution_retriggerable"` + WebhookID int64 `db:"webhook_execution_webhook_id"` + TriggerType enum.WebhookTrigger `db:"webhook_execution_trigger_type"` + TriggerID string `db:"webhook_execution_trigger_id"` + Result enum.WebhookExecutionResult `db:"webhook_execution_result"` + Created int64 `db:"webhook_execution_created"` + Duration int64 `db:"webhook_execution_duration"` + Error string `db:"webhook_execution_error"` + RequestURL string `db:"webhook_execution_request_url"` + RequestHeaders string `db:"webhook_execution_request_headers"` + RequestBody string `db:"webhook_execution_request_body"` + ResponseStatusCode int `db:"webhook_execution_response_status_code"` + ResponseStatus string `db:"webhook_execution_response_status"` + ResponseHeaders string `db:"webhook_execution_response_headers"` + ResponseBody string `db:"webhook_execution_response_body"` +} + +const ( + webhookExecutionColumns = ` + webhook_execution_id + ,webhook_execution_retrigger_of + ,webhook_execution_retriggerable + ,webhook_execution_webhook_id + ,webhook_execution_trigger_type + ,webhook_execution_trigger_id + ,webhook_execution_result + ,webhook_execution_created + ,webhook_execution_duration + ,webhook_execution_error + ,webhook_execution_request_url + ,webhook_execution_request_headers + ,webhook_execution_request_body + ,webhook_execution_response_status_code + ,webhook_execution_response_status + ,webhook_execution_response_headers + ,webhook_execution_response_body` + + webhookExecutionSelectBase = ` + SELECT` + webhookExecutionColumns + ` + FROM webhook_executions` +) + +// Find finds the webhook execution by id. +func (s *WebhookExecutionStore) Find(ctx context.Context, id int64) (*types.WebhookExecution, error) { + const sqlQuery = webhookExecutionSelectBase + ` + WHERE webhook_execution_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := &webhookExecution{} + if err := db.GetContext(ctx, dst, sqlQuery, id); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select query failed") + } + + return mapToWebhookExecution(dst), nil +} + +// Create creates a new webhook execution entry. +func (s *WebhookExecutionStore) Create(ctx context.Context, execution *types.WebhookExecution) error { + const sqlQuery = ` + INSERT INTO webhook_executions ( + webhook_execution_retrigger_of + ,webhook_execution_retriggerable + ,webhook_execution_webhook_id + ,webhook_execution_trigger_type + ,webhook_execution_trigger_id + ,webhook_execution_result + ,webhook_execution_created + ,webhook_execution_duration + ,webhook_execution_error + ,webhook_execution_request_url + ,webhook_execution_request_headers + ,webhook_execution_request_body + ,webhook_execution_response_status_code + ,webhook_execution_response_status + ,webhook_execution_response_headers + ,webhook_execution_response_body + ) values ( + :webhook_execution_retrigger_of + ,:webhook_execution_retriggerable + ,:webhook_execution_webhook_id + ,:webhook_execution_trigger_type + ,:webhook_execution_trigger_id + ,:webhook_execution_result + ,:webhook_execution_created + ,:webhook_execution_duration + ,:webhook_execution_error + ,:webhook_execution_request_url + ,:webhook_execution_request_headers + ,:webhook_execution_request_body + ,:webhook_execution_response_status_code + ,:webhook_execution_response_status + ,:webhook_execution_response_headers + ,:webhook_execution_response_body + ) RETURNING webhook_execution_id` + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(sqlQuery, mapToInternalWebhookExecution(execution)) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind webhook execution object") + } + + if err = db.QueryRowContext(ctx, query, arg...).Scan(&execution.ID); err != nil { + return database.ProcessSQLErrorf(err, "Insert query failed") + } + + return nil +} + +// ListForWebhook lists the webhook executions for a given webhook id. +func (s *WebhookExecutionStore) ListForWebhook(ctx context.Context, webhookID int64, + opts *types.WebhookExecutionFilter) ([]*types.WebhookExecution, error) { + stmt := database.Builder. + Select(webhookExecutionColumns). + From("webhook_executions"). + Where("webhook_execution_webhook_id = ?", webhookID) + + stmt = stmt.Limit(database.Limit(opts.Size)) + stmt = stmt.Offset(database.Offset(opts.Page, opts.Size)) + + // fixed ordering by desc id (new ones first) - add customized ordering if deemed necessary. + stmt = stmt.OrderBy("webhook_execution_id DESC") + + sql, args, err := stmt.ToSql() + if err != nil { + return nil, fmt.Errorf("failed to convert query to sql: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*webhookExecution{} + if err = db.SelectContext(ctx, &dst, sql, args...); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select query failed") + } + + return mapToWebhookExecutions(dst), nil +} + +// ListForTrigger lists the webhook executions for a given trigger id. +func (s *WebhookExecutionStore) ListForTrigger(ctx context.Context, + triggerID string) ([]*types.WebhookExecution, error) { + const sqlQuery = webhookExecutionSelectBase + ` + WHERE webhook_execution_trigger_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + dst := []*webhookExecution{} + if err := db.SelectContext(ctx, &dst, sqlQuery, triggerID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Select query failed") + } + + return mapToWebhookExecutions(dst), nil +} + +func mapToWebhookExecution(execution *webhookExecution) *types.WebhookExecution { + return &types.WebhookExecution{ + ID: execution.ID, + RetriggerOf: execution.RetriggerOf.Ptr(), + Retriggerable: execution.Retriggerable, + Created: execution.Created, + WebhookID: execution.WebhookID, + TriggerType: execution.TriggerType, + TriggerID: execution.TriggerID, + Result: execution.Result, + Error: execution.Error, + Duration: execution.Duration, + Request: types.WebhookExecutionRequest{ + URL: execution.RequestURL, + Headers: execution.RequestHeaders, + Body: execution.RequestBody, + }, + Response: types.WebhookExecutionResponse{ + StatusCode: execution.ResponseStatusCode, + Status: execution.ResponseStatus, + Headers: execution.ResponseHeaders, + Body: execution.ResponseBody, + }, + } +} + +func mapToInternalWebhookExecution(execution *types.WebhookExecution) *webhookExecution { + return &webhookExecution{ + ID: execution.ID, + RetriggerOf: null.IntFromPtr(execution.RetriggerOf), + Retriggerable: execution.Retriggerable, + Created: execution.Created, + WebhookID: execution.WebhookID, + TriggerType: execution.TriggerType, + TriggerID: execution.TriggerID, + Result: execution.Result, + Error: execution.Error, + Duration: execution.Duration, + RequestURL: execution.Request.URL, + RequestHeaders: execution.Request.Headers, + RequestBody: execution.Request.Body, + ResponseStatusCode: execution.Response.StatusCode, + ResponseStatus: execution.Response.Status, + ResponseHeaders: execution.Response.Headers, + ResponseBody: execution.Response.Body, + } +} + +func mapToWebhookExecutions(executions []*webhookExecution) []*types.WebhookExecution { + m := make([]*types.WebhookExecution, len(executions)) + for i, hook := range executions { + m[i] = mapToWebhookExecution(hook) + } + + return m +} diff --git a/internal/store/database/wire.go b/internal/store/database/wire.go new file mode 100644 index 0000000000..098b6419e9 --- /dev/null +++ b/internal/store/database/wire.go @@ -0,0 +1,239 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "context" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/internal/store/database/migrate" + "github.com/harness/gitness/store/database" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideDatabase, + ProvidePrincipalStore, + ProvidePrincipalInfoView, + ProvideSpacePathStore, + ProvideSpaceStore, + ProvideRepoStore, + ProvideJobStore, + ProvideExecutionStore, + ProvidePipelineStore, + ProvideStageStore, + ProvideStepStore, + ProvideSecretStore, + ProvideRepoGitInfoView, + ProvideMembershipStore, + ProvideTokenStore, + ProvidePullReqStore, + ProvidePullReqActivityStore, + ProvideCodeCommentView, + ProvidePullReqReviewStore, + ProvidePullReqReviewerStore, + ProvidePullReqFileViewStore, + ProvideWebhookStore, + ProvideWebhookExecutionStore, + ProvideCheckStore, + ProvideReqCheckStore, + ProvideConnectorStore, + ProvideTemplateStore, + ProvideTriggerStore, + ProvidePluginStore, +) + +// migrator is helper function to set up the database by performing automated +// database migration steps. +func migrator(ctx context.Context, db *sqlx.DB) error { + return migrate.Migrate(ctx, db) +} + +// ProvideDatabase provides a database connection. +func ProvideDatabase(ctx context.Context, config database.Config) (*sqlx.DB, error) { + return database.ConnectAndMigrate( + ctx, + config.Driver, + config.Datasource, + migrator, + ) +} + +// ProvidePrincipalStore provides a principal store. +func ProvidePrincipalStore(db *sqlx.DB, uidTransformation store.PrincipalUIDTransformation) store.PrincipalStore { + return NewPrincipalStore(db, uidTransformation) +} + +// ProvidePrincipalInfoView provides a principal info store. +func ProvidePrincipalInfoView(db *sqlx.DB) store.PrincipalInfoView { + return NewPrincipalInfoView(db) +} + +// ProvideSpacePathStore provides a space path store. +func ProvideSpacePathStore( + db *sqlx.DB, + spacePathTransformation store.SpacePathTransformation, +) store.SpacePathStore { + return NewSpacePathStore(db, spacePathTransformation) +} + +// ProvideSpaceStore provides a space store. +func ProvideSpaceStore( + db *sqlx.DB, + spacePathCache store.SpacePathCache, + spacePathStore store.SpacePathStore, +) store.SpaceStore { + return NewSpaceStore(db, spacePathCache, spacePathStore) +} + +// ProvideRepoStore provides a repo store. +func ProvideRepoStore( + db *sqlx.DB, + spacePathCache store.SpacePathCache, + spacePathStore store.SpacePathStore, +) store.RepoStore { + return NewRepoStore(db, spacePathCache, spacePathStore) +} + +// ProvideJobStore provides a job store. +func ProvideJobStore(db *sqlx.DB) store.JobStore { + return NewJobStore(db) +} + +// ProvidePipelineStore provides a pipeline store. +func ProvidePipelineStore(db *sqlx.DB) store.PipelineStore { + return NewPipelineStore(db) +} + +// ProvideStageStore provides a stage store. +func ProvideStageStore(db *sqlx.DB) store.StageStore { + return NewStageStore(db) +} + +// ProvideStepStore provides a step store. +func ProvideStepStore(db *sqlx.DB) store.StepStore { + return NewStepStore(db) +} + +// ProvideSecretStore provides a secret store. +func ProvideSecretStore(db *sqlx.DB) store.SecretStore { + return NewSecretStore(db) +} + +// ProvideConnectorStore provides a connector store. +func ProvideConnectorStore(db *sqlx.DB) store.ConnectorStore { + return NewConnectorStore(db) +} + +// ProvideTemplateStore provides a template store. +func ProvideTemplateStore(db *sqlx.DB) store.TemplateStore { + return NewTemplateStore(db) +} + +// ProvideTriggerStore provides a trigger store. +func ProvideTriggerStore(db *sqlx.DB) store.TriggerStore { + return NewTriggerStore(db) +} + +// ProvideExecutionStore provides an execution store. +func ProvideExecutionStore(db *sqlx.DB) store.ExecutionStore { + return NewExecutionStore(db) +} + +// ProvidePluginStore provides a plugin store. +func ProvidePluginStore(db *sqlx.DB) store.PluginStore { + return NewPluginStore(db) +} + +// ProvideRepoGitInfoView provides a repo git UID view. +func ProvideRepoGitInfoView(db *sqlx.DB) store.RepoGitInfoView { + return NewRepoGitInfoView(db) +} + +func ProvideMembershipStore( + db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, + spacePathStore store.SpacePathStore, +) store.MembershipStore { + return NewMembershipStore(db, principalInfoCache, spacePathStore) +} + +// ProvideTokenStore provides a token store. +func ProvideTokenStore(db *sqlx.DB) store.TokenStore { + return NewTokenStore(db) +} + +// ProvidePullReqStore provides a pull request store. +func ProvidePullReqStore(db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, +) store.PullReqStore { + return NewPullReqStore(db, principalInfoCache) +} + +// ProvidePullReqActivityStore provides a pull request activity store. +func ProvidePullReqActivityStore(db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, +) store.PullReqActivityStore { + return NewPullReqActivityStore(db, principalInfoCache) +} + +// ProvideCodeCommentView provides a code comment view. +func ProvideCodeCommentView(db *sqlx.DB) store.CodeCommentView { + return NewCodeCommentView(db) +} + +// ProvidePullReqReviewStore provides a pull request review store. +func ProvidePullReqReviewStore(db *sqlx.DB) store.PullReqReviewStore { + return NewPullReqReviewStore(db) +} + +// ProvidePullReqReviewerStore provides a pull request reviewer store. +func ProvidePullReqReviewerStore(db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, +) store.PullReqReviewerStore { + return NewPullReqReviewerStore(db, principalInfoCache) +} + +// ProvidePullReqFileViewStore provides a pull request file view store. +func ProvidePullReqFileViewStore(db *sqlx.DB) store.PullReqFileViewStore { + return NewPullReqFileViewStore(db) +} + +// ProvideWebhookStore provides a webhook store. +func ProvideWebhookStore(db *sqlx.DB) store.WebhookStore { + return NewWebhookStore(db) +} + +// ProvideWebhookExecutionStore provides a webhook execution store. +func ProvideWebhookExecutionStore(db *sqlx.DB) store.WebhookExecutionStore { + return NewWebhookExecutionStore(db) +} + +// ProvideCheckStore provides a status check result store. +func ProvideCheckStore(db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, +) store.CheckStore { + return NewCheckStore(db, principalInfoCache) +} + +// ProvideReqCheckStore provides a required status check store. +func ProvideReqCheckStore(db *sqlx.DB, + principalInfoCache store.PrincipalInfoCache, +) store.ReqCheckStore { + return NewReqCheckStore(db, principalInfoCache) +} diff --git a/internal/store/logs.go b/internal/store/logs.go new file mode 100644 index 0000000000..69d0ff5ea2 --- /dev/null +++ b/internal/store/logs.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import ( + "context" + "io" +) + +// LogStore provides an interface for the persistent log store backend +type LogStore interface { + // Find returns a log stream from the datastore. + Find(ctx context.Context, stepID int64) (io.ReadCloser, error) + + // Create writes copies the log stream from Reader r to the datastore. + Create(ctx context.Context, stepID int64, r io.Reader) error + + // Update copies the log stream from Reader r to the datastore. + Update(ctx context.Context, stepID int64, r io.Reader) error + + // Delete purges the log stream from the datastore. + Delete(ctx context.Context, stepID int64) error +} diff --git a/internal/store/logs/combine.go b/internal/store/logs/combine.go new file mode 100644 index 0000000000..0ef6a5c3c7 --- /dev/null +++ b/internal/store/logs/combine.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "context" + "io" + + "github.com/harness/gitness/internal/store" +) + +// NewCombined returns a new combined log store that will fallback +// to a secondary log store when necessary. This can be useful when +// migrating from database logs to s3, where logs for older builds +// are still being stored in the database, and newer logs in s3. +func NewCombined(primary, secondary store.LogStore) store.LogStore { + return &combined{ + primary: primary, + secondary: secondary, + } +} + +type combined struct { + primary, secondary store.LogStore +} + +func (s *combined) Find(ctx context.Context, step int64) (io.ReadCloser, error) { + rc, err := s.primary.Find(ctx, step) + if err == nil { + return rc, nil + } + return s.secondary.Find(ctx, step) +} + +func (s *combined) Create(ctx context.Context, step int64, r io.Reader) error { + return s.primary.Create(ctx, step, r) +} + +func (s *combined) Update(ctx context.Context, step int64, r io.Reader) error { + return s.primary.Update(ctx, step, r) +} + +func (s *combined) Delete(ctx context.Context, step int64) error { + err := s.primary.Delete(ctx, step) + if err != nil { + err = s.secondary.Delete(ctx, step) + } + return err +} diff --git a/internal/store/logs/db.go b/internal/store/logs/db.go new file mode 100644 index 0000000000..a8e193bf7b --- /dev/null +++ b/internal/store/logs/db.go @@ -0,0 +1,141 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "bytes" + "context" + "fmt" + "io" + + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/store/database" + "github.com/harness/gitness/store/database/dbtx" + + "github.com/jmoiron/sqlx" +) + +var _ store.LogStore = (*logStore)(nil) + +// not used out of this package. +type logs struct { + ID int64 `db:"log_id"` + Data []byte `db:"log_data"` +} + +// NewLogStore returns a new LogStore. +func NewDatabaseLogStore(db *sqlx.DB) *logStore { + return &logStore{ + db: db, + } +} + +type logStore struct { + db *sqlx.DB +} + +// Find returns a log given a log ID. +func (s *logStore) Find(ctx context.Context, stepID int64) (io.ReadCloser, error) { + const findQueryStmt = ` + SELECT + log_id, log_data + FROM logs + WHERE log_id = $1` + db := dbtx.GetAccessor(ctx, s.db) + + var err error + dst := new(logs) + if err = db.GetContext(ctx, dst, findQueryStmt, stepID); err != nil { + return nil, database.ProcessSQLErrorf(err, "Failed to find log") + } + return io.NopCloser( + bytes.NewBuffer(dst.Data), + ), err +} + +// Create creates a log. +func (s *logStore) Create(ctx context.Context, stepID int64, r io.Reader) error { + const logInsertStmt = ` + INSERT INTO logs ( + log_id + ,log_data + ) values ( + :log_id + ,:log_data + )` + data, err := io.ReadAll(r) + if err != nil { + return fmt.Errorf("could not read log data: %w", err) + } + params := &logs{ + ID: stepID, + Data: data, + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(logInsertStmt, params) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind log object") + } + + if _, err := db.ExecContext(ctx, query, arg...); err != nil { + return database.ProcessSQLErrorf(err, "log query failed") + } + + return nil +} + +// Update overrides existing logs data. +func (s *logStore) Update(ctx context.Context, stepID int64, r io.Reader) error { + const logUpdateStmt = ` + UPDATE logs + SET + log_data = :log_data + WHERE log_id = :log_id` + data, err := io.ReadAll(r) + if err != nil { + return fmt.Errorf("could not read log data: %w", err) + } + + db := dbtx.GetAccessor(ctx, s.db) + + query, arg, err := db.BindNamed(logUpdateStmt, &logs{ID: stepID, Data: data}) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to bind log object") + } + + _, err = db.ExecContext(ctx, query, arg...) + if err != nil { + return database.ProcessSQLErrorf(err, "Failed to update log") + } + + return nil +} + +// Delete deletes a log given a log ID. +func (s *logStore) Delete(ctx context.Context, stepID int64) error { + const logDeleteStmt = ` + DELETE FROM logs + WHERE log_id = $1` + + db := dbtx.GetAccessor(ctx, s.db) + + if _, err := db.ExecContext(ctx, logDeleteStmt, stepID); err != nil { + return database.ProcessSQLErrorf(err, "Could not delete log") + } + + return nil +} diff --git a/internal/store/logs/s3.go b/internal/store/logs/s3.go new file mode 100644 index 0000000000..70be620676 --- /dev/null +++ b/internal/store/logs/s3.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "context" + "fmt" + "io" + "path" + "strings" + + "github.com/harness/gitness/internal/store" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3manager" +) + +// NewS3Env returns a new S3 log store. +func NewS3LogStore(bucket, prefix, endpoint string, pathStyle bool) store.LogStore { + disableSSL := false + + if endpoint != "" { + disableSSL = !strings.HasPrefix(endpoint, "https://") + } + + return &s3store{ + bucket: bucket, + prefix: prefix, + session: session.Must( + session.NewSession(&aws.Config{ + Endpoint: aws.String(endpoint), + DisableSSL: aws.Bool(disableSSL), + S3ForcePathStyle: aws.Bool(pathStyle), + }), + ), + } +} + +type s3store struct { + bucket string + prefix string + session *session.Session +} + +func (s *s3store) Find(ctx context.Context, step int64) (io.ReadCloser, error) { + svc := s3.New(s.session) + out, err := svc.GetObject(&s3.GetObjectInput{ + Bucket: aws.String(s.bucket), + Key: aws.String(s.key(step)), + }) + if err != nil { + return nil, err + } + return out.Body, nil +} + +func (s *s3store) Create(ctx context.Context, step int64, r io.Reader) error { + uploader := s3manager.NewUploader(s.session) + input := &s3manager.UploadInput{ + ACL: aws.String("private"), + Bucket: aws.String(s.bucket), + Key: aws.String(s.key(step)), + Body: r, + } + _, err := uploader.Upload(input) + return err +} + +func (s *s3store) Update(ctx context.Context, step int64, r io.Reader) error { + return s.Create(ctx, step, r) +} + +func (s *s3store) Delete(ctx context.Context, step int64) error { + svc := s3.New(s.session) + _, err := svc.DeleteObject(&s3.DeleteObjectInput{ + Bucket: aws.String(s.bucket), + Key: aws.String(s.key(step)), + }) + return err +} + +func (s *s3store) key(step int64) string { + return path.Join("/", s.prefix, fmt.Sprint(step)) +} diff --git a/internal/store/logs/wire.go b/internal/store/logs/wire.go new file mode 100644 index 0000000000..d0b7fe0fa8 --- /dev/null +++ b/internal/store/logs/wire.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logs + +import ( + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + + "github.com/google/wire" + "github.com/jmoiron/sqlx" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideLogStore, +) + +func ProvideLogStore(db *sqlx.DB, config *types.Config) store.LogStore { + s := NewDatabaseLogStore(db) + if config.Logs.S3.Bucket != "" { + p := NewS3LogStore( + config.Logs.S3.Bucket, + config.Logs.S3.Prefix, + config.Logs.S3.Endpoint, + config.Logs.S3.PathStyle, + ) + return NewCombined(p, s) + } + return s +} diff --git a/internal/store/store_test.go b/internal/store/store_test.go new file mode 100644 index 0000000000..fb2b0bc34c --- /dev/null +++ b/internal/store/store_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store diff --git a/internal/store/transformation.go b/internal/store/transformation.go new file mode 100644 index 0000000000..836a0b88c1 --- /dev/null +++ b/internal/store/transformation.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import ( + "strings" +) + +// PrincipalUIDTransformation transforms a principalUID to a value that should be duplicate free. +// This allows us to simply switch between principalUIDs being case sensitive, insensitive or anything in between. +type PrincipalUIDTransformation func(uid string) (string, error) + +func ToLowerPrincipalUIDTransformation(uid string) (string, error) { + return strings.ToLower(uid), nil +} + +// SpacePathTransformation transforms a path to a value that should be duplicate free. +// This allows us to simply switch between paths being case sensitive, insensitive or anything in between. +type SpacePathTransformation func(original string, isRoot bool) string + +func ToLowerSpacePathTransformation(original string, _ bool) string { + return strings.ToLower(original) +} diff --git a/internal/store/wire.go b/internal/store/wire.go new file mode 100644 index 0000000000..0a4c0fa215 --- /dev/null +++ b/internal/store/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import ( + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvidePathTransformation, + ProvidePrincipalUIDTransformation, +) + +func ProvidePathTransformation() SpacePathTransformation { + return ToLowerSpacePathTransformation +} + +func ProvidePrincipalUIDTransformation() PrincipalUIDTransformation { + return ToLowerPrincipalUIDTransformation +} diff --git a/internal/testing/integration/integration.go b/internal/testing/integration/integration.go new file mode 100644 index 0000000000..36471e618b --- /dev/null +++ b/internal/testing/integration/integration.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package integration diff --git a/internal/testing/testing.go b/internal/testing/testing.go new file mode 100644 index 0000000000..3485fd3f75 --- /dev/null +++ b/internal/testing/testing.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package testing diff --git a/internal/token/token.go b/internal/token/token.go new file mode 100644 index 0000000000..bd644ba21a --- /dev/null +++ b/internal/token/token.go @@ -0,0 +1,130 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package token + +import ( + "context" + "fmt" + "time" + + "github.com/harness/gitness/internal/jwt" + "github.com/harness/gitness/internal/store" + "github.com/harness/gitness/types" + "github.com/harness/gitness/types/enum" + + "github.com/gotidy/ptr" +) + +const ( + // userSessionTokenLifeTime is the duration a login / register token is valid. + // NOTE: Users can list / delete session tokens via rest API if they want to cleanup earlier. + userSessionTokenLifeTime time.Duration = 30 * 24 * time.Hour // 30 days. +) + +func CreateUserSession( + ctx context.Context, + tokenStore store.TokenStore, + user *types.User, + uid string, +) (*types.Token, string, error) { + principal := user.ToPrincipal() + return create( + ctx, + tokenStore, + enum.TokenTypeSession, + principal, + principal, + uid, + ptr.Duration(userSessionTokenLifeTime), + ) +} + +func CreatePAT( + ctx context.Context, + tokenStore store.TokenStore, + createdBy *types.Principal, + createdFor *types.User, + uid string, + lifetime *time.Duration, +) (*types.Token, string, error) { + return create( + ctx, + tokenStore, + enum.TokenTypePAT, + createdBy, + createdFor.ToPrincipal(), + uid, + lifetime, + ) +} + +func CreateSAT( + ctx context.Context, + tokenStore store.TokenStore, + createdBy *types.Principal, + createdFor *types.ServiceAccount, + uid string, + lifetime *time.Duration, +) (*types.Token, string, error) { + return create( + ctx, + tokenStore, + enum.TokenTypeSAT, + createdBy, + createdFor.ToPrincipal(), + uid, + lifetime, + ) +} + +func create( + ctx context.Context, + tokenStore store.TokenStore, + tokenType enum.TokenType, + createdBy *types.Principal, + createdFor *types.Principal, + uid string, + lifetime *time.Duration, +) (*types.Token, string, error) { + issuedAt := time.Now() + + var expiresAt *int64 + if lifetime != nil { + expiresAt = ptr.Int64(issuedAt.Add(*lifetime).UnixMilli()) + } + + // create db entry first so we get the id. + token := types.Token{ + Type: tokenType, + UID: uid, + PrincipalID: createdFor.ID, + IssuedAt: issuedAt.UnixMilli(), + ExpiresAt: expiresAt, + CreatedBy: createdBy.ID, + } + + err := tokenStore.Create(ctx, &token) + if err != nil { + return nil, "", fmt.Errorf("failed to store token in db: %w", err) + } + + // create jwt token. + jwtToken, err := jwt.GenerateForToken(&token, createdFor.Salt) + if err != nil { + return nil, "", fmt.Errorf("failed to create jwt token: %w", err) + } + + return &token, jwtToken, nil +} diff --git a/internal/url/provider.go b/internal/url/provider.go new file mode 100644 index 0000000000..c5381a7bdf --- /dev/null +++ b/internal/url/provider.go @@ -0,0 +1,117 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package url + +import ( + "fmt" + "net/url" + "path" + "strings" +) + +// Provider provides the URLs of the gitness system. +type Provider struct { + // apiURLRaw stores the raw URL the api endpoints are reachable at publicly. + // NOTE: url is guaranteed to not have any trailing '/'. + apiURLRaw string + + // apiURLInternalRaw stores the raw URL the api endpoints are reachable at internally + // (no need for internal services to go via public route). + // NOTE: url is guaranteed to not have any trailing '/'. + apiURLInternalRaw string + + // ciURL stores the rawURL that can be used to communicate with gitness from inside a + // container. + ciURL *url.URL + + // gitURL stores the URL the git endpoints are available at. + // NOTE: we store it as url.URL so we can derive clone URLS without errors. + gitURL *url.URL + + // harnessCodeApiUrl stores the URL for communicating with SaaS harness code. + harnessCodeApiUrl *url.URL +} + +func NewProvider(apiURLRaw string, apiURLInternalRaw, gitURLRaw, ciURLRaw string, harnessCodeApiUrlRaw string) (*Provider, error) { + // remove trailing '/' to make usage easier + apiURLRaw = strings.TrimRight(apiURLRaw, "/") + apiURLInternalRaw = strings.TrimRight(apiURLInternalRaw, "/") + gitURLRaw = strings.TrimRight(gitURLRaw, "/") + ciURLRaw = strings.TrimRight(ciURLRaw, "/") + + // parse gitURL + gitURL, err := url.Parse(gitURLRaw) + if err != nil { + return nil, fmt.Errorf("provided gitURLRaw '%s' is invalid: %w", gitURLRaw, err) + } + + // parse ciURL + ciURL, err := url.Parse(ciURLRaw) + if err != nil { + return nil, fmt.Errorf("provided ciURLRaw '%s' is invalid: %w", ciURLRaw, err) + } + + harnessCodeApiUrlRaw = strings.TrimRight(harnessCodeApiUrlRaw, "/") + harnessCodeApiUrl, err := url.Parse(harnessCodeApiUrlRaw) + if err != nil { + return nil, fmt.Errorf("provided harnessCodeAPIURLRaw '%s' is invalid: %w", harnessCodeAPIURLRaw, err) + } + + return &Provider{ + apiURLRaw: apiURLRaw, + apiURLInternalRaw: apiURLInternalRaw, + gitURL: gitURL, + ciURL: ciURL, + harnessCodeApiUrl: harnessCodeApiUrl, + }, nil +} + +// GetAPIBaseURL returns the publicly reachable base url of the api server. +// NOTE: url is guaranteed to not have any trailing '/'. +func (p *Provider) GetAPIBaseURL() string { + return p.apiURLRaw +} + +// GetAPIBaseURLInternal returns the internally reachable base url of the api server. +// NOTE: url is guaranteed to not have any trailing '/'. +func (p *Provider) GetAPIBaseURLInternal() string { + return p.apiURLInternalRaw +} + +// GenerateRepoCloneURL generates the public git clone URL for the provided repo path. +// NOTE: url is guaranteed to not have any trailing '/'. +func (p *Provider) GenerateRepoCloneURL(repoPath string) string { + repoPath = path.Clean(repoPath) + if !strings.HasSuffix(repoPath, ".git") { + repoPath += ".git" + } + + return p.gitURL.JoinPath(repoPath).String() +} + +// GenerateCICloneURL generates a URL that can be used by CI container builds to +// interact with gitness and clone a repo. +func (p *Provider) GenerateCICloneURL(repoPath string) string { + repoPath = path.Clean(repoPath) + if !strings.HasSuffix(repoPath, ".git") { + repoPath += ".git" + } + + return p.ciURL.JoinPath(repoPath).String() +} + +func (p *Provider) GetHarnessCodeInternalUrl() string { + return p.harnessCodeApiUrl.String() +} diff --git a/internal/url/wire.go b/internal/url/wire.go new file mode 100644 index 0000000000..af1ccbfc8e --- /dev/null +++ b/internal/url/wire.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package url + +import ( + "github.com/google/wire" + "github.com/harness/gitness/types" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet(ProvideURLProvider) + +const harnessCodeAPIURLRaw = "https://app.harness.io/gateway/code/api/" + +func ProvideURLProvider(config *types.Config) (*Provider, error) { + return NewProvider( + config.URL.API, + config.URL.APIInternal, + config.URL.Git, + config.URL.CIURL, + harnessCodeAPIURLRaw, + ) +} diff --git a/internal/writer/writeflush.go b/internal/writer/writeflush.go new file mode 100644 index 0000000000..c31c88b1a3 --- /dev/null +++ b/internal/writer/writeflush.go @@ -0,0 +1,47 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package writer + +import "io" + +type Flusher interface { + Flush() +} + +type writeWithFlusher struct { + writer io.Writer + flusher Flusher +} + +type WriterFlusher interface { + io.Writer + Flusher +} + +func NewWriterFlusher(writer io.Writer, flusher Flusher) WriterFlusher { + return &writeWithFlusher{ + writer: writer, + flusher: flusher, + } +} + +func (w *writeWithFlusher) Write(p []byte) (int, error) { + n, err := w.writer.Write(p) + return n, err +} + +func (w *writeWithFlusher) Flush() { + w.flusher.Flush() +} diff --git a/livelog/livelog.go b/livelog/livelog.go new file mode 100644 index 0000000000..623db204cf --- /dev/null +++ b/livelog/livelog.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package livelog + +import "context" + +// Line represents a line in the logs. +type Line struct { + Number int `json:"pos"` + Message string `json:"out"` + Timestamp int64 `json:"time"` +} + +// LogStreamInfo provides internal stream information. This can +// be used to monitor the number of registered streams and +// subscribers. +type LogStreamInfo struct { + // Streams is a key-value pair where the key is the step + // identifier, and the value is the count of subscribers + // streaming the logs. + Streams map[int64]int `json:"streams"` +} + +// LogStream manages a live stream of logs. +type LogStream interface { + // Create creates the log stream for the step ID. + Create(context.Context, int64) error + + // Delete deletes the log stream for the step ID. + Delete(context.Context, int64) error + + // Writes writes to the log stream. + Write(context.Context, int64, *Line) error + + // Tail tails the log stream. + Tail(context.Context, int64) (<-chan *Line, <-chan error) + + // Info returns internal stream information. + Info(context.Context) *LogStreamInfo +} diff --git a/livelog/memory.go b/livelog/memory.go new file mode 100644 index 0000000000..670bbe197c --- /dev/null +++ b/livelog/memory.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package livelog + +import ( + "context" + "errors" + "sync" +) + +// error returned when a stream is not registered with +// the streamer. +var errStreamNotFound = errors.New("stream: not found") + +type streamer struct { + sync.Mutex + + streams map[int64]*stream +} + +// New returns a new in-memory log streamer. +func NewMemory() LogStream { + return &streamer{ + streams: make(map[int64]*stream), + } +} + +func (s *streamer) Create(ctx context.Context, id int64) error { + s.Lock() + s.streams[id] = newStream() + s.Unlock() + return nil +} + +func (s *streamer) Delete(ctx context.Context, id int64) error { + s.Lock() + stream, ok := s.streams[id] + if ok { + delete(s.streams, id) + } + s.Unlock() + if !ok { + return errStreamNotFound + } + return stream.close() +} + +func (s *streamer) Write(ctx context.Context, id int64, line *Line) error { + s.Lock() + stream, ok := s.streams[id] + s.Unlock() + if !ok { + return errStreamNotFound + } + return stream.write(line) +} + +func (s *streamer) Tail(ctx context.Context, id int64) (<-chan *Line, <-chan error) { + s.Lock() + stream, ok := s.streams[id] + s.Unlock() + if !ok { + return nil, nil + } + return stream.subscribe(ctx) +} + +func (s *streamer) Info(ctx context.Context) *LogStreamInfo { + s.Lock() + defer s.Unlock() + info := &LogStreamInfo{ + Streams: map[int64]int{}, + } + for id, stream := range s.streams { + stream.Lock() + info.Streams[id] = len(stream.list) + stream.Unlock() + } + return info +} diff --git a/livelog/stream.go b/livelog/stream.go new file mode 100644 index 0000000000..53a8e5e0fb --- /dev/null +++ b/livelog/stream.go @@ -0,0 +1,90 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package livelog + +import ( + "context" + "sync" +) + +// this is the amount of items that are stored in memory +// in the buffer. This should result in approximately 10kb +// of memory allocated per-stream and per-subscriber, not +// including any logdata stored in these structures. +const bufferSize = 5000 + +type stream struct { + sync.Mutex + + hist []*Line + list map[*subscriber]struct{} +} + +func newStream() *stream { + return &stream{ + list: map[*subscriber]struct{}{}, + } +} + +func (s *stream) write(line *Line) error { + s.Lock() + s.hist = append(s.hist, line) + for l := range s.list { + l.publish(line) + } + // the history should not be unbounded. The history + // slice is capped and items are removed in a FIFO + // ordering when capacity is reached. + if size := len(s.hist); size >= bufferSize { + s.hist = s.hist[size-bufferSize:] + } + s.Unlock() + return nil +} + +func (s *stream) subscribe(ctx context.Context) (<-chan *Line, <-chan error) { + sub := &subscriber{ + handler: make(chan *Line, bufferSize), + closec: make(chan struct{}), + } + err := make(chan error) + + s.Lock() + for _, line := range s.hist { + sub.publish(line) + } + s.list[sub] = struct{}{} + s.Unlock() + + go func() { + defer close(err) + select { + case <-sub.closec: + case <-ctx.Done(): + sub.close() + } + }() + return sub.handler, err +} + +func (s *stream) close() error { + s.Lock() + defer s.Unlock() + for sub := range s.list { + delete(s.list, sub) + sub.close() + } + return nil +} diff --git a/livelog/sub.go b/livelog/sub.go new file mode 100644 index 0000000000..42f32cb0cd --- /dev/null +++ b/livelog/sub.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package livelog + +import ( + "sync" +) + +type subscriber struct { + sync.Mutex + + handler chan *Line + closec chan struct{} + closed bool +} + +func (s *subscriber) publish(line *Line) { + select { + case <-s.closec: + case s.handler <- line: + default: + // lines are sent on a buffered channel. If there + // is a slow consumer that is not processing events, + // the buffered channel will fill and newer messages + // are ignored. + } +} + +func (s *subscriber) close() { + s.Lock() + if !s.closed { + close(s.closec) + s.closed = true + } + s.Unlock() +} diff --git a/livelog/wire.go b/livelog/wire.go new file mode 100644 index 0000000000..4595335f18 --- /dev/null +++ b/livelog/wire.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package livelog + +import ( + "github.com/harness/gitness/types" + + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvideLogStream, +) + +// ProvideLogStream provides an implementation of a logs streamer. +func ProvideLogStream(config *types.Config) LogStream { + return NewMemory() +} diff --git a/lock/config.go b/lock/config.go new file mode 100644 index 0000000000..725304840b --- /dev/null +++ b/lock/config.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import "time" + +type Provider string + +const ( + MemoryProvider Provider = "inmemory" + RedisProvider Provider = "redis" +) + +// A DelayFunc is used to decide the amount of time to wait between retries. +type DelayFunc func(tries int) time.Duration + +type Config struct { + App string // app namespace prefix + Namespace string + Provider Provider + Expiry time.Duration + + Tries int + RetryDelay time.Duration + DelayFunc DelayFunc + + DriftFactor float64 + TimeoutFactor float64 + + GenValueFunc func() (string, error) + Value string +} diff --git a/lock/lock.go b/lock/lock.go new file mode 100644 index 0000000000..1a1ebc8020 --- /dev/null +++ b/lock/lock.go @@ -0,0 +1,75 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "context" + "fmt" +) + +// KindError enum displays human readable message +// in error. +type KindError string + +const ( + LockHeld KindError = "lock already held" + LockNotHeld KindError = "lock not held" + ProviderError KindError = "lock provider error" + CannotLock KindError = "timeout while trying to acquire lock" + Context KindError = "context error while trying to acquire lock" + MaxRetriesExceeded KindError = "max retries exceeded to acquire lock" + GenerateTokenFailed KindError = "token generation failed" +) + +// Error is custom unique type for all type of errors. +type Error struct { + Kind KindError + Key string + Err error +} + +func NewError(kind KindError, key string, err error) *Error { + return &Error{ + Kind: kind, + Key: key, + Err: err, + } +} + +// Error implements error interface. +func (e Error) Error() string { + if e.Err != nil { + return fmt.Sprintf("%s on key %s with err: %v", e.Kind, e.Key, e.Err) + } + return fmt.Sprintf("%s on key %s", e.Kind, e.Key) +} + +// MutexManager describes a Distributed Lock Manager. +type MutexManager interface { + // NewMutex creates a mutex for the given key. The returned mutex is not held + // and must be acquired with a call to .Lock. + NewMutex(key string, options ...Option) (Mutex, error) +} + +type Mutex interface { + // Key returns the key to be locked. + Key() string + + // Lock acquires the lock. It fails with error if the lock is already held. + Lock(ctx context.Context) error + + // Unlock releases the lock. It fails with error if the lock is not currently held. + Unlock(ctx context.Context) error +} diff --git a/lock/memory.go b/lock/memory.go new file mode 100644 index 0000000000..54c0ae4541 --- /dev/null +++ b/lock/memory.go @@ -0,0 +1,226 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "context" + "crypto/rand" + "encoding/base64" + "sync" + "time" +) + +// InMemory is a local implementation of a MutexManager that it's intended to be used during development. +type InMemory struct { + config Config // force value copy + mutex sync.Mutex + keys map[string]inMemEntry +} + +// NewInMemory creates a new InMemory instance only used for development. +func NewInMemory(config Config) *InMemory { + keys := make(map[string]inMemEntry) + + return &InMemory{ + config: config, + keys: keys, + } +} + +// NewMutex creates a mutex for the given key. The returned mutex is not held +// and must be acquired with a call to .Lock. +func (m *InMemory) NewMutex(key string, options ...Option) (Mutex, error) { + var ( + token string + err error + ) + + // copy default values + config := m.config + + // set default delayFunc + if config.DelayFunc == nil { + config.DelayFunc = func(i int) time.Duration { + return config.RetryDelay + } + } + + // override config with custom options + for _, opt := range options { + opt.Apply(&config) + } + + // format key + key = formatKey(config.App, config.Namespace, key) + + switch { + case config.Value != "": + token = config.Value + case config.GenValueFunc != nil: + token, err = config.GenValueFunc() + default: + token, err = randstr(32) + } + if err != nil { + return nil, NewError(GenerateTokenFailed, key, nil) + } + + // waitTime logic is similar to redis implementation: + // https://github.com/go-redsync/redsync/blob/e1e5da6654c81a2069d6a360f1a31c21f05cd22d/mutex.go#LL81C4-L81C100 + waitTime := config.Expiry + if config.TimeoutFactor > 0 { + waitTime = time.Duration(int64(float64(config.Expiry) * config.TimeoutFactor)) + } + + lock := inMemMutex{ + expiry: config.Expiry, + waitTime: waitTime, + tries: config.Tries, + delayFunc: config.DelayFunc, + provider: m, + key: key, + token: token, + } + + return &lock, nil +} + +func (m *InMemory) acquire(key, token string, ttl time.Duration) bool { + m.mutex.Lock() + defer m.mutex.Unlock() + + now := time.Now() + + entry, ok := m.keys[key] + if ok && entry.validUntil.After(now) { + return false + } + + m.keys[key] = inMemEntry{token, now.Add(ttl)} + + return true +} + +func (m *InMemory) release(key, token string) bool { + m.mutex.Lock() + defer m.mutex.Unlock() + + entry, ok := m.keys[key] + if !ok || entry.token != token { + return false + } + + delete(m.keys, key) + + return true +} + +type inMemEntry struct { + token string + validUntil time.Time +} + +type inMemMutex struct { + mutex sync.Mutex // Used while manipulating the internal state of the lock itself + + provider *InMemory + + expiry time.Duration + waitTime time.Duration + + tries int + delayFunc DelayFunc + + key string + token string // A random string used to safely release the lock + isHeld bool +} + +// Key returns the key to be locked. +func (m *inMemMutex) Key() string { + return m.key +} + +// Lock acquires the lock. It fails with error if the lock is already held. +func (m *inMemMutex) Lock(ctx context.Context) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + if m.isHeld { + return NewError(LockHeld, m.key, nil) + } + + if m.provider.acquire(m.key, m.token, m.expiry) { + m.isHeld = true + return nil + } + + timeout := time.NewTimer(m.waitTime) + defer timeout.Stop() + + for i := 1; !m.isHeld && i <= m.tries; i++ { + if err := m.retry(ctx, i, timeout); err != nil { + return err + } + } + return nil +} + +func (m *inMemMutex) retry(ctx context.Context, attempt int, timeout *time.Timer) error { + if m.isHeld { + return nil + } + if attempt == m.tries { + return NewError(MaxRetriesExceeded, m.key, nil) + } + + delay := time.NewTimer(m.delayFunc(attempt)) + defer delay.Stop() + + select { + case <-ctx.Done(): + return NewError(Context, m.key, ctx.Err()) + case <-timeout.C: + return NewError(CannotLock, m.key, nil) + case <-delay.C: // just wait + } + + if m.provider.acquire(m.key, m.token, m.expiry) { + m.isHeld = true + } + return nil +} + +// Unlock releases the lock. It fails with error if the lock is not currently held. +func (m *inMemMutex) Unlock(_ context.Context) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + if !m.isHeld || !m.provider.release(m.key, m.token) { + return NewError(LockNotHeld, m.key, nil) + } + + m.isHeld = false + return nil +} + +func randstr(size int) (string, error) { + buffer := make([]byte, size) + if _, err := rand.Read(buffer); err != nil { + return "", err + } + + return base64.URLEncoding.EncodeToString(buffer), nil +} diff --git a/lock/memory_test.go b/lock/memory_test.go new file mode 100644 index 0000000000..9e58dd73a9 --- /dev/null +++ b/lock/memory_test.go @@ -0,0 +1,143 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "context" + "errors" + "sync" + "testing" + "time" +) + +func Test_inMemMutex_Lock(t *testing.T) { + manager := NewInMemory(Config{ + App: "gitness", + Namespace: "pullreq", + Expiry: 3 * time.Second, + Tries: 10, + RetryDelay: 300 * time.Millisecond, + }) + wg := sync.WaitGroup{} + wg.Add(1) + go func() { + defer wg.Done() + time.Sleep(500 * time.Millisecond) + mx, err := manager.NewMutex("key1") + if err != nil { + t.Errorf("mutex not created, err: %v", err) + return + } + if err := mx.Lock(context.Background()); err != nil { + t.Errorf("error from go routine while locking %s, err: %v", mx.Key(), err) + return + } + mx.Unlock(context.Background()) + }() + + mx, err := manager.NewMutex("key1") + if err != nil { + t.Errorf("mutex not created, err: %v", err) + return + } + if err := mx.Lock(context.Background()); err != nil { + t.Errorf("error while locking %v, err: %v", mx.Key(), err) + } + time.Sleep(1 * time.Second) + mx.Unlock(context.Background()) + wg.Wait() +} + +func Test_inMemMutex_MaxTries(t *testing.T) { + manager := NewInMemory(Config{ + App: "gitness", + Namespace: "pullreq", + Expiry: 1 * time.Second, + Tries: 2, + RetryDelay: 300 * time.Millisecond, + }) + wg := sync.WaitGroup{} + wg.Add(1) + go func() { + defer wg.Done() + time.Sleep(500 * time.Millisecond) + mx, err := manager.NewMutex("key1") + if err != nil { + t.Errorf("mutex not created, err: %v", err) + return + } + + err = mx.Lock(context.Background()) + if err == nil { + t.Errorf("error should be returned while locking %s instead of nil", mx.Key()) + return + } + var errLock *Error + if !errors.As(err, &errLock) { + t.Errorf("expected error lock.Error, got: %v", err) + return + } + if errLock.Kind != MaxRetriesExceeded { + t.Errorf("expected lock.MaxRetriesExceeded, got: %v", err) + return + } + }() + + mx, err := manager.NewMutex("key1") + if err != nil { + t.Errorf("mutex not created, err: %v", err) + return + } + if err := mx.Lock(context.Background()); err != nil { + t.Errorf("error while locking %v, err: %v", mx.Key(), err) + } + time.Sleep(1 * time.Second) + mx.Unlock(context.Background()) + wg.Wait() +} + +func Test_inMemMutex_LockAndWait(t *testing.T) { + wg := &sync.WaitGroup{} + manager := NewInMemory(Config{ + App: "gitness", + Namespace: "pullreq", + Expiry: 3 * time.Second, + Tries: 10, + RetryDelay: 300 * time.Millisecond, + }) + fn := func(n int) { + mx, err := manager.NewMutex("Key1") + if err != nil { + t.Errorf("mutex not created routine %d, err: %v", n, err) + return + } + defer func() { + if err := mx.Unlock(context.Background()); err != nil { + t.Errorf("failed to unlock %d", n) + } + wg.Done() + }() + if err := mx.Lock(context.Background()); err != nil { + t.Errorf("failed to lock %d", n) + } + time.Sleep(50 * time.Millisecond) + } + + wg.Add(3) + go fn(1) + go fn(2) + go fn(3) + wg.Wait() +} diff --git a/lock/options.go b/lock/options.go new file mode 100644 index 0000000000..0d8a9c45c6 --- /dev/null +++ b/lock/options.go @@ -0,0 +1,98 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "time" +) + +// An Option configures a mutex. +type Option interface { + Apply(*Config) +} + +// OptionFunc is a function that configures a mutex. +type OptionFunc func(*Config) + +// Apply calls f(config). +func (f OptionFunc) Apply(config *Config) { + f(config) +} + +// WithNamespace returns an option that configures Mutex.ns. +func WithNamespace(ns string) Option { + return OptionFunc(func(m *Config) { + m.Namespace = ns + }) +} + +// WithExpiry can be used to set the expiry of a mutex to the given value. +func WithExpiry(expiry time.Duration) Option { + return OptionFunc(func(m *Config) { + m.Expiry = expiry + }) +} + +// WithTries can be used to set the number of times lock acquire is attempted. +func WithTries(tries int) Option { + return OptionFunc(func(m *Config) { + m.Tries = tries + }) +} + +// WithRetryDelay can be used to set the amount of time to wait between retries. +func WithRetryDelay(delay time.Duration) Option { + return OptionFunc(func(m *Config) { + m.DelayFunc = func(tries int) time.Duration { + return delay + } + }) +} + +// WithRetryDelayFunc can be used to override default delay behavior. +func WithRetryDelayFunc(delayFunc DelayFunc) Option { + return OptionFunc(func(m *Config) { + m.DelayFunc = delayFunc + }) +} + +// WithDriftFactor can be used to set the clock drift factor. +func WithDriftFactor(factor float64) Option { + return OptionFunc(func(m *Config) { + m.DriftFactor = factor + }) +} + +// WithTimeoutFactor can be used to set the timeout factor. +func WithTimeoutFactor(factor float64) Option { + return OptionFunc(func(m *Config) { + m.TimeoutFactor = factor + }) +} + +// WithGenValueFunc can be used to set the custom value generator. +func WithGenValueFunc(genValueFunc func() (string, error)) Option { + return OptionFunc(func(m *Config) { + m.GenValueFunc = genValueFunc + }) +} + +// WithValue can be used to assign the random value without having to call lock. +// This allows the ownership of a lock to be "transferred" and allows the lock to be unlocked from elsewhere. +func WithValue(v string) Option { + return OptionFunc(func(m *Config) { + m.Value = v + }) +} diff --git a/lock/redis.go b/lock/redis.go new file mode 100644 index 0000000000..9b4c91eff5 --- /dev/null +++ b/lock/redis.go @@ -0,0 +1,115 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "context" + "errors" + + redislib "github.com/go-redis/redis/v8" + "github.com/go-redsync/redsync/v4" + "github.com/go-redsync/redsync/v4/redis/goredis/v8" +) + +// Redis wrapper for redsync. +type Redis struct { + config Config + rs *redsync.Redsync +} + +// NewRedis create an instance of redisync to be used to obtain a mutual exclusion +// lock. +func NewRedis(config Config, client redislib.UniversalClient) *Redis { + pool := goredis.NewPool(client) + return &Redis{ + config: config, + rs: redsync.New(pool), + } +} + +// Acquire new lock. +func (r *Redis) NewMutex(key string, options ...Option) (Mutex, error) { + // copy default values + config := r.config + // customize config + for _, opt := range options { + opt.Apply(&config) + } + + // convert to redis helper functions + args := make([]redsync.Option, 0, 8) + args = append(args, + redsync.WithExpiry(config.Expiry), + redsync.WithTimeoutFactor(config.TimeoutFactor), + redsync.WithTries(config.Tries), + redsync.WithRetryDelay(config.RetryDelay), + redsync.WithDriftFactor(config.DriftFactor), + ) + + if config.DelayFunc != nil { + args = append(args, redsync.WithRetryDelayFunc(redsync.DelayFunc(config.DelayFunc))) + } + + if config.GenValueFunc != nil { + args = append(args, redsync.WithGenValueFunc(config.GenValueFunc)) + } + + uniqKey := formatKey(config.App, config.Namespace, key) + mutex := r.rs.NewMutex(uniqKey, args...) + + return &RedisMutex{ + mutex: mutex, + }, nil +} + +type RedisMutex struct { + mutex *redsync.Mutex +} + +// Key returns the key to be locked. +func (l *RedisMutex) Key() string { + return l.mutex.Name() +} + +// Lock acquires the lock. It fails with error if the lock is already held. +func (l *RedisMutex) Lock(ctx context.Context) error { + err := l.mutex.LockContext(ctx) + if err != nil { + return translateRedisErr(err, l.Key()) + } + return nil +} + +// Unlock releases the lock. It fails with error if the lock is not currently held. +func (l *RedisMutex) Unlock(ctx context.Context) error { + _, err := l.mutex.UnlockContext(ctx) + if err != nil { + return translateRedisErr(err, l.Key()) + } + return nil +} + +func translateRedisErr(err error, key string) error { + var kind KindError + switch { + case errors.Is(err, redsync.ErrFailed): + kind = CannotLock + case errors.Is(err, redsync.ErrExtendFailed), errors.Is(err, &redsync.RedisError{}): + kind = ProviderError + case errors.Is(err, &redsync.ErrTaken{}), errors.Is(err, &redsync.ErrNodeTaken{}): + kind = LockHeld + } + return NewError(kind, key, err) +} diff --git a/lock/util.go b/lock/util.go new file mode 100644 index 0000000000..de82e69476 --- /dev/null +++ b/lock/util.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import "strings" + +func formatKey(app, ns, key string) string { + return app + ":" + ns + ":" + key +} + +func SplitKey(uniqKey string) (namespace, key string) { + parts := strings.Split(uniqKey, ":") + key = uniqKey + if len(parts) > 2 { + namespace = parts[1] + key = parts[2] + } + return +} diff --git a/lock/wire.go b/lock/wire.go new file mode 100644 index 0000000000..7ed9ecabe6 --- /dev/null +++ b/lock/wire.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package lock + +import ( + "github.com/go-redis/redis/v8" + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideMutexManager, +) + +func ProvideMutexManager(config Config, client redis.UniversalClient) MutexManager { + switch config.Provider { + case MemoryProvider: + return NewInMemory(config) + case RedisProvider: + return NewRedis(config, client) + } + return nil +} diff --git a/profiler/gcpprofiler.go b/profiler/gcpprofiler.go new file mode 100644 index 0000000000..3e218c9a86 --- /dev/null +++ b/profiler/gcpprofiler.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profiler + +import ( + "cloud.google.com/go/profiler" + "github.com/rs/zerolog/log" +) + +type GCPProfiler struct { +} + +func (gcpProfiler *GCPProfiler) StartProfiling(serviceName, serviceVersion string) { + // Need to add env/namespace with service name to uniquely identify this + cfg := profiler.Config{ + Service: serviceName, + ServiceVersion: serviceVersion, + } + + if err := profiler.Start(cfg); err != nil { + log.Warn().Err(err).Msg("unable to start profiler") + } +} diff --git a/profiler/noopprofiler.go b/profiler/noopprofiler.go new file mode 100644 index 0000000000..772de21b4b --- /dev/null +++ b/profiler/noopprofiler.go @@ -0,0 +1,24 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profiler + +import "github.com/rs/zerolog/log" + +type NoopProfiler struct { +} + +func (noopProfiler *NoopProfiler) StartProfiling(serviceName, serviceVersion string) { + log.Info().Msg("Not starting profiler") +} diff --git a/profiler/profiler.go b/profiler/profiler.go new file mode 100644 index 0000000000..7d36e5d6d7 --- /dev/null +++ b/profiler/profiler.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profiler + +import ( + "fmt" + "strings" +) + +type Profiler interface { + StartProfiling(serviceName, serviceVersion string) +} + +type Type string + +const ( + TypeGCP Type = "gcp" +) + +func ParseType(profilerType string) (Type, bool) { + switch strings.ToLower(strings.TrimSpace(profilerType)) { + case string(TypeGCP): + return TypeGCP, true + default: + return "", false + } +} + +func New(profiler Type) (Profiler, error) { + switch profiler { + case TypeGCP: + return &GCPProfiler{}, nil + default: + return &NoopProfiler{}, fmt.Errorf("profiler '%s' not supported", profiler) + } +} diff --git a/profiler/profiler_test.go b/profiler/profiler_test.go new file mode 100644 index 0000000000..e79e1c6a77 --- /dev/null +++ b/profiler/profiler_test.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package profiler + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseType(t *testing.T) { + var tests = []struct { + raw string + expectedType Type + expectedOk bool + }{ + // basic invalid tests + {"", Type(""), false}, + {"a", Type(""), false}, + {"g cp", Type(""), false}, + + // ensure case insensitivity + {"gcp", TypeGCP, true}, + {"GCP", TypeGCP, true}, + + // ensure trim space works + {" gcp ", TypeGCP, true}, + {" GCP ", TypeGCP, true}, + + // testing all valid values + {"gcp", TypeGCP, true}, + } + + for i, test := range tests { + parsedType, ok := ParseType(test.raw) + + assert.Equal(t, test.expectedOk, ok, "test case %d with input '%s'", i, test.raw) + assert.Equal(t, test.expectedType, parsedType, "test case %d with input '%s'", i, test.raw) + } +} diff --git a/pubsub/config.go b/pubsub/config.go new file mode 100644 index 0000000000..d46c0df34d --- /dev/null +++ b/pubsub/config.go @@ -0,0 +1,35 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import "time" + +type Provider string + +const ( + ProviderMemory Provider = "inmemory" + ProviderRedis Provider = "redis" +) + +type Config struct { + app string // app namespace prefix + namespace string + + provider Provider + + healthInterval time.Duration + sendTimeout time.Duration + channelSize int +} diff --git a/pubsub/inmem.go b/pubsub/inmem.go new file mode 100644 index 0000000000..3cf7027933 --- /dev/null +++ b/pubsub/inmem.go @@ -0,0 +1,230 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import ( + "context" + "errors" + "sync" + "time" + + "github.com/rs/zerolog/log" + "golang.org/x/exp/slices" +) + +var ( + ErrClosed = errors.New("pubsub: subscriber is closed") +) + +type InMemory struct { + config Config + mutex sync.Mutex + registry []*inMemorySubscriber +} + +// NewInMemory create an instance of memory pubsub implementation. +func NewInMemory(options ...Option) *InMemory { + config := Config{ + app: "app", + namespace: "default", + healthInterval: 3 * time.Second, + sendTimeout: 60, + channelSize: 100, + } + + for _, f := range options { + f.Apply(&config) + } + return &InMemory{ + config: config, + registry: make([]*inMemorySubscriber, 0, 16), + } +} + +// Subscribe consumer to process the event with payload. +func (r *InMemory) Subscribe( + ctx context.Context, + topic string, + handler func(payload []byte) error, + options ...SubscribeOption, +) Consumer { + r.mutex.Lock() + defer r.mutex.Unlock() + + config := SubscribeConfig{ + topics: make([]string, 0, 8), + app: r.config.app, + namespace: r.config.namespace, + sendTimeout: r.config.sendTimeout, + channelSize: r.config.channelSize, + } + + for _, f := range options { + f.Apply(&config) + } + + // create subscriber and map it to the registry + subscriber := &inMemorySubscriber{ + config: &config, + handler: handler, + } + + config.topics = append(config.topics, topic) + subscriber.topics = subscriber.formatTopics(config.topics...) + + // start subscriber + go subscriber.start(ctx) + + // register subscriber + r.registry = append(r.registry, subscriber) + + return subscriber +} + +// Publish event to message broker with payload. +func (r *InMemory) Publish(ctx context.Context, topic string, payload []byte, opts ...PublishOption) error { + if len(r.registry) == 0 { + log.Ctx(ctx).Warn().Msg("in pubsub Publish: no subscribers registered") + return nil + } + pubConfig := PublishConfig{ + app: r.config.app, + namespace: r.config.namespace, + } + for _, f := range opts { + f.Apply(&pubConfig) + } + + topic = formatTopic(pubConfig.app, pubConfig.namespace, topic) + wg := sync.WaitGroup{} + for _, sub := range r.registry { + if slices.Contains(sub.topics, topic) && !sub.isClosed() { + wg.Add(1) + go func(subscriber *inMemorySubscriber) { + defer wg.Done() + // timer is based on subscriber data + t := time.NewTimer(subscriber.config.sendTimeout) + defer t.Stop() + select { + case <-ctx.Done(): + return + case subscriber.channel <- payload: + log.Ctx(ctx).Trace().Msgf("in pubsub Publish: message %v sent to topic %s", string(payload), topic) + case <-t.C: + // channel is full for topic (message is dropped) + log.Ctx(ctx).Warn().Msgf("in pubsub Publish: %s topic is full for %s (message is dropped)", + topic, subscriber.config.sendTimeout) + } + }(sub) + } + } + + // Wait for all subscribers to complete + // Otherwise, we might fail notifying some subscribers due to context completion. + wg.Wait() + + return nil +} + +func (r *InMemory) Close(ctx context.Context) error { + for _, subscriber := range r.registry { + if err := subscriber.Close(); err != nil { + return err + } + } + return nil +} + +type inMemorySubscriber struct { + config *SubscribeConfig + handler func([]byte) error + channel chan []byte + once sync.Once + mutex sync.RWMutex + topics []string + closed bool +} + +func (s *inMemorySubscriber) start(ctx context.Context) { + s.channel = make(chan []byte, s.config.channelSize) + for { + select { + case <-ctx.Done(): + return + case msg, ok := <-s.channel: + if !ok { + return + } + if err := s.handler(msg); err != nil { + // TODO: bump err to caller + log.Ctx(ctx).Err(err).Msgf("in pubsub start: error while running handler for topic") + } + } + } +} + +func (s *inMemorySubscriber) Subscribe(ctx context.Context, topics ...string) error { + s.mutex.RLock() + defer s.mutex.RUnlock() + topics = s.formatTopics(topics...) + for _, ch := range topics { + if slices.Contains(s.topics, ch) { + continue + } + s.topics = append(s.topics, ch) + } + return nil +} + +func (s *inMemorySubscriber) Unsubscribe(ctx context.Context, topics ...string) error { + s.mutex.RLock() + defer s.mutex.RUnlock() + topics = s.formatTopics(topics...) + for i, ch := range topics { + if slices.Contains(s.topics, ch) { + s.topics[i] = s.topics[len(s.topics)-1] + s.topics = s.topics[:len(s.topics)-1] + } + } + return nil +} + +func (s *inMemorySubscriber) Close() error { + s.mutex.Lock() + defer s.mutex.Unlock() + + if s.closed { + return ErrClosed + } + s.closed = true + s.once.Do(func() { + close(s.channel) + }) + return nil +} + +func (s *inMemorySubscriber) isClosed() bool { + s.mutex.Lock() + defer s.mutex.Unlock() + return s.closed +} + +func (s *inMemorySubscriber) formatTopics(topics ...string) []string { + result := make([]string, len(topics)) + for i, topic := range topics { + result[i] = formatTopic(s.config.app, s.config.namespace, topic) + } + return result +} diff --git a/pubsub/options.go b/pubsub/options.go new file mode 100644 index 0000000000..fc96c5c6ce --- /dev/null +++ b/pubsub/options.go @@ -0,0 +1,168 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import ( + "time" +) + +// An Option configures a pubsub instance. +type Option interface { + Apply(*Config) +} + +// OptionFunc is a function that configures a pubsub config. +type OptionFunc func(*Config) + +// Apply calls f(config). +func (f OptionFunc) Apply(config *Config) { + f(config) +} + +// WithApp returns an option that set config app name. +func WithApp(value string) Option { + return OptionFunc(func(m *Config) { + m.app = value + }) +} + +// WithNamespace returns an option that set config namespace. +func WithNamespace(value string) Option { + return OptionFunc(func(m *Config) { + m.namespace = value + }) +} + +// WithHealthCheckInterval specifies the config health check interval. +// PubSub will ping Server if it does not receive any messages +// within the interval (redis, ...). +// To disable health check, use zero interval. +func WithHealthCheckInterval(value time.Duration) Option { + return OptionFunc(func(m *Config) { + m.healthInterval = value + }) +} + +// WithSendTimeout specifies the pubsub send timeout after which +// the message is dropped. +func WithSendTimeout(value time.Duration) Option { + return OptionFunc(func(m *Config) { + m.sendTimeout = value + }) +} + +// WithSize specifies the Go chan size in config that is used to buffer +// incoming messages. +func WithSize(value int) Option { + return OptionFunc(func(m *Config) { + m.channelSize = value + }) +} + +type SubscribeConfig struct { + topics []string + app string + namespace string + healthInterval time.Duration + sendTimeout time.Duration + channelSize int +} + +// SubscribeOption configures a subscription config. +type SubscribeOption interface { + Apply(*SubscribeConfig) +} + +// SubscribeOptionFunc is a function that configures a subscription config. +type SubscribeOptionFunc func(*SubscribeConfig) + +// Apply calls f(subscribeConfig). +func (f SubscribeOptionFunc) Apply(config *SubscribeConfig) { + f(config) +} + +// WithTopics specifies the topics to subsribe. +func WithTopics(topics ...string) SubscribeOption { + return SubscribeOptionFunc(func(c *SubscribeConfig) { + c.topics = topics + }) +} + +// WithNamespace returns an channel option that configures namespace. +func WithChannelNamespace(value string) SubscribeOption { + return SubscribeOptionFunc(func(c *SubscribeConfig) { + c.namespace = value + }) +} + +// WithChannelHealthCheckInterval specifies the channel health check interval. +// PubSub will ping Server if it does not receive any messages +// within the interval. To disable health check, use zero interval. +func WithChannelHealthCheckInterval(value time.Duration) SubscribeOption { + return SubscribeOptionFunc(func(c *SubscribeConfig) { + c.healthInterval = value + }) +} + +// WithChannelSendTimeout specifies the channel send timeout after which +// the message is dropped. +func WithChannelSendTimeout(value time.Duration) SubscribeOption { + return SubscribeOptionFunc(func(c *SubscribeConfig) { + c.sendTimeout = value + }) +} + +// WithChannelSize specifies the Go chan size that is used to buffer +// incoming messages for subscriber. +func WithChannelSize(value int) SubscribeOption { + return SubscribeOptionFunc(func(c *SubscribeConfig) { + c.channelSize = value + }) +} + +type PublishConfig struct { + app string + namespace string +} + +type PublishOption interface { + Apply(*PublishConfig) +} + +// PublishOptionFunc is a function that configures a publish config. +type PublishOptionFunc func(*PublishConfig) + +// Apply calls f(publishConfig). +func (f PublishOptionFunc) Apply(config *PublishConfig) { + f(config) +} + +// WithPublishApp modifies publish config app identifier. +func WithPublishApp(value string) PublishOption { + return PublishOptionFunc(func(c *PublishConfig) { + c.app = value + }) +} + +// WithPublishNamespace modifies publish config namespace. +func WithPublishNamespace(value string) PublishOption { + return PublishOptionFunc(func(c *PublishConfig) { + c.namespace = value + }) +} + +func formatTopic(app, ns, topic string) string { + return app + ":" + ns + ":" + topic +} diff --git a/pubsub/pubsub.go b/pubsub/pubsub.go new file mode 100644 index 0000000000..7bcb4eff22 --- /dev/null +++ b/pubsub/pubsub.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import "context" + +type Publisher interface { + // Publish topic to message broker with payload. + Publish(ctx context.Context, topic string, payload []byte, + options ...PublishOption) error +} + +type PubSub interface { + Publisher + // Subscribe consumer to process the topic with payload, this should be + // blocking operation. + Subscribe(ctx context.Context, topic string, + handler func(payload []byte) error, options ...SubscribeOption) Consumer +} + +type Consumer interface { + Subscribe(ctx context.Context, topics ...string) error + Unsubscribe(ctx context.Context, topics ...string) error + Close() error +} diff --git a/pubsub/redis.go b/pubsub/redis.go new file mode 100644 index 0000000000..5d41d742d3 --- /dev/null +++ b/pubsub/redis.go @@ -0,0 +1,185 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import ( + "context" + "fmt" + "strings" + "sync" + "time" + + "github.com/go-redis/redis/v8" + "github.com/rs/zerolog/log" +) + +type Redis struct { + config Config + client redis.UniversalClient + mutex sync.RWMutex + registry []Consumer +} + +// NewRedis create an instance of redis PubSub implementation. +func NewRedis(client redis.UniversalClient, options ...Option) *Redis { + config := Config{ + app: "app", + namespace: "default", + healthInterval: 3 * time.Second, + sendTimeout: 60, + channelSize: 100, + } + + for _, f := range options { + f.Apply(&config) + } + return &Redis{ + config: config, + client: client, + registry: make([]Consumer, 0, 16), + } +} + +// Subscribe consumer to process the event with payload. +func (r *Redis) Subscribe( + ctx context.Context, + topic string, + handler func(payload []byte) error, + options ...SubscribeOption, +) Consumer { + r.mutex.Lock() + defer r.mutex.Unlock() + + config := SubscribeConfig{ + topics: make([]string, 0, 8), + app: r.config.app, + namespace: r.config.namespace, + healthInterval: r.config.healthInterval, + sendTimeout: r.config.sendTimeout, + channelSize: r.config.channelSize, + } + + for _, f := range options { + f.Apply(&config) + } + + // create subscriber and map it to the registry + subscriber := &redisSubscriber{ + config: &config, + handler: handler, + } + + config.topics = append(config.topics, topic) + + topics := subscriber.formatTopics(config.topics...) + subscriber.rdb = r.client.Subscribe(ctx, topics...) + + // start subscriber + go subscriber.start(ctx) + + // register subscriber + r.registry = append(r.registry, subscriber) + + return subscriber +} + +// Publish event topic to message broker with payload. +func (r *Redis) Publish(ctx context.Context, topic string, payload []byte, opts ...PublishOption) error { + pubConfig := PublishConfig{ + app: r.config.app, + namespace: r.config.namespace, + } + for _, f := range opts { + f.Apply(&pubConfig) + } + + topic = formatTopic(pubConfig.app, pubConfig.namespace, topic) + + err := r.client.Publish(ctx, topic, payload).Err() + if err != nil { + return fmt.Errorf("failed to write to pubsub topic '%s'. Error: %w", + topic, err) + } + return nil +} + +func (r *Redis) Close(ctx context.Context) error { + for _, subscriber := range r.registry { + err := subscriber.Close() + if err != nil { + return err + } + } + return nil +} + +type redisSubscriber struct { + config *SubscribeConfig + rdb *redis.PubSub + handler func([]byte) error +} + +func (s *redisSubscriber) start(ctx context.Context) { + // Go channel which receives messages. + ch := s.rdb.Channel( + redis.WithChannelHealthCheckInterval(s.config.healthInterval), + redis.WithChannelSendTimeout(s.config.sendTimeout), + redis.WithChannelSize(s.config.channelSize), + ) + for { + select { + case <-ctx.Done(): + return + case msg := <-ch: + if err := s.handler([]byte(msg.Payload)); err != nil { + log.Ctx(ctx).Err(err).Msg("received an error from handler function") + } + } + } +} + +func (s *redisSubscriber) Subscribe(ctx context.Context, topics ...string) error { + err := s.rdb.Subscribe(ctx, s.formatTopics(topics...)...) + if err != nil { + return fmt.Errorf("subscribe failed for chanels %v with error: %w", + strings.Join(topics, ","), err) + } + return nil +} + +func (s *redisSubscriber) Unsubscribe(ctx context.Context, topics ...string) error { + err := s.rdb.Unsubscribe(ctx, s.formatTopics(topics...)...) + if err != nil { + return fmt.Errorf("unsubscribe failed for chanels %v with error: %w", + strings.Join(topics, ","), err) + } + return nil +} + +func (s *redisSubscriber) Close() error { + err := s.rdb.Close() + if err != nil { + return fmt.Errorf("failed while closing subscriber with error: %w", err) + } + return nil +} + +func (s *redisSubscriber) formatTopics(topics ...string) []string { + result := make([]string, len(topics)) + for i, topic := range topics { + result[i] = formatTopic(s.config.app, s.config.namespace, topic) + } + return result +} diff --git a/pubsub/wire.go b/pubsub/wire.go new file mode 100644 index 0000000000..627a245581 --- /dev/null +++ b/pubsub/wire.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pubsub + +import ( + "github.com/harness/gitness/types" + + "github.com/go-redis/redis/v8" + "github.com/google/wire" +) + +var WireSet = wire.NewSet( + ProvideConfig, + ProvidePubSub, +) + +func ProvideConfig(config *types.Config) Config { + return Config{ + app: config.PubSub.AppNamespace, + namespace: config.PubSub.DefaultNamespace, + provider: Provider(config.PubSub.Provider), + healthInterval: config.PubSub.HealthInterval, + sendTimeout: config.PubSub.SendTimeout, + channelSize: config.PubSub.ChannelSize, + } +} + +func ProvidePubSub(config Config, client redis.UniversalClient) PubSub { + switch config.provider { + case ProviderRedis: + return NewRedis(client, + WithApp(config.app), + WithNamespace(config.namespace), + WithHealthCheckInterval(config.healthInterval), + WithSendTimeout(config.sendTimeout), + WithSize(config.channelSize), + ) + case ProviderMemory: + fallthrough + default: + return NewInMemory( + WithApp(config.app), + WithNamespace(config.namespace), + WithHealthCheckInterval(config.healthInterval), + WithSendTimeout(config.sendTimeout), + WithSize(config.channelSize), + ) + } +} diff --git a/resources/embed.go b/resources/embed.go new file mode 100644 index 0000000000..e1906c4f85 --- /dev/null +++ b/resources/embed.go @@ -0,0 +1,61 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resources + +import ( + "embed" + "fmt" + "strings" +) + +var ( + //go:embed gitignore + gitignore embed.FS + + //go:embed license + licence embed.FS +) + +// Licenses returns map of licences in license folder. +func Licenses() ([]byte, error) { + return licence.ReadFile("license/index.json") +} + +// ReadLicense reads licence from license folder. +func ReadLicense(name string) ([]byte, error) { + content, err := licence.ReadFile(fmt.Sprintf("license/%s.txt", name)) + if err != nil { + return nil, err + } + return content, err +} + +// GitIgnores lists all files in gitignore folder and return file names. +func GitIgnores() ([]string, error) { + entries, err := gitignore.ReadDir("gitignore") + files := make([]string, len(entries)) + if err != nil { + return []string{}, err + } + for i, filename := range entries { + files[i] = strings.ReplaceAll(filename.Name(), ".gitignore", "") + } + return files, nil +} + +// ReadGitIgnore reads gitignore file from license folder. +func ReadGitIgnore(name string) ([]byte, error) { + return gitignore.ReadFile(fmt.Sprintf("gitignore/%s.gitignore", name)) +} diff --git a/resources/gitignore/AL.gitignore b/resources/gitignore/AL.gitignore new file mode 100644 index 0000000000..02eac6989a --- /dev/null +++ b/resources/gitignore/AL.gitignore @@ -0,0 +1,22 @@ +### AL ### +#Template for AL projects for Dynamics 365 Business Central +#launch.json folder +.vscode/ +#Cache folder +.alcache/ +#Symbols folder +.alpackages/ +#Snapshots folder +.snapshots/ +#Testing Output folder +.output/ +#Extension App-file +*.app +#Rapid Application Development File +rad.json +#Translation Base-file +*.g.xlf +#License-file +*.flf +#Test results file +TestResults.xml \ No newline at end of file diff --git a/resources/gitignore/Actionscript.gitignore b/resources/gitignore/Actionscript.gitignore new file mode 100644 index 0000000000..5d947ca887 --- /dev/null +++ b/resources/gitignore/Actionscript.gitignore @@ -0,0 +1,18 @@ +# Build and Release Folders +bin-debug/ +bin-release/ +[Oo]bj/ +[Bb]in/ + +# Other files and folders +.settings/ + +# Executables +*.swf +*.air +*.ipa +*.apk + +# Project files, i.e. `.project`, `.actionScriptProperties` and `.flexProperties` +# should NOT be excluded as they contain compiler settings and other important +# information for Eclipse / Flash Builder. diff --git a/resources/gitignore/Ada.gitignore b/resources/gitignore/Ada.gitignore new file mode 100644 index 0000000000..b4d703968a --- /dev/null +++ b/resources/gitignore/Ada.gitignore @@ -0,0 +1,5 @@ +# Object file +*.o + +# Ada Library Information +*.ali diff --git a/resources/gitignore/Agda.gitignore b/resources/gitignore/Agda.gitignore new file mode 100644 index 0000000000..58ab67f071 --- /dev/null +++ b/resources/gitignore/Agda.gitignore @@ -0,0 +1,2 @@ +*.agdai +MAlonzo/** diff --git a/resources/gitignore/Android.gitignore b/resources/gitignore/Android.gitignore new file mode 100644 index 0000000000..347e252ef1 --- /dev/null +++ b/resources/gitignore/Android.gitignore @@ -0,0 +1,33 @@ +# Gradle files +.gradle/ +build/ + +# Local configuration file (sdk path, etc) +local.properties + +# Log/OS Files +*.log + +# Android Studio generated files and folders +captures/ +.externalNativeBuild/ +.cxx/ +*.apk +output.json + +# IntelliJ +*.iml +.idea/ +misc.xml +deploymentTargetDropDown.xml +render.experimental.xml + +# Keystore files +*.jks +*.keystore + +# Google Services (e.g. APIs or Firebase) +google-services.json + +# Android Profiling +*.hprof diff --git a/resources/gitignore/AppEngine.gitignore b/resources/gitignore/AppEngine.gitignore new file mode 100644 index 0000000000..6227345453 --- /dev/null +++ b/resources/gitignore/AppEngine.gitignore @@ -0,0 +1,2 @@ +# Google App Engine generated folder +appengine-generated/ diff --git a/resources/gitignore/AppceleratorTitanium.gitignore b/resources/gitignore/AppceleratorTitanium.gitignore new file mode 100644 index 0000000000..3abea55976 --- /dev/null +++ b/resources/gitignore/AppceleratorTitanium.gitignore @@ -0,0 +1,3 @@ +# Build folder and log file +build/ +build.log diff --git a/resources/gitignore/ArchLinuxPackages.gitignore b/resources/gitignore/ArchLinuxPackages.gitignore new file mode 100644 index 0000000000..b73905529f --- /dev/null +++ b/resources/gitignore/ArchLinuxPackages.gitignore @@ -0,0 +1,13 @@ +*.tar +*.tar.* +*.jar +*.exe +*.msi +*.zip +*.tgz +*.log +*.log.* +*.sig + +pkg/ +src/ diff --git a/resources/gitignore/Autotools.gitignore b/resources/gitignore/Autotools.gitignore new file mode 100644 index 0000000000..617156f819 --- /dev/null +++ b/resources/gitignore/Autotools.gitignore @@ -0,0 +1,52 @@ +# http://www.gnu.org/software/automake + +Makefile.in +/ar-lib +/mdate-sh +/py-compile +/test-driver +/ylwrap +.deps/ +.dirstamp + +# http://www.gnu.org/software/autoconf + +autom4te.cache +/autoscan.log +/autoscan-*.log +/aclocal.m4 +/compile +/config.cache +/config.guess +/config.h.in +/config.log +/config.status +/config.sub +/configure +/configure.scan +/depcomp +/install-sh +/missing +/stamp-h1 + +# https://www.gnu.org/software/libtool/ + +/ltmain.sh + +# http://www.gnu.org/software/texinfo + +/texinfo.tex + +# http://www.gnu.org/software/m4/ + +m4/libtool.m4 +m4/ltoptions.m4 +m4/ltsugar.m4 +m4/ltversion.m4 +m4/lt~obsolete.m4 + +# Generated Makefile +# (meta build system like autotools, +# can automatically generate from config.status script +# (which is called by configure script)) +Makefile diff --git a/resources/gitignore/C++.gitignore b/resources/gitignore/C++.gitignore new file mode 100644 index 0000000000..259148fa18 --- /dev/null +++ b/resources/gitignore/C++.gitignore @@ -0,0 +1,32 @@ +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app diff --git a/resources/gitignore/C.gitignore b/resources/gitignore/C.gitignore new file mode 100644 index 0000000000..c6127b38c1 --- /dev/null +++ b/resources/gitignore/C.gitignore @@ -0,0 +1,52 @@ +# Prerequisites +*.d + +# Object files +*.o +*.ko +*.obj +*.elf + +# Linker output +*.ilk +*.map +*.exp + +# Precompiled Headers +*.gch +*.pch + +# Libraries +*.lib +*.a +*.la +*.lo + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.exe +*.out +*.app +*.i*86 +*.x86_64 +*.hex + +# Debug files +*.dSYM/ +*.su +*.idb +*.pdb + +# Kernel Module Compile Results +*.mod* +*.cmd +.tmp_versions/ +modules.order +Module.symvers +Mkfile.old +dkms.conf diff --git a/resources/gitignore/CFWheels.gitignore b/resources/gitignore/CFWheels.gitignore new file mode 100644 index 0000000000..f2fec34ff8 --- /dev/null +++ b/resources/gitignore/CFWheels.gitignore @@ -0,0 +1,12 @@ +# unpacked plugin folders +plugins/**/* + +# files directory where uploads go +files + +# DBMigrate plugin: generated SQL +db/sql + +# AssetBundler plugin: generated bundles +javascripts/bundles +stylesheets/bundles diff --git a/resources/gitignore/CMake.gitignore b/resources/gitignore/CMake.gitignore new file mode 100644 index 0000000000..46f42f8f3c --- /dev/null +++ b/resources/gitignore/CMake.gitignore @@ -0,0 +1,11 @@ +CMakeLists.txt.user +CMakeCache.txt +CMakeFiles +CMakeScripts +Testing +Makefile +cmake_install.cmake +install_manifest.txt +compile_commands.json +CTestTestfile.cmake +_deps diff --git a/resources/gitignore/CONTRIBUTING.md b/resources/gitignore/CONTRIBUTING.md new file mode 100644 index 0000000000..c693838195 --- /dev/null +++ b/resources/gitignore/CONTRIBUTING.md @@ -0,0 +1,39 @@ +# Contributing guidelines + +We’d love you to help us improve this project. To help us keep this collection +high quality, we request that contributions adhere to the following guidelines. + +- **Provide a link to the application or project’s homepage**. Unless it’s + extremely popular, there’s a chance the maintainers don’t know about or use + the language, framework, editor, app, or project your change applies to. + +- **Provide links to documentation** supporting the change you’re making. + Current, canonical documentation mentioning the files being ignored is best. + If documentation isn’t available to support your change, do the best you can + to explain what the files being ignored are for. + +- **Explain why you’re making a change**. Even if it seems self-evident, please + take a sentence or two to tell us why your change or addition should happen. + It’s especially helpful to articulate why this change applies to *everyone* + who works with the applicable technology, rather than just you or your team. + +- **Please consider the scope of your change**. If your change specific to a + certain language or framework, then make sure the change is made to the + template for that language or framework, rather than to the template for an + editor, tool, or operating system. + +- **Please only modify *one template* per pull request**. This helps keep pull + requests and feedback focused on a specific project or technology. + +In general, the more you can do to help us understand the change you’re making, +the more likely we’ll be to accept your contribution quickly. + +If a template is mostly a list of files installed by a particular version of +some software (e.g. a PHP framework) then it's brittle and probably no more +helpful than a simple `ls`. If it's not possible to curate a small set of +useful rules, then the template might not be a good fit for this collection. + +Please also understand that we can’t list every tool that ever existed. +Our aim is to curate a collection of the *most common and helpful* templates, +not to make sure we cover every project possible. If we choose not to +include your language, tool, or project, it’s not because it’s not awesome. diff --git a/resources/gitignore/CUDA.gitignore b/resources/gitignore/CUDA.gitignore new file mode 100644 index 0000000000..cb385db83f --- /dev/null +++ b/resources/gitignore/CUDA.gitignore @@ -0,0 +1,6 @@ +*.i +*.ii +*.gpu +*.ptx +*.cubin +*.fatbin diff --git a/resources/gitignore/CakePHP.gitignore b/resources/gitignore/CakePHP.gitignore new file mode 100644 index 0000000000..c6597e4eab --- /dev/null +++ b/resources/gitignore/CakePHP.gitignore @@ -0,0 +1,25 @@ +# CakePHP 3 + +/vendor/* +/config/app.php + +/tmp/cache/models/* +!/tmp/cache/models/empty +/tmp/cache/persistent/* +!/tmp/cache/persistent/empty +/tmp/cache/views/* +!/tmp/cache/views/empty +/tmp/sessions/* +!/tmp/sessions/empty +/tmp/tests/* +!/tmp/tests/empty + +/logs/* +!/logs/empty + +# CakePHP 2 + +/app/tmp/* +/app/Config/core.php +/app/Config/database.php +/vendors/* diff --git a/resources/gitignore/ChefCookbook.gitignore b/resources/gitignore/ChefCookbook.gitignore new file mode 100644 index 0000000000..5ee7b7a9a1 --- /dev/null +++ b/resources/gitignore/ChefCookbook.gitignore @@ -0,0 +1,9 @@ +.vagrant +/cookbooks + +# Bundler +bin/* +.bundle/* + +.kitchen/ +.kitchen.local.yml diff --git a/resources/gitignore/Clojure.gitignore b/resources/gitignore/Clojure.gitignore new file mode 100644 index 0000000000..55cdb78821 --- /dev/null +++ b/resources/gitignore/Clojure.gitignore @@ -0,0 +1,14 @@ +pom.xml +pom.xml.asc +*.jar +*.class +/lib/ +/classes/ +/target/ +/checkouts/ +.lein-deps-sum +.lein-repl-history +.lein-plugins/ +.lein-failures +.nrepl-port +.cpcache/ \ No newline at end of file diff --git a/resources/gitignore/CodeIgniter.gitignore b/resources/gitignore/CodeIgniter.gitignore new file mode 100644 index 0000000000..f447dc307b --- /dev/null +++ b/resources/gitignore/CodeIgniter.gitignore @@ -0,0 +1,18 @@ +*/config/development +*/logs/log-*.php +!*/logs/index.html +*/cache/* +!system/cache/* +!*/cache/index.html +!*/cache/.htaccess + +user_guide_src/build/* +user_guide_src/cilexer/build/* +user_guide_src/cilexer/dist/* +user_guide_src/cilexer/pycilexer.egg-info/* + +#codeigniter 3 +application/logs/* +!application/logs/index.html +!application/logs/.htaccess +/vendor/ diff --git a/resources/gitignore/CommonLisp.gitignore b/resources/gitignore/CommonLisp.gitignore new file mode 100644 index 0000000000..e7de127b01 --- /dev/null +++ b/resources/gitignore/CommonLisp.gitignore @@ -0,0 +1,17 @@ +*.FASL +*.fasl +*.lisp-temp +*.dfsl +*.pfsl +*.d64fsl +*.p64fsl +*.lx64fsl +*.lx32fsl +*.dx64fsl +*.dx32fsl +*.fx64fsl +*.fx32fsl +*.sx64fsl +*.sx32fsl +*.wx64fsl +*.wx32fsl diff --git a/resources/gitignore/Composer.gitignore b/resources/gitignore/Composer.gitignore new file mode 100644 index 0000000000..a67d42b32f --- /dev/null +++ b/resources/gitignore/Composer.gitignore @@ -0,0 +1,6 @@ +composer.phar +/vendor/ + +# Commit your application's lock file https://getcomposer.org/doc/01-basic-usage.md#commit-your-composer-lock-file-to-version-control +# You may choose to ignore a library lock file http://getcomposer.org/doc/02-libraries.md#lock-file +# composer.lock diff --git a/resources/gitignore/Concrete5.gitignore b/resources/gitignore/Concrete5.gitignore new file mode 100644 index 0000000000..d6c11ad2b4 --- /dev/null +++ b/resources/gitignore/Concrete5.gitignore @@ -0,0 +1,21 @@ +# ignore the error log and .htaccess and others +error_log +.htaccess + +# concrete5 5.6 specific + +config/site.php +files/cache/* +files/tmp/* + +# concrete5 5.7 specific + +# ignore everything but the index.html +/application/files/* +!/application/files/index.html + +# ignore updates folder +/updates/* + +# ignore sitemap.xml +/sitemap.xml diff --git a/resources/gitignore/Coq.gitignore b/resources/gitignore/Coq.gitignore new file mode 100644 index 0000000000..66596b22ed --- /dev/null +++ b/resources/gitignore/Coq.gitignore @@ -0,0 +1,45 @@ +.*.aux +.*.d +*.a +*.cma +*.cmi +*.cmo +*.cmx +*.cmxa +*.cmxs +*.glob +*.ml.d +*.ml4.d +*.mlg.d +*.mli.d +*.mllib.d +*.mlpack.d +*.native +*.o +*.v.d +*.vio +*.vo +*.vok +*.vos +.coq-native +.csdp.cache +.lia.cache +.nia.cache +.nlia.cache +.nra.cache +csdp.cache +lia.cache +nia.cache +nlia.cache +nra.cache +native_compute_profile_*.data + +# generated timing files +*.timing.diff +*.v.after-timing +*.v.before-timing +*.v.timing +time-of-build-after.log +time-of-build-before.log +time-of-build-both.log +time-of-build-pretty.log diff --git a/resources/gitignore/CraftCMS.gitignore b/resources/gitignore/CraftCMS.gitignore new file mode 100644 index 0000000000..0d81b397e3 --- /dev/null +++ b/resources/gitignore/CraftCMS.gitignore @@ -0,0 +1,4 @@ +# Craft 2 Storage (https://craftcms.com/support/craft-storage-gitignore) +# not necessary for Craft 3 (https://github.com/craftcms/craft/issues/26) +/craft/storage/* +!/craft/storage/rebrand diff --git a/resources/gitignore/D.gitignore b/resources/gitignore/D.gitignore new file mode 100644 index 0000000000..74b926fc90 --- /dev/null +++ b/resources/gitignore/D.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files +*.o +*.obj + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Compiled Static libraries +*.a +*.lib + +# Executables +*.exe + +# DUB +.dub +docs.json +__dummy.html +docs/ + +# Code coverage +*.lst diff --git a/resources/gitignore/DM.gitignore b/resources/gitignore/DM.gitignore new file mode 100644 index 0000000000..ba5abdab83 --- /dev/null +++ b/resources/gitignore/DM.gitignore @@ -0,0 +1,5 @@ +*.dmb +*.rsc +*.int +*.lk +*.zip diff --git a/resources/gitignore/Dart.gitignore b/resources/gitignore/Dart.gitignore new file mode 100644 index 0000000000..3a83c2f087 --- /dev/null +++ b/resources/gitignore/Dart.gitignore @@ -0,0 +1,27 @@ +# See https://www.dartlang.org/guides/libraries/private-files + +# Files and directories created by pub +.dart_tool/ +.packages +build/ +# If you're building an application, you may want to check-in your pubspec.lock +pubspec.lock + +# Directory created by dartdoc +# If you don't generate documentation locally you can remove this line. +doc/api/ + +# dotenv environment variables file +.env* + +# Avoid committing generated Javascript files: +*.dart.js +*.info.json # Produced by the --dump-info flag. +*.js # When generated by dart2js. Don't specify *.js if your + # project includes source files written in JavaScript. +*.js_ +*.js.deps +*.js.map + +.flutter-plugins +.flutter-plugins-dependencies diff --git a/resources/gitignore/Delphi.gitignore b/resources/gitignore/Delphi.gitignore new file mode 100644 index 0000000000..9532800ba2 --- /dev/null +++ b/resources/gitignore/Delphi.gitignore @@ -0,0 +1,69 @@ +# Uncomment these types if you want even more clean repository. But be careful. +# It can make harm to an existing project source. Read explanations below. +# +# Resource files are binaries containing manifest, project icon and version info. +# They can not be viewed as text or compared by diff-tools. Consider replacing them with .rc files. +#*.res +# +# Type library file (binary). In old Delphi versions it should be stored. +# Since Delphi 2009 it is produced from .ridl file and can safely be ignored. +#*.tlb +# +# Diagram Portfolio file. Used by the diagram editor up to Delphi 7. +# Uncomment this if you are not using diagrams or use newer Delphi version. +#*.ddp +# +# Visual LiveBindings file. Added in Delphi XE2. +# Uncomment this if you are not using LiveBindings Designer. +#*.vlb +# +# Deployment Manager configuration file for your project. Added in Delphi XE2. +# Uncomment this if it is not mobile development and you do not use remote debug feature. +#*.deployproj +# +# C++ object files produced when C/C++ Output file generation is configured. +# Uncomment this if you are not using external objects (zlib library for example). +#*.obj +# + +# Delphi compiler-generated binaries (safe to delete) +*.exe +*.dll +*.bpl +*.bpi +*.dcp +*.so +*.apk +*.drc +*.map +*.dres +*.rsm +*.tds +*.dcu +*.lib +*.a +*.o +*.ocx + +# Delphi autogenerated files (duplicated info) +*.cfg +*.hpp +*Resource.rc + +# Delphi local files (user-specific info) +*.local +*.identcache +*.projdata +*.tvsconfig +*.dsk + +# Delphi history and backups +__history/ +__recovery/ +*.~* + +# Castalia statistics file (since XE7 Castalia is distributed with Delphi) +*.stat + +# Boss dependency manager vendor folder https://github.com/HashLoad/boss +modules/ diff --git a/resources/gitignore/Drupal.gitignore b/resources/gitignore/Drupal.gitignore new file mode 100644 index 0000000000..faae808384 --- /dev/null +++ b/resources/gitignore/Drupal.gitignore @@ -0,0 +1,62 @@ +# gitignore template for Drupal 8 projects +# +# earlier versions of Drupal are tracked in `community/PHP/` +# +# follows official upstream conventions: +# https://www.drupal.org/docs/develop/using-composer + +# Ignore configuration files that may contain sensitive information +/web/sites/*/*settings*.php +/web/sites/*/*services*.yml + +# Ignore paths that may contain user-generated content +/web/sites/*/files +/web/sites/*/public +/web/sites/*/private +/web/sites/*/files-public +/web/sites/*/files-private + +# Ignore paths that may contain temporary files +/web/sites/*/translations +/web/sites/*/tmp +/web/sites/*/cache + +# Ignore drupal core (if not versioning drupal sources) +/web/vendor +/web/core +/web/modules/README.txt +/web/profiles/README.txt +/web/sites/development.services.yml +/web/sites/example.settings.local.php +/web/sites/example.sites.php +/web/sites/README.txt +/web/themes/README.txt +/web/.csslintrc +/web/.editorconfig +/web/.eslintignore +/web/.eslintrc.json +/web/.gitattributes +/web/.htaccess +/web/.ht.router.php +/web/autoload.php +/web/composer.json +/web/composer.lock +/web/example.gitignore +/web/index.php +/web/INSTALL.txt +/web/LICENSE.txt +/web/README.txt +/web/robots.txt +/web/update.php +/web/web.config + +# Ignore vendor dependencies and scripts +/vendor +/composer.phar +/composer +/robo.phar +/robo +/drush.phar +/drush +/drupal.phar +/drupal diff --git a/resources/gitignore/EPiServer.gitignore b/resources/gitignore/EPiServer.gitignore new file mode 100644 index 0000000000..97037de743 --- /dev/null +++ b/resources/gitignore/EPiServer.gitignore @@ -0,0 +1,4 @@ +###################### +## EPiServer Files +###################### +*License.config diff --git a/resources/gitignore/Eagle.gitignore b/resources/gitignore/Eagle.gitignore new file mode 100644 index 0000000000..28f0b9715e --- /dev/null +++ b/resources/gitignore/Eagle.gitignore @@ -0,0 +1,51 @@ +# Ignore list for Eagle, a PCB layout tool + +# Backup files +*.s#? +*.b#? +*.l#? +*.b$? +*.s$? +*.l$? + +# Eagle project file +# It contains a serial number and references to the file structure +# on your computer. +# comment the following line if you want to have your project file included. +eagle.epf + +# Autorouter files +*.pro +*.job + +# CAM files +*.$$$ +*.cmp +*.ly2 +*.l15 +*.sol +*.plc +*.stc +*.sts +*.crc +*.crs + +*.dri +*.drl +*.gpi +*.pls +*.ger +*.xln + +*.drd +*.drd.* + +*.s#* +*.b#* + +*.info + +*.eps + +# file locks introduced since 7.x +*.lck diff --git a/resources/gitignore/Elisp.gitignore b/resources/gitignore/Elisp.gitignore new file mode 100644 index 0000000000..206569dc66 --- /dev/null +++ b/resources/gitignore/Elisp.gitignore @@ -0,0 +1,11 @@ +# Compiled +*.elc + +# Packaging +.cask + +# Backup files +*~ + +# Undo-tree save-files +*.~undo-tree diff --git a/resources/gitignore/Elixir.gitignore b/resources/gitignore/Elixir.gitignore new file mode 100644 index 0000000000..b263cd10f3 --- /dev/null +++ b/resources/gitignore/Elixir.gitignore @@ -0,0 +1,10 @@ +/_build +/cover +/deps +/doc +/.fetch +erl_crash.dump +*.ez +*.beam +/config/*.secret.exs +.elixir_ls/ diff --git a/resources/gitignore/Elm.gitignore b/resources/gitignore/Elm.gitignore new file mode 100644 index 0000000000..8b631e7de0 --- /dev/null +++ b/resources/gitignore/Elm.gitignore @@ -0,0 +1,4 @@ +# elm-package generated files +elm-stuff +# elm-repl generated files +repl-temp-* diff --git a/resources/gitignore/Erlang.gitignore b/resources/gitignore/Erlang.gitignore new file mode 100644 index 0000000000..751a61d1a1 --- /dev/null +++ b/resources/gitignore/Erlang.gitignore @@ -0,0 +1,17 @@ +.eunit +*.o +*.beam +*.plt +erl_crash.dump +.concrete/DEV_MODE + +# rebar 2.x +.rebar +rel/example_project +ebin/*.beam +deps + +# rebar 3 +.rebar3 +_build/ +_checkouts/ diff --git a/resources/gitignore/ExpressionEngine.gitignore b/resources/gitignore/ExpressionEngine.gitignore new file mode 100644 index 0000000000..314e4df123 --- /dev/null +++ b/resources/gitignore/ExpressionEngine.gitignore @@ -0,0 +1,19 @@ +.DS_Store + +# Images +images/avatars/ +images/captchas/ +images/smileys/ +images/member_photos/ +images/signature_attachments/ +images/pm_attachments/ + +# For security do not publish the following files +system/expressionengine/config/database.php +system/expressionengine/config/config.php + +# Caches +sized/ +thumbs/ +_thumbs/ +*/expressionengine/cache/* diff --git a/resources/gitignore/ExtJs.gitignore b/resources/gitignore/ExtJs.gitignore new file mode 100644 index 0000000000..ab97a8cc3e --- /dev/null +++ b/resources/gitignore/ExtJs.gitignore @@ -0,0 +1,14 @@ +.architect +bootstrap.css +bootstrap.js +bootstrap.json +bootstrap.jsonp +build/ +classic.json +classic.jsonp +ext/ +modern.json +modern.jsonp +resources/sass/.sass-cache/ +resources/.arch-internal-preview.css +.arch-internal-preview.css diff --git a/resources/gitignore/Fancy.gitignore b/resources/gitignore/Fancy.gitignore new file mode 100644 index 0000000000..70d6e631e5 --- /dev/null +++ b/resources/gitignore/Fancy.gitignore @@ -0,0 +1,2 @@ +*.rbc +*.fyc diff --git a/resources/gitignore/Finale.gitignore b/resources/gitignore/Finale.gitignore new file mode 100644 index 0000000000..7ef08e0c34 --- /dev/null +++ b/resources/gitignore/Finale.gitignore @@ -0,0 +1,13 @@ +*.bak +*.db +*.avi +*.pdf +*.ps +*.mid +*.midi +*.mp3 +*.aif +*.wav +# Some versions of Finale have a bug and randomly save extra copies of +# the music source as " copy.mus" +*copy.mus diff --git a/resources/gitignore/FlaxEngine.gitignore b/resources/gitignore/FlaxEngine.gitignore new file mode 100644 index 0000000000..b470568443 --- /dev/null +++ b/resources/gitignore/FlaxEngine.gitignore @@ -0,0 +1,45 @@ +# Ignore Flax project files +Binaries/ +Cache/ +Logs/ +Output/ +Screenshots/ +*.HotReload.* + +# Ignore Visual Studio project files (generated locally) +*.csproj +*.sln + +# Ignore thumbnails created by Windows +Thumbs.db + +# Ignore files built by Visual Studio +*.obj +*.exe +*.pdb +*.user +*.aps +*.pch +*.vspscc +*_i.c +*_p.c +*.ncb +*.suo +*.tlb +*.tlh +*.bak +*.cache +*.ilk +*.log +[Bb]in +[Dd]ebug*/ +*.lib +*.sbr +obj/ +[Rr]elease*/ +_ReSharper*/ +[Tt]est[Rr]esult* +.vs/ + +# Ignore Nuget packages folder +packages/ diff --git a/resources/gitignore/ForceDotCom.gitignore b/resources/gitignore/ForceDotCom.gitignore new file mode 100644 index 0000000000..3933cd4dd5 --- /dev/null +++ b/resources/gitignore/ForceDotCom.gitignore @@ -0,0 +1,4 @@ +.project +.settings +salesforce.schema +Referenced Packages diff --git a/resources/gitignore/Fortran.gitignore b/resources/gitignore/Fortran.gitignore new file mode 100644 index 0000000000..d99efa91a0 --- /dev/null +++ b/resources/gitignore/Fortran.gitignore @@ -0,0 +1,32 @@ +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app \ No newline at end of file diff --git a/resources/gitignore/FuelPHP.gitignore b/resources/gitignore/FuelPHP.gitignore new file mode 100644 index 0000000000..d69f71f433 --- /dev/null +++ b/resources/gitignore/FuelPHP.gitignore @@ -0,0 +1,21 @@ +# the composer package lock file and install directory +# Commit your application's lock file http://getcomposer.org/doc/01-basic-usage.md#composer-lock-the-lock-file +# You may choose to ignore a library lock file http://getcomposer.org/doc/02-libraries.md#lock-file +# /composer.lock +/fuel/vendor + +# the fuelphp document +/docs/ + +# you may install these packages with `oil package`. +# http://fuelphp.com/docs/packages/oil/package.html +# /fuel/packages/auth/ +# /fuel/packages/email/ +# /fuel/packages/oil/ +# /fuel/packages/orm/ +# /fuel/packages/parser/ + +# dynamically generated files +/fuel/app/logs/*/*/* +/fuel/app/cache/*/* +/fuel/app/config/crypt.php diff --git a/resources/gitignore/GWT.gitignore b/resources/gitignore/GWT.gitignore new file mode 100644 index 0000000000..a01e7fcd92 --- /dev/null +++ b/resources/gitignore/GWT.gitignore @@ -0,0 +1,25 @@ +*.class + +# Package Files # +*.jar +*.war + +# gwt caches and compiled units # +war/gwt_bree/ +gwt-unitCache/ + +# boilerplate generated classes # +.apt_generated/ + +# more caches and things from deploy # +war/WEB-INF/deploy/ +war/WEB-INF/classes/ + +#compilation logs +.gwt/ + +#gwt junit compilation files +www-test/ + +#old GWT (1.5) created this dir +.gwt-tmp/ diff --git a/resources/gitignore/Gcov.gitignore b/resources/gitignore/Gcov.gitignore new file mode 100644 index 0000000000..a6451430e1 --- /dev/null +++ b/resources/gitignore/Gcov.gitignore @@ -0,0 +1,5 @@ +# gcc coverage testing tool files + +*.gcno +*.gcda +*.gcov diff --git a/resources/gitignore/GitBook.gitignore b/resources/gitignore/GitBook.gitignore new file mode 100644 index 0000000000..4cb12d8db7 --- /dev/null +++ b/resources/gitignore/GitBook.gitignore @@ -0,0 +1,16 @@ +# Node rules: +## Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +## Dependency directory +## Commenting this out is preferred by some people, see +## https://docs.npmjs.com/misc/faq#should-i-check-my-node_modules-folder-into-git +node_modules + +# Book build output +_book + +# eBook build output +*.epub +*.mobi +*.pdf diff --git a/resources/gitignore/Go.gitignore b/resources/gitignore/Go.gitignore new file mode 100644 index 0000000000..3b735ec4a8 --- /dev/null +++ b/resources/gitignore/Go.gitignore @@ -0,0 +1,21 @@ +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work diff --git a/resources/gitignore/Godot.gitignore b/resources/gitignore/Godot.gitignore new file mode 100644 index 0000000000..4f48ad79f8 --- /dev/null +++ b/resources/gitignore/Godot.gitignore @@ -0,0 +1,11 @@ +# Godot-specific ignores +.import/ +export.cfg +export_presets.cfg + +# Imported translations (automatically generated from CSV files) +*.translation + +# Mono-specific ignores +.mono/ +data_*/ diff --git a/resources/gitignore/Gradle.gitignore b/resources/gitignore/Gradle.gitignore new file mode 100644 index 0000000000..a5b111377b --- /dev/null +++ b/resources/gitignore/Gradle.gitignore @@ -0,0 +1,21 @@ +.gradle +**/build/ +!src/**/build/ + +# Ignore Gradle GUI config +gradle-app.setting + +# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) +!gradle-wrapper.jar + +# Avoid ignore Gradle wrappper properties +!gradle-wrapper.properties + +# Cache of project +.gradletasknamecache + +# Eclipse Gradle plugin generated files +# Eclipse Core +.project +# JDT-specific (Eclipse Java Development Tools) +.classpath diff --git a/resources/gitignore/Grails.gitignore b/resources/gitignore/Grails.gitignore new file mode 100644 index 0000000000..9185f14c37 --- /dev/null +++ b/resources/gitignore/Grails.gitignore @@ -0,0 +1,33 @@ +# .gitignore for Grails 1.2 and 1.3 +# Although this should work for most versions of grails, it is +# suggested that you use the "grails integrate-with --git" command +# to generate your .gitignore file. + +# web application files +/web-app/WEB-INF/classes + +# default HSQL database files for production mode +/prodDb.* + +# general HSQL database files +*Db.properties +*Db.script + +# logs +/stacktrace.log +/test/reports +/logs + +# project release file +/*.war + +# plugin release files +/*.zip +/plugin.xml + +# older plugin install locations +/plugins +/web-app/plugins + +# "temporary" build files +/target diff --git a/resources/gitignore/Haskell.gitignore b/resources/gitignore/Haskell.gitignore new file mode 100644 index 0000000000..4c9e245b5d --- /dev/null +++ b/resources/gitignore/Haskell.gitignore @@ -0,0 +1,23 @@ +dist +dist-* +cabal-dev +*.o +*.hi +*.hie +*.chi +*.chs.h +*.dyn_o +*.dyn_hi +.hpc +.hsenv +.cabal-sandbox/ +cabal.sandbox.config +*.prof +*.aux +*.hp +*.eventlog +.stack-work/ +cabal.project.local +cabal.project.local~ +.HTF/ +.ghc.environment.* diff --git a/resources/gitignore/IGORPro.gitignore b/resources/gitignore/IGORPro.gitignore new file mode 100644 index 0000000000..c62be65003 --- /dev/null +++ b/resources/gitignore/IGORPro.gitignore @@ -0,0 +1,5 @@ +# Avoid including Experiment files: they can be created and edited locally to test the ipf files +*.pxp +*.pxt +*.uxp +*.uxt diff --git a/resources/gitignore/Idris.gitignore b/resources/gitignore/Idris.gitignore new file mode 100644 index 0000000000..0f4e72c71c --- /dev/null +++ b/resources/gitignore/Idris.gitignore @@ -0,0 +1,7 @@ +# Idris 2 +*.ttc +*.ttm + +# Idris 1 +*.ibc +*.o diff --git a/resources/gitignore/JBoss.gitignore b/resources/gitignore/JBoss.gitignore new file mode 100644 index 0000000000..75d1731ed9 --- /dev/null +++ b/resources/gitignore/JBoss.gitignore @@ -0,0 +1,19 @@ +jboss/server/all/deploy/project.ext +jboss/server/default/deploy/project.ext +jboss/server/minimal/deploy/project.ext +jboss/server/all/log/*.log +jboss/server/all/tmp/**/* +jboss/server/all/data/**/* +jboss/server/all/work/**/* +jboss/server/default/log/*.log +jboss/server/default/tmp/**/* +jboss/server/default/data/**/* +jboss/server/default/work/**/* +jboss/server/minimal/log/*.log +jboss/server/minimal/tmp/**/* +jboss/server/minimal/data/**/* +jboss/server/minimal/work/**/* + +# deployed package files # + +*.DEPLOYED diff --git a/resources/gitignore/JENKINS_HOME.gitignore b/resources/gitignore/JENKINS_HOME.gitignore new file mode 100644 index 0000000000..2516c09949 --- /dev/null +++ b/resources/gitignore/JENKINS_HOME.gitignore @@ -0,0 +1,50 @@ +# Learn more about Jenkins and JENKINS_HOME directory for which this file is +# intended. +# +# http://jenkins-ci.org/ +# https://wiki.jenkins-ci.org/display/JENKINS/Administering+Jenkins +# +# Note: secret.key is purposefully not tracked by git. This should be backed up +# separately because configs may contain secrets which were encrypted using the +# secret.key. To back up secrets use 'tar -czf /tmp/secrets.tgz secret*' and +# save the file separate from your repository. If you want secrets backed up +# with configuration, then see the bottom of this file for an example. + +# Ignore all JENKINS_HOME except jobs directory, root xml config, and +# .gitignore file. +/* +!/jobs +!/.gitignore +!/*.xml + +# Ignore all files in jobs subdirectories except for folders. +# Note: git doesn't track folders, only file content. +jobs/** +!jobs/**/ + +# Uncomment the following line to save next build numbers with config. + +#!jobs/**/nextBuildNumber + +# For performance reasons, we want to ignore builds in Jenkins jobs because it +# contains many tiny files on large installations. This can impact git +# performance when running even basic commands like 'git status'. +builds +indexing + +# Exclude only config.xml files in repository subdirectories. +!config.xml + +# Don't track workspaces (when users build on the master). +jobs/**/*workspace + +# Security warning: If secrets are included with your configuration, then an +# adversary will be able to decrypt all encrypted secrets within Jenkins +# config. Including secrets is a bad practice, but the example is included in +# case someone still wants it for convenience. Uncomment the following line to +# include secrets for decryption with repository configuration in Git. + +#!/secret* + +# As a result, only Jenkins settings and job config.xml files in JENKINS_HOME +# will be tracked by git. diff --git a/resources/gitignore/Java.gitignore b/resources/gitignore/Java.gitignore new file mode 100644 index 0000000000..524f0963bd --- /dev/null +++ b/resources/gitignore/Java.gitignore @@ -0,0 +1,24 @@ +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* +replay_pid* diff --git a/resources/gitignore/Jekyll.gitignore b/resources/gitignore/Jekyll.gitignore new file mode 100644 index 0000000000..74a9223a45 --- /dev/null +++ b/resources/gitignore/Jekyll.gitignore @@ -0,0 +1,7 @@ +_site/ +.sass-cache/ +.jekyll-cache/ +.jekyll-metadata +# Ignore folders generated by Bundler +.bundle/ +vendor/ diff --git a/resources/gitignore/Joomla.gitignore b/resources/gitignore/Joomla.gitignore new file mode 100644 index 0000000000..41a9ba415c --- /dev/null +++ b/resources/gitignore/Joomla.gitignore @@ -0,0 +1,705 @@ +/.htaccess +/administrator/cache/* +/administrator/components/com_actionlogs/* +/administrator/components/com_admin/* +/administrator/components/com_ajax/* +/administrator/components/com_associations/* +/administrator/components/com_banners/* +/administrator/components/com_cache/* +/administrator/components/com_categories/* +/administrator/components/com_checkin/* +/administrator/components/com_config/* +/administrator/components/com_contact/* +/administrator/components/com_content/* +/administrator/components/com_contenthistory/* +/administrator/components/com_cpanel/* +/administrator/components/com_fields/* +/administrator/components/com_finder/* +/administrator/components/com_installer/* +/administrator/components/com_joomlaupdate/* +/administrator/components/com_languages/* +/administrator/components/com_login/* +/administrator/components/com_media/* +/administrator/components/com_menus/* +/administrator/components/com_messages/* +/administrator/components/com_modules/* +/administrator/components/com_newsfeeds/* +/administrator/components/com_plugins/* +/administrator/components/com_postinstall/* +/administrator/components/com_privacy/* +/administrator/components/com_redirect/* +/administrator/components/com_search/* +/administrator/components/com_tags/* +/administrator/components/com_templates/* +/administrator/components/com_users/* +/administrator/help/* +/administrator/includes/* +/administrator/index.php +/administrator/language/en-GB/en-GB.com_actionlogs.ini +/administrator/language/en-GB/en-GB.com_actionlogs.sys.ini +/administrator/language/en-GB/en-GB.com_admin.ini +/administrator/language/en-GB/en-GB.com_admin.sys.ini +/administrator/language/en-GB/en-GB.com_ajax.ini +/administrator/language/en-GB/en-GB.com_ajax.sys.ini +/administrator/language/en-GB/en-GB.com_associations.ini +/administrator/language/en-GB/en-GB.com_associations.sys.ini +/administrator/language/en-GB/en-GB.com_banners.ini +/administrator/language/en-GB/en-GB.com_banners.sys.ini +/administrator/language/en-GB/en-GB.com_cache.ini +/administrator/language/en-GB/en-GB.com_cache.sys.ini +/administrator/language/en-GB/en-GB.com_categories.ini +/administrator/language/en-GB/en-GB.com_categories.sys.ini +/administrator/language/en-GB/en-GB.com_checkin.ini +/administrator/language/en-GB/en-GB.com_checkin.sys.ini +/administrator/language/en-GB/en-GB.com_config.ini +/administrator/language/en-GB/en-GB.com_config.sys.ini +/administrator/language/en-GB/en-GB.com_contact.ini +/administrator/language/en-GB/en-GB.com_contact.sys.ini +/administrator/language/en-GB/en-GB.com_content.ini +/administrator/language/en-GB/en-GB.com_content.sys.ini +/administrator/language/en-GB/en-GB.com_contenthistory.ini +/administrator/language/en-GB/en-GB.com_contenthistory.sys.ini +/administrator/language/en-GB/en-GB.com_cpanel.ini +/administrator/language/en-GB/en-GB.com_cpanel.sys.ini +/administrator/language/en-GB/en-GB.com_fields.ini +/administrator/language/en-GB/en-GB.com_fields.sys.ini +/administrator/language/en-GB/en-GB.com_finder.ini +/administrator/language/en-GB/en-GB.com_finder.sys.ini +/administrator/language/en-GB/en-GB.com_installer.ini +/administrator/language/en-GB/en-GB.com_installer.sys.ini +/administrator/language/en-GB/en-GB.com_joomlaupdate.ini +/administrator/language/en-GB/en-GB.com_joomlaupdate.sys.ini +/administrator/language/en-GB/en-GB.com_languages.ini +/administrator/language/en-GB/en-GB.com_languages.sys.ini +/administrator/language/en-GB/en-GB.com_login.ini +/administrator/language/en-GB/en-GB.com_login.sys.ini +/administrator/language/en-GB/en-GB.com_mailto.sys.ini +/administrator/language/en-GB/en-GB.com_media.ini +/administrator/language/en-GB/en-GB.com_media.sys.ini +/administrator/language/en-GB/en-GB.com_menus.ini +/administrator/language/en-GB/en-GB.com_menus.sys.ini +/administrator/language/en-GB/en-GB.com_messages.ini +/administrator/language/en-GB/en-GB.com_messages.sys.ini +/administrator/language/en-GB/en-GB.com_modules.ini +/administrator/language/en-GB/en-GB.com_modules.sys.ini +/administrator/language/en-GB/en-GB.com_newsfeeds.ini +/administrator/language/en-GB/en-GB.com_newsfeeds.sys.ini +/administrator/language/en-GB/en-GB.com_plugins.ini +/administrator/language/en-GB/en-GB.com_plugins.sys.ini +/administrator/language/en-GB/en-GB.com_postinstall.ini +/administrator/language/en-GB/en-GB.com_postinstall.sys.ini +/administrator/language/en-GB/en-GB.com_privacy.ini +/administrator/language/en-GB/en-GB.com_privacy.sys.ini +/administrator/language/en-GB/en-GB.com_redirect.ini +/administrator/language/en-GB/en-GB.com_redirect.sys.ini +/administrator/language/en-GB/en-GB.com_search.ini +/administrator/language/en-GB/en-GB.com_search.sys.ini +/administrator/language/en-GB/en-GB.com_tags.ini +/administrator/language/en-GB/en-GB.com_tags.sys.ini +/administrator/language/en-GB/en-GB.com_templates.ini +/administrator/language/en-GB/en-GB.com_templates.sys.ini +/administrator/language/en-GB/en-GB.com_users.ini +/administrator/language/en-GB/en-GB.com_users.sys.ini +/administrator/language/en-GB/en-GB.com_weblinks.ini +/administrator/language/en-GB/en-GB.com_weblinks.sys.ini +/administrator/language/en-GB/en-GB.com_wrapper.ini +/administrator/language/en-GB/en-GB.com_wrapper.sys.ini +/administrator/language/en-GB/en-GB.ini +/administrator/language/en-GB/en-GB.lib_joomla.ini +/administrator/language/en-GB/en-GB.localise.php +/administrator/language/en-GB/en-GB.mod_custom.ini +/administrator/language/en-GB/en-GB.mod_custom.sys.ini +/administrator/language/en-GB/en-GB.mod_feed.ini +/administrator/language/en-GB/en-GB.mod_feed.sys.ini +/administrator/language/en-GB/en-GB.mod_latest.ini +/administrator/language/en-GB/en-GB.mod_latest.sys.ini +/administrator/language/en-GB/en-GB.mod_latestactions.ini +/administrator/language/en-GB/en-GB.mod_latestactions.sys.ini +/administrator/language/en-GB/en-GB.mod_logged.ini +/administrator/language/en-GB/en-GB.mod_logged.sys.ini +/administrator/language/en-GB/en-GB.mod_login.ini +/administrator/language/en-GB/en-GB.mod_login.sys.ini +/administrator/language/en-GB/en-GB.mod_menu.ini +/administrator/language/en-GB/en-GB.mod_menu.sys.ini +/administrator/language/en-GB/en-GB.mod_multilangstatus.ini +/administrator/language/en-GB/en-GB.mod_multilangstatus.sys.ini +/administrator/language/en-GB/en-GB.mod_online.ini +/administrator/language/en-GB/en-GB.mod_online.sys.ini +/administrator/language/en-GB/en-GB.mod_popular.ini +/administrator/language/en-GB/en-GB.mod_popular.sys.ini +/administrator/language/en-GB/en-GB.mod_privacy_dashboard.ini +/administrator/language/en-GB/en-GB.mod_privacy_dashboard.sys.ini +/administrator/language/en-GB/en-GB.mod_quickicon.ini +/administrator/language/en-GB/en-GB.mod_quickicon.sys.ini +/administrator/language/en-GB/en-GB.mod_sampledata.ini +/administrator/language/en-GB/en-GB.mod_sampledata.sys.ini +/administrator/language/en-GB/en-GB.mod_stats_admin.ini +/administrator/language/en-GB/en-GB.mod_stats_admin.sys.ini +/administrator/language/en-GB/en-GB.mod_status.ini +/administrator/language/en-GB/en-GB.mod_status.sys.ini +/administrator/language/en-GB/en-GB.mod_submenu.ini +/administrator/language/en-GB/en-GB.mod_submenu.sys.ini +/administrator/language/en-GB/en-GB.mod_title.ini +/administrator/language/en-GB/en-GB.mod_title.sys.ini +/administrator/language/en-GB/en-GB.mod_toolbar.ini +/administrator/language/en-GB/en-GB.mod_toolbar.sys.ini +/administrator/language/en-GB/en-GB.mod_unread.ini +/administrator/language/en-GB/en-GB.mod_unread.sys.ini +/administrator/language/en-GB/en-GB.mod_version.ini +/administrator/language/en-GB/en-GB.mod_version.sys.ini +/administrator/language/en-GB/en-GB.plg_actionlog_joomla.ini +/administrator/language/en-GB/en-GB.plg_actionlog_joomla.sys.ini +/administrator/language/en-GB/en-GB.plg_authentication_cookie.ini +/administrator/language/en-GB/en-GB.plg_authentication_cookie.sys.ini +/administrator/language/en-GB/en-GB.plg_authentication_example.ini +/administrator/language/en-GB/en-GB.plg_authentication_example.sys.ini +/administrator/language/en-GB/en-GB.plg_authentication_gmail.ini +/administrator/language/en-GB/en-GB.plg_authentication_gmail.sys.ini +/administrator/language/en-GB/en-GB.plg_authentication_joomla.ini +/administrator/language/en-GB/en-GB.plg_authentication_joomla.sys.ini +/administrator/language/en-GB/en-GB.plg_authentication_ldap.ini +/administrator/language/en-GB/en-GB.plg_authentication_ldap.sys.ini +/administrator/language/en-GB/en-GB.plg_captcha_recaptcha.ini +/administrator/language/en-GB/en-GB.plg_captcha_recaptcha.sys.ini +/administrator/language/en-GB/en-GB.plg_captcha_recaptcha_invisible.ini +/administrator/language/en-GB/en-GB.plg_captcha_recaptcha_invisible.sys.ini +/administrator/language/en-GB/en-GB.plg_content_confirmconsent.ini +/administrator/language/en-GB/en-GB.plg_content_confirmconsent.sys.ini +/administrator/language/en-GB/en-GB.plg_content_contact.ini +/administrator/language/en-GB/en-GB.plg_content_contact.sys.ini +/administrator/language/en-GB/en-GB.plg_content_emailcloak.ini +/administrator/language/en-GB/en-GB.plg_content_emailcloak.sys.ini +/administrator/language/en-GB/en-GB.plg_content_fields.ini +/administrator/language/en-GB/en-GB.plg_content_fields.sys.ini +/administrator/language/en-GB/en-GB.plg_content_finder.ini +/administrator/language/en-GB/en-GB.plg_content_finder.sys.ini +/administrator/language/en-GB/en-GB.plg_content_geshi.ini +/administrator/language/en-GB/en-GB.plg_content_geshi.sys.ini +/administrator/language/en-GB/en-GB.plg_content_joomla.ini +/administrator/language/en-GB/en-GB.plg_content_joomla.sys.ini +/administrator/language/en-GB/en-GB.plg_content_loadmodule.ini +/administrator/language/en-GB/en-GB.plg_content_loadmodule.sys.ini +/administrator/language/en-GB/en-GB.plg_content_pagebreak.ini +/administrator/language/en-GB/en-GB.plg_content_pagebreak.sys.ini +/administrator/language/en-GB/en-GB.plg_content_pagenavigation.ini +/administrator/language/en-GB/en-GB.plg_content_pagenavigation.sys.ini +/administrator/language/en-GB/en-GB.plg_content_vote.ini +/administrator/language/en-GB/en-GB.plg_content_vote.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_article.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_article.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_contact.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_contact.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_fields.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_fields.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_image.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_image.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_menu.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_menu.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_module.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_module.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_pagebreak.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_pagebreak.sys.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_readmore.ini +/administrator/language/en-GB/en-GB.plg_editors-xtd_readmore.sys.ini +/administrator/language/en-GB/en-GB.plg_editors_codemirror.ini +/administrator/language/en-GB/en-GB.plg_editors_codemirror.sys.ini +/administrator/language/en-GB/en-GB.plg_editors_none.ini +/administrator/language/en-GB/en-GB.plg_editors_none.sys.ini +/administrator/language/en-GB/en-GB.plg_editors_tinymce.ini +/administrator/language/en-GB/en-GB.plg_editors_tinymce.sys.ini +/administrator/language/en-GB/en-GB.plg_extension_joomla.ini +/administrator/language/en-GB/en-GB.plg_extension_joomla.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_calendar.ini +/administrator/language/en-GB/en-GB.plg_fields_calendar.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_checkboxes.ini +/administrator/language/en-GB/en-GB.plg_fields_checkboxes.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_color.ini +/administrator/language/en-GB/en-GB.plg_fields_color.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_editor.ini +/administrator/language/en-GB/en-GB.plg_fields_editor.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_image.ini +/administrator/language/en-GB/en-GB.plg_fields_image.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_imagelist.ini +/administrator/language/en-GB/en-GB.plg_fields_imagelist.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_integer.ini +/administrator/language/en-GB/en-GB.plg_fields_integer.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_list.ini +/administrator/language/en-GB/en-GB.plg_fields_list.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_media.ini +/administrator/language/en-GB/en-GB.plg_fields_media.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_radio.ini +/administrator/language/en-GB/en-GB.plg_fields_radio.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_repeatable.ini +/administrator/language/en-GB/en-GB.plg_fields_repeatable.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_sql.ini +/administrator/language/en-GB/en-GB.plg_fields_sql.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_text.ini +/administrator/language/en-GB/en-GB.plg_fields_text.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_textarea.ini +/administrator/language/en-GB/en-GB.plg_fields_textarea.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_url.ini +/administrator/language/en-GB/en-GB.plg_fields_url.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_user.ini +/administrator/language/en-GB/en-GB.plg_fields_user.sys.ini +/administrator/language/en-GB/en-GB.plg_fields_usergrouplist.ini +/administrator/language/en-GB/en-GB.plg_fields_usergrouplist.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_categories.ini +/administrator/language/en-GB/en-GB.plg_finder_categories.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_contacts.ini +/administrator/language/en-GB/en-GB.plg_finder_contacts.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_content.ini +/administrator/language/en-GB/en-GB.plg_finder_content.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_newsfeeds.ini +/administrator/language/en-GB/en-GB.plg_finder_newsfeeds.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_tags.ini +/administrator/language/en-GB/en-GB.plg_finder_tags.sys.ini +/administrator/language/en-GB/en-GB.plg_finder_weblinks.ini +/administrator/language/en-GB/en-GB.plg_finder_weblinks.sys.ini +/administrator/language/en-GB/en-GB.plg_installer_folderinstaller.ini +/administrator/language/en-GB/en-GB.plg_installer_folderinstaller.sys.ini +/administrator/language/en-GB/en-GB.plg_installer_packageinstaller.ini +/administrator/language/en-GB/en-GB.plg_installer_packageinstaller.sys.ini +/administrator/language/en-GB/en-GB.plg_installer_urlinstaller.ini +/administrator/language/en-GB/en-GB.plg_installer_urlinstaller.sys.ini +/administrator/language/en-GB/en-GB.plg_installer_webinstaller.ini +/administrator/language/en-GB/en-GB.plg_installer_webinstaller.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_actionlogs.ini +/administrator/language/en-GB/en-GB.plg_privacy_actionlogs.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_consents.ini +/administrator/language/en-GB/en-GB.plg_privacy_consents.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_contact.ini +/administrator/language/en-GB/en-GB.plg_privacy_contact.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_content.ini +/administrator/language/en-GB/en-GB.plg_privacy_content.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_message.ini +/administrator/language/en-GB/en-GB.plg_privacy_message.sys.ini +/administrator/language/en-GB/en-GB.plg_privacy_user.ini +/administrator/language/en-GB/en-GB.plg_privacy_user.sys.ini +/administrator/language/en-GB/en-GB.plg_quickicon_extensionupdate.ini +/administrator/language/en-GB/en-GB.plg_quickicon_extensionupdate.sys.ini +/administrator/language/en-GB/en-GB.plg_quickicon_joomlaupdate.ini +/administrator/language/en-GB/en-GB.plg_quickicon_joomlaupdate.sys.ini +/administrator/language/en-GB/en-GB.plg_quickicon_phpversioncheck.ini +/administrator/language/en-GB/en-GB.plg_quickicon_phpversioncheck.sys.ini +/administrator/language/en-GB/en-GB.plg_quickicon_privacycheck.ini +/administrator/language/en-GB/en-GB.plg_quickicon_privacycheck.sys.ini +/administrator/language/en-GB/en-GB.plg_sampledata_blog.ini +/administrator/language/en-GB/en-GB.plg_sampledata_blog.sys.ini +/administrator/language/en-GB/en-GB.plg_search_categories.ini +/administrator/language/en-GB/en-GB.plg_search_categories.sys.ini +/administrator/language/en-GB/en-GB.plg_search_contacts.ini +/administrator/language/en-GB/en-GB.plg_search_contacts.sys.ini +/administrator/language/en-GB/en-GB.plg_search_content.ini +/administrator/language/en-GB/en-GB.plg_search_content.sys.ini +/administrator/language/en-GB/en-GB.plg_search_newsfeeds.ini +/administrator/language/en-GB/en-GB.plg_search_newsfeeds.sys.ini +/administrator/language/en-GB/en-GB.plg_search_tags.ini +/administrator/language/en-GB/en-GB.plg_search_tags.sys.ini +/administrator/language/en-GB/en-GB.plg_search_weblinks.ini +/administrator/language/en-GB/en-GB.plg_search_weblinks.sys.ini +/administrator/language/en-GB/en-GB.plg_system_actionlogs.ini +/administrator/language/en-GB/en-GB.plg_system_actionlogs.sys.ini +/administrator/language/en-GB/en-GB.plg_system_cache.ini +/administrator/language/en-GB/en-GB.plg_system_cache.sys.ini +/administrator/language/en-GB/en-GB.plg_system_debug.ini +/administrator/language/en-GB/en-GB.plg_system_debug.sys.ini +/administrator/language/en-GB/en-GB.plg_system_fields.ini +/administrator/language/en-GB/en-GB.plg_system_fields.sys.ini +/administrator/language/en-GB/en-GB.plg_system_highlight.ini +/administrator/language/en-GB/en-GB.plg_system_highlight.sys.ini +/administrator/language/en-GB/en-GB.plg_system_languagecode.ini +/administrator/language/en-GB/en-GB.plg_system_languagecode.sys.ini +/administrator/language/en-GB/en-GB.plg_system_languagefilter.ini +/administrator/language/en-GB/en-GB.plg_system_languagefilter.sys.ini +/administrator/language/en-GB/en-GB.plg_system_log.ini +/administrator/language/en-GB/en-GB.plg_system_log.sys.ini +/administrator/language/en-GB/en-GB.plg_system_logout.ini +/administrator/language/en-GB/en-GB.plg_system_logout.sys.ini +/administrator/language/en-GB/en-GB.plg_system_logrotation.ini +/administrator/language/en-GB/en-GB.plg_system_logrotation.sys.ini +/administrator/language/en-GB/en-GB.plg_system_p3p.ini +/administrator/language/en-GB/en-GB.plg_system_p3p.sys.ini +/administrator/language/en-GB/en-GB.plg_system_privacyconsent.ini +/administrator/language/en-GB/en-GB.plg_system_privacyconsent.sys.ini +/administrator/language/en-GB/en-GB.plg_system_redirect.ini +/administrator/language/en-GB/en-GB.plg_system_redirect.sys.ini +/administrator/language/en-GB/en-GB.plg_system_remember.ini +/administrator/language/en-GB/en-GB.plg_system_remember.sys.ini +/administrator/language/en-GB/en-GB.plg_system_sef.ini +/administrator/language/en-GB/en-GB.plg_system_sef.sys.ini +/administrator/language/en-GB/en-GB.plg_system_sessiongc.ini +/administrator/language/en-GB/en-GB.plg_system_sessiongc.sys.ini +/administrator/language/en-GB/en-GB.plg_system_stats.ini +/administrator/language/en-GB/en-GB.plg_system_stats.sys.ini +/administrator/language/en-GB/en-GB.plg_system_updatenotification.ini +/administrator/language/en-GB/en-GB.plg_system_updatenotification.sys.ini +/administrator/language/en-GB/en-GB.plg_twofactorauth_totp.ini +/administrator/language/en-GB/en-GB.plg_twofactorauth_totp.sys.ini +/administrator/language/en-GB/en-GB.plg_twofactorauth_yubikey.ini +/administrator/language/en-GB/en-GB.plg_twofactorauth_yubikey.sys.ini +/administrator/language/en-GB/en-GB.plg_user_contactcreator.ini +/administrator/language/en-GB/en-GB.plg_user_contactcreator.sys.ini +/administrator/language/en-GB/en-GB.plg_user_joomla.ini +/administrator/language/en-GB/en-GB.plg_user_joomla.sys.ini +/administrator/language/en-GB/en-GB.plg_user_profile.ini +/administrator/language/en-GB/en-GB.plg_user_profile.sys.ini +/administrator/language/en-GB/en-GB.plg_user_terms.ini +/administrator/language/en-GB/en-GB.plg_user_terms.sys.ini +/administrator/language/en-GB/en-GB.tpl_hathor.ini +/administrator/language/en-GB/en-GB.tpl_hathor.sys.ini +/administrator/language/en-GB/en-GB.tpl_isis.ini +/administrator/language/en-GB/en-GB.tpl_isis.sys.ini +/administrator/language/en-GB/en-GB.xml +/administrator/language/en-GB/install.xml +/administrator/language/overrides/* +/administrator/language/index.html +/administrator/logs/* +/administrator/manifests/files/joomla.xml +/administrator/manifests/libraries/fof.xml +/administrator/manifests/libraries/idna_convert.xml +/administrator/manifests/libraries/joomla.xml +/administrator/manifests/libraries/phpass.xml +/administrator/manifests/libraries/phputf8.xml +/administrator/manifests/packages/pkg_en-GB.xml +/administrator/manifests/packages/index.html +/administrator/modules/mod_custom/* +/administrator/modules/mod_feed/* +/administrator/modules/mod_latest/* +/administrator/modules/mod_latestactions/* +/administrator/modules/mod_logged/* +/administrator/modules/mod_login/* +/administrator/modules/mod_menu/* +/administrator/modules/mod_multilangstatus/* +/administrator/modules/mod_online/* +/administrator/modules/mod_popular/* +/administrator/modules/mod_privacy_dashboard/* +/administrator/modules/mod_quickicon/* +/administrator/modules/mod_sampledata/* +/administrator/modules/mod_stats_admin/* +/administrator/modules/mod_status/* +/administrator/modules/mod_submenu/* +/administrator/modules/mod_title/* +/administrator/modules/mod_toolbar/* +/administrator/modules/mod_unread/* +/administrator/modules/mod_version/* +/administrator/templates/hathor/* +/administrator/templates/isis/* +/administrator/templates/system/* +/bin/* +/cache/* +/cli/* +/components/com_ajax/* +/components/com_banners/* +/components/com_config/* +/components/com_contact/* +/components/com_content/* +/components/com_contenthistory/* +/components/com_fields/* +/components/com_finder/* +/components/com_mailto/* +/components/com_media/* +/components/com_menus/* +/components/com_modules/* +/components/com_newsfeeds/* +/components/com_privacy/* +/components/com_search/* +/components/com_tags/* +/components/com_users/* +/components/com_wrapper/* +/components/index.html +/images/banners/* +/images/headers/* +/images/sampledata/* +/images/index.html +/images/joomla* +/images/powered_by.png +/includes/* +/installation/* +/language/en-GB/en-GB.com_ajax.ini +/language/en-GB/en-GB.com_config.ini +/language/en-GB/en-GB.com_contact.ini +/language/en-GB/en-GB.com_content.ini +/language/en-GB/en-GB.com_finder.ini +/language/en-GB/en-GB.com_mailto.ini +/language/en-GB/en-GB.com_media.ini +/language/en-GB/en-GB.com_messages.ini +/language/en-GB/en-GB.com_newsfeeds.ini +/language/en-GB/en-GB.com_privacy.ini +/language/en-GB/en-GB.com_search.ini +/language/en-GB/en-GB.com_tags.ini +/language/en-GB/en-GB.com_users.ini +/language/en-GB/en-GB.com_weblinks.ini +/language/en-GB/en-GB.com_wrapper.ini +/language/en-GB/en-GB.files_joomla.sys.ini +/language/en-GB/en-GB.finder_cli.ini +/language/en-GB/en-GB.ini +/language/en-GB/en-GB.lib_fof.ini +/language/en-GB/en-GB.lib_fof.sys.ini +/language/en-GB/en-GB.lib_idna_convert.sys.ini +/language/en-GB/en-GB.lib_joomla.ini +/language/en-GB/en-GB.lib_joomla.sys.ini +/language/en-GB/en-GB.lib_phpass.sys.ini +/language/en-GB/en-GB.lib_phpmailer.sys.ini +/language/en-GB/en-GB.lib_phputf8.sys.ini +/language/en-GB/en-GB.lib_simplepie.sys.ini +/language/en-GB/en-GB.localise.php +/language/en-GB/en-GB.mod_articles_archive.ini +/language/en-GB/en-GB.mod_articles_archive.sys.ini +/language/en-GB/en-GB.mod_articles_categories.ini +/language/en-GB/en-GB.mod_articles_categories.sys.ini +/language/en-GB/en-GB.mod_articles_category.ini +/language/en-GB/en-GB.mod_articles_category.sys.ini +/language/en-GB/en-GB.mod_articles_latest.ini +/language/en-GB/en-GB.mod_articles_latest.sys.ini +/language/en-GB/en-GB.mod_articles_news.ini +/language/en-GB/en-GB.mod_articles_news.sys.ini +/language/en-GB/en-GB.mod_articles_popular.ini +/language/en-GB/en-GB.mod_articles_popular.sys.ini +/language/en-GB/en-GB.mod_banners.ini +/language/en-GB/en-GB.mod_banners.sys.ini +/language/en-GB/en-GB.mod_breadcrumbs.ini +/language/en-GB/en-GB.mod_breadcrumbs.sys.ini +/language/en-GB/en-GB.mod_custom.ini +/language/en-GB/en-GB.mod_custom.sys.ini +/language/en-GB/en-GB.mod_feed.ini +/language/en-GB/en-GB.mod_feed.sys.ini +/language/en-GB/en-GB.mod_finder.ini +/language/en-GB/en-GB.mod_finder.sys.ini +/language/en-GB/en-GB.mod_footer.ini +/language/en-GB/en-GB.mod_footer.sys.ini +/language/en-GB/en-GB.mod_languages.ini +/language/en-GB/en-GB.mod_languages.sys.ini +/language/en-GB/en-GB.mod_login.ini +/language/en-GB/en-GB.mod_login.sys.ini +/language/en-GB/en-GB.mod_menu.ini +/language/en-GB/en-GB.mod_menu.sys.ini +/language/en-GB/en-GB.mod_random_image.ini +/language/en-GB/en-GB.mod_random_image.sys.ini +/language/en-GB/en-GB.mod_related_items.ini +/language/en-GB/en-GB.mod_related_items.sys.ini +/language/en-GB/en-GB.mod_search.ini +/language/en-GB/en-GB.mod_search.sys.ini +/language/en-GB/en-GB.mod_stats.ini +/language/en-GB/en-GB.mod_stats.sys.ini +/language/en-GB/en-GB.mod_syndicate.ini +/language/en-GB/en-GB.mod_syndicate.sys.ini +/language/en-GB/en-GB.mod_tags_popular.ini +/language/en-GB/en-GB.mod_tags_popular.sys.ini +/language/en-GB/en-GB.mod_tags_similar.ini +/language/en-GB/en-GB.mod_tags_similar.sys.ini +/language/en-GB/en-GB.mod_users_latest.ini +/language/en-GB/en-GB.mod_users_latest.sys.ini +/language/en-GB/en-GB.mod_weblinks.ini +/language/en-GB/en-GB.mod_weblinks.sys.ini +/language/en-GB/en-GB.mod_whosonline.ini +/language/en-GB/en-GB.mod_whosonline.sys.ini +/language/en-GB/en-GB.mod_wrapper.ini +/language/en-GB/en-GB.mod_wrapper.sys.ini +/language/en-GB/en-GB.tpl_atomic.ini +/language/en-GB/en-GB.tpl_atomic.sys.ini +/language/en-GB/en-GB.tpl_beez3.ini +/language/en-GB/en-GB.tpl_beez3.sys.ini +/language/en-GB/en-GB.tpl_beez5.ini +/language/en-GB/en-GB.tpl_beez5.sys.ini +/language/en-GB/en-GB.tpl_beez_20.ini +/language/en-GB/en-GB.tpl_beez_20.sys.ini +/language/en-GB/en-GB.tpl_protostar.ini +/language/en-GB/en-GB.tpl_protostar.sys.ini +/language/en-GB/en-GB.xml +/language/en-GB/install.xml +/language/overrides/* +/language/index.html +/layouts/joomla/* +/layouts/libraries/* +/layouts/plugins/* +/layouts/index.html +/libraries/cms/* +/libraries/fof/* +/libraries/idna_convert/* +/libraries/joomla/* +/libraries/legacy/* +/libraries/php-encryption/* +/libraries/phpass/* +/libraries/phpmailer/* +/libraries/phputf8/* +/libraries/simplepie/* +/libraries/src/* +/libraries/vendor/* +/libraries/classmap.php +/libraries/cms.php +/libraries/import.legacy.php +/libraries/import.php +/libraries/index.html +/libraries/loader.php +/media/cms/* +/media/com_associations/* +/media/com_contact/* +/media/com_content/* +/media/com_contenthistory/* +/media/com_fields/* +/media/com_finder/* +/media/com_joomlaupdate/* +/media/com_menus/* +/media/com_modules/* +/media/com_wrapper/* +/media/contacts/* +/media/editors/* +/media/jui/* +/media/mailto/* +/media/media/* +/media/mod_languages/* +/media/mod_sampledata/* +/media/overrider/* +/media/plg_captcha_recaptcha/* +/media/plg_captcha_recaptcha_invisible/* +/media/plg_quickicon_extensionupdate/* +/media/plg_quickicon_joomlaupdate/* +/media/plg_quickicon_privacycheck/* +/media/plg_system_highlight/* +/media/plg_system_stats/* +/media/plg_twofactorauth_totp/* +/media/system/* +/media/index.html +/modules/mod_articles_archive/* +/modules/mod_articles_categories/* +/modules/mod_articles_category/* +/modules/mod_articles_latest/* +/modules/mod_articles_news/* +/modules/mod_articles_popular/* +/modules/mod_banners/* +/modules/mod_breadcrumbs/* +/modules/mod_custom/* +/modules/mod_feed/* +/modules/mod_finder/* +/modules/mod_footer/* +/modules/mod_languages/* +/modules/mod_login/* +/modules/mod_menu/* +/modules/mod_random_image/* +/modules/mod_related_items/* +/modules/mod_search/* +/modules/mod_stats/* +/modules/mod_syndicate/* +/modules/mod_tags_popular/* +/modules/mod_tags_similar/* +/modules/mod_users_latest/* +/modules/mod_whosonline/* +/modules/mod_wrapper/* +/modules/index.html +/plugins/actionlog/joomla/* +/plugins/authentication/cookie/* +/plugins/authentication/example/* +/plugins/authentication/gmail/* +/plugins/authentication/joomla/* +/plugins/authentication/ldap/* +/plugins/captcha/recaptcha/* +/plugins/captcha/recaptcha_invisible/* +/plugins/content/confirmconsent/* +/plugins/content/contact/* +/plugins/content/emailcloak/* +/plugins/content/example/* +/plugins/content/fields/* +/plugins/content/finder/* +/plugins/content/geshi/* +/plugins/content/joomla/* +/plugins/content/loadmodule/* +/plugins/content/pagebreak/* +/plugins/content/pagenavigation/* +/plugins/content/vote/* +/plugins/editors/codemirror/* +/plugins/editors/none/* +/plugins/editors/tinymce/* +/plugins/editors-xtd/article/* +/plugins/editors-xtd/contact/* +/plugins/editors-xtd/fields/* +/plugins/editors-xtd/image/* +/plugins/editors-xtd/menu/* +/plugins/editors-xtd/module/* +/plugins/editors-xtd/pagebreak/* +/plugins/editors-xtd/readmore/* +/plugins/extension/example/* +/plugins/extension/joomla/* +/plugins/fields/calendar/* +/plugins/fields/checkboxes/* +/plugins/fields/color/* +/plugins/fields/editor/* +/plugins/fields/imagelist/* +/plugins/fields/integer/* +/plugins/fields/list/* +/plugins/fields/media/* +/plugins/fields/radio/* +/plugins/fields/repeatable/* +/plugins/fields/sql/* +/plugins/fields/text/* +/plugins/fields/textarea/* +/plugins/fields/url/* +/plugins/fields/user/* +/plugins/fields/usergrouplist/* +/plugins/finder/categories/* +/plugins/finder/contacts/* +/plugins/finder/content/* +/plugins/finder/newsfeeds/* +/plugins/finder/tags/* +/plugins/installer/folderinstaller/* +/plugins/installer/packageinstaller/* +/plugins/installer/urlinstaller/* +/plugins/privacy/actionlogs/* +/plugins/privacy/consents/* +/plugins/privacy/contact/* +/plugins/privacy/content/* +/plugins/privacy/message/* +/plugins/privacy/user/* +/plugins/quickicon/extensionupdate/* +/plugins/quickicon/joomlaupdate/* +/plugins/quickicon/phpversioncheck/* +/plugins/quickicon/privacycheck/* +/plugins/quickicon/index.html +/plugins/sampledata/blog/* +/plugins/search/categories/* +/plugins/search/contacts/* +/plugins/search/content/* +/plugins/search/newsfeeds/* +/plugins/search/tags/* +/plugins/search/weblinks/* +/plugins/search/index.html +/plugins/system/actionlogs/* +/plugins/system/cache/* +/plugins/system/debug/* +/plugins/system/fields/* +/plugins/system/highlight/* +/plugins/system/languagecode/* +/plugins/system/languagefilter/* +/plugins/system/log/* +/plugins/system/logout/* +/plugins/system/logrotation/* +/plugins/system/p3p/* +/plugins/system/privacyconsent/* +/plugins/system/redirect/* +/plugins/system/remember/* +/plugins/system/sef/* +/plugins/system/sessiongc/* +/plugins/system/stats/* +/plugins/system/updatenotification/* +/plugins/system/index.html +/plugins/twofactorauth/totp/* +/plugins/twofactorauth/yubikey/* +/plugins/user/contactcreator/* +/plugins/user/example/* +/plugins/user/joomla/* +/plugins/user/profile/* +/plugins/user/terms/* +/plugins/user/index.html +/plugins/index.html +/templates/beez3/* +/templates/protostar/* +/templates/system/* +/templates/index.html +/tmp/* +/configuration.php +/htaccess.txt +/index.php +/joomla.xml +/LICENSE.txt +/README.txt +/robots.txt.dist +/web.config.txt diff --git a/resources/gitignore/Julia.gitignore b/resources/gitignore/Julia.gitignore new file mode 100644 index 0000000000..29126e47b0 --- /dev/null +++ b/resources/gitignore/Julia.gitignore @@ -0,0 +1,24 @@ +# Files generated by invoking Julia with --code-coverage +*.jl.cov +*.jl.*.cov + +# Files generated by invoking Julia with --track-allocation +*.jl.mem + +# System-specific files and directories generated by the BinaryProvider and BinDeps packages +# They contain absolute paths specific to the host computer, and so should not be committed +deps/deps.jl +deps/build.log +deps/downloads/ +deps/usr/ +deps/src/ + +# Build artifacts for creating documentation generated by the Documenter package +docs/build/ +docs/site/ + +# File generated by Pkg, the package manager, based on a corresponding Project.toml +# It records a fixed state of all packages used by the project. As such, it should not be +# committed for packages, but should be committed for applications that require a static +# environment. +Manifest.toml diff --git a/resources/gitignore/KiCad.gitignore b/resources/gitignore/KiCad.gitignore new file mode 100644 index 0000000000..a63bc0e7f7 --- /dev/null +++ b/resources/gitignore/KiCad.gitignore @@ -0,0 +1,29 @@ +# For PCBs designed using KiCad: https://www.kicad.org/ +# Format documentation: https://kicad.org/help/file-formats/ + +# Temporary files +*.000 +*.bak +*.bck +*.kicad_pcb-bak +*.kicad_sch-bak +*-backups +*.kicad_prl +*.sch-bak +*~ +_autosave-* +*.tmp +*-save.pro +*-save.kicad_pcb +fp-info-cache + +# Netlist files (exported from Eeschema) +*.net + +# Autorouter files (exported from Pcbnew) +*.dsn +*.ses + +# Exported BOM files +*.xml +*.csv diff --git a/resources/gitignore/Kohana.gitignore b/resources/gitignore/Kohana.gitignore new file mode 100644 index 0000000000..8b2ab01a80 --- /dev/null +++ b/resources/gitignore/Kohana.gitignore @@ -0,0 +1,2 @@ +application/cache/* +application/logs/* diff --git a/resources/gitignore/Kotlin.gitignore b/resources/gitignore/Kotlin.gitignore new file mode 100644 index 0000000000..38105ec234 --- /dev/null +++ b/resources/gitignore/Kotlin.gitignore @@ -0,0 +1,24 @@ +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* +replay_pid* \ No newline at end of file diff --git a/resources/gitignore/LICENSE b/resources/gitignore/LICENSE new file mode 100644 index 0000000000..670154e353 --- /dev/null +++ b/resources/gitignore/LICENSE @@ -0,0 +1,116 @@ +CC0 1.0 Universal + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator and +subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for the +purpose of contributing to a commons of creative, cultural and scientific +works ("Commons") that the public can reliably and without fear of later +claims of infringement build upon, modify, incorporate in other works, reuse +and redistribute as freely as possible in any form whatsoever and for any +purposes, including without limitation commercial purposes. These owners may +contribute to the Commons to promote the ideal of a free culture and the +further production of creative, cultural and scientific works, or to gain +reputation or greater distribution for their Work in part through the use and +efforts of others. + +For these and/or other purposes and motivations, and without any expectation +of additional consideration or compensation, the person associating CC0 with a +Work (the "Affirmer"), to the extent that he or she is an owner of Copyright +and Related Rights in the Work, voluntarily elects to apply CC0 to the Work +and publicly distribute the Work under its terms, with knowledge of his or her +Copyright and Related Rights in the Work and the meaning and intended legal +effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not limited +to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, communicate, + and translate a Work; + + ii. moral rights retained by the original author(s) and/or performer(s); + + iii. publicity and privacy rights pertaining to a person's image or likeness + depicted in a Work; + + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + + v. rights protecting the extraction, dissemination, use and reuse of data in + a Work; + + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + + vii. other similar, equivalent or corresponding rights throughout the world + based on applicable law or treaty, and any national implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention of, +applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and +unconditionally waives, abandons, and surrenders all of Affirmer's Copyright +and Related Rights and associated claims and causes of action, whether now +known or unknown (including existing as well as future claims and causes of +action), in the Work (i) in all territories worldwide, (ii) for the maximum +duration provided by applicable law or treaty (including future time +extensions), (iii) in any current or future medium and for any number of +copies, and (iv) for any purpose whatsoever, including without limitation +commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes +the Waiver for the benefit of each member of the public at large and to the +detriment of Affirmer's heirs and successors, fully intending that such Waiver +shall not be subject to revocation, rescission, cancellation, termination, or +any other legal or equitable action to disrupt the quiet enjoyment of the Work +by the public as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason be +judged legally invalid or ineffective under applicable law, then the Waiver +shall be preserved to the maximum extent permitted taking into account +Affirmer's express Statement of Purpose. In addition, to the extent the Waiver +is so judged Affirmer hereby grants to each affected person a royalty-free, +non transferable, non sublicensable, non exclusive, irrevocable and +unconditional license to exercise Affirmer's Copyright and Related Rights in +the Work (i) in all territories worldwide, (ii) for the maximum duration +provided by applicable law or treaty (including future time extensions), (iii) +in any current or future medium and for any number of copies, and (iv) for any +purpose whatsoever, including without limitation commercial, advertising or +promotional purposes (the "License"). The License shall be deemed effective as +of the date CC0 was applied by Affirmer to the Work. Should any part of the +License for any reason be judged legally invalid or ineffective under +applicable law, such partial invalidity or ineffectiveness shall not +invalidate the remainder of the License, and in such case Affirmer hereby +affirms that he or she will not (i) exercise any of his or her remaining +Copyright and Related Rights in the Work or (ii) assert any associated claims +and causes of action with respect to the Work, in either case contrary to +Affirmer's express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + + b. Affirmer offers the Work as-is and makes no representations or warranties + of any kind concerning the Work, express, implied, statutory or otherwise, + including without limitation warranties of title, merchantability, fitness + for a particular purpose, non infringement, or the absence of latent or + other defects, accuracy, or the present or absence of errors, whether or not + discoverable, all to the greatest extent permissible under applicable law. + + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without limitation + any person's Copyright and Related Rights in the Work. Further, Affirmer + disclaims responsibility for obtaining any necessary consents, permissions + or other rights required for any use of the Work. + + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + +For more information, please see + diff --git a/resources/gitignore/LabVIEW.gitignore b/resources/gitignore/LabVIEW.gitignore new file mode 100644 index 0000000000..31619f5981 --- /dev/null +++ b/resources/gitignore/LabVIEW.gitignore @@ -0,0 +1,17 @@ +# Libraries +*.lvlibp +*.llb + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.exe + +# Metadata +*.aliases +*.lvlps +.cache/ diff --git a/resources/gitignore/Laravel.gitignore b/resources/gitignore/Laravel.gitignore new file mode 100644 index 0000000000..297959a19e --- /dev/null +++ b/resources/gitignore/Laravel.gitignore @@ -0,0 +1,23 @@ +/vendor/ +node_modules/ +npm-debug.log +yarn-error.log + +# Laravel 4 specific +bootstrap/compiled.php +app/storage/ + +# Laravel 5 & Lumen specific +public/storage +public/hot + +# Laravel 5 & Lumen specific with changed public path +public_html/storage +public_html/hot + +storage/*.key +.env +Homestead.yaml +Homestead.json +/.vagrant +.phpunit.result.cache diff --git a/resources/gitignore/Leiningen.gitignore b/resources/gitignore/Leiningen.gitignore new file mode 100644 index 0000000000..a4cb69a32c --- /dev/null +++ b/resources/gitignore/Leiningen.gitignore @@ -0,0 +1,14 @@ +pom.xml +pom.xml.asc +*.jar +*.class +/lib/ +/classes/ +/target/ +/checkouts/ +.lein-deps-sum +.lein-repl-history +.lein-plugins/ +.lein-failures +.nrepl-port +.cpcache/ diff --git a/resources/gitignore/LemonStand.gitignore b/resources/gitignore/LemonStand.gitignore new file mode 100644 index 0000000000..c7d94ad34b --- /dev/null +++ b/resources/gitignore/LemonStand.gitignore @@ -0,0 +1,21 @@ +boot.php +index.php +install.php +/config/* +!/config/config.php +/controllers/* +/init/* +/logs/* +/phproad/* +/temp/* +/uploaded/* +/installer_files/* +/modules/backend/* +/modules/blog/* +/modules/cms/* +/modules/core/* +/modules/session/* +/modules/shop/* +/modules/system/* +/modules/users/* +# add content_*.php if you don't want erase client changes to content diff --git a/resources/gitignore/Lilypond.gitignore b/resources/gitignore/Lilypond.gitignore new file mode 100644 index 0000000000..513e6edd9c --- /dev/null +++ b/resources/gitignore/Lilypond.gitignore @@ -0,0 +1,6 @@ +*.pdf +*.ps +*.midi +*.mid +*.log +*~ diff --git a/resources/gitignore/Lithium.gitignore b/resources/gitignore/Lithium.gitignore new file mode 100644 index 0000000000..7b22568ea8 --- /dev/null +++ b/resources/gitignore/Lithium.gitignore @@ -0,0 +1,2 @@ +libraries/* +resources/tmp/* diff --git a/resources/gitignore/Lua.gitignore b/resources/gitignore/Lua.gitignore new file mode 100644 index 0000000000..6fd0a376de --- /dev/null +++ b/resources/gitignore/Lua.gitignore @@ -0,0 +1,41 @@ +# Compiled Lua sources +luac.out + +# luarocks build files +*.src.rock +*.zip +*.tar.gz + +# Object files +*.o +*.os +*.ko +*.obj +*.elf + +# Precompiled Headers +*.gch +*.pch + +# Libraries +*.lib +*.a +*.la +*.lo +*.def +*.exp + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.exe +*.out +*.app +*.i*86 +*.x86_64 +*.hex + diff --git a/resources/gitignore/Magento.gitignore b/resources/gitignore/Magento.gitignore new file mode 100644 index 0000000000..abe6d79fed --- /dev/null +++ b/resources/gitignore/Magento.gitignore @@ -0,0 +1,45 @@ +#--------------------------# +# Magento Default Files # +#--------------------------# + +/PATCH_*.sh + +/app/etc/local.xml + +/media/* +!/media/.htaccess + +!/media/customer +/media/customer/* +!/media/customer/.htaccess + +!/media/dhl +/media/dhl/* +!/media/dhl/logo.jpg + +!/media/downloadable +/media/downloadable/* +!/media/downloadable/.htaccess + +!/media/xmlconnect +/media/xmlconnect/* + +!/media/xmlconnect/custom +/media/xmlconnect/custom/* +!/media/xmlconnect/custom/ok.gif + +!/media/xmlconnect/original +/media/xmlconnect/original/* +!/media/xmlconnect/original/ok.gif + +!/media/xmlconnect/system +/media/xmlconnect/system/* +!/media/xmlconnect/system/ok.gif + +/var/* +!/var/.htaccess + +!/var/package +/var/package/* +!/var/package/*.xml + diff --git a/resources/gitignore/Maven.gitignore b/resources/gitignore/Maven.gitignore new file mode 100644 index 0000000000..2f4353087f --- /dev/null +++ b/resources/gitignore/Maven.gitignore @@ -0,0 +1,17 @@ +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +pom.xml.next +release.properties +dependency-reduced-pom.xml +buildNumber.properties +.mvn/timing.properties +# https://github.com/takari/maven-wrapper#usage-without-binary-jar +.mvn/wrapper/maven-wrapper.jar + +# Eclipse m2e generated files +# Eclipse Core +.project +# JDT-specific (Eclipse Java Development Tools) +.classpath diff --git a/resources/gitignore/Mercury.gitignore b/resources/gitignore/Mercury.gitignore new file mode 100644 index 0000000000..70ec869397 --- /dev/null +++ b/resources/gitignore/Mercury.gitignore @@ -0,0 +1,13 @@ +Mercury/ +Mercury.modules +*.mh +*.err +*.init +*.dll +*.exe +*.a +*.so +*.dylib +*.beams +*.d +*.c_date diff --git a/resources/gitignore/MetaProgrammingSystem.gitignore b/resources/gitignore/MetaProgrammingSystem.gitignore new file mode 100644 index 0000000000..3e75841041 --- /dev/null +++ b/resources/gitignore/MetaProgrammingSystem.gitignore @@ -0,0 +1,16 @@ +workspace.xml +junitvmwatcher*.properties +build.properties + +# generated java classes and java source files +# manually add any custom artifacts that can't be generated from the models +# http://confluence.jetbrains.com/display/MPSD25/HowTo+--+MPS+and+Git +classes_gen +source_gen +source_gen.caches + +# generated test code and test results +test_gen +test_gen.caches +TEST-*.xml +junit*.properties diff --git a/resources/gitignore/Nanoc.gitignore b/resources/gitignore/Nanoc.gitignore new file mode 100644 index 0000000000..6f35daaf47 --- /dev/null +++ b/resources/gitignore/Nanoc.gitignore @@ -0,0 +1,10 @@ +# For projects using Nanoc (http://nanoc.ws/) + +# Default location for output (needs to match output_dir's value found in nanoc.yaml) +output/ + +# Temporary file directory +tmp/nanoc/ + +# Crash Log +crash.log diff --git a/resources/gitignore/Nim.gitignore b/resources/gitignore/Nim.gitignore new file mode 100644 index 0000000000..32e1dc0fde --- /dev/null +++ b/resources/gitignore/Nim.gitignore @@ -0,0 +1,3 @@ +nimcache/ +nimblecache/ +htmldocs/ diff --git a/resources/gitignore/Node.gitignore b/resources/gitignore/Node.gitignore new file mode 100644 index 0000000000..c6bba59138 --- /dev/null +++ b/resources/gitignore/Node.gitignore @@ -0,0 +1,130 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* diff --git a/resources/gitignore/OCaml.gitignore b/resources/gitignore/OCaml.gitignore new file mode 100644 index 0000000000..a18e08402b --- /dev/null +++ b/resources/gitignore/OCaml.gitignore @@ -0,0 +1,29 @@ +*.annot +*.cmo +*.cma +*.cmi +*.a +*.o +*.cmx +*.cmxs +*.cmxa + +# ocamlbuild working directory +_build/ + +# ocamlbuild targets +*.byte +*.native + +# oasis generated files +setup.data +setup.log + +# Merlin configuring file for Vim and Emacs +.merlin + +# Dune generated files +*.install + +# Local OPAM switch +_opam/ diff --git a/resources/gitignore/Objective-C.gitignore b/resources/gitignore/Objective-C.gitignore new file mode 100644 index 0000000000..7801c93000 --- /dev/null +++ b/resources/gitignore/Objective-C.gitignore @@ -0,0 +1,68 @@ +# Xcode +# +# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore + +## User settings +xcuserdata/ + +## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9) +*.xcscmblueprint +*.xccheckout + +## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4) +build/ +DerivedData/ +*.moved-aside +*.pbxuser +!default.pbxuser +*.mode1v3 +!default.mode1v3 +*.mode2v3 +!default.mode2v3 +*.perspectivev3 +!default.perspectivev3 + +## Obj-C/Swift specific +*.hmap + +## App packaging +*.ipa +*.dSYM.zip +*.dSYM + +# CocoaPods +# +# We recommend against adding the Pods directory to your .gitignore. However +# you should judge for yourself, the pros and cons are mentioned at: +# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control +# +# Pods/ +# +# Add this line if you want to avoid checking in source code from the Xcode workspace +# *.xcworkspace + +# Carthage +# +# Add this line if you want to avoid checking in source code from Carthage dependencies. +# Carthage/Checkouts + +Carthage/Build/ + +# fastlane +# +# It is recommended to not store the screenshots in the git repo. +# Instead, use fastlane to re-generate the screenshots whenever they are needed. +# For more information about the recommended setup visit: +# https://docs.fastlane.tools/best-practices/source-control/#source-control + +fastlane/report.xml +fastlane/Preview.html +fastlane/screenshots/**/*.png +fastlane/test_output + +# Code Injection +# +# After new code Injection tools there's a generated folder /iOSInjectionProject +# https://github.com/johnno1962/injectionforxcode + +iOSInjectionProject/ diff --git a/resources/gitignore/Opa.gitignore b/resources/gitignore/Opa.gitignore new file mode 100644 index 0000000000..74c6219ced --- /dev/null +++ b/resources/gitignore/Opa.gitignore @@ -0,0 +1,13 @@ +_build +_tracks + +opa-debug-js + +*.opp +*.opx +*.opx.broken +*.dump +*.api +*.api-txt +*.exe +*.log diff --git a/resources/gitignore/OpenCart.gitignore b/resources/gitignore/OpenCart.gitignore new file mode 100644 index 0000000000..97be41faa3 --- /dev/null +++ b/resources/gitignore/OpenCart.gitignore @@ -0,0 +1,20 @@ +.htaccess +/config.php +admin/config.php + +!index.html + +download/ +image/data/ +image/cache/ +system/cache/ +system/logs/ + +system/storage/ + +# vQmod log files +vqmod/logs/* +# vQmod cache files +vqmod/vqcache/* +vqmod/checked.cache +vqmod/mods.cache diff --git a/resources/gitignore/OracleForms.gitignore b/resources/gitignore/OracleForms.gitignore new file mode 100644 index 0000000000..699a494011 --- /dev/null +++ b/resources/gitignore/OracleForms.gitignore @@ -0,0 +1,8 @@ +# Compiled Form Modules +*.fmx + +# Compiled Menu Modules +*.mmx + +# Compiled Pre-Linked Libraries +*.plx diff --git a/resources/gitignore/Packer.gitignore b/resources/gitignore/Packer.gitignore new file mode 100644 index 0000000000..2cbc1ad079 --- /dev/null +++ b/resources/gitignore/Packer.gitignore @@ -0,0 +1,16 @@ +# Cache objects +packer_cache/ + +# Crash log +crash.log + +# https://www.packer.io/guides/hcl/variables +# Exclude all .pkrvars.hcl files, which are likely to contain sensitive data, +# such as password, private keys, and other secrets. These should not be part of +# version control as they are data points which are potentially sensitive and +# subject to change depending on the environment. +# +*.pkrvars.hcl + +# For built boxes +*.box diff --git a/resources/gitignore/Perl.gitignore b/resources/gitignore/Perl.gitignore new file mode 100644 index 0000000000..fb8b193173 --- /dev/null +++ b/resources/gitignore/Perl.gitignore @@ -0,0 +1,35 @@ +!Build/ +.last_cover_stats +/META.yml +/META.json +/MYMETA.* +*.o +*.pm.tdy +*.bs + +# Devel::Cover +cover_db/ + +# Devel::NYTProf +nytprof.out + +# Dist::Zilla +/.build/ + +# Module::Build +_build/ +Build +Build.bat + +# Module::Install +inc/ + +# ExtUtils::MakeMaker +/blib/ +/_eumm/ +/*.gz +/Makefile +/Makefile.old +/MANIFEST.bak +/pm_to_blib +/*.zip diff --git a/resources/gitignore/Phalcon.gitignore b/resources/gitignore/Phalcon.gitignore new file mode 100644 index 0000000000..6ffe3aa220 --- /dev/null +++ b/resources/gitignore/Phalcon.gitignore @@ -0,0 +1,2 @@ +/cache/ +/config/development/ diff --git a/resources/gitignore/PlayFramework.gitignore b/resources/gitignore/PlayFramework.gitignore new file mode 100644 index 0000000000..ae5ec9fe1d --- /dev/null +++ b/resources/gitignore/PlayFramework.gitignore @@ -0,0 +1,16 @@ +# Ignore Play! working directory # +bin/ +/db +.eclipse +/lib/ +/logs/ +/modules +/project/project +/project/target +/target +tmp/ +test-result +server.pid +*.eml +/dist/ +.cache diff --git a/resources/gitignore/Plone.gitignore b/resources/gitignore/Plone.gitignore new file mode 100644 index 0000000000..770a8681ac --- /dev/null +++ b/resources/gitignore/Plone.gitignore @@ -0,0 +1,18 @@ +*.pyc +*.pyo +*.tmp* +*.mo +*.egg +*.EGG +*.egg-info +*.EGG-INFO +.*.cfg +bin/ +build/ +develop-eggs/ +downloads/ +eggs/ +fake-eggs/ +parts/ +dist/ +var/ diff --git a/resources/gitignore/Prestashop.gitignore b/resources/gitignore/Prestashop.gitignore new file mode 100644 index 0000000000..9da6d29ad4 --- /dev/null +++ b/resources/gitignore/Prestashop.gitignore @@ -0,0 +1,173 @@ +# Cache, temp and personal files + +/.htaccess +*.log + +# Cache +/cache/* +!/cache/.htaccess +!/cache/cachefs/index.php +!/cache/deprecated.txt +!/cache/index.php +!/cache/purifier/index.php +!/cache/push/activity +!/cache/push/index.php +!/cache/push/trends +!/cache/sandbox/index.php +!/cache/smarty/cache/index.php +!/cache/smarty/compile/index.php +!/cache/smarty/index.php +!/cache/tcpdf/index.php + +# Download +/download/* +!/download/.htaccess +!/download/index.php + +# Images +/img/* +!/img/.htaccess +!/img/index.php +!/img/404.gif +!/img/bg_500.png +!/img/bg_loader.png +!/img/favicon.ico +!/img/loader.gif +!/img/loadingAnimation.gif +!/img/logo.jpg +!/img/logo.png +!/img/logo_invoice.jpg +!/img/logo_stores.png +!/img/macFFBgHack.png +!/img/prestashop-avatar.png +!/img/prestashop@2x.png +!/img/preston-login-wink@2x.png +!/img/preston-login@2x.png +!/img/questionmark.png +!/img/genders/index.php +!/img/admin/index.php +!/img/c/index.php +!/img/cms/index.php +!/img/co/index.php +!/img/jquery-ui +!/img/l/index.php +!/img/m/index.php +!/img/os/index.php +!/img/p/index.php +!/img/s/index.php +!/img/scenes +!/img/st/index.php +!/img/su/index.php +!/img/t/index.php +!/img/tmp/index.php + +# Upload +/upload/* +!/upload/.htaccess + +/vendor/* +/docs/phpdoc-sf/ +/composer.lock +*.hot-update.js +*.hot-update.json + + +/admin-dev/autoupgrade/* +!/admin-dev/autoupgrade/index.php +!/admin-dev/autoupgrade/backup/index.php + +/admin-dev/backups/* +!/admin-dev/backups/.htaccess + +/admin-dev/import/* +!/admin-dev/import/.htaccess +!/admin-dev/import/index.php + +/admin-dev/export/* +!/admin-dev/export/.htaccess +!/admin-dev/export/index.php + +# Downloaded RTL files +/admin-dev/themes/default/css/bundle/default_rtl.css +/admin-dev/themes/default/css/bundle/shared_rtl.css +/admin-dev/themes/default/css/font_rtl.css +/admin-dev/themes/default/css/overrides_rtl.css +/admin-dev/themes/default/css/vendor/font-awesome/font-awesome_rtl.css +/admin-dev/themes/default/css/vendor/nv.d3_rtl.css +/admin-dev/themes/default/css/vendor/titatoggle-min_rtl.css +/admin-dev/themes/default/public/theme_rtl.css +/admin-dev/themes/new-theme/css/module/drop_rtl.css +/admin-dev/themes/new-theme/css/right-sidebar_rtl.css + +themes/*/cache/* + +# Config + +config/settings.inc.php +config/settings.old.php +config/xml/* +config/themes/* +!config/xml/themes/default.xml +themes/*/config/settings_*.json +app/config/parameters.old.yml +app/config/config.php + +# Themes, modules and overrides + +modules/* +override/* +themes/*/ +!themes/classic +!themes/_core +!themes/_libraries + +# Vendors and dependencies + +bower_components/ +node_modules/ +composer.phar +php-cs-fixer +.grunt/* + +# Translations and emails templates + +translations/* +mails/* +!mails/themes/ +!mails/_partials/ +themes/default-bootstrap/lang/* +themes/default-bootstrap/modules/*/translations/*.php +themes/default-bootstrap/mails/* +!themes/default-bootstrap/mails/en/ +themes/default-bootstrap/modules/*/mails/* +!themes/default-bootstrap/modules/*/mails/en + +# MISC + +*sitemap.xml +/robots.txt + +# Symfony + +/bin/ +/app/Resources/geoip/GeoLite2-City.mmdb +/app/Resources/translations/* +!/app/Resources/translations/default +/app/config/parameters.yml +/app/config/parameters.php +/build/ +/phpunit.xml +/var/* +!/var/cache +/var/cache/* +!var/cache/.gitkeep +!/var/logs +/var/logs/* +!var/logs/.gitkeep +!/var/sessions +/var/sessions/* +!var/sessions/.gitkeep +!var/SymfonyRequirements.php +/vendor/ +/web/bundles/ + diff --git a/resources/gitignore/Processing.gitignore b/resources/gitignore/Processing.gitignore new file mode 100644 index 0000000000..942ebbccb5 --- /dev/null +++ b/resources/gitignore/Processing.gitignore @@ -0,0 +1,10 @@ +.DS_Store +applet +application.linux-arm64 +application.linux-armv6hf +application.linux32 +application.linux64 +application.windows32 +application.windows64 +application.macosx +out diff --git a/resources/gitignore/PureScript.gitignore b/resources/gitignore/PureScript.gitignore new file mode 100644 index 0000000000..de86604d37 --- /dev/null +++ b/resources/gitignore/PureScript.gitignore @@ -0,0 +1,9 @@ +# Dependencies +.psci_modules +.spago +bower_components +node_modules + +# Generated files +.psci +output diff --git a/resources/gitignore/Python.gitignore b/resources/gitignore/Python.gitignore new file mode 100644 index 0000000000..68bc17f9ff --- /dev/null +++ b/resources/gitignore/Python.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/resources/gitignore/Qooxdoo.gitignore b/resources/gitignore/Qooxdoo.gitignore new file mode 100644 index 0000000000..d0c64102d8 --- /dev/null +++ b/resources/gitignore/Qooxdoo.gitignore @@ -0,0 +1,5 @@ +cache +cache-downloads +inspector +api +source/inspector.html diff --git a/resources/gitignore/Qt.gitignore b/resources/gitignore/Qt.gitignore new file mode 100644 index 0000000000..7f4826b46f --- /dev/null +++ b/resources/gitignore/Qt.gitignore @@ -0,0 +1,54 @@ +# C++ objects and libs +*.slo +*.lo +*.o +*.a +*.la +*.lai +*.so +*.so.* +*.dll +*.dylib + +# Qt-es +object_script.*.Release +object_script.*.Debug +*_plugin_import.cpp +/.qmake.cache +/.qmake.stash +*.pro.user +*.pro.user.* +*.qbs.user +*.qbs.user.* +*.moc +moc_*.cpp +moc_*.h +qrc_*.cpp +ui_*.h +*.qmlc +*.jsc +Makefile* +*build-* +*.qm +*.prl + +# Qt unit tests +target_wrapper.* + +# QtCreator +*.autosave + +# QtCreator Qml +*.qmlproject.user +*.qmlproject.user.* + +# QtCreator CMake +CMakeLists.txt.user* + +# QtCreator 4.8< compilation database +compile_commands.json + +# QtCreator local machine specific files for imported projects +*creator.user* + +*_qmlcache.qrc diff --git a/resources/gitignore/R.gitignore b/resources/gitignore/R.gitignore new file mode 100644 index 0000000000..e75435c1b1 --- /dev/null +++ b/resources/gitignore/R.gitignore @@ -0,0 +1,49 @@ +# History files +.Rhistory +.Rapp.history + +# Session Data files +.RData +.RDataTmp + +# User-specific files +.Ruserdata + +# Example code in package build process +*-Ex.R + +# Output files from R CMD build +/*.tar.gz + +# Output files from R CMD check +/*.Rcheck/ + +# RStudio files +.Rproj.user/ + +# produced vignettes +vignettes/*.html +vignettes/*.pdf + +# OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 +.httr-oauth + +# knitr and R markdown default cache directories +*_cache/ +/cache/ + +# Temporary files created by R markdown +*.utf8.md +*.knit.md + +# R Environment Variables +.Renviron + +# pkgdown site +docs/ + +# translation temp files +po/*~ + +# RStudio Connect folder +rsconnect/ diff --git a/resources/gitignore/README.md b/resources/gitignore/README.md new file mode 100644 index 0000000000..7014eed78d --- /dev/null +++ b/resources/gitignore/README.md @@ -0,0 +1,155 @@ +# A collection of `.gitignore` templates + +This is GitHub’s collection of [`.gitignore`][man] file templates. +We use this list to populate the `.gitignore` template choosers available +in the GitHub.com interface when creating new repositories and files. + +For more information about how `.gitignore` files work, and how to use them, +the following resources are a great place to start: + +- The [Ignoring Files chapter][chapter] of the [Pro Git][progit] book. +- The [Ignoring Files article][help] on the GitHub Help site. +- The [gitignore(5)][man] manual page. + +[man]: http://git-scm.com/docs/gitignore +[help]: https://help.github.com/articles/ignoring-files +[chapter]: https://git-scm.com/book/en/Git-Basics-Recording-Changes-to-the-Repository#_ignoring +[progit]: http://git-scm.com/book + +## Folder structure + +We support a collection of templates, organized in this way: + +- The root folder contains templates in common use, to help people get started + with popular programming languages and technologies. These define a meaningful + set of rules to help get started, and ensure you are not committing + unimportant files into your repository. +- [`Global`](./Global) contains templates for various editors, tools and + operating systems that can be used in different situations. It is recommended + that you either [add these to your global template](https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files#configuring-ignored-files-for-all-repositories-on-your-computer) + or merge these rules into your project-specific templates if you want to use + them permanently. +- [`community`](./community) contains specialized templates for other popular + languages, tools and project, which don't currently belong in the mainstream + templates. These should be added to your project-specific templates when you + decide to adopt the framework or tool. + +## What makes a good template? + +A template should contain a set of rules to help Git repositories work with a +specific programming language, framework, tool or environment. + +If it's not possible to curate a small set of useful rules for this situation, +then the template is not a good fit for this collection. + +If a template is mostly a list of files installed by a particular version of +some software (e.g. a PHP framework), it could live under the `community` +directory. See [versioned templates](#versioned-templates) for more details. + +If you have a small set of rules, or want to support a technology that is not +widely in use, and still believe this will be helpful to others, please read the +section about [specialized templates](#specialized-templates) for more details. + +Include details when opening pull request if the template is important and visible. We +may not accept it immediately, but we can promote it to the root at a later date +based on interest. + +Please also understand that we can’t list every tool that ever existed. +Our aim is to curate a collection of the _most common and helpful_ templates, +not to make sure we cover every project possible. If we choose not to +include your language, tool, or project, it’s not because it’s not awesome. + +## Contributing guidelines + +We’d love for you to help us improve this project. To help us keep this collection +high quality, we request that contributions adhere to the following guidelines. + +- **Provide a link to the application or project’s homepage**. Unless it’s + extremely popular, there’s a chance the maintainers don’t know about or use + the language, framework, editor, app, or project your change applies to. + +- **Provide links to documentation** supporting the change you’re making. + Current, canonical documentation mentioning the files being ignored is best. + If documentation isn’t available to support your change, do the best you can + to explain what the files being ignored are for. + +- **Explain why you’re making a change**. Even if it seems self-evident, please + take a sentence or two to tell us why your change or addition should happen. + It’s especially helpful to articulate why this change applies to _everyone_ + who works with the applicable technology, rather than just you or your team. + +- **Please consider the scope of your change**. If your change is specific to a + certain language or framework, then make sure the change is made to the + template for that language or framework, rather than to the template for an + editor, tool, or operating system. + +- **Please only modify _one template_ per pull request**. This helps keep pull + requests and feedback focused on a specific project or technology. + +In general, the more you can do to help us understand the change you’re making, +the more likely we’ll be to accept your contribution quickly. + +## Versioned templates + +Some templates can change greatly between versions, and if you wish to contribute +to this repository we need to follow this specific flow: + +- the template at the root should be the current supported version +- the template at the root should not have a version in the filename (i.e. + "evergreen") +- previous versions of templates should live under `community/` +- previous versions of the template should embed the version in the filename, + for readability + +This helps ensure users get the latest version (because they'll use whatever is +at the root) but helps maintainers support older versions still in the wild. + +## Specialized templates + +If you have a template that you would like to contribute, but it isn't quite +mainstream, please consider adding this to the `community` directory under a +folder that best suits where it belongs. + +The rules in your specialized template should be specific to the framework or +tool, and any additional templates should be mentioned in a comment in the +header of the template. + +For example, this template might live at `community/DotNet/InforCRM.gitignore`: + +``` +# gitignore template for InforCRM (formerly SalesLogix) +# website: https://www.infor.com/product-summary/cx/infor-crm/ +# +# Recommended: VisualStudio.gitignore + +# Ignore model files that are auto-generated +ModelIndex.xml +ExportedFiles.xml + +# Ignore deployment files +[Mm]odel/[Dd]eployment + +# Force include portal SupportFiles +!Model/Portal/*/SupportFiles/[Bb]in/ +!Model/Portal/PortalTemplates/*/SupportFiles/[Bb]in +``` + +## Contributing workflow + +Here’s how we suggest you go about proposing a change to this project: + +1. [Fork this project][fork] to your account. +2. [Create a branch][branch] for the change you intend to make. +3. Make your changes to your fork. +4. [Send a pull request][pr] from your fork’s branch to our `main` branch. + +Using the web-based interface to make changes is fine too, and will help you +by automatically forking the project and prompting to send a pull request too. + +[fork]: https://help.github.com/articles/fork-a-repo/ +[branch]: https://help.github.com/articles/creating-and-deleting-branches-within-your-repository +[pr]: https://help.github.com/articles/using-pull-requests/ + +## License + +[CC0-1.0](./LICENSE). diff --git a/resources/gitignore/ROS.gitignore b/resources/gitignore/ROS.gitignore new file mode 100644 index 0000000000..35d74bb771 --- /dev/null +++ b/resources/gitignore/ROS.gitignore @@ -0,0 +1,51 @@ +devel/ +logs/ +build/ +bin/ +lib/ +msg_gen/ +srv_gen/ +msg/*Action.msg +msg/*ActionFeedback.msg +msg/*ActionGoal.msg +msg/*ActionResult.msg +msg/*Feedback.msg +msg/*Goal.msg +msg/*Result.msg +msg/_*.py +build_isolated/ +devel_isolated/ + +# Generated by dynamic reconfigure +*.cfgc +/cfg/cpp/ +/cfg/*.py + +# Ignore generated docs +*.dox +*.wikidoc + +# eclipse stuff +.project +.cproject + +# qcreator stuff +CMakeLists.txt.user + +srv/_*.py +*.pcd +*.pyc +qtcreator-* +*.user + +/planning/cfg +/planning/docs +/planning/src + +*~ + +# Emacs +.#* + +# Catkin custom files +CATKIN_IGNORE diff --git a/resources/gitignore/Racket.gitignore b/resources/gitignore/Racket.gitignore new file mode 100644 index 0000000000..93e407298d --- /dev/null +++ b/resources/gitignore/Racket.gitignore @@ -0,0 +1,7 @@ +.DS_Store +compiled/ +/doc/ +*~ +*.bak +\#* +.\#* diff --git a/resources/gitignore/Rails.gitignore b/resources/gitignore/Rails.gitignore new file mode 100644 index 0000000000..c55bb7132e --- /dev/null +++ b/resources/gitignore/Rails.gitignore @@ -0,0 +1,69 @@ +*.rbc +capybara-*.html +.rspec +/db/*.sqlite3 +/db/*.sqlite3-journal +/db/*.sqlite3-[0-9]* +/public/system +/coverage/ +/spec/tmp +*.orig +rerun.txt +pickle-email-*.html + +# Ignore all logfiles and tempfiles. +/log/* +/tmp/* +!/log/.keep +!/tmp/.keep + +# TODO Comment out this rule if you are OK with secrets being uploaded to the repo +config/initializers/secret_token.rb +config/master.key + +# Only include if you have production secrets in this file, which is no longer a Rails default +# config/secrets.yml + +# dotenv, dotenv-rails +# TODO Comment out these rules if environment variables can be committed +.env +.env*.local + +## Environment normalization: +/.bundle +/vendor/bundle + +# these should all be checked in to normalize the environment: +# Gemfile.lock, .ruby-version, .ruby-gemset + +# unless supporting rvm < 1.11.0 or doing something fancy, ignore this: +.rvmrc + +# if using bower-rails ignore default bower_components path bower.json files +/vendor/assets/bower_components +*.bowerrc +bower.json + +# Ignore pow environment settings +.powenv + +# Ignore Byebug command history file. +.byebug_history + +# Ignore node_modules +node_modules/ + +# Ignore precompiled javascript packs +/public/packs +/public/packs-test +/public/assets + +# Ignore yarn files +/yarn-error.log +yarn-debug.log* +.yarn-integrity + +# Ignore uploaded files in development +/storage/* +!/storage/.keep +/public/uploads diff --git a/resources/gitignore/Raku.gitignore b/resources/gitignore/Raku.gitignore new file mode 100644 index 0000000000..e792f6e466 --- /dev/null +++ b/resources/gitignore/Raku.gitignore @@ -0,0 +1,7 @@ +# Gitignore for Raku (https://raku.org) +# As part of https://github.com/github/gitignore + +# precompiled files +.precomp +lib/.precomp + diff --git a/resources/gitignore/RhodesRhomobile.gitignore b/resources/gitignore/RhodesRhomobile.gitignore new file mode 100644 index 0000000000..a211dcc3b0 --- /dev/null +++ b/resources/gitignore/RhodesRhomobile.gitignore @@ -0,0 +1,9 @@ +rholog-* +sim-* +bin/libs +bin/RhoBundle +bin/tmp +bin/target +bin/*.ap_ +*.o +*.jar diff --git a/resources/gitignore/Ruby.gitignore b/resources/gitignore/Ruby.gitignore new file mode 100644 index 0000000000..e3200e0f81 --- /dev/null +++ b/resources/gitignore/Ruby.gitignore @@ -0,0 +1,56 @@ +*.gem +*.rbc +/.config +/coverage/ +/InstalledFiles +/pkg/ +/spec/reports/ +/spec/examples.txt +/test/tmp/ +/test/version_tmp/ +/tmp/ + +# Used by dotenv library to load environment variables. +# .env + +# Ignore Byebug command history file. +.byebug_history + +## Specific to RubyMotion: +.dat* +.repl_history +build/ +*.bridgesupport +build-iPhoneOS/ +build-iPhoneSimulator/ + +## Specific to RubyMotion (use of CocoaPods): +# +# We recommend against adding the Pods directory to your .gitignore. However +# you should judge for yourself, the pros and cons are mentioned at: +# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control +# +# vendor/Pods/ + +## Documentation cache and generated files: +/.yardoc/ +/_yardoc/ +/doc/ +/rdoc/ + +## Environment normalization: +/.bundle/ +/vendor/bundle +/lib/bundler/man/ + +# for a library or gem, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# Gemfile.lock +# .ruby-version +# .ruby-gemset + +# unless supporting rvm < 1.11.0 or doing something fancy, ignore this: +.rvmrc + +# Used by RuboCop. Remote config files pulled in from inherit_from directive. +# .rubocop-https?--* diff --git a/resources/gitignore/Rust.gitignore b/resources/gitignore/Rust.gitignore new file mode 100644 index 0000000000..6985cf1bd0 --- /dev/null +++ b/resources/gitignore/Rust.gitignore @@ -0,0 +1,14 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb diff --git a/resources/gitignore/SCons.gitignore b/resources/gitignore/SCons.gitignore new file mode 100644 index 0000000000..84eee81b08 --- /dev/null +++ b/resources/gitignore/SCons.gitignore @@ -0,0 +1,6 @@ +# for projects that use SCons for building: http://http://www.scons.org/ +.sconsign.dblite + +# When configure fails, SCons outputs these +config.log +.sconf_temp diff --git a/resources/gitignore/Sass.gitignore b/resources/gitignore/Sass.gitignore new file mode 100644 index 0000000000..159f515170 --- /dev/null +++ b/resources/gitignore/Sass.gitignore @@ -0,0 +1,4 @@ +.sass-cache/ +*.css.map +*.sass.map +*.scss.map diff --git a/resources/gitignore/Scala.gitignore b/resources/gitignore/Scala.gitignore new file mode 100644 index 0000000000..7169cab195 --- /dev/null +++ b/resources/gitignore/Scala.gitignore @@ -0,0 +1,5 @@ +*.class +*.log + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* diff --git a/resources/gitignore/Scheme.gitignore b/resources/gitignore/Scheme.gitignore new file mode 100644 index 0000000000..cbb89d78da --- /dev/null +++ b/resources/gitignore/Scheme.gitignore @@ -0,0 +1,7 @@ +*.ss~ +*.ss#* +.#*.ss + +*.scm~ +*.scm#* +.#*.scm diff --git a/resources/gitignore/Scrivener.gitignore b/resources/gitignore/Scrivener.gitignore new file mode 100644 index 0000000000..5884b0c6c3 --- /dev/null +++ b/resources/gitignore/Scrivener.gitignore @@ -0,0 +1,8 @@ +*/Files/binder.autosave +*/Files/binder.backup +*/Files/search.indexes +*/Files/user.lock +*/Files/Docs/docs.checksum +*/Files/Data/docs.checksum +*/QuickLook/ +*/Settings/ui.plist diff --git a/resources/gitignore/Sdcc.gitignore b/resources/gitignore/Sdcc.gitignore new file mode 100644 index 0000000000..07ee7d59ab --- /dev/null +++ b/resources/gitignore/Sdcc.gitignore @@ -0,0 +1,8 @@ +# SDCC stuff +*.lnk +*.lst +*.map +*.mem +*.rel +*.rst +*.sym diff --git a/resources/gitignore/SeamGen.gitignore b/resources/gitignore/SeamGen.gitignore new file mode 100644 index 0000000000..a418cf376c --- /dev/null +++ b/resources/gitignore/SeamGen.gitignore @@ -0,0 +1,26 @@ +/bootstrap/data +/bootstrap/tmp +/classes/ +/dist/ +/exploded-archives/ +/test-build/ +/test-output/ +/test-report/ +/target/ +temp-testng-customsuite.xml + +# based on http://stackoverflow.com/a/8865858/422476 I am removing inline comments + +#/classes/ all class files +#/dist/ contains generated war files for deployment +#/exploded-archives/ war content generation during deploy (or explode) +#/test-build/ test compilation (ant target for Seam) +#/test-output/ test results +#/test-report/ test report generation for, e.g., Hudson +#/target/ maven output folder +#temp-testng-customsuite.xml generated when running test cases under Eclipse + +# Thanks to @VonC and @kraftan for their helpful answers on a related question +# on StackOverflow.com: +# http://stackoverflow.com/questions/4176687 +# /what-is-the-recommended-source-control-ignore-pattern-for-seam-projects diff --git a/resources/gitignore/SketchUp.gitignore b/resources/gitignore/SketchUp.gitignore new file mode 100644 index 0000000000..5160df3c6b --- /dev/null +++ b/resources/gitignore/SketchUp.gitignore @@ -0,0 +1 @@ +*.skb diff --git a/resources/gitignore/Smalltalk.gitignore b/resources/gitignore/Smalltalk.gitignore new file mode 100644 index 0000000000..178d87af45 --- /dev/null +++ b/resources/gitignore/Smalltalk.gitignore @@ -0,0 +1,31 @@ +# changes file +*.changes +*.chg + +# system image +*.image +*.img7 +*.img + +# Pharo Smalltalk Debug log file +PharoDebug.log + +# Squeak Smalltalk Debug log file +SqueakDebug.log + +# Dolphin Smalltalk source file +*.sml + +# Dolphin Smalltalk error file +*.errors + +# Monticello package cache +/package-cache + +# playground cache +/play-cache +/play-stash + +# Metacello-github cache +/github-cache +github-*.zip diff --git a/resources/gitignore/Stella.gitignore b/resources/gitignore/Stella.gitignore new file mode 100644 index 0000000000..402a543837 --- /dev/null +++ b/resources/gitignore/Stella.gitignore @@ -0,0 +1,12 @@ +# Atari 2600 (Stella) support for multiple assemblers +# - DASM +# - CC65 + +# Assembled binaries and object directories +obj/ +a.out +*.bin +*.a26 + +# Add in special Atari 7800-based binaries for good measure +*.a78 diff --git a/resources/gitignore/SugarCRM.gitignore b/resources/gitignore/SugarCRM.gitignore new file mode 100644 index 0000000000..6a183d1c74 --- /dev/null +++ b/resources/gitignore/SugarCRM.gitignore @@ -0,0 +1,27 @@ +## SugarCRM +# Ignore custom .htaccess stuff. +/.htaccess +# Ignore the cache directory completely. +# This will break the current behaviour. Which was often leading to +# the misuse of the repository as backup replacement. +# For development the cache directory can be safely ignored and +# therefore it is ignored. +/cache/* +!/cache/index.html +# Ignore some files and directories from the custom directory. +/custom/history/ +/custom/modulebuilder/ +/custom/working/ +/custom/modules/*/Ext/ +/custom/application/Ext/ +# Custom configuration should also be ignored. +/config.php +/config_override.php +# The silent upgrade scripts aren't needed. +/silentUpgrade*.php +# Logs files can safely be ignored. +*.log +# Ignore the new upload directories. +/upload/* +!/upload/index.html +/upload_backup/ diff --git a/resources/gitignore/Swift.gitignore b/resources/gitignore/Swift.gitignore new file mode 100644 index 0000000000..330d1674f3 --- /dev/null +++ b/resources/gitignore/Swift.gitignore @@ -0,0 +1,90 @@ +# Xcode +# +# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore + +## User settings +xcuserdata/ + +## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9) +*.xcscmblueprint +*.xccheckout + +## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4) +build/ +DerivedData/ +*.moved-aside +*.pbxuser +!default.pbxuser +*.mode1v3 +!default.mode1v3 +*.mode2v3 +!default.mode2v3 +*.perspectivev3 +!default.perspectivev3 + +## Obj-C/Swift specific +*.hmap + +## App packaging +*.ipa +*.dSYM.zip +*.dSYM + +## Playgrounds +timeline.xctimeline +playground.xcworkspace + +# Swift Package Manager +# +# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. +# Packages/ +# Package.pins +# Package.resolved +# *.xcodeproj +# +# Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata +# hence it is not needed unless you have added a package configuration file to your project +# .swiftpm + +.build/ + +# CocoaPods +# +# We recommend against adding the Pods directory to your .gitignore. However +# you should judge for yourself, the pros and cons are mentioned at: +# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control +# +# Pods/ +# +# Add this line if you want to avoid checking in source code from the Xcode workspace +# *.xcworkspace + +# Carthage +# +# Add this line if you want to avoid checking in source code from Carthage dependencies. +# Carthage/Checkouts + +Carthage/Build/ + +# Accio dependency management +Dependencies/ +.accio/ + +# fastlane +# +# It is recommended to not store the screenshots in the git repo. +# Instead, use fastlane to re-generate the screenshots whenever they are needed. +# For more information about the recommended setup visit: +# https://docs.fastlane.tools/best-practices/source-control/#source-control + +fastlane/report.xml +fastlane/Preview.html +fastlane/screenshots/**/*.png +fastlane/test_output + +# Code Injection +# +# After new code Injection tools there's a generated folder /iOSInjectionProject +# https://github.com/johnno1962/injectionforxcode + +iOSInjectionProject/ diff --git a/resources/gitignore/Symfony.gitignore b/resources/gitignore/Symfony.gitignore new file mode 100644 index 0000000000..3dab634c18 --- /dev/null +++ b/resources/gitignore/Symfony.gitignore @@ -0,0 +1,52 @@ +# Cache and logs (Symfony2) +/app/cache/* +/app/logs/* +!app/cache/.gitkeep +!app/logs/.gitkeep + +# Email spool folder +/app/spool/* + +# Cache, session files and logs (Symfony3) +/var/cache/* +/var/logs/* +/var/sessions/* +!var/cache/.gitkeep +!var/logs/.gitkeep +!var/sessions/.gitkeep + +# Logs (Symfony4) +/var/log/* +!var/log/.gitkeep + +# Parameters +/app/config/parameters.yml +/app/config/parameters.ini + +# Managed by Composer +/app/bootstrap.php.cache +/var/bootstrap.php.cache +/bin/* +!bin/console +!bin/symfony_requirements +/vendor/ + +# Assets and user uploads +/web/bundles/ +/web/uploads/ + +# PHPUnit +/app/phpunit.xml +/phpunit.xml + +# Build data +/build/ + +# Composer PHAR +/composer.phar + +# Backup entities generated with doctrine:generate:entities command +**/Entity/*~ + +# Embedded web-server pid file +/.web-server-pid diff --git a/resources/gitignore/SymphonyCMS.gitignore b/resources/gitignore/SymphonyCMS.gitignore new file mode 100644 index 0000000000..671c7ff9e3 --- /dev/null +++ b/resources/gitignore/SymphonyCMS.gitignore @@ -0,0 +1,6 @@ +manifest/cache/ +manifest/logs/ +manifest/tmp/ +symphony/ +workspace/uploads/ +install-log.txt diff --git a/resources/gitignore/TeX.gitignore b/resources/gitignore/TeX.gitignore new file mode 100644 index 0000000000..e964244133 --- /dev/null +++ b/resources/gitignore/TeX.gitignore @@ -0,0 +1,301 @@ +## Core latex/pdflatex auxiliary files: +*.aux +*.lof +*.log +*.lot +*.fls +*.out +*.toc +*.fmt +*.fot +*.cb +*.cb2 +.*.lb + +## Intermediate documents: +*.dvi +*.xdv +*-converted-to.* +# these rules might exclude image files for figures etc. +# *.ps +# *.eps +# *.pdf + +## Generated if empty string is given at "Please type another file name for output:" +.pdf + +## Bibliography auxiliary files (bibtex/biblatex/biber): +*.bbl +*.bcf +*.blg +*-blx.aux +*-blx.bib +*.run.xml + +## Build tool auxiliary files: +*.fdb_latexmk +*.synctex +*.synctex(busy) +*.synctex.gz +*.synctex.gz(busy) +*.pdfsync + +## Build tool directories for auxiliary files +# latexrun +latex.out/ + +## Auxiliary and intermediate files from other packages: +# algorithms +*.alg +*.loa + +# achemso +acs-*.bib + +# amsthm +*.thm + +# beamer +*.nav +*.pre +*.snm +*.vrb + +# changes +*.soc + +# comment +*.cut + +# cprotect +*.cpt + +# elsarticle (documentclass of Elsevier journals) +*.spl + +# endnotes +*.ent + +# fixme +*.lox + +# feynmf/feynmp +*.mf +*.mp +*.t[1-9] +*.t[1-9][0-9] +*.tfm + +#(r)(e)ledmac/(r)(e)ledpar +*.end +*.?end +*.[1-9] +*.[1-9][0-9] +*.[1-9][0-9][0-9] +*.[1-9]R +*.[1-9][0-9]R +*.[1-9][0-9][0-9]R +*.eledsec[1-9] +*.eledsec[1-9]R +*.eledsec[1-9][0-9] +*.eledsec[1-9][0-9]R +*.eledsec[1-9][0-9][0-9] +*.eledsec[1-9][0-9][0-9]R + +# glossaries +*.acn +*.acr +*.glg +*.glo +*.gls +*.glsdefs +*.lzo +*.lzs +*.slg +*.slo +*.sls + +# uncomment this for glossaries-extra (will ignore makeindex's style files!) +# *.ist + +# gnuplot +*.gnuplot +*.table + +# gnuplottex +*-gnuplottex-* + +# gregoriotex +*.gaux +*.glog +*.gtex + +# htlatex +*.4ct +*.4tc +*.idv +*.lg +*.trc +*.xref + +# hyperref +*.brf + +# knitr +*-concordance.tex +# TODO Uncomment the next line if you use knitr and want to ignore its generated tikz files +# *.tikz +*-tikzDictionary + +# listings +*.lol + +# luatexja-ruby +*.ltjruby + +# makeidx +*.idx +*.ilg +*.ind + +# minitoc +*.maf +*.mlf +*.mlt +*.mtc[0-9]* +*.slf[0-9]* +*.slt[0-9]* +*.stc[0-9]* + +# minted +_minted* +*.pyg + +# morewrites +*.mw + +# newpax +*.newpax + +# nomencl +*.nlg +*.nlo +*.nls + +# pax +*.pax + +# pdfpcnotes +*.pdfpc + +# sagetex +*.sagetex.sage +*.sagetex.py +*.sagetex.scmd + +# scrwfile +*.wrt + +# svg +svg-inkscape/ + +# sympy +*.sout +*.sympy +sympy-plots-for-*.tex/ + +# pdfcomment +*.upa +*.upb + +# pythontex +*.pytxcode +pythontex-files-*/ + +# tcolorbox +*.listing + +# thmtools +*.loe + +# TikZ & PGF +*.dpth +*.md5 +*.auxlock + +# titletoc +*.ptc + +# todonotes +*.tdo + +# vhistory +*.hst +*.ver + +# easy-todo +*.lod + +# xcolor +*.xcp + +# xmpincl +*.xmpi + +# xindy +*.xdy + +# xypic precompiled matrices and outlines +*.xyc +*.xyd + +# endfloat +*.ttt +*.fff + +# Latexian +TSWLatexianTemp* + +## Editors: +# WinEdt +*.bak +*.sav + +# Texpad +.texpadtmp + +# LyX +*.lyx~ + +# Kile +*.backup + +# gummi +.*.swp + +# KBibTeX +*~[0-9]* + +# TeXnicCenter +*.tps + +# auto folder when using emacs and auctex +./auto/* +*.el + +# expex forward references with \gathertags +*-tags.tex + +# standalone packages +*.sta + +# Makeindex log files +*.lpz + +# xwatermark package +*.xwm + +# REVTeX puts footnotes in the bibliography by default, unless the nofootinbib +# option is specified. Footnotes are the stored in a file with suffix Notes.bib. +# Uncomment the next line to have this generated file ignored. +#*Notes.bib diff --git a/resources/gitignore/Terraform.gitignore b/resources/gitignore/Terraform.gitignore new file mode 100644 index 0000000000..9b8a46e692 --- /dev/null +++ b/resources/gitignore/Terraform.gitignore @@ -0,0 +1,34 @@ +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log +crash.*.log + +# Exclude all .tfvars files, which are likely to contain sensitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +*.tfvars +*.tfvars.json + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc diff --git a/resources/gitignore/Textpattern.gitignore b/resources/gitignore/Textpattern.gitignore new file mode 100644 index 0000000000..3805636d62 --- /dev/null +++ b/resources/gitignore/Textpattern.gitignore @@ -0,0 +1,11 @@ +.htaccess +css.php +rpc/ +sites/site*/admin/ +sites/site*/private/ +sites/site*/public/admin/ +sites/site*/public/setup/ +sites/site*/public/theme/ +textpattern/ +HISTORY.txt +README.txt diff --git a/resources/gitignore/TurboGears2.gitignore b/resources/gitignore/TurboGears2.gitignore new file mode 100644 index 0000000000..122b3de221 --- /dev/null +++ b/resources/gitignore/TurboGears2.gitignore @@ -0,0 +1,20 @@ +*.py[co] + +# Default development database +devdata.db + +# Default data directory +data/* + +# Packages +*.egg +*.egg-info +dist +build + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox diff --git a/resources/gitignore/TwinCAT3.gitignore b/resources/gitignore/TwinCAT3.gitignore new file mode 100644 index 0000000000..7bd6f87505 --- /dev/null +++ b/resources/gitignore/TwinCAT3.gitignore @@ -0,0 +1,25 @@ +# gitignore template for TwinCAT3 +# website: https://www.beckhoff.com/twincat3/ +# +# Recommended: VisualStudio.gitignore + +# TwinCAT files +*.tpy +*.tclrs +*.compiled-library +*.compileinfo +# Don't include the tmc-file rule if either of the following is true: +# 1. You've got TwinCAT C++ projects, as the information in the TMC-file is created manually for the C++ projects (in that case, only (manually) ignore the tmc-files for the PLC projects) +# 2. You've created a standalone PLC-project and added events to it, as these are stored in the TMC-file. +*.tmc +*.tmcRefac +*.library +*.project.~u +*.tsproj.bak +*.xti.bak +LineIDs.dbg +LineIDs.dbg.bak +_Boot/ +_CompileInfo/ +_Libraries/ +_ModuleInstall/ \ No newline at end of file diff --git a/resources/gitignore/Typo3.gitignore b/resources/gitignore/Typo3.gitignore new file mode 100644 index 0000000000..200c2a2bf7 --- /dev/null +++ b/resources/gitignore/Typo3.gitignore @@ -0,0 +1,23 @@ +## TYPO3 v6.2 +# Ignore several upload and file directories. +/fileadmin/user_upload/ +/fileadmin/_temp_/ +/fileadmin/_processed_/ +/uploads/ +# Ignore cache +/typo3conf/temp_CACHED* +/typo3conf/temp_fieldInfo.php +/typo3conf/deprecation_*.log +/typo3conf/ENABLE_INSTALL_TOOL +/typo3conf/realurl_autoconf.php +/FIRST_INSTALL +# Ignore system folders, you should have them symlinked. +# If not comment out the following entries. +/typo3 +/typo3_src +/typo3_src-* +/Packages +/.htaccess +/index.php +# Ignore temp directory. +/typo3temp/ diff --git a/resources/gitignore/Unity.gitignore b/resources/gitignore/Unity.gitignore new file mode 100644 index 0000000000..58cbc8256e --- /dev/null +++ b/resources/gitignore/Unity.gitignore @@ -0,0 +1,72 @@ +# This .gitignore file should be placed at the root of your Unity project directory +# +# Get latest from https://github.com/github/gitignore/blob/main/Unity.gitignore +# +/[Ll]ibrary/ +/[Tt]emp/ +/[Oo]bj/ +/[Bb]uild/ +/[Bb]uilds/ +/[Ll]ogs/ +/[Uu]ser[Ss]ettings/ + +# MemoryCaptures can get excessive in size. +# They also could contain extremely sensitive data +/[Mm]emoryCaptures/ + +# Recordings can get excessive in size +/[Rr]ecordings/ + +# Uncomment this line if you wish to ignore the asset store tools plugin +# /[Aa]ssets/AssetStoreTools* + +# Autogenerated Jetbrains Rider plugin +/[Aa]ssets/Plugins/Editor/JetBrains* + +# Visual Studio cache directory +.vs/ + +# Gradle cache directory +.gradle/ + +# Autogenerated VS/MD/Consulo solution and project files +ExportedObj/ +.consulo/ +*.csproj +*.unityproj +*.sln +*.suo +*.tmp +*.user +*.userprefs +*.pidb +*.booproj +*.svd +*.pdb +*.mdb +*.opendb +*.VC.db + +# Unity3D generated meta files +*.pidb.meta +*.pdb.meta +*.mdb.meta + +# Unity3D generated file on crash reports +sysinfo.txt + +# Builds +*.apk +*.aab +*.unitypackage +*.app + +# Crashlytics generated file +crashlytics-build.properties + +# Packed Addressables +/[Aa]ssets/[Aa]ddressable[Aa]ssets[Dd]ata/*/*.bin* + +# Temporary auto-generated Android Assets +/[Aa]ssets/[Ss]treamingAssets/aa.meta +/[Aa]ssets/[Ss]treamingAssets/aa/* diff --git a/resources/gitignore/UnrealEngine.gitignore b/resources/gitignore/UnrealEngine.gitignore new file mode 100644 index 0000000000..6582eaf9a1 --- /dev/null +++ b/resources/gitignore/UnrealEngine.gitignore @@ -0,0 +1,74 @@ +# Visual Studio 2015 user specific files +.vs/ + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app +*.ipa + +# These project files can be generated by the engine +*.xcodeproj +*.xcworkspace +*.sln +*.suo +*.opensdf +*.sdf +*.VC.db +*.VC.opendb + +# Precompiled Assets +SourceArt/**/*.png +SourceArt/**/*.tga + +# Binary Files +Binaries/* +Plugins/*/Binaries/* + +# Builds +Build/* + +# Whitelist PakBlacklist-.txt files +!Build/*/ +Build/*/** +!Build/*/PakBlacklist*.txt + +# Don't ignore icon files in Build +!Build/**/*.ico + +# Built data for maps +*_BuiltData.uasset + +# Configuration files generated by the Editor +Saved/* + +# Compiled source files for the engine to use +Intermediate/* +Plugins/*/Intermediate/* + +# Cache files for the editor to use +DerivedDataCache/* diff --git a/resources/gitignore/VVVV.gitignore b/resources/gitignore/VVVV.gitignore new file mode 100644 index 0000000000..5df4324603 --- /dev/null +++ b/resources/gitignore/VVVV.gitignore @@ -0,0 +1,6 @@ + +# .v4p backup files +*~.xml + +# Dynamic plugins .dll +bin/ diff --git a/resources/gitignore/VisualStudio.gitignore b/resources/gitignore/VisualStudio.gitignore new file mode 100644 index 0000000000..8a30d258ed --- /dev/null +++ b/resources/gitignore/VisualStudio.gitignore @@ -0,0 +1,398 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml diff --git a/resources/gitignore/Waf.gitignore b/resources/gitignore/Waf.gitignore new file mode 100644 index 0000000000..dad2b56bdd --- /dev/null +++ b/resources/gitignore/Waf.gitignore @@ -0,0 +1,9 @@ +# For projects that use the Waf build system: https://waf.io/ +# Dot-hidden on Unix-like systems +.waf-*-*/ +.waf3-*-*/ +# Hidden directory on Windows (no dot) +waf-*-*/ +waf3-*-*/ +# Lockfile +.lock-waf_*_build diff --git a/resources/gitignore/WordPress.gitignore b/resources/gitignore/WordPress.gitignore new file mode 100644 index 0000000000..54696693a3 --- /dev/null +++ b/resources/gitignore/WordPress.gitignore @@ -0,0 +1,48 @@ +# Wordpress - ignore core, configuration, examples, uploads and logs. +# https://github.com/github/gitignore/blob/main/WordPress.gitignore + +# Core +# +# Note: if you want to stage/commit WP core files +# you can delete this whole section/until Configuration. +/wp-admin/ +/wp-content/index.php +/wp-content/languages +/wp-content/plugins/index.php +/wp-content/themes/index.php +/wp-includes/ +/index.php +/license.txt +/readme.html +/wp-*.php +/xmlrpc.php + +# Configuration +wp-config.php + +# Example themes +/wp-content/themes/twenty*/ + +# Example plugin +/wp-content/plugins/hello.php + +# Uploads +/wp-content/uploads/ + +# Log files +*.log + +# htaccess +/.htaccess + +# All plugins +# +# Note: If you wish to whitelist plugins, +# uncomment the next line +#/wp-content/plugins + +# All themes +# +# Note: If you wish to whitelist themes, +# uncomment the next line +#/wp-content/themes \ No newline at end of file diff --git a/resources/gitignore/Xojo.gitignore b/resources/gitignore/Xojo.gitignore new file mode 100644 index 0000000000..4915783bf0 --- /dev/null +++ b/resources/gitignore/Xojo.gitignore @@ -0,0 +1,11 @@ +# Xojo (formerly REALbasic and Real Studio) + +Builds* +*.debug +*.debug.app +Debug*.exe +Debug*/Debug*.exe +Debug*/Debug*\ Libs +*.rbuistate +*.xojo_uistate +*.obsolete* diff --git a/resources/gitignore/Yeoman.gitignore b/resources/gitignore/Yeoman.gitignore new file mode 100644 index 0000000000..7170d72018 --- /dev/null +++ b/resources/gitignore/Yeoman.gitignore @@ -0,0 +1,6 @@ +node_modules/ +bower_components/ +*.log + +build/ +dist/ diff --git a/resources/gitignore/Yii.gitignore b/resources/gitignore/Yii.gitignore new file mode 100644 index 0000000000..70f087546f --- /dev/null +++ b/resources/gitignore/Yii.gitignore @@ -0,0 +1,6 @@ +assets/* +!assets/.gitignore +protected/runtime/* +!protected/runtime/.gitignore +protected/data/*.db +themes/classic/views/ diff --git a/resources/gitignore/ZendFramework.gitignore b/resources/gitignore/ZendFramework.gitignore new file mode 100644 index 0000000000..f0b7d8585b --- /dev/null +++ b/resources/gitignore/ZendFramework.gitignore @@ -0,0 +1,24 @@ +# Composer files +composer.phar +vendor/ + +# Local configs +config/autoload/*.local.php + +# Binary gettext files +*.mo + +# Data +data/logs/ +data/cache/ +data/sessions/ +data/tmp/ +temp/ + +#Doctrine 2 +data/DoctrineORMModule/Proxy/ +data/DoctrineORMModule/cache/ + +# Legacy ZF1 +demos/ +extras/documentation diff --git a/resources/gitignore/Zephir.gitignore b/resources/gitignore/Zephir.gitignore new file mode 100644 index 0000000000..839cb5d707 --- /dev/null +++ b/resources/gitignore/Zephir.gitignore @@ -0,0 +1,26 @@ +# Cache files, generates by Zephir +.temp/ +.libs/ + +# Object files, generates by linker +*.lo +*.la +*.o +*.loT + +# Files generated by configure and Zephir, +# not required for extension compilation. +ext/build/ +ext/modules/ +ext/Makefile* +ext/config* +ext/acinclude.m4 +ext/aclocal.m4 +ext/autom4te* +ext/install-sh +ext/ltmain.sh +ext/missing +ext/mkinstalldirs +ext/run-tests.php +ext/.deps +ext/libtool diff --git a/resources/license/afl-3.0.txt b/resources/license/afl-3.0.txt new file mode 100644 index 0000000000..0c87cae5c8 --- /dev/null +++ b/resources/license/afl-3.0.txt @@ -0,0 +1,43 @@ +Academic Free License (“AFL”) v. 3.0 + +This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following licensing notice adjacent to the copyright notice for the Original Work: + + Licensed under the Academic Free License version 3.0 + +1) Grant of Copyright License. Licensor grants You a worldwide, royalty-free, non-exclusive, sublicensable license, for the duration of the copyright, to do the following: + + a) to reproduce the Original Work in copies, either alone or as part of a collective work; + b) to translate, adapt, alter, transform, modify, or arrange the Original Work, thereby creating derivative works ("Derivative Works") based upon the Original Work; + c) to distribute or communicate copies of the Original Work and Derivative Works to the public, under any license of your choice that does not contradict the terms and conditions, including Licensor’s reserved rights and remedies, in this Academic Free License; + d) to perform the Original Work publicly; and + e) to display the Original Work publicly. + +2) Grant of Patent License. Licensor grants You a worldwide, royalty-free, non-exclusive, sublicensable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, for the duration of the patents, to make, use, sell, offer for sale, have made, and import the Original Work and Derivative Works. + +3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work. + + 4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior permission of the Licensor. Except as expressly stated herein, nothing in this License grants any license to Licensor’s trademarks, copyrights, patents, trade secrets or any other intellectual property. No patent license is granted to make, use, sell, offer for sale, have made, or import embodiments of any patent claims other than the licensed claims defined in Section 2. No license is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under terms different from this License any Original Work that Licensor otherwise would have a right to license. + +5) External Deployment. The term "External Deployment" means the use, distribution, or communication of the Original Work or Derivative Works in any way such that the Original Work or Derivative Works may be used by anyone other than You, whether those works are distributed or communicated to those persons or made available as an application intended for use over a network. As an express condition for the grants of license hereunder, You must treat any External Deployment by You of the Original Work or a Derivative Work as a distribution under section 1(c). + +6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent, or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work. + +7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately preceding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of non-infringement, merchantability or fitness for a particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to the Original Work is granted by this License except under this disclaimer. + +8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to anyone for any indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to the extent applicable law prohibits such limitation. + +9) Acceptance and Termination. If, at any time, You expressly assented to this License, that assent indicates your clear and irrevocable acceptance of this License and all of its terms and conditions. If You distribute or communicate copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. This License conditions your rights to undertake the activities listed in Section 1, including your right to create Derivative Works based upon the Original Work, and doing so without honoring these terms and conditions is prohibited by copyright law and international treaty. Nothing in this License is intended to affect copyright exceptions and limitations (including “fair use” or “fair dealing”). This License shall terminate immediately and You may no longer exercise any of the rights granted to You by this License upon your failure to honor the conditions in Section 1(c). + +10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware. + +11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of copyright or patent law in the appropriate jurisdiction. This section shall survive the termination of this License. + +12) Attorneys’ Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License. + +13) Miscellaneous. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. + +14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You. + +16) Modification of This License. This License is Copyright © 2005 Lawrence Rosen. Permission is granted to copy, distribute, or communicate this License without modification. Nothing in this License permits You to modify this License as applied to the Original Work or to Derivative Works. However, You may modify the text of this License and copy, distribute or communicate your modified version (the "Modified License") and apply it to other original works of authorship subject to the following conditions: (i) You may not indicate in any way that your Modified License is the "Academic Free License" or "AFL" and you may not use those names in the name of your Modified License; (ii) You must replace the notice specified in the first paragraph above with the notice "Licensed under " or with a notice of your own that is not confusingly similar to the notice in this License; and (iii) You may not claim that your original works are open source software unless your Modified License has been approved by Open Source Initiative (OSI) and You comply with its license review and certification process. \ No newline at end of file diff --git a/resources/license/agpl-3.0.txt b/resources/license/agpl-3.0.txt new file mode 100644 index 0000000000..4e2755a73c --- /dev/null +++ b/resources/license/agpl-3.0.txt @@ -0,0 +1,235 @@ +GNU AFFERO GENERAL PUBLIC LICENSE +Version 3, 19 November 2007 + +Copyright (C) 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + + Preamble + +The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. + +A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. + +The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. + +An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. + +The precise terms and conditions for copying, distribution and modification follow. + + TERMS AND CONDITIONS + +0. Definitions. + +"This License" refers to version 3 of the GNU Affero General Public License. + +"Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +"The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. + +To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. + +A "covered work" means either the unmodified Program or a work based on the Program. + +To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. + +A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those +subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". + + c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. + +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +"Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +"Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. + +All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. + +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. + +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. + +10. Automatic Licensing of Downstream Recipients. + +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. + +An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. + +A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". + +A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. + +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. + +13. Remote Network Interaction; Use with the GNU General Public License. + +Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. + +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. + +14. Revised Versions of this License. + +The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. + +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. Limitation of Liability. + +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +17. Interpretation of Sections 15 and 16. + +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. + +You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . \ No newline at end of file diff --git a/resources/license/apache-2.0.txt b/resources/license/apache-2.0.txt new file mode 100644 index 0000000000..33666dddf7 --- /dev/null +++ b/resources/license/apache-2.0.txt @@ -0,0 +1,73 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/resources/license/artistic-2.0.txt b/resources/license/artistic-2.0.txt new file mode 100644 index 0000000000..3f9f0f5d91 --- /dev/null +++ b/resources/license/artistic-2.0.txt @@ -0,0 +1,85 @@ +The Artistic License 2.0 + +Copyright (c) 2000-2006, The Perl Foundation. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +This license establishes the terms under which a given free software Package may be copied, modified, distributed, and/or redistributed. The intent is that the Copyright Holder maintains some artistic control over the development of that Package while still keeping the Package available as open source and free software. + +You are always permitted to make arrangements wholly outside of this license directly with the Copyright Holder of a given Package. If the terms of this license do not permit the full use that you propose to make of the Package, you should contact the Copyright Holder and seek a different licensing arrangement. + +Definitions + + "Copyright Holder" means the individual(s) or organization(s) named in the copyright notice for the entire Package. + + "Contributor" means any party that has contributed code or other material to the Package, in accordance with the Copyright Holder's procedures. + + "You" and "your" means any person who would like to copy, distribute, or modify the Package. + + "Package" means the collection of files distributed by the Copyright Holder, and derivatives of that collection and/or of those files. A given Package may consist of either the Standard Version, or a Modified Version. + + "Distribute" means providing a copy of the Package or making it accessible to anyone else, or in the case of a company or organization, to others outside of your company or organization. + + "Distributor Fee" means any fee that you charge for Distributing this Package or providing support for this Package to another party. It does not mean licensing fees. + + "Standard Version" refers to the Package if it has not been modified, or has been modified only in ways explicitly requested by the Copyright Holder. + + "Modified Version" means the Package, if it has been changed, and such changes were not explicitly requested by the Copyright Holder. + + "Original License" means this Artistic License as Distributed with the Standard Version of the Package, in its current version or as it may be modified by The Perl Foundation in the future. + + "Source" form means the source code, documentation source, and configuration files for the Package. + + "Compiled" form means the compiled bytecode, object code, binary, or any other form resulting from mechanical transformation or translation of the Source form. + +Permission for Use and Modification Without Distribution + +(1) You are permitted to use the Standard Version and create and use Modified Versions for any purpose without restriction, provided that you do not Distribute the Modified Version. + +Permissions for Redistribution of the Standard Version + +(2) You may Distribute verbatim copies of the Source form of the Standard Version of this Package in any medium without restriction, either gratis or for a Distributor Fee, provided that you duplicate all of the original copyright notices and associated disclaimers. At your discretion, such verbatim copies may or may not include a Compiled form of the Package. + +(3) You may apply any bug fixes, portability changes, and other modifications made available from the Copyright Holder. The resulting Package will still be considered the Standard Version, and as such will be subject to the Original License. + +Distribution of Modified Versions of the Package as Source + +(4) You may Distribute your Modified Version as Source (either gratis or for a Distributor Fee, and with or without a Compiled form of the Modified Version) provided that you clearly document how it differs from the Standard Version, including, but not limited to, documenting any non-standard features, executables, or modules, and provided that you do at least ONE of the following: + + (a) make the Modified Version available to the Copyright Holder of the Standard Version, under the Original License, so that the Copyright Holder may include your modifications in the Standard Version. + (b) ensure that installation of your Modified Version does not prevent the user installing or running the Standard Version. In addition, the Modified Version must bear a name that is different from the name of the Standard Version. + (c) allow anyone who receives a copy of the Modified Version to make the Source form of the Modified Version available to others under + + (i) the Original License or + (ii) a license that permits the licensee to freely copy, modify and redistribute the Modified Version using the same licensing terms that apply to the copy that the licensee received, and requires that the Source form of the Modified Version, and of any works derived from it, be made freely available in that license fees are prohibited but Distributor Fees are allowed. + +Distribution of Compiled Forms of the Standard Version or Modified Versions without the Source + +(5) You may Distribute Compiled forms of the Standard Version without the Source, provided that you include complete instructions on how to get the Source of the Standard Version. Such instructions must be valid at the time of your distribution. If these instructions, at any time while you are carrying out such distribution, become invalid, you must provide new instructions on demand or cease further distribution. If you provide valid instructions or cease distribution within thirty days after you become aware that the instructions are invalid, then you do not forfeit any of your rights under this license. + +(6) You may Distribute a Modified Version in Compiled form without the Source, provided that you comply with Section 4 with respect to the Source of the Modified Version. + +Aggregating or Linking the Package + +(7) You may aggregate the Package (either the Standard Version or Modified Version) with other packages and Distribute the resulting aggregation provided that you do not charge a licensing fee for the Package. Distributor Fees are permitted, and licensing fees for other components in the aggregation are permitted. The terms of this license apply to the use and Distribution of the Standard or Modified Versions as included in the aggregation. + +(8) You are permitted to link Modified and Standard Versions with other works, to embed the Package in a larger work of your own, or to build stand-alone binary or bytecode versions of applications that include the Package, and Distribute the result without restriction, provided the result does not expose a direct interface to the Package. + +Items That are Not Considered Part of a Modified Version + +(9) Works (including, but not limited to, modules and scripts) that merely extend or make use of the Package, do not, by themselves, cause the Package to be a Modified Version. In addition, such works are not considered parts of the Package itself, and are not subject to the terms of this license. + +General Provisions + +(10) Any use, modification, and distribution of the Standard or Modified Versions is governed by this Artistic License. By using, modifying or distributing the Package, you accept this license. Do not use, modify, or distribute the Package, if you do not accept this license. + +(11) If your Modified Version has been derived from a Modified Version made by someone other than you, you are nevertheless required to ensure that your Modified Version complies with the requirements of this license. + +(12) This license does not grant you the right to use any trademark, service mark, tradename, or logo of the Copyright Holder. + +(13) This license includes the non-exclusive, worldwide, free-of-charge patent license to make, have made, use, offer to sell, sell, import and otherwise transfer the Package with respect to any patent claims licensable by the Copyright Holder that are necessarily infringed by the Package. If you institute patent litigation (including a cross-claim or counterclaim) against any party alleging that the Package constitutes direct or contributory patent infringement, then this Artistic License to you shall terminate on the date that such litigation is filed. + +(14) Disclaimer of Warranty: +THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/resources/license/bsd-2-clause.txt b/resources/license/bsd-2-clause.txt new file mode 100644 index 0000000000..1e23d1f189 --- /dev/null +++ b/resources/license/bsd-2-clause.txt @@ -0,0 +1,9 @@ +Copyright (c) + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/resources/license/bsd-3-clause-clear.txt b/resources/license/bsd-3-clause-clear.txt new file mode 100644 index 0000000000..4bd5c8c9cf --- /dev/null +++ b/resources/license/bsd-3-clause-clear.txt @@ -0,0 +1,14 @@ +The Clear BSD License + +Copyright (c) [xxxx]-[xxxx] [Owner Organization] +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + * Neither the name of [Owner Organization] nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/resources/license/bsd-3-clause.txt b/resources/license/bsd-3-clause.txt new file mode 100644 index 0000000000..a33c6961e2 --- /dev/null +++ b/resources/license/bsd-3-clause.txt @@ -0,0 +1,11 @@ +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/resources/license/bsl-1.0.txt b/resources/license/bsl-1.0.txt new file mode 100644 index 0000000000..c2dc836bf7 --- /dev/null +++ b/resources/license/bsl-1.0.txt @@ -0,0 +1,7 @@ +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization obtaining a copy of the software and accompanying documentation covered by this license (the "Software") to use, reproduce, display, distribute, execute, and transmit the Software, and to prepare derivative works of the Software, and to permit third-parties to whom the Software is furnished to do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including the above license grant, this restriction and the following disclaimer, must be included in all copies of the Software, in whole or in part, and all derivative works of the Software, unless such copies or derivative works are solely in the form of machine-executable object code generated by a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/resources/license/cc-by-4.0.txt b/resources/license/cc-by-4.0.txt new file mode 100644 index 0000000000..231f757079 --- /dev/null +++ b/resources/license/cc-by-4.0.txt @@ -0,0 +1,156 @@ +Creative Commons Attribution 4.0 International + + Creative Commons Corporation (“Creative Commons”) is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an “as-is” basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses. + +Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC-licensed material, or material used under an exception or limitation to copyright. More considerations for licensors. + +Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor’s permission is not necessary for any reason–for example, because of any applicable exception or limitation to copyright–then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public. + +Creative Commons Attribution 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. + +Section 1 – Definitions. + + a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. + + d. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. + + g. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights under this Public License. + + i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. + + j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. + + k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. + +Section 2 – Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: + + A. reproduce and Share the Licensed Material, in whole or in part; and + + B. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. + + 3. Term. The term of this Public License is specified in Section 6(a). + + 4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material. + + 5. Downstream recipients. + + A. Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. + + B. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. + + 6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). + +b. Other rights. + + 1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this Public License. + + 3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. + +Section 3 – License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified form), You must: + + A. retain the following if it is supplied by the Licensor with the Licensed Material: + + i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of warranties; + + v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; + + B. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and + + C. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. + + 3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License. + +Section 4 – Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; + + b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. +For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. + +Section 5 – Disclaimer of Warranties and Limitation of Liability. + + a. Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You. + + b. To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You. + + c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. + +Section 6 – Term and Termination. + + a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or + + 2. upon express reinstatement by the Licensor. + + c. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. + + d. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. + + e. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. + +Section 7 – Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. + +Section 8 – Interpretation. + + a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. + + c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. + + d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. + +Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.” Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark “Creative Commons” or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses. + +Creative Commons may be contacted at creativecommons.org. \ No newline at end of file diff --git a/resources/license/cc-by-sa-4.0.txt b/resources/license/cc-by-sa-4.0.txt new file mode 100644 index 0000000000..21a54e727b --- /dev/null +++ b/resources/license/cc-by-sa-4.0.txt @@ -0,0 +1,170 @@ +Creative Commons Attribution-ShareAlike 4.0 International + + Creative Commons Corporation (“Creative Commons”) is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an “as-is” basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses. + +Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC-licensed material, or material used under an exception or limitation to copyright. More considerations for licensors. + +Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor’s permission is not necessary for any reason–for example, because of any applicable exception or limitation to copyright–then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. + +Although not required by our licenses, you are encouraged to respect those requests where reasonable. More considerations for the public. + +Creative Commons Attribution-ShareAlike 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-ShareAlike 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. + +Section 1 – Definitions. + + a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. + + c. BY-SA Compatible License means a license listed at creativecommons.org/compatiblelicenses, approved by Creative Commons as essentially the equivalent of this Public License. + + d. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. + + e. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. + + f. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. + + g. License Elements means the license attributes listed in the name of a Creative Commons Public License. The License Elements of this Public License are Attribution and ShareAlike. + + h. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. + + i. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. + + j. Licensor means the individual(s) or entity(ies) granting rights under this Public License. + + k. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. + + l. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. + + m. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. + +Section 2 – Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: + + A. reproduce and Share the Licensed Material, in whole or in part; and + + B. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. + + 3. Term. The term of this Public License is specified in Section 6(a). + + 4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material. + + 5. Downstream recipients. + + A. Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. + + B. Additional offer from the Licensor – Adapted Material. Every recipient of Adapted Material from You automatically receives an offer from the Licensor to exercise the Licensed Rights in the Adapted Material under the conditions of the Adapter’s License You apply. + + C. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. + + 6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this Public License. + + 3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. + +Section 3 – License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified form), You must: + + A. retain the following if it is supplied by the Licensor with the Licensed Material: + + i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of warranties; + + v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; + + B. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and + + C. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. + + 3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. + + b. ShareAlike.In addition to the conditions in Section 3(a), if You Share Adapted Material You produce, the following conditions also apply. + + 1. The Adapter’s License You apply must be a Creative Commons license with the same License Elements, this version or later, or a BY-SA Compatible License. + + 2. You must include the text of, or the URI or hyperlink to, the Adapter's License You apply. You may satisfy this condition in any reasonable manner based on the medium, means, and context in which You Share Adapted Material. + + 3. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, Adapted Material that restrict exercise of the rights granted under the Adapter's License You apply. + +Section 4 – Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; + + b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material, including for purposes of Section 3(b); and + + c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. +For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. + +Section 5 – Disclaimer of Warranties and Limitation of Liability. + + a. Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You. + + b. To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You. + + c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. + +Section 6 – Term and Termination. + + a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or + + 2. upon express reinstatement by the Licensor. + + c. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. + + d. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. + + e. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. + +Section 7 – Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. + +Section 8 – Interpretation. + + a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. + + c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. + + d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. + +Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the “Licensor.” Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark “Creative Commons” or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses. + +Creative Commons may be contacted at creativecommons.org. \ No newline at end of file diff --git a/resources/license/cc.txt b/resources/license/cc.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/resources/license/cc0-1.0.txt b/resources/license/cc0-1.0.txt new file mode 100644 index 0000000000..1625c17936 --- /dev/null +++ b/resources/license/cc0-1.0.txt @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. \ No newline at end of file diff --git a/resources/license/ecl-2.0.txt b/resources/license/ecl-2.0.txt new file mode 100644 index 0000000000..d75f5b5006 --- /dev/null +++ b/resources/license/ecl-2.0.txt @@ -0,0 +1,98 @@ +Educational Community License +Version 2.0, April 2007 + +http://www.osedu.org/licenses/ + +The Educational Community License version 2.0 ("ECL") consists of the Apache 2.0 license, modified to change the scope of the patent grant in section 3 to be specific to the needs of the education communities using this license. The original Apache 2.0 license can be found at: http://www.apache.org/licenses/LICENSE-2.0 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. Any patent license granted hereby with respect to contributions by an individual employed by an institution or organization is limited to patent claims where the individual that is the author of the Work is also the inventor of the patent claims licensed, and where the organization or institution has the right to grant such license under applicable grant and research funding agreements. No other express or implied licenses are granted. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + a. You must give any other recipients of the Work or Derivative Works a copy of this License; and + + b. You must cause any modified files to carry prominent notices stating that You changed the files; and + + c. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + + d. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Educational Community License to your work + +To apply the Educational Community License to your work, attach +the following boilerplate notice, with the fields enclosed by +brackets "[]" replaced with your own identifying information. +(Don't include the brackets!) The text should be enclosed in the +appropriate comment syntax for the file format. We also recommend +that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier +identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] Licensed under the + Educational Community License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may + obtain a copy of the License at + + http://www.osedu.org/licenses/ECL-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an "AS IS" + BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + or implied. See the License for the specific language governing + permissions and limitations under the License. \ No newline at end of file diff --git a/resources/license/epl-1.0.txt b/resources/license/epl-1.0.txt new file mode 100644 index 0000000000..069406a104 --- /dev/null +++ b/resources/license/epl-1.0.txt @@ -0,0 +1,73 @@ +Eclipse Public License - v 1.0 + +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + +where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program. +"Contributor" means any person or entity that distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. + +"Program" means the Contributions distributed in accordance with this Agreement. + +"Recipient" means anyone who receives the Program under this Agreement, including all Contributors. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. + + b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. + + c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. + + d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. + +3. REQUIREMENTS +A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: + + a) it complies with the terms and conditions of this Agreement; and + + b) its license agreement: + i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; + ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; + iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and + iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. + +When the Program is made available in source code form: + + a) it must be made available under this Agreement; and + + b) a copy of this Agreement must be included with each copy of the Program. +Contributors may not remove or alter any copyright notices contained within the Program. + +Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. + +4. COMMERCIAL DISTRIBUTION +Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. + +5. NO WARRANTY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. + +This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. \ No newline at end of file diff --git a/resources/license/epl-2.0.txt b/resources/license/epl-2.0.txt new file mode 100644 index 0000000000..add7d15ec5 --- /dev/null +++ b/resources/license/epl-2.0.txt @@ -0,0 +1,80 @@ +Eclipse Public License - v 2.0 +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS +“Contribution” means: + +a) in the case of the initial Contributor, the initial content Distributed under this Agreement, and +b) in the case of each subsequent Contributor: +i) changes to the Program, and +ii) additions to the Program; +where such changes and/or additions to the Program originate from and are Distributed by that particular Contributor. A Contribution “originates” from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include changes or additions to the Program that are not Modified Works. +“Contributor” means any person or entity that Distributes the Program. + +“Licensed Patents” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. + +“Program” means the Contributions Distributed in accordance with this Agreement. + +“Recipient” means anyone who receives the Program under this Agreement or any Secondary License (as applicable), including Contributors. + +“Derivative Works” shall mean any work, whether in Source Code or other form, that is based on (or derived from) the Program and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. + +“Modified Works” shall mean any work in Source Code or other form that results from an addition to, deletion from, or modification of the contents of the Program, including, for purposes of clarity any new file in Source Code form that contains any contents of the Program. Modified Works shall not include works that contain only declarations, interfaces, types, classes, structures, or files of the Program solely in each case in order to link to, bind by name, or subclass the Program or Modified Works thereof. + +“Distribute” means the acts of a) distributing or b) making available in any manner that enables the transfer of a copy. + +“Source Code” means the form of a Program preferred for making modifications, including but not limited to software source code, documentation source, and configuration files. + +“Secondary License” means either the GNU General Public License, Version 2.0, or any later versions of that license, including any exceptions or additional permissions as identified by the initial Contributor. + +2. GRANT OF RIGHTS +a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, Distribute and sublicense the Contribution of such Contributor, if any, and such Derivative Works. +b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in Source Code or other form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. +c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to Distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. +d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. +e) Notwithstanding the terms of any Secondary License, no Contributor makes additional grants to any Recipient (other than those set forth in this Agreement) as a result of such Recipient's receipt of the Program under the terms of a Secondary License (if permitted under the terms of Section 3). +3. REQUIREMENTS +3.1 If a Contributor Distributes the Program in any form, then: + +a) the Program must also be made available as Source Code, in accordance with section 3.2, and the Contributor must accompany the Program with a statement that the Source Code for the Program is available under this Agreement, and informs Recipients how to obtain it in a reasonable manner on or through a medium customarily used for software exchange; and +b) the Contributor may Distribute the Program under a license different than this Agreement, provided that such license: +i) effectively disclaims on behalf of all other Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; +ii) effectively excludes on behalf of all other Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; +iii) does not attempt to limit or alter the recipients' rights in the Source Code under section 3.2; and +iv) requires any subsequent distribution of the Program by any party to be under a license that satisfies the requirements of this section 3. +3.2 When the Program is Distributed as Source Code: + +a) it must be made available under this Agreement, or if the Program (i) is combined with other material in a separate file or files made available under a Secondary License, and (ii) the initial Contributor attached to the Source Code the notice described in Exhibit A of this Agreement, then the Program may be made available under the terms of such Secondary Licenses, and +b) a copy of this Agreement must be included with each copy of the Program. +3.3 Contributors may not remove or alter any copyright, patent, trademark, attribution notices, disclaimers of warranty, or limitations of liability (‘notices’) contained within the Program from any copy of the Program which they Distribute, provided that Contributors may add their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION +Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. + +5. NO WARRANTY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement, including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL +If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be Distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to Distribute the Program (including its Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. Nothing in this Agreement is intended to be enforceable by any entity that is not a Contributor or Recipient. No third-party beneficiary rights are created under this Agreement. + +Exhibit A – Form of Secondary Licenses Notice +“This Source Code may also be made available under the following Secondary Licenses when the conditions for such availability set forth in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), version(s), and exceptions or additional permissions here}.” + +Simply including a copy of this Agreement, including this Exhibit A is not sufficient to license the Source Code under Secondary Licenses. + +If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. \ No newline at end of file diff --git a/resources/license/eupl-1.1.txt b/resources/license/eupl-1.1.txt new file mode 100644 index 0000000000..193980520a --- /dev/null +++ b/resources/license/eupl-1.1.txt @@ -0,0 +1,157 @@ +European Union Public Licence V. 1.1 + +EUPL (c) the European Community 2007 + +This European Union Public Licence (the "EUPL") applies to the Work or Software (as defined below) which is provided under the terms of this Licence. Any use of the Work, other than as authorised under this Licence is prohibited (to the extent such use is covered by a right of the copyright holder of the Work). + +The Original Work is provided under the terms of this Licence when the Licensor (as defined below) has placed the following notice immediately following the copyright notice for the Original Work: + + Licensed under the EUPL V.1.1 + +or has expressed by any other mean his willingness to license under the EUPL. + +1. Definitions + +In this Licence, the following terms have the following meaning: + + - The Licence: this Licence. + + - The Original Work or the Software: the software distributed and/or communicated by the Licensor under this Licence, available as Source Code and also as Executable Code as the case may be. + + - Derivative Works: the works or software that could be created by the Licensee, based upon the Original Work or modifications thereof. This Licence does not define the extent of modification or dependence on the Original Work required in order to classify a work as a Derivative Work; this extent is determined by copyright law applicable in the country mentioned in Article 15. + + - The Work: the Original Work and/or its Derivative Works. + + - The Source Code: the human-readable form of the Work which is the most convenient for people to study and modify. + + - The Executable Code: any code which has generally been compiled and which is meant to be interpreted by a computer as a program. + + - The Licensor: the natural or legal person that distributes and/or communicates the Work under the Licence. + + - Contributor(s): any natural or legal person who modifies the Work under the Licence, or otherwise contributes to the creation of a Derivative Work. + + - The Licensee or "You": any natural or legal person who makes any usage of the Software under the terms of the Licence. + + - Distribution and/or Communication: any act of selling, giving, lending, renting, distributing, communicating, transmitting, or otherwise making available, on-line or off-line, copies of the Work or providing access to its essential functionalities at the disposal of any other natural or legal person. + +2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a world-wide, royalty-free, non-exclusive, sublicensable licence to do the following, for the duration of copyright vested in the Original Work: + + - use the Work in any circumstance and for all usage, + + - reproduce the Work, + + - modify the Original Work, and make Derivative Works based upon the Work, + + - communicate to the public, including the right to make available or display the Work or copies thereof to the public and perform publicly, as the case may be, the Work, + + - distribute the Work or copies thereof, + + - lend and rent the Work or copies thereof, + + - sub-license rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to exercise his moral right to the extent allowed by law in order to make effective the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non exclusive usage rights to any patents held by the Licensor, to the extent necessary to make use of the rights granted on the Work under this Licence. + +3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as Executable Code. If the Work is provided as Executable Code, the Licensor provides in addition a machine-readable copy of the Source Code of the Work along with each copy of the Work that the Licensor distributes or indicates, in a notice following the copyright notice attached to the Work, a repository where the Source Code is easily and freely accessible for as long as the Licensor continues to distribute and/or communicate the Work. + +4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits from any exception or limitation to the exclusive rights of the rights owners in the Original Work or Software, of the exhaustion of those rights or of other applicable limitations thereto. + +5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: the Licensee shall keep intact all copyright, patent or trademarks notices and all notices that refer to the Licence and to the disclaimer of warranties. The Licensee must include a copy of such notices and a copy of the Licence with every copy of the Work he/she distributes and/or communicates. The Licensee must cause any Derivative Work to carry prominent notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes and/or communicates copies of the Original Works or Derivative Works based upon the Original Work, this Distribution and/or Communication will be done under the terms of this Licence or of a later version of this Licence unless the Original Work is expressly distributed only under this version of the Licence. The Licensee (becoming Licensor) cannot offer or impose any additional terms or conditions on the Work or Derivative Work that alter or restrict the terms of the Licence. + +Compatibility clause: If the Licensee Distributes and/or Communicates Derivative Works or copies thereof based upon both the Original Work and another work licensed under a Compatible Licence, this Distribution and/or Communication can be done under the terms of this Compatible Licence. For the sake of this clause, "Compatible Licence," refers to the licences listed in the appendix attached to this Licence. Should the Licensee's obligations under the Compatible Licence conflict with his/her obligations under this Licence, the obligations of the Compatible Licence shall prevail. + +Provision of Source Code: When distributing and/or communicating copies of the Work, the Licensee will provide a machine-readable copy of the Source Code or indicate a repository where this Source will be easily and freely available for as long as the Licensee continues to distribute and/or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade names, trademarks, service marks, or names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the copyright notice. + +6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted hereunder is owned by him/her or licensed to him/her and that he/she has the power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she brings to the Work are owned by him/her or licensed to him/her and that he/she has the power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent Contributors grant You a licence to their contributions to the Work, under the terms of this Licence. + +7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous contributors. It is not a finished work and may therefore contain defects or "bugs" inherent to this type of software development. + +For the above reason, the Work is provided under the Licence on an "as is" basis and without warranties of any kind concerning the Work, including without limitation merchantability, fitness for a particular purpose, absence of defects or errors, accuracy, non-infringement of intellectual property rights other than copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a condition for the grant of any rights to the Work. + +8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural persons, the Licensor will in no event be liable for any direct or indirect, material or moral, damages of any kind, arising out of the Licence or of the use of the Work, including without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, loss of data or any commercial damage, even if the Licensor has been advised of the possibility of such damage. However, the Licensor will be liable under statutory product liability laws as far such laws apply to the Work. + +9. Additional agreements + +While distributing the Original Work or Derivative Works, You may choose to conclude an additional agreement to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or services consistent with this Licence. However, in accepting such obligations, You may act only on your own behalf and on your sole responsibility, not on behalf of the original Licensor or any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against such Contributor by the fact You have accepted any such warranty or additional liability. + +10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon "I agree" placed under the bottom of a window displaying the text of this Licence or by affirming consent in any other similar way, in accordance with the rules of applicable law. Clicking on that icon indicates your clear and irrevocable acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and conditions by exercising any rights granted to You by Article 2 of this Licence, such as the use of the Work, the creation by You of a Derivative Work or the Distribution and/or Communication by You of the Work or copies thereof. + +11. Information to the public + +In case of any Distribution and/or Communication of the Work by means of electronic communication by You (for example, by offering to download the Work from a remote location) the distribution channel or media (for example, a website) must at least provide to the public the information requested by the applicable law regarding the Licensor, the Licence and the way it may be accessible, concluded, stored and reproduced by the Licensee. + +12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon any breach by the Licensee of the terms of the Licence. Such a termination will not terminate the licences of any person who has received the Work from the Licensee under the Licence, provided such persons remain in full compliance with the Licence. + +13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete agreement between the Parties as to the Work licensed hereunder. + +If any provision of the Licence is invalid or unenforceable under applicable law, this will not affect the validity or enforceability of the Licence as a whole. Such provision will be construed and/or reformed so as necessary to make it valid and enforceable. + +The European Commission may publish other linguistic versions and/or new versions of this Licence, so far this is required and reasonable, without reducing the scope of the rights granted by the Licence. New versions of the Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, have identical value. Parties can take advantage of the linguistic version of their choice. + +14. Jurisdiction + +Any litigation resulting from the interpretation of this License, arising between the European Commission, as a Licensor, and any Licensee, will be subject to the jurisdiction of the Court of Justice of the European Communities, as laid down in article 238 of the Treaty establishing the European Community. + +Any litigation arising between Parties, other than the European Commission, and resulting from the interpretation of this License, will be subject to the exclusive jurisdiction of the competent court where the Licensor resides or conducts its primary business. + +15. Applicable Law + +This Licence shall be governed by the law of the European Union country where the Licensor resides or has his registered office. + +This licence shall be governed by the Belgian law if: + + - a litigation arises between the European Commission, as a Licensor, and any Licensee; + + - the Licensor, other than the European Commission, has no residence or registered office inside a European Union country. + + + +Appendix + +"Compatible Licences" according to article 5 EUPL are: + + - GNU General Public License (GNU GPL) v. 2 + - Open Software License (OSL) v. 2.1, v. 3.0 + - Common Public License v. 1.0 + - Eclipse Public License v. 1.0 + - Cecill v. 2.0 \ No newline at end of file diff --git a/resources/license/gpl-2.0.txt b/resources/license/gpl-2.0.txt new file mode 100644 index 0000000000..581d732b70 --- /dev/null +++ b/resources/license/gpl-2.0.txt @@ -0,0 +1,117 @@ +GNU GENERAL PUBLIC LICENSE +Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. + +1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. + +You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. + + c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. + +3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. + +If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. + +4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. + +5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. + +6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. + +7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. + +This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. + +8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. + +9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. + +10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + one line to give the program's name and an idea of what it does. Copyright (C) yyyy name of author + + This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. + +signature of Ty Coon, 1 April 1989 Ty Coon, President of Vice \ No newline at end of file diff --git a/resources/license/gpl-3.0.txt b/resources/license/gpl-3.0.txt new file mode 100644 index 0000000000..20db68e3d1 --- /dev/null +++ b/resources/license/gpl-3.0.txt @@ -0,0 +1,232 @@ +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and other kinds of works. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS + +0. Definitions. + +“This License” refers to version 3 of the GNU General Public License. + +“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. + +To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. + +A “covered work” means either the unmodified Program or a work based on the Program. + +To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. + +A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. + + c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. + +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. + +All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. + +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. + +An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. +A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. + +A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. + +13. Use with the GNU Affero General Public License. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. + +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”. + +You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . \ No newline at end of file diff --git a/resources/license/gpl.txt b/resources/license/gpl.txt new file mode 100644 index 0000000000..b0d9c10cdd --- /dev/null +++ b/resources/license/gpl.txt @@ -0,0 +1,100 @@ +GNU GENERAL PUBLIC LICENSE +Version 1, February 1989 + +Copyright (C) 1989 Free Software Foundation, Inc. 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The license agreements of most software companies try to keep users at the mercy of those companies. By contrast, our General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. The General Public License applies to the Free Software Foundation's software and to any other program whose authors commit to using it. You can use it for your programs, too. + +When we speak of free software, we are referring to freedom, not price. Specifically, the General Public License is designed to make sure that you have the freedom to give away or sell copies of free software, that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. + +For example, if you distribute copies of a such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must tell them their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. + +The precise terms and conditions for copying, distribution and modification follow. + +GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License Agreement applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any work containing the Program or a portion of it, either verbatim or with modifications. Each licensee is addressed as "you". + +1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this General Public License and to the absence of any warranty; and give any other recipients of the Program a copy of this General Public License along with the Program. You may charge a fee for the physical act of transferring a copy. + +2. You may modify your copy or copies of the Program or any portion of it, and copy and distribute such modifications under the terms of Paragraph 1 above, provided that you also do the following: + + a) cause the modified files to carry prominent notices stating that you changed the files and the date of any change; and + + b) cause the whole of any work that you distribute or publish, that in whole or in part contains the Program or any part thereof, either with or without modifications, to be licensed at no charge to all third parties under the terms of this General Public License (except that you may choose to grant warranty protection to some or all third parties, at your option). + + c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the simplest and most usual way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this General Public License. + + d) You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. + +Mere aggregation of another independent work with the Program (or its derivative) on a volume of a storage or distribution medium does not bring the other work under the scope of these terms. + +3. You may copy and distribute the Program (or a portion or derivative of it, under Paragraph 2) in object code or executable form under the terms of Paragraphs 1 and 2 above provided that you also do one of the following: + + a) accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Paragraphs 1 and 2 above; or, + + b) accompany it with a written offer, valid for at least three years, to give any third party free (except for a nominal charge for the cost of distribution) a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Paragraphs 1 and 2 above; or, + + c) accompany it with the information you received as to where the corresponding source code may be obtained. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form alone.) + +Source code for a work means the preferred form of the work for making modifications to it. For an executable file, complete source code means all the source code for all modules it contains; but, as a special exception, it need not include source code for modules which are standard libraries that accompany the operating system on which the executable file runs, or for standard header files or definitions files that accompany that operating system. + +4. You may not copy, modify, sublicense, distribute or transfer the Program except as expressly provided under this General Public License. Any attempt otherwise to copy, modify, sublicense, distribute or transfer the Program is void, and will automatically terminate your rights to use the Program under this License. However, parties who have received copies, or rights to use copies, from you under this General Public License will not have their licenses terminated so long as such parties remain in full compliance. + +5. By copying, distributing or modifying the Program (or any work based on the Program) you indicate your acceptance of this license to do so, and all its terms and conditions. + +6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. + +7. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies a version number of the license which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the license, you may choose any version ever published by the Free Software Foundation. + +8. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +9. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +10. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + +Appendix: How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to humanity, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + Copyright (C) 19yy + + This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 1, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) 19xx name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (a program to direct compilers to make passes at assemblers) written by James Hacker. + + , 1 April 1989 Ty Coon, President of Vice + +That's all there is to it! \ No newline at end of file diff --git a/resources/license/index.json b/resources/license/index.json new file mode 100644 index 0000000000..6439bf91c1 --- /dev/null +++ b/resources/license/index.json @@ -0,0 +1,31 @@ +[ + { "label": "None", "value": "none" }, + { "label": "Academic Free License v3.0", "value": "afl-3.0" }, + { "label": "Apache license 2.0", "value": "apache-2.0" }, + { "label": "Artistic license 2.0", "value": "artistic-2.0" }, + { "label": "Boost Software License 1.0", "value": "bsl-1.0" }, + { "label": "BSD 2-clause \"Simplified\" license", "value": "bsd-2-clause" }, + { "label": "BSD 3-clause \"New\" or \"Revised\" license", "value": "bsd-3-clause" }, + { "label": "BSD 3-clause Clear license", "value": "bsd-3-clause-clear" }, + { "label": "Creative Commons license family", "value": "cc" }, + { "label": "Creative Commons Zero v1.0 Universal", "value": "cc0-1.0" }, + { "label": "Creative Commons Attribution 4.0", "value": "cc-by-4.0" }, + { "label": "Creative Commons Attribution Share Alike 4.0", "value": "cc-by-sa-4.0" }, + { "label": "Educational Community License v2.0", "value": "ecl-2.0" }, + { "label": "Eclipse Public License 1.0", "value": "epl-1.0" }, + { "label": "Eclipse Public License 2.0", "value": "epl-2.0" }, + { "label": "European Union Public License 1.1", "value": "eupl-1.1" }, + { "label": "GNU Affero General Public License v3.0", "value": "agpl-3.0" }, + { "label": "GNU General Public License family", "value": "gpl" }, + { "label": "GNU General Public License v2.0", "value": "gpl-2.0" }, + { "label": "GNU General Public License v3.0", "value": "gpl-3.0" }, + { "label": "GNU Lesser General Public License family", "value": "lgpl" }, + { "label": "GNU Lesser General Public License v2.1", "value": "lgpl-2.1" }, + { "label": "GNU Lesser General Public License v3.0", "value": "lgpl-3.0" }, + { "label": "ISC", "value": "isc" }, + { "label": "MIT", "value": "mit" }, + { "label": "Mozilla Public License 2.0", "value": "mpl-2.0" }, + { "label": "Open Software License 3.0", "value": "osl-3.0" }, + { "label": "The Unlicense", "value": "unlicense" }, + { "label": "zLib License", "value": "zlib" } +] \ No newline at end of file diff --git a/resources/license/isc.txt b/resources/license/isc.txt new file mode 100644 index 0000000000..d50e0cf2a9 --- /dev/null +++ b/resources/license/isc.txt @@ -0,0 +1,8 @@ +ISC License: + +Copyright (c) 2004-2010 by Internet Systems Consortium, Inc. ("ISC") +Copyright (c) 1995-2003 by Internet Software Consortium + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/resources/license/lgpl-2.1.txt b/resources/license/lgpl-2.1.txt new file mode 100644 index 0000000000..8212e9bd10 --- /dev/null +++ b/resources/license/lgpl-2.1.txt @@ -0,0 +1,175 @@ +GNU LESSER GENERAL PUBLIC LICENSE + +Version 2.1, February 1999 + +Copyright (C) 1991, 1999 Free Software Foundation, Inc. +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] + +Preamble + +The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. + +This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. + +When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. + +To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. + +For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. + +We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. + +To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. + +Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. + +Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. + +When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. + +We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. + +For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. + +In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. + +Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. + +The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". + +A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. + +The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) + +"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. + +Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. + +1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. + +You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. + +(For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. + +3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. + +Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. + +This option is useful when you wish to copy part of the code of the Library into a program that is not a library. + +4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. + +If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. + +5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. + +However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. + +When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. + +If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) + +Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. + +6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. + +You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: + + a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. + + e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. + +For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. + +It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. + +7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. + + b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. + +8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. + +9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. + +10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. + +11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. + +This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. + +12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. + +13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. + +14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Libraries + +If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). + +To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + one line to give the library's name and an idea of what it does. + Copyright (C) year name of author + + This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: + +Yoyodyne, Inc., hereby disclaims all copyright interest in +the library `Frob' (a library for tweaking knobs) written +by James Random Hacker. + +signature of Ty Coon, 1 April 1990 +Ty Coon, President of Vice +That's all there is to it! \ No newline at end of file diff --git a/resources/license/lgpl-3.0.txt b/resources/license/lgpl-3.0.txt new file mode 100644 index 0000000000..51ce144d37 --- /dev/null +++ b/resources/license/lgpl-3.0.txt @@ -0,0 +1,304 @@ +GNU LESSER GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright (C) 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. + +0. Additional Definitions. + +As used herein, "this License" refers to version 3 of the GNU Lesser General Public License, and the "GNU GPL" refers to version 3 of the GNU General Public License. + +"The Library" refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. + +An "Application" is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. + +A "Combined Work" is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the "Linked Version". + +The "Minimal Corresponding Source" for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. + +The "Corresponding Application Code" for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. + +1. Exception to Section 3 of the GNU GPL. +You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. + +2. Conveying Modified Versions. +If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: + + a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy. + +3. Object Code Incorporating Material from Library Header Files. +The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license document. + +4. Combined Works. +You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: + + a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license document. + + c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. + + e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) + +5. Combined Libraries. +You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. + +6. Revised Versions of the GNU Lesser General Public License. +The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. + +If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. + +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and other kinds of works. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS + +0. Definitions. + +“This License” refers to version 3 of the GNU General Public License. + +“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. + +To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. + +A “covered work” means either the unmodified Program or a work based on the Program. + +To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. + +A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. + + c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. + +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. + +All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. + +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. + +An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. +A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. + +A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. + +13. Use with the GNU Affero General Public License. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. + +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”. + +You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . \ No newline at end of file diff --git a/resources/license/lgpl.txt b/resources/license/lgpl.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/resources/license/mit.txt b/resources/license/mit.txt new file mode 100644 index 0000000000..63b4b681cb --- /dev/null +++ b/resources/license/mit.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) [year] [fullname] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/resources/license/mpl-2.0.txt b/resources/license/mpl-2.0.txt new file mode 100644 index 0000000000..dbdb0fa8e5 --- /dev/null +++ b/resources/license/mpl-2.0.txt @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/resources/license/osl-3.0.txt b/resources/license/osl-3.0.txt new file mode 100644 index 0000000000..6466b7af15 --- /dev/null +++ b/resources/license/osl-3.0.txt @@ -0,0 +1,47 @@ +Open Software License v. 3.0 (OSL-3.0) + +This Open Software License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following licensing notice adjacent to the copyright notice for the Original Work: + + Licensed under the Open Software License version 3.0 + +1) Grant of Copyright License. Licensor grants You a worldwide, royalty-free, non-exclusive, sublicensable license, for the duration of the copyright, to do the following: + + a) to reproduce the Original Work in copies, either alone or as part of a collective work; + + b) to translate, adapt, alter, transform, modify, or arrange the Original Work, thereby creating derivative works ("Derivative Works") based upon the Original Work; + + c) to distribute or communicate copies of the Original Work and Derivative Works to the public, with the proviso that copies of Original Work or Derivative Works that You distribute or communicate shall be licensed under this Open Software License; + + d) to perform the Original Work publicly; and + + e) to display the Original Work publicly. + +2) Grant of Patent License. Licensor grants You a worldwide, royalty-free, non-exclusive, sublicensable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, for the duration of the patents, to make, use, sell, offer for sale, have made, and import the Original Work and Derivative Works. + +3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work. + +4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior permission of the Licensor. Except as expressly stated herein, nothing in this License grants any license to Licensor’s trademarks, copyrights, patents, trade secrets or any other intellectual property. No patent license is granted to make, use, sell, offer for sale, have made, or import embodiments of any patent claims other than the licensed claims defined in Section 2. No license is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under terms different from this License any Original Work that Licensor otherwise would have a right to license. + +5) External Deployment. The term "External Deployment" means the use, distribution, or communication of the Original Work or Derivative Works in any way such that the Original Work or Derivative Works may be used by anyone other than You, whether those works are distributed or communicated to those persons or made available as an application intended for use over a network. As an express condition for the grants of license hereunder, You must treat any External Deployment by You of the Original Work or a Derivative Work as a distribution under section 1(c). + +6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent, or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work. + +7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately preceding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of non-infringement, merchantability or fitness for a particular purpose. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to the Original Work is granted by this License except under this disclaimer. + +8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to anyone for any indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to the extent applicable law prohibits such limitation. + +9) Acceptance and Termination. If, at any time, You expressly assented to this License, that assent indicates your clear and irrevocable acceptance of this License and all of its terms and conditions. If You distribute or communicate copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. This License conditions your rights to undertake the activities listed in Section 1, including your right to create Derivative Works based upon the Original Work, and doing so without honoring these terms and conditions is prohibited by copyright law and international treaty. Nothing in this License is intended to affect copyright exceptions and limitations (including “fair use” or “fair dealing”). This License shall terminate immediately and You may no longer exercise any of the rights granted to You by this License upon your failure to honor the conditions in Section 1(c). + +10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware. + +11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of copyright or patent law in the appropriate jurisdiction. This section shall survive the termination of this License. + +12) Attorneys' Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License. + +13) Miscellaneous. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. + +14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You. + +16) Modification of This License. This License is Copyright (c) 2005 Lawrence Rosen. Permission is granted to copy, distribute, or communicate this License without modification. Nothing in this License permits You to modify this License as applied to the Original Work or to Derivative Works. However, You may modify the text of this License and copy, distribute or communicate your modified version (the "Modified License") and apply it to other original works of authorship subject to the following conditions: (i) You may not indicate in any way that your Modified License is the "Open Software License" or "OSL" and you may not use those names in the name of your Modified License; (ii) You must replace the notice specified in the first paragraph above with the notice "Licensed under " or with a notice of your own that is not confusingly similar to the notice in this License; and (iii) You may not claim that your original works are open source software unless your Modified License has been approved by Open Source Initiative (OSI) and You comply with its license review and certification process. \ No newline at end of file diff --git a/resources/license/unlicense.txt b/resources/license/unlicense.txt new file mode 100644 index 0000000000..c675a17ccb --- /dev/null +++ b/resources/license/unlicense.txt @@ -0,0 +1,10 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. + +In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to \ No newline at end of file diff --git a/resources/license/zlib.txt b/resources/license/zlib.txt new file mode 100644 index 0000000000..8d4358188d --- /dev/null +++ b/resources/license/zlib.txt @@ -0,0 +1,11 @@ +zlib License + +This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. \ No newline at end of file diff --git a/scripts/wire/gitrpcserver/wire.sh b/scripts/wire/gitrpcserver/wire.sh new file mode 100755 index 0000000000..480465fd89 --- /dev/null +++ b/scripts/wire/gitrpcserver/wire.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env sh + +echo "Updating cmd/gitrpcserver/wire.go" +go run github.com/google/wire/cmd/wire gen github.com/harness/gitness/cmd/gitrpcserver \ No newline at end of file diff --git a/scripts/wire/server/standalone.sh b/scripts/wire/server/standalone.sh new file mode 100755 index 0000000000..ed3f588664 --- /dev/null +++ b/scripts/wire/server/standalone.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env sh + +echo "Updating cmd/gitness/wire_gen.go" +go run github.com/google/wire/cmd/wire gen github.com/harness/gitness/cmd/gitness \ No newline at end of file diff --git a/store/database/config.go b/store/database/config.go new file mode 100644 index 0000000000..93acf01d93 --- /dev/null +++ b/store/database/config.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +// Config specifies the config for the database package. +type Config struct { + Driver string + Datasource string +} diff --git a/store/database/dbtx/ctx.go b/store/database/dbtx/ctx.go new file mode 100644 index 0000000000..ae896b52b5 --- /dev/null +++ b/store/database/dbtx/ctx.go @@ -0,0 +1,55 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "context" + + "github.com/jmoiron/sqlx" +) + +// ctxKeyDB is context key for storing and retrieving Transactor to and from a context. +type ctxKeyDB struct{} + +// PutTransactor places Transactor into the context. +func PutTransactor(ctx context.Context, t Transactor) context.Context { + return context.WithValue(ctx, ctxKeyDB{}, t) +} + +// WithTx starts a transaction with Transactor interface from the context. It will panic if there is no Transactor. +func WithTx(ctx context.Context, txFn func(ctx context.Context) error, opts ...interface{}) error { + return ctx.Value(ctxKeyDB{}).(Transactor).WithTx(ctx, txFn, opts...) +} + +// ctxKeyTx is context key for storing and retrieving Tx to and from a context. +type ctxKeyTx struct{} + +// GetAccessor returns Accessor interface from the context if it exists or creates a new one from the provided *sql.DB. +// It is intended to be used in data layer functions that might or might not be running inside a transaction. +func GetAccessor(ctx context.Context, db *sqlx.DB) Accessor { + if a, ok := ctx.Value(ctxKeyTx{}).(Accessor); ok { + return a + } + return New(db) +} + +// GetTransaction returns Transaction interface from the context if it exists or return nil. +// It is intended to be used in transactions in service layer functions to explicitly commit or rollback transactions. +func GetTransaction(ctx context.Context) Transaction { + if a, ok := ctx.Value(ctxKeyTx{}).(Transaction); ok { + return a + } + return nil +} diff --git a/store/database/dbtx/db.go b/store/database/dbtx/db.go new file mode 100644 index 0000000000..d1bff1d5ee --- /dev/null +++ b/store/database/dbtx/db.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "context" + "database/sql" + + "github.com/jmoiron/sqlx" +) + +// New returns new database Runner interface. +func New(db *sqlx.DB) Transactor { + mx := getLocker(db) + run := &runnerDB{ + db: sqlDB{db}, + mx: mx, + } + return run +} + +// transactor is combines data access capabilities with transaction starting. +type transactor interface { + Accessor + startTx(ctx context.Context, opts *sql.TxOptions) (Tx, error) +} + +// sqlDB is a wrapper for the sqlx.DB that implements the transactor interface. +type sqlDB struct { + *sqlx.DB +} + +var _ transactor = (*sqlDB)(nil) + +func (db sqlDB) startTx(ctx context.Context, opts *sql.TxOptions) (Tx, error) { + tx, err := db.DB.BeginTxx(ctx, opts) + return tx, err +} diff --git a/store/database/dbtx/interface.go b/store/database/dbtx/interface.go new file mode 100644 index 0000000000..94566ee0bd --- /dev/null +++ b/store/database/dbtx/interface.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "context" + "database/sql" + + "github.com/jmoiron/sqlx" +) + +// Accessor is the SQLx database manipulation interface. +type Accessor interface { + sqlx.ExtContext // sqlx.binder + sqlx.QueryerContext + sqlx.ExecerContext + QueryRowContext(ctx context.Context, query string, args ...any) *sql.Row + + PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) + PreparexContext(ctx context.Context, query string) (*sqlx.Stmt, error) + PrepareNamedContext(ctx context.Context, query string) (*sqlx.NamedStmt, error) + + GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error + SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error +} + +// Transaction is the Go's standard sql transaction interface. +type Transaction interface { + Commit() error + Rollback() error +} + +// Transactor is used to access the database. It combines Accessor interface +// with capability to run functions in a transaction. +type Transactor interface { + Accessor + WithTx(ctx context.Context, txFn func(ctx context.Context) error, opts ...interface{}) error +} + +// Tx combines data access capabilities with the transaction commit and rollback. +type Tx interface { + Accessor + Transaction +} diff --git a/store/database/dbtx/locker.go b/store/database/dbtx/locker.go new file mode 100644 index 0000000000..02e58d0bd1 --- /dev/null +++ b/store/database/dbtx/locker.go @@ -0,0 +1,52 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "sync" + + "github.com/jmoiron/sqlx" +) + +const ( + postgres = "postgres" +) + +type locker interface { + Lock() + Unlock() + RLock() + RUnlock() +} + +var globalMx sync.RWMutex + +func needsLocking(driver string) bool { + return driver != postgres +} + +func getLocker(db *sqlx.DB) locker { + if needsLocking(db.DriverName()) { + return &globalMx + } + return lockerNop{} +} + +type lockerNop struct{} + +func (lockerNop) RLock() {} +func (lockerNop) RUnlock() {} +func (lockerNop) Lock() {} +func (lockerNop) Unlock() {} diff --git a/store/database/dbtx/runner.go b/store/database/dbtx/runner.go new file mode 100644 index 0000000000..7e539450c4 --- /dev/null +++ b/store/database/dbtx/runner.go @@ -0,0 +1,186 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "context" + "database/sql" + "errors" + + "github.com/jmoiron/sqlx" +) + +// runnerDB executes individual sqlx database calls wrapped with the locker calls (Lock/Unlock) +// or a transaction wrapped with the locker calls (RLock/RUnlock or Lock/Unlock depending on the transaction type). +type runnerDB struct { + db transactor + mx locker +} + +var _ Transactor = runnerDB{} + +func (r runnerDB) WithTx(ctx context.Context, txFn func(context.Context) error, opts ...interface{}) error { + var txOpts *sql.TxOptions + for _, opt := range opts { + if v, ok := opt.(*sql.TxOptions); ok { + txOpts = v + } + } + + if txOpts == nil { + txOpts = TxDefault + } + + if txOpts.ReadOnly { + r.mx.RLock() + defer r.mx.RUnlock() + } else { + r.mx.Lock() + defer r.mx.Unlock() + } + + tx, err := r.db.startTx(ctx, txOpts) + if err != nil { + return err + } + + rtx := &runnerTx{ + Tx: tx, + commit: false, + rollback: false, + } + + defer func() { + if rtx.commit || rtx.rollback { + return + } + _ = tx.Rollback() // ignoring the rollback error + }() + + err = txFn(context.WithValue(ctx, ctxKeyTx{}, Tx(rtx))) + if err != nil { + return err + } + + if !rtx.commit && !rtx.rollback { + err = rtx.Commit() + if errors.Is(err, sql.ErrTxDone) { + // Check if the transaction failed because of the context, if yes return the ctx error. + if ctxErr := ctx.Err(); errors.Is(ctxErr, context.Canceled) || errors.Is(ctxErr, context.DeadlineExceeded) { + err = ctxErr + } + } + } + + return err +} + +func (r runnerDB) DriverName() string { + return r.db.DriverName() +} + +func (r runnerDB) Rebind(query string) string { + return r.db.Rebind(query) +} + +func (r runnerDB) BindNamed(query string, arg interface{}) (string, []interface{}, error) { + return r.db.BindNamed(query, arg) +} + +func (r runnerDB) QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.QueryContext(ctx, query, args...) +} + +func (r runnerDB) QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.QueryxContext(ctx, query, args...) +} + +func (r runnerDB) QueryRowxContext(ctx context.Context, query string, args ...interface{}) *sqlx.Row { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.QueryRowxContext(ctx, query, args...) +} + +func (r runnerDB) ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.ExecContext(ctx, query, args...) +} + +func (r runnerDB) QueryRowContext(ctx context.Context, query string, args ...any) *sql.Row { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.QueryRowContext(ctx, query, args...) +} + +func (r runnerDB) PrepareContext(ctx context.Context, query string) (*sql.Stmt, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.PrepareContext(ctx, query) +} + +func (r runnerDB) PreparexContext(ctx context.Context, query string) (*sqlx.Stmt, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.PreparexContext(ctx, query) +} + +func (r runnerDB) PrepareNamedContext(ctx context.Context, query string) (*sqlx.NamedStmt, error) { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.PrepareNamedContext(ctx, query) +} + +func (r runnerDB) GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.GetContext(ctx, dest, query, args...) +} + +func (r runnerDB) SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error { + r.mx.Lock() + defer r.mx.Unlock() + return r.db.SelectContext(ctx, dest, query, args...) +} + +// runnerTx executes sqlx database transaction calls. +// Locking is not used because runnerDB locks the entire transaction. +type runnerTx struct { + Tx + commit bool + rollback bool +} + +var _ Tx = (*runnerTx)(nil) + +func (r *runnerTx) Commit() error { + err := r.Tx.Commit() + if err == nil { + r.commit = true + } + return err +} + +func (r *runnerTx) Rollback() error { + err := r.Tx.Rollback() + if err == nil { + r.rollback = true + } + return err +} diff --git a/store/database/dbtx/runner_test.go b/store/database/dbtx/runner_test.go new file mode 100644 index 0000000000..83cc7041d4 --- /dev/null +++ b/store/database/dbtx/runner_test.go @@ -0,0 +1,345 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import ( + "context" + "database/sql" + "errors" + "testing" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" +) + +//nolint:gocognit +func TestWithTx(t *testing.T) { + errTest := errors.New("dummy error") + + tests := []struct { + name string + fn func(tx Transaction) error + errCommit error + cancelCtx bool + expectErr error + expectCommitted bool + expectRollback bool + }{ + { + name: "successful", + fn: func(Transaction) error { return nil }, + expectCommitted: true, + }, + { + name: "err-in-transaction", + fn: func(Transaction) error { return errTest }, + expectErr: errTest, + expectRollback: true, + }, + { + name: "commit-failed", + fn: func(Transaction) error { return nil }, + errCommit: errTest, + expectErr: errTest, + expectRollback: true, + }, + { + name: "commit-failed-tx-done", + fn: func(Transaction) error { return nil }, + errCommit: sql.ErrTxDone, + expectErr: sql.ErrTxDone, + expectRollback: true, + }, + { + name: "commit-failed-ctx-cancelled", + fn: func(Transaction) error { return nil }, + errCommit: sql.ErrTxDone, + cancelCtx: true, + expectErr: context.Canceled, + expectRollback: true, + }, + { + name: "panic-in-transaction", + fn: func(Transaction) error { panic("dummy panic") }, + expectRollback: true, + }, + { + name: "commit-in-transaction", + fn: func(tx Transaction) error { + _ = tx.Commit() + return nil + }, + expectCommitted: true, + }, + { + name: "commit-in-transaction-fn-returns-err", + fn: func(tx Transaction) error { + _ = tx.Commit() + return errTest + }, + expectErr: errTest, + expectCommitted: true, + }, + { + name: "rollback-in-transaction", + fn: func(tx Transaction) error { + _ = tx.Rollback() + return nil + }, + expectRollback: true, + }, + { + name: "rollback-in-transaction-fn-returns-err", + fn: func(tx Transaction) error { + _ = tx.Rollback() + return errTest + }, + expectErr: errTest, + expectRollback: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + mock := &dbMock{ + t: t, + errCommit: test.errCommit, + } + run := &runnerDB{ + db: mock, + mx: lockerNop{}, + } + + ctx, cancelFn := context.WithCancel(context.Background()) + defer cancelFn() + + var err error + + func() { + defer func() { + _ = recover() + }() + + err = run.WithTx(ctx, func(ctx context.Context) error { + if test.cancelCtx { + cancelFn() + } + return test.fn(GetTransaction(ctx)) + }) + }() + + tx := mock.createdTx + if tx == nil { + t.Error("did not start a transaction") + return + } + + if !tx.finished { + t.Error("transaction not finished") + } + + if want, got := test.expectErr, err; !errors.Is(got, want) { + t.Errorf("expected error %v, but got %v", want, got) + } + + if want, got := test.expectCommitted, tx.committed; want != got { + t.Errorf("expected committed %t, but got %t", want, got) + } + + if want, got := test.expectRollback, tx.rollback; want != got { + t.Errorf("expected rollback %t, but got %t", want, got) + } + }) + } +} + +type dbMock struct { + *sqlx.DB // only to fulfill the Accessor interface, will be nil + t *testing.T + errCommit error + createdTx *txMock +} + +var _ transactor = (*dbMock)(nil) + +func (d *dbMock) startTx(context.Context, *sql.TxOptions) (Tx, error) { + d.createdTx = &txMock{ + t: d.t, + errCommit: d.errCommit, + finished: false, + committed: false, + rollback: false, + } + return d.createdTx, nil +} + +type txMock struct { + *sqlx.Tx // only to fulfill the Accessor interface, will be nil + t *testing.T + errCommit error + finished bool + committed bool + rollback bool +} + +var _ Tx = (*txMock)(nil) + +func (tx *txMock) Commit() error { + if tx.finished { + tx.t.Error("Committing an already finished transaction") + return nil + } + if tx.errCommit == nil { + tx.finished = true + tx.committed = true + } + return tx.errCommit +} + +func (tx *txMock) Rollback() error { + if tx.finished { + tx.t.Error("Rolling back an already finished transaction") + return nil + } + tx.finished = true + tx.rollback = true + return nil +} + +func TestLocking(t *testing.T) { + const dummyQuery = "" + tests := []struct { + name string + fn func(db Transactor, l *lockerCounter) + }{ + { + name: "exec-lock", + fn: func(db Transactor, l *lockerCounter) { + ctx := context.Background() + _, _ = db.ExecContext(ctx, dummyQuery) + _, _ = db.ExecContext(ctx, dummyQuery) + _, _ = db.ExecContext(ctx, dummyQuery) + + assert.Zero(t, l.RLocks) + assert.Zero(t, l.RUnlocks) + assert.Equal(t, 3, l.Locks) + assert.Equal(t, 3, l.Unlocks) + }, + }, + { + name: "tx-lock", + fn: func(db Transactor, l *lockerCounter) { + ctx := context.Background() + _ = db.WithTx(ctx, func(ctx context.Context) error { + _, _ = GetAccessor(ctx, nil).ExecContext(ctx, dummyQuery) + _, _ = GetAccessor(ctx, nil).ExecContext(ctx, dummyQuery) + return nil + }) + + assert.Zero(t, l.RLocks) + assert.Zero(t, l.RUnlocks) + assert.Equal(t, 1, l.Locks) + assert.Equal(t, 1, l.Unlocks) + }, + }, + { + name: "tx-read-lock", + fn: func(db Transactor, l *lockerCounter) { + ctx := context.Background() + _ = db.WithTx(ctx, func(ctx context.Context) error { + _, _ = GetAccessor(ctx, nil).QueryContext(ctx, dummyQuery) + _, _ = GetAccessor(ctx, nil).QueryContext(ctx, dummyQuery) + return nil + }, TxDefaultReadOnly) + + assert.Equal(t, 1, l.RLocks) + assert.Equal(t, 1, l.RUnlocks) + assert.Zero(t, l.Locks) + assert.Zero(t, l.Unlocks) + }, + }, + } + + for _, test := range tests { + l := &lockerCounter{} + t.Run(test.name, func(t *testing.T) { + test.fn(runnerDB{ + db: dbMockNop{}, + mx: l, + }, l) + }) + } +} + +type lockerCounter struct { + Locks int + Unlocks int + RLocks int + RUnlocks int +} + +func (l *lockerCounter) Lock() { l.Locks++ } +func (l *lockerCounter) Unlock() { l.Unlocks++ } +func (l *lockerCounter) RLock() { l.RLocks++ } +func (l *lockerCounter) RUnlock() { l.RUnlocks++ } + +type dbMockNop struct{} + +func (dbMockNop) DriverName() string { return "" } +func (dbMockNop) Rebind(string) string { return "" } +func (dbMockNop) BindNamed(string, interface{}) (string, []interface{}, error) { return "", nil, nil } + +//nolint:nilnil // it's a mock +func (dbMockNop) QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) { + return nil, nil +} + +//nolint:nilnil // it's a mock +func (dbMockNop) QueryxContext(context.Context, string, ...interface{}) (*sqlx.Rows, error) { + return nil, nil +} +func (dbMockNop) QueryRowxContext(context.Context, string, ...interface{}) *sqlx.Row { return nil } +func (dbMockNop) ExecContext(context.Context, string, ...interface{}) (sql.Result, error) { + return nil, nil +} +func (dbMockNop) QueryRowContext(context.Context, string, ...any) *sql.Row { + return nil +} + +//nolint:nilnil // it's a mock +func (dbMockNop) PrepareContext(context.Context, string) (*sql.Stmt, error) { + return nil, nil +} + +//nolint:nilnil // it's a mock +func (dbMockNop) PreparexContext(context.Context, string) (*sqlx.Stmt, error) { + return nil, nil +} + +//nolint:nilnil // it's a mock +func (dbMockNop) PrepareNamedContext(context.Context, string) (*sqlx.NamedStmt, error) { + return nil, nil +} +func (dbMockNop) GetContext(context.Context, interface{}, string, ...interface{}) error { + return nil +} +func (dbMockNop) SelectContext(context.Context, interface{}, string, ...interface{}) error { + return nil +} + +func (dbMockNop) Commit() error { return nil } +func (dbMockNop) Rollback() error { return nil } + +func (d dbMockNop) startTx(context.Context, *sql.TxOptions) (Tx, error) { return d, nil } diff --git a/store/database/dbtx/tx.go b/store/database/dbtx/tx.go new file mode 100644 index 0000000000..e2190653cb --- /dev/null +++ b/store/database/dbtx/tx.go @@ -0,0 +1,26 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dbtx + +import "database/sql" + +// TxDefault represents default transaction options. +var TxDefault = &sql.TxOptions{Isolation: sql.LevelDefault, ReadOnly: false} + +// TxDefaultReadOnly represents default transaction options for read-only transactions. +var TxDefaultReadOnly = &sql.TxOptions{Isolation: sql.LevelDefault, ReadOnly: true} + +// TxSerializable represents serializable transaction options. +var TxSerializable = &sql.TxOptions{Isolation: sql.LevelSerializable, ReadOnly: false} diff --git a/store/database/store.go b/store/database/store.go new file mode 100644 index 0000000000..7294654873 --- /dev/null +++ b/store/database/store.go @@ -0,0 +1,136 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package database provides persistent data storage using +// a postgres or sqlite3 database. +package database + +import ( + "context" + "database/sql" + "errors" + "fmt" + "net/url" + "time" + + "github.com/Masterminds/squirrel" + "github.com/jmoiron/sqlx" + "github.com/rs/zerolog/log" +) + +const ( + // sqlForUpdate is the sql statement used for locking rows returned by select queries. + SQLForUpdate = "FOR UPDATE" +) + +type Migrator func(ctx context.Context, dbx *sqlx.DB) error + +// Builder is a global instance of the sql builder. we are able to +// hardcode to postgres since sqlite3 is compatible with postgres. +var Builder = squirrel.StatementBuilder.PlaceholderFormat(squirrel.Dollar) + +// Connect to a database and verify with a ping. +func Connect(ctx context.Context, driver string, datasource string) (*sqlx.DB, error) { + datasource, err := prepareDatasourceForDriver(driver, datasource) + if err != nil { + return nil, fmt.Errorf("failed to prepare datasource: %w", err) + } + + db, err := sql.Open(driver, datasource) + if err != nil { + return nil, fmt.Errorf("failed to open the db: %w", err) + } + + dbx := sqlx.NewDb(db, driver) + if err = pingDatabase(ctx, dbx); err != nil { + return nil, fmt.Errorf("failed to ping the db: %w", err) + } + + return dbx, nil +} + +// ConnectAndMigrate creates the database handle and migrates the database. +func ConnectAndMigrate(ctx context.Context, driver string, datasource string, migrator Migrator) (*sqlx.DB, error) { + dbx, err := Connect(ctx, driver, datasource) + if err != nil { + return nil, err + } + + if err = migrator(ctx, dbx); err != nil { + return nil, fmt.Errorf("failed to setup the db: %w", err) + } + + return dbx, nil +} + +// Must is a helper function that wraps a call to Connect +// and panics if the error is non-nil. +func Must(db *sqlx.DB, err error) *sqlx.DB { + if err != nil { + panic(err) + } + return db +} + +// prepareDatasourceForDriver ensures that required features are enabled on the +// datasource connection string based on the driver. +func prepareDatasourceForDriver(driver string, datasource string) (string, error) { + switch driver { + case "sqlite3": + url, err := url.Parse(datasource) + if err != nil { + return "", fmt.Errorf("datasource is of invalid format for driver sqlite3") + } + + // get original query and update it with required settings + query := url.Query() + + // ensure foreign keys are always enabled (disabled by default) + // See https://github.com/mattn/go-sqlite3#connection-string + query.Set("_foreign_keys", "on") + + // update url with updated query + url.RawQuery = query.Encode() + + return url.String(), nil + default: + return datasource, nil + } +} + +// helper function to ping the database with backoff to ensure +// a connection can be established before we proceed with the +// database setup and migration. +func pingDatabase(ctx context.Context, db *sqlx.DB) error { + var err error + for i := 1; i <= 30; i++ { + err = db.PingContext(ctx) + + // No point in continuing if context was cancelled + if errors.Is(err, context.Canceled) { + return err + } + + // We can complete on first successful ping + if err == nil { + return nil + } + + log.Debug().Err(err).Msgf("Ping attempt #%d failed", i) + + time.Sleep(time.Second) + } + + return fmt.Errorf("all 30 tries failed, last failure: %w", err) +} diff --git a/store/database/util.go b/store/database/util.go new file mode 100644 index 0000000000..5daa879cc1 --- /dev/null +++ b/store/database/util.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "database/sql" + "fmt" + + "github.com/harness/gitness/store" + + "github.com/pkg/errors" + "github.com/rs/zerolog/log" +) + +// default query range limit. +const defaultLimit = 100 + +// limit returns the page size to a sql limit. +func Limit(size int) uint64 { + if size == 0 { + size = defaultLimit + } + return uint64(size) +} + +// offset converts the page to a sql offset. +func Offset(page, size int) uint64 { + if page == 0 { + page = 1 + } + if size == 0 { + size = defaultLimit + } + page-- + return uint64(page * size) +} + +// Logs the error and message, returns either the provided message or a gitrpc equivalent if possible. +// Always logs the full message with error as warning. +// +//nolint:unparam // revisit error processing +func ProcessSQLErrorf(err error, format string, args ...interface{}) error { + // create fallback error returned if we can't map it + fallbackErr := fmt.Errorf(format, args...) + + // always log internal error together with message. + log.Debug().Msgf("%v: [SQL] %v", fallbackErr, err) + + // If it's a known error, return converted error instead. + switch { + case errors.Is(err, sql.ErrNoRows): + return store.ErrResourceNotFound + case isSQLUniqueConstraintError(err): + return store.ErrDuplicate + default: + return fallbackErr + } +} diff --git a/store/database/util_pq.go b/store/database/util_pq.go new file mode 100644 index 0000000000..4899e7ec9f --- /dev/null +++ b/store/database/util_pq.go @@ -0,0 +1,32 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build pq +// +build pq + +package database + +import ( + "github.com/lib/pq" + "github.com/pkg/errors" +) + +func isSQLUniqueConstraintError(original error) bool { + var pqErr *pq.Error + if errors.As(original, &pqErr) { + return pqErr.Code == "23505" // unique_violation + } + + return false +} diff --git a/store/database/util_sqlite.go b/store/database/util_sqlite.go new file mode 100644 index 0000000000..0a4db9a3a1 --- /dev/null +++ b/store/database/util_sqlite.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !pq +// +build !pq + +package database + +import ( + "github.com/mattn/go-sqlite3" + "github.com/pkg/errors" +) + +func isSQLUniqueConstraintError(original error) bool { + var sqliteErr sqlite3.Error + if errors.As(original, &sqliteErr) { + return errors.Is(sqliteErr.ExtendedCode, sqlite3.ErrConstraintUnique) || + errors.Is(sqliteErr.ExtendedCode, sqlite3.ErrConstraintPrimaryKey) + } + + return false +} diff --git a/store/database/util_test.go b/store/database/util_test.go new file mode 100644 index 0000000000..693d70892b --- /dev/null +++ b/store/database/util_test.go @@ -0,0 +1,88 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package database + +import ( + "testing" +) + +func TestOffset(t *testing.T) { + tests := []struct { + page int + size int + want uint64 + }{ + { + page: 0, + size: 10, + want: 0, + }, + { + page: 1, + size: 10, + want: 0, + }, + { + page: 2, + size: 10, + want: 10, + }, + { + page: 3, + size: 10, + want: 20, + }, + { + page: 4, + size: 100, + want: 300, + }, + { + page: 4, + size: 0, // unset, expect default 100 + want: 300, + }, + } + + for _, test := range tests { + got, want := Offset(test.page, test.size), test.want + if got != want { + t.Errorf("Got %d want %d for page %d, size %d", got, want, test.page, test.size) + } + } +} + +func TestLimit(t *testing.T) { + tests := []struct { + size int + want uint64 + }{ + { + size: 0, + want: 100, + }, + { + size: 10, + want: 10, + }, + } + + for _, test := range tests { + got, want := Limit(test.size), test.want + if got != want { + t.Errorf("Got %d want %d for size %d", got, want, test.size) + } + } +} diff --git a/store/errors.go b/store/errors.go new file mode 100644 index 0000000000..04907c94d7 --- /dev/null +++ b/store/errors.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import "errors" + +var ( + ErrResourceNotFound = errors.New("resource not found") + ErrDuplicate = errors.New("resource is a duplicate") + ErrVersionConflict = errors.New("resource version conflict") + ErrPathTooLong = errors.New("the path is too long") + ErrPrimaryPathAlreadyExists = errors.New("primary path already exists for resource") + ErrPrimaryPathRequired = errors.New("path has to be primary") + ErrAliasPathRequired = errors.New("path has to be an alias") + ErrPrimaryPathCantBeDeleted = errors.New("primary path can't be deleted") + ErrNoChangeInRequestedMove = errors.New("the requested move doesn't change anything") + ErrIllegalMoveCyclicHierarchy = errors.New("the requested move is not permitted as it would cause a " + + "cyclic depdency") + ErrSpaceWithChildsCantBeDeleted = errors.New("the space can't be deleted as it still contains " + + "spaces or repos") + ErrPreConditionFailed = errors.New("precondition failed") +) diff --git a/stream/memory_broker.go b/stream/memory_broker.go new file mode 100644 index 0000000000..31878bfd26 --- /dev/null +++ b/stream/memory_broker.go @@ -0,0 +1,117 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "fmt" + "sync" + "sync/atomic" + + "github.com/hashicorp/go-multierror" + gonanoid "github.com/matoous/go-nanoid" +) + +const ( + idPrefixUIDAlphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + idPrefixUIDLength = 8 +) + +// MemoryBroker is a very basic in memory broker implementation that supports multiple streams and consumer groups. +type MemoryBroker struct { + // idPrefix is a random prefix the memory broker is seeded with to avoid overlaps with previous runs! + idPrefix string + // latestID is used to generate unique, sequentially increasing message IDs + latestID uint64 + // queueSize is the max size of the queues before enqueing is blocking + queueSize int64 + mx sync.RWMutex + // messageQueues contains all streams requested for a specific group + // messageQueues[streamID][groupName]: queue of messages for a specific stream and group + messageQueues map[string]map[string]chan message +} + +func NewMemoryBroker(queueSize int64) (*MemoryBroker, error) { + idPrefix, err := gonanoid.Generate(idPrefixUIDAlphabet, idPrefixUIDLength) + if err != nil { + return nil, fmt.Errorf("failed to generate random prefix for in-memory event IDs: %w", err) + } + return &MemoryBroker{ + idPrefix: idPrefix, + latestID: 0, + queueSize: queueSize, + mx: sync.RWMutex{}, + messageQueues: make(map[string]map[string]chan message), + }, err +} + +// enqueue enqueues a message to a stream. +// NOTE: messages are only send to existing groups - groups created after completion won't get the message. +// NOTE: if any of the groups' message queues is full, an aggregated error is returned. +// However, the error is only returned after attempting to send the message to all groups. +func (b *MemoryBroker) enqueue(streamID string, m message) (string, error) { + // similar to redis, only populate the ID if's empty or requested explicitly + if m.id == "" || m.id == "*" { + id := atomic.AddUint64(&b.latestID, 1) + m.id = fmt.Sprintf("%s-%d", b.idPrefix, id) + } + + // the lock is for reading from the messageQueues map + // NOTE: this method isn't blocking anywhere so we should be safe from deadlocking + // NOTE: might be possible to optimize (potentially not even required), but okay for initial local solution. + b.mx.RLock() + defer b.mx.RUnlock() + + // get all group queues for the stream - no lock needed as it's read-only + queues, ok := b.messageQueues[streamID] + if !ok { + // if there are no groups - discard message + return "", nil + } + + // we have to queue up the message to ALL EXISTING groups of a stream + var errs error + for groupName, queue := range queues { + select { + case queue <- m: + continue + default: + errs = multierror.Append(errs, fmt.Errorf("queue for group '%s' is full", groupName)) + } + } + + return m.id, errs +} + +// messages returns a read-only stream that can be used to receive messages of a stream under a specific group. +// If no such stream exists yet, a new one is created, otherwise, the existing one is returned. +func (b *MemoryBroker) messages(streamID string, groupName string) <-chan message { + // the lock is for writing in the messageQueues map + b.mx.Lock() + defer b.mx.Unlock() + + groups, ok := b.messageQueues[streamID] + if !ok { + groups = make(map[string]chan message) + b.messageQueues[streamID] = groups + } + + groupQueue, ok := groups[groupName] + if !ok { + groupQueue = make(chan message, b.queueSize) + groups[groupName] = groupQueue + } + + return groupQueue +} diff --git a/stream/memory_consumer.go b/stream/memory_consumer.go new file mode 100644 index 0000000000..5c1d2abec7 --- /dev/null +++ b/stream/memory_consumer.go @@ -0,0 +1,236 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "context" + "errors" + "fmt" + "runtime/debug" + "sync" + "time" +) + +// memoryMessage extends the message object to allow tracking retries. +type memoryMessage struct { + message + retries int64 +} + +// MemoryConsumer consumes streams from a MemoryBroker. +type MemoryConsumer struct { + broker *MemoryBroker + // namespace specifies the namespace of the keys - any stream key will be prefixed with it + namespace string + // groupName specifies the name of the consumer group. + groupName string + + // Config is the generic consumer configuration. + Config ConsumerConfig + + // streams is a map of all registered streams and their handlers. + streams map[string]handler + + isStarted bool + messageQueue chan memoryMessage + errorCh chan error + infoCh chan string +} + +func NewMemoryConsumer(broker *MemoryBroker, namespace string, groupName string) (*MemoryConsumer, error) { + if groupName == "" { + return nil, errors.New("groupName can't be empty") + } + + const queueCapacity = 500 + const errorChCapacity = 64 + const infoChCapacity = 64 + + return &MemoryConsumer{ + broker: broker, + namespace: namespace, + groupName: groupName, + streams: map[string]handler{}, + Config: defaultConfig, + isStarted: false, + messageQueue: make(chan memoryMessage, queueCapacity), + errorCh: make(chan error, errorChCapacity), + infoCh: make(chan string, infoChCapacity), + }, nil +} + +func (c *MemoryConsumer) Configure(opts ...ConsumerOption) { + if c.isStarted { + return + } + + for _, opt := range opts { + opt.apply(&c.Config) + } +} + +func (c *MemoryConsumer) Register(streamID string, fn HandlerFunc, opts ...HandlerOption) error { + if c.isStarted { + return ErrAlreadyStarted + } + if streamID == "" { + return errors.New("streamID can't be empty") + } + if fn == nil { + return errors.New("fn can't be empty") + } + + // transpose streamID to key namespace - no need to keep inner streamID + transposedStreamID := transposeStreamID(c.namespace, streamID) + if _, ok := c.streams[transposedStreamID]; ok { + return fmt.Errorf("consumer is already registered for '%s' (full stream '%s')", streamID, transposedStreamID) + } + + config := c.Config.DefaultHandlerConfig + for _, opt := range opts { + opt.apply(&config) + } + + c.streams[transposedStreamID] = handler{ + handle: fn, + config: config, + } + return nil +} + +func (c *MemoryConsumer) Start(ctx context.Context) error { + if c.isStarted { + return ErrAlreadyStarted + } + + if len(c.streams) == 0 { + return errors.New("no streams registered") + } + + // mark as started before starting go routines (can't error out from here) + c.isStarted = true + + wg := &sync.WaitGroup{} + + // start routines to read messages from broker + for streamID := range c.streams { + wg.Add(1) + go func(stream string) { + defer wg.Done() + c.reader(ctx, stream) + }(streamID) + } + + // start workers + for i := 0; i < c.Config.Concurrency; i++ { + wg.Add(1) + go func() { + defer wg.Done() + c.consume(ctx) + }() + } + + // start cleanup routing + go func() { + // wait for all go routines to complete + wg.Wait() + + close(c.messageQueue) + close(c.infoCh) + close(c.errorCh) + }() + + return nil +} + +// reader reads the messages of a specific stream from the broker and puts it +// into the single message queue monitored by the consumers. +func (c *MemoryConsumer) reader(ctx context.Context, streamID string) { + streamQueue := c.broker.messages(streamID, c.groupName) + for { + select { + case <-ctx.Done(): + return + case m := <-streamQueue: + c.messageQueue <- memoryMessage{ + message: m, + retries: 0, + } + } + } +} + +func (c *MemoryConsumer) consume(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + case m := <-c.messageQueue: + handler, ok := c.streams[m.streamID] + if !ok { + // we only take messages from registered streams, this should never happen. + // WARNING this will discard the message + c.pushError(fmt.Errorf("discard message with id '%s' from stream '%s' - doesn't belong to us", + m.id, m.streamID)) + continue + } + + err := func() (err error) { + // Ensure that handlers don't cause panic. + defer func() { + if r := recover(); r != nil { + c.pushError(fmt.Errorf("PANIC when processing message '%s' in stream '%s':\n%s", + m.id, m.streamID, debug.Stack())) + } + }() + + return handler.handle(ctx, m.id, m.values) + }() + + if err != nil { + c.pushError(fmt.Errorf("failed to process message with id '%s' in stream '%s' (retries: %d): %w", + m.id, m.streamID, m.retries, err)) + + if m.retries >= int64(handler.config.maxRetries) { + c.pushError(fmt.Errorf( + "discard message with id '%s' from stream '%s' - failed %d retries", + m.id, m.streamID, m.retries)) + continue + } + + // increase retry count + m.retries++ + + // requeue message for a retry (needs to be in a separate go func to avoid deadlock) + // IMPORTANT: this won't requeue to broker, only in this consumer's queue! + go func() { + // TODO: linear/exponential backoff relative to retry count might be good + time.Sleep(handler.config.idleTimeout) + c.messageQueue <- m + }() + } + } + } +} + +func (c *MemoryConsumer) Errors() <-chan error { return c.errorCh } +func (c *MemoryConsumer) Infos() <-chan string { return c.infoCh } + +func (c *MemoryConsumer) pushError(err error) { + select { + case c.errorCh <- err: + default: + } +} diff --git a/stream/memory_producer.go b/stream/memory_producer.go new file mode 100644 index 0000000000..75be82b504 --- /dev/null +++ b/stream/memory_producer.go @@ -0,0 +1,54 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "context" + "fmt" +) + +// MemoryProducer sends messages to streams of a MemoryBroker. +type MemoryProducer struct { + broker *MemoryBroker + // namespace specifies the namespace of the keys - any stream key will be prefixed with it + namespace string +} + +func NewMemoryProducer(broker *MemoryBroker, namespace string) *MemoryProducer { + return &MemoryProducer{ + broker: broker, + namespace: namespace, + } +} + +// Send sends information to the Broker. +// Returns the message ID in case of success. +func (p *MemoryProducer) Send(ctx context.Context, streamID string, payload map[string]interface{}) (string, error) { + // ensure we transpose streamID using the key namespace + transposedStreamID := transposeStreamID(p.namespace, streamID) + + msgID, err := p.broker.enqueue( + transposedStreamID, + message{ + streamID: transposedStreamID, + values: payload, + }) + if err != nil { + return "", fmt.Errorf("failed to write to stream '%s' (full stream '%s'). Error: %w", + streamID, transposedStreamID, err) + } + + return msgID, nil +} diff --git a/stream/options.go b/stream/options.go new file mode 100644 index 0000000000..164dfbd026 --- /dev/null +++ b/stream/options.go @@ -0,0 +1,100 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "fmt" + "time" +) + +const ( + // MaxConcurrency is the max number of concurrent go routines (for message handling) for a single stream consumer. + MaxConcurrency = 64 + + // MaxMaxRetries is the max number of retries of a message for a single consumer group. + MaxMaxRetries = 64 + + // MinIdleTimeout is the minimum time that can be configured as idle timeout for a stream consumer. + MinIdleTimeout = 5 * time.Second +) + +// ConsumerOption is used to configure consumers. +type ConsumerOption interface { + apply(*ConsumerConfig) +} + +// consumerOptionFunc allows to have functions implement the ConsumerOption interface. +type consumerOptionFunc func(*ConsumerConfig) + +// Apply calls f(config). +func (f consumerOptionFunc) apply(config *ConsumerConfig) { + f(config) +} + +// WithConcurrency sets up the concurrency of the stream consumer. +func WithConcurrency(concurrency int) ConsumerOption { + if concurrency < 1 || concurrency > MaxConcurrency { + // missconfiguration - panic to keep options clean + panic(fmt.Sprintf("provided concurrency %d is invalid - has to be between 1 and %d", + concurrency, MaxConcurrency)) + } + return consumerOptionFunc(func(c *ConsumerConfig) { + c.Concurrency = concurrency + }) +} + +// WithHandlerOptions sets up the default handler options of a stream consumer. +func WithHandlerOptions(opts ...HandlerOption) ConsumerOption { + return consumerOptionFunc(func(c *ConsumerConfig) { + for _, opt := range opts { + opt.apply(&c.DefaultHandlerConfig) + } + }) +} + +// HandlerOption is used to configure the handler consuming a single stream. +type HandlerOption interface { + apply(*HandlerConfig) +} + +// handlerOptionFunc allows to have functions implement the HandlerOption interface. +type handlerOptionFunc func(*HandlerConfig) + +// Apply calls f(config). +func (f handlerOptionFunc) apply(config *HandlerConfig) { + f(config) +} + +// WithMaxRetries can be used to set the max retry count for a specific handler. +func WithMaxRetries(maxRetries int) HandlerOption { + if maxRetries < 0 || maxRetries > MaxMaxRetries { + // missconfiguration - panic to keep options clean + panic(fmt.Sprintf("provided maxRetries %d is invalid - has to be between 0 and %d", maxRetries, MaxMaxRetries)) + } + return handlerOptionFunc(func(c *HandlerConfig) { + c.maxRetries = maxRetries + }) +} + +// WithIdleTimeout can be used to set the idle timeout for a specific handler. +func WithIdleTimeout(timeout time.Duration) HandlerOption { + if timeout < MinIdleTimeout { + // missconfiguration - panic to keep options clean + panic(fmt.Sprintf("provided timeout %d is invalid - has to be longer than %s", timeout, MinIdleTimeout)) + } + return handlerOptionFunc(func(c *HandlerConfig) { + c.idleTimeout = timeout + }) +} diff --git a/stream/redis_consumer.go b/stream/redis_consumer.go new file mode 100644 index 0000000000..daf4c4d287 --- /dev/null +++ b/stream/redis_consumer.go @@ -0,0 +1,579 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "context" + "errors" + "fmt" + "net" + "runtime/debug" + "strings" + "sync" + "time" + + "github.com/go-redis/redis/v8" +) + +// RedisConsumer provides functionality to process Redis streams as part of a consumer group. +type RedisConsumer struct { + rdb redis.UniversalClient + // namespace specifies the namespace of the keys - any stream key will be prefixed with it + namespace string + // groupName specifies the name of the consumer group. + groupName string + // consumerName specifies the name of the consumer. + consumerName string + + // Config is the generic consumer configuration. + Config ConsumerConfig + + // streams is a map of all registered streams and their handlers. + streams map[string]handler + + isStarted bool + messageQueue chan message + errorCh chan error + infoCh chan string +} + +// NewRedisConsumer creates new Redis stream consumer. Streams are read with XREADGROUP. +// It returns channels of info messages and errors. The caller should not block on these channels for too long. +// These channels are provided mainly for logging. +func NewRedisConsumer(rdb redis.UniversalClient, namespace string, + groupName string, consumerName string) (*RedisConsumer, error) { + if groupName == "" { + return nil, errors.New("groupName can't be empty") + } + if consumerName == "" { + return nil, errors.New("consumerName can't be empty") + } + + const queueCapacity = 500 + const errorChCapacity = 64 + const infoChCapacity = 64 + + return &RedisConsumer{ + rdb: rdb, + namespace: namespace, + groupName: groupName, + consumerName: consumerName, + streams: map[string]handler{}, + Config: defaultConfig, + isStarted: false, + messageQueue: make(chan message, queueCapacity), + errorCh: make(chan error, errorChCapacity), + infoCh: make(chan string, infoChCapacity), + }, nil +} + +func (c *RedisConsumer) Configure(opts ...ConsumerOption) { + if c.isStarted { + return + } + + for _, opt := range opts { + opt.apply(&c.Config) + } +} + +func (c *RedisConsumer) Register(streamID string, fn HandlerFunc, opts ...HandlerOption) error { + if c.isStarted { + return ErrAlreadyStarted + } + if streamID == "" { + return errors.New("streamID can't be empty") + } + if fn == nil { + return errors.New("fn can't be empty") + } + + // transpose streamID to key namespace - no need to keep inner streamID + transposedStreamID := transposeStreamID(c.namespace, streamID) + if _, ok := c.streams[transposedStreamID]; ok { + return fmt.Errorf("consumer is already registered for '%s' (redis stream '%s')", streamID, transposedStreamID) + } + + // create final config for handler + config := c.Config.DefaultHandlerConfig + for _, opt := range opts { + opt.apply(&config) + } + + c.streams[transposedStreamID] = handler{ + handle: fn, + config: config, + } + + return nil +} + +func (c *RedisConsumer) Start(ctx context.Context) error { + if c.isStarted { + return ErrAlreadyStarted + } + + if len(c.streams) == 0 { + return errors.New("no streams registered") + } + + var err error + + // Check if Redis is accessible, fail if it's not. + err = c.rdb.Ping(ctx).Err() + if err != nil && !errors.Is(err, redis.Nil) { + return fmt.Errorf("failed to ping redis server: %w", err) + } + + // Create consumer group for all streams, creates streams if they don't exist. + err = c.createGroupForAllStreams(ctx) + if err != nil { + return err + } + + // mark as started before starting go routines (can't error out from here) + c.isStarted = true + + wg := &sync.WaitGroup{} + + wg.Add(1) + go func() { + defer wg.Done() + c.removeStaleConsumers(ctx, time.Hour) + // launch redis reader, it will finish when the ctx is done + c.reader(ctx) + }() + + wg.Add(1) + go func() { + defer wg.Done() + // launch redis message reclaimer, it will finish when the ctx is done. + // IMPORTANT: Keep reclaim interval small for now to support faster retries => higher load on redis! + // TODO: Make retries local by default with opt-in cross-instance retries. + // https://harness.atlassian.net/browse/SCM-83 + const reclaimInterval = 10 * time.Second + c.reclaimer(ctx, reclaimInterval) + }() + + for i := 0; i < c.Config.Concurrency; i++ { + wg.Add(1) + go func() { + defer wg.Done() + // launch redis message consumer, it will finish when the ctx is done + c.consumer(ctx) + }() + } + + go func() { + // wait for all go routines to complete + wg.Wait() + + // close all channels + close(c.messageQueue) + close(c.errorCh) + close(c.infoCh) + }() + + return nil +} + +// reader method reads a Redis stream with XREADGROUP command to retrieve messages. +// The messages are then sent to a go channel passed as parameter for processing. +// If the stream already contains unassigned messages, those we'll be returned. +// Otherwise XREADGROUP blocks until either a new message arrives or block timeout happens. +// The method terminates when the provided context finishes. +// +//nolint:funlen,gocognit // refactor if needed +func (c *RedisConsumer) reader(ctx context.Context) { + delays := []time.Duration{1 * time.Millisecond, 5 * time.Second, 15 * time.Second, 30 * time.Second, time.Minute} + consecutiveFailures := 0 + + // pre-generate streams argument for XReadGroup + // NOTE: for the first call ever we want to get the history of the consumer (to allow for seamless restarts) + // ASSUMPTION: only one consumer with a given groupName+consumerName is running at a time + scanHistory := true + streamLen := len(c.streams) + streamsArg := make([]string, 2*streamLen) + i := 0 + for streamID := range c.streams { + streamsArg[i] = streamID + streamsArg[streamLen+i] = "0" + i++ + } + + for { + var delay time.Duration + if consecutiveFailures < len(delays) { + delay = delays[consecutiveFailures] + } else { + delay = delays[len(delays)-1] + } + readTimer := time.NewTimer(delay) + + select { + case <-ctx.Done(): + readTimer.Stop() + return + + case <-readTimer.C: + const count = 100 + + resReadStream, err := c.rdb.XReadGroup(ctx, &redis.XReadGroupArgs{ + Group: c.groupName, + Consumer: c.consumerName, + Streams: streamsArg, + Count: count, + Block: 5 * time.Minute, + }).Result() + + // if context is canceled, continue and next iteration will exit cleanly + if errors.Is(err, context.Canceled) { + continue + } + + // network timeout - log it and retry + var errNet net.Error + if ok := errors.As(err, &errNet); ok && errNet.Timeout() { + c.pushError(fmt.Errorf("encountered network failure: %w", errNet)) + consecutiveFailures++ + continue + } + + // group doesn't exist anymore - recreate it + if err != nil && strings.HasPrefix(err.Error(), "NOGROUP") { + cErr := c.createGroupForAllStreams(ctx) + if cErr != nil { + c.pushError(fmt.Errorf("failed to re-create group for at least one stream: %w", err)) + consecutiveFailures++ + } else { + c.pushInfo(fmt.Sprintf("re-created group for all streams where it got removed, original error: %s", + err)) + consecutiveFailures = 0 + } + continue + } + + // any other error we handle generically + if err != nil && !errors.Is(err, redis.Nil) { + consecutiveFailures++ + c.pushError(fmt.Errorf("failed to read redis streams %v (consecutive fails: %d): %w", + streamsArg, consecutiveFailures, err)) + continue + } + + // check if we are done with scanning the history of all streams + if scanHistory { + scanHistory = false + + // Getting history always returns all streams in the same order as queried + // (even a stream that doesn't have any history left, in that case redis returns an empty slice) + // Thus, we can use a simple incrementing index to get the streamArg for a stream in the response + x := 0 + for _, stream := range resReadStream { + // If the stream had messages in the history, continue scanning after the latest read message. + if len(stream.Messages) > 0 { + scanHistory = true + streamsArg[streamLen+x] = stream.Messages[len(stream.Messages)-1].ID + } + x++ + } + + if !scanHistory { + c.pushInfo("completed scan of history") + + // Update stream args to read latest messages for all streams + for j := 0; j < streamLen; j++ { + streamsArg[streamLen+j] = ">" + } + + continue + } + } + + // reset fail count + consecutiveFailures = 0 + + // if no messages were read we can skip iteration + if len(resReadStream) == 0 { + continue + } + + // retrieve all messages across all streams and put them into the message queue + for _, stream := range resReadStream { + for _, m := range stream.Messages { + c.messageQueue <- message{ + streamID: stream.Stream, + id: m.ID, + values: m.Values, + } + } + } + } + } +} + +// reclaimer periodically inspects pending messages with XPENDING command. +// If a message sits longer than processingTimeout, we attempt to reclaim the message for this consumer +// and enqueue it for processing. +// +//nolint:funlen,gocognit // refactor if needed +func (c *RedisConsumer) reclaimer(ctx context.Context, reclaimInterval time.Duration) { + reclaimTimer := time.NewTimer(reclaimInterval) + defer func() { + reclaimTimer.Stop() + }() + + const ( + baseCount = 16 + maxCount = 1024 + ) + + // the minimum message ID which we are querying for. + // redis treats "-" as smaller than any valid message ID + start := "-" + // the maximum message ID which we are querying for. + // redis treats "+" as bigger than any valid message ID + end := "+" + count := baseCount + + for { + select { + case <-ctx.Done(): + return + case <-reclaimTimer.C: + for streamID, handler := range c.streams { + resPending, errPending := c.rdb.XPendingExt(ctx, &redis.XPendingExtArgs{ + Stream: streamID, + Group: c.groupName, + Start: start, + End: end, + Idle: handler.config.idleTimeout, + Count: int64(count), + }).Result() + if errPending != nil && !errors.Is(errPending, redis.Nil) { + c.pushError(fmt.Errorf("failed to fetch pending messages: %w", errPending)) + continue + } + + if len(resPending) == 0 { + continue + } + + // It's safe to change start of the requested range for the next iteration to oldest message. + start = resPending[0].ID + + for _, resMessage := range resPending { + if resMessage.RetryCount > int64(handler.config.maxRetries) { + // Retry count gets increased after every XCLAIM. + // Large retry count might mean there is something wrong with the message, so we'll XACK it. + // WARNING this will discard the message! + errAck := c.rdb.XAck(ctx, streamID, c.groupName, resMessage.ID).Err() + if errAck != nil { + c.pushError(fmt.Errorf( + "failed to force acknowledge (discard) message '%s' (Retries: %d) in stream '%s': %w", + resMessage.ID, resMessage.RetryCount, streamID, errAck)) + } else { + retryCount := resMessage.RetryCount - 1 // redis is counting this execution as retry + c.pushError(fmt.Errorf( + "force acknowledged (discarded) message '%s' (Retries: %d) in stream '%s'", + resMessage.ID, retryCount, streamID)) + } + continue + } + + // Otherwise, claim the message so we can retry it. + claimedMessages, errClaim := c.rdb.XClaim(ctx, &redis.XClaimArgs{ + Stream: streamID, + Group: c.groupName, + Consumer: c.consumerName, + MinIdle: handler.config.idleTimeout, + Messages: []string{resMessage.ID}, + }).Result() + + if errors.Is(errClaim, redis.Nil) { + // Receiving redis.Nil here means the message is removed from the stream (because of MAXLEN). + // The only option is to acknowledge it with XACK. + errAck := c.rdb.XAck(ctx, streamID, c.groupName, resMessage.ID).Err() + if errAck != nil { + c.pushError(fmt.Errorf("failed to acknowledge failed message '%s' in stream '%s': %w", + resMessage.ID, streamID, errAck)) + } else { + c.pushInfo(fmt.Sprintf("acknowledged failed message '%s' in stream '%s'", + resMessage.ID, streamID)) + } + + continue + } + + if errClaim != nil { + // This can happen if two consumers try to claim the same message at once. + // One would succeed and the other will get an error. + c.pushError(fmt.Errorf("failed to claim message '%s' in stream '%s': %w", + resMessage.ID, streamID, errClaim)) + + continue + } + + // This is not expected to happen (message will be retried or eventually discarded) + if len(claimedMessages) == 0 { + c.pushError(fmt.Errorf( + "no error when claiming message '%s' in stream '%s', but redis returned no message", + resMessage.ID, streamID)) + + continue + } + + // we claimed only one message id so there is only one message in the slice + claimedMessage := claimedMessages[0] + c.messageQueue <- message{ + streamID: streamID, + id: claimedMessage.ID, + values: claimedMessage.Values, + } + } + + // If number of messages that we got is equal to the number that we requested + // it means that there's a lot for processing, so we'll increase number of messages + // that we'll pull in the next iteration. + if len(resPending) == count { + count *= 2 + if count > maxCount { + count = maxCount + } + } else { + count = baseCount + } + } + + reclaimTimer.Reset(reclaimInterval) + } + } +} + +// consumer method consumes messages coming from Redis. The method terminates when messageQueue channel closes. +func (c *RedisConsumer) consumer(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + case m := <-c.messageQueue: + if m.id == "" { + // id should never be empty, if it is then the channel is closed + return + } + + handler, ok := c.streams[m.streamID] + if !ok { + // we don't want to ack the message + // maybe someone else can claim and process it (worst case it expires) + c.pushError(fmt.Errorf("received message '%s' in stream '%s' that doesn't belong to us, skip", + m.id, m.streamID)) + continue + } + + err := func() (err error) { + // Ensure that handlers don't cause panic. + defer func() { + if r := recover(); r != nil { + c.pushError(fmt.Errorf("PANIC when processing message '%s' in stream '%s':\n%s", + m.id, m.streamID, debug.Stack())) + } + }() + + return handler.handle(ctx, m.id, m.values) + }() + if err != nil { + c.pushError(fmt.Errorf("failed to process message '%s' in stream '%s': %w", m.id, m.streamID, err)) + continue + } + + err = c.rdb.XAck(ctx, m.streamID, c.groupName, m.id).Err() + if err != nil { + c.pushError(fmt.Errorf("failed to acknowledge message '%s' in stream '%s': %w", m.id, m.streamID, err)) + continue + } + } + } +} + +func (c *RedisConsumer) removeStaleConsumers(ctx context.Context, maxAge time.Duration) { + for streamID := range c.streams { + // Fetch all consumers for this stream and group. + resConsumers, err := c.rdb.XInfoConsumers(ctx, streamID, c.groupName).Result() + if err != nil { + c.pushError(fmt.Errorf("failed to read consumers for stream '%s': %w", streamID, err)) + return + } + + // Delete old consumers, but only if they don't have pending messages. + for _, resConsumer := range resConsumers { + age := time.Duration(resConsumer.Idle) * time.Millisecond + if resConsumer.Pending > 0 || age < maxAge { + continue + } + + err = c.rdb.XGroupDelConsumer(ctx, streamID, c.groupName, resConsumer.Name).Err() + if err != nil { + c.pushError(fmt.Errorf( + "failed to remove stale consumer '%s' from group '%s' (age '%s') for stream '%s': %w", + resConsumer.Name, c.groupName, age, streamID, err)) + continue + } + + c.pushInfo(fmt.Sprintf("removed stale consumer '%s' from group '%s' (age '%s') for stream '%s'", + resConsumer.Name, c.groupName, age, streamID)) + } + } +} + +func (c *RedisConsumer) pushError(err error) { + select { + case c.errorCh <- err: + default: + } +} + +func (c *RedisConsumer) pushInfo(s string) { + select { + case c.infoCh <- s: + default: + } +} + +func (c *RedisConsumer) Errors() <-chan error { return c.errorCh } +func (c *RedisConsumer) Infos() <-chan string { return c.infoCh } + +func (c *RedisConsumer) createGroupForAllStreams(ctx context.Context) error { + for streamID := range c.streams { + err := createGroup(ctx, c.rdb, streamID, c.groupName) + if err != nil { + return err + } + } + + return nil +} + +func createGroup(ctx context.Context, rdb redis.UniversalClient, streamID string, groupName string) error { + // Creates a new consumer group that starts receiving messages from now on. + // Existing messges in the queue are ignored (we don't want to overload a group with old messages) + // For more details of the XGROUPCREATE api see https://redis.io/commands/xgroup-create/ + err := rdb.XGroupCreateMkStream(ctx, streamID, groupName, "$").Err() + if err != nil && !strings.HasPrefix(err.Error(), "BUSYGROUP") { + return fmt.Errorf("failed to create consumer group '%s' for stream '%s': %w", groupName, streamID, err) + } + + return nil +} diff --git a/stream/redis_producer.go b/stream/redis_producer.go new file mode 100644 index 0000000000..066a851fcd --- /dev/null +++ b/stream/redis_producer.go @@ -0,0 +1,67 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "context" + "fmt" + + "github.com/go-redis/redis/v8" +) + +type RedisProducer struct { + rdb redis.UniversalClient + // namespace defines the namespace of the stream keys - any stream key will be prefixed with it. + namespace string + // maxStreamLength defines the maximum number of entries in each stream (ring buffer). + maxStreamLength int64 + // approxMaxStreamLength specifies whether the maxStreamLength should be approximated. + // NOTE: enabling approximation of stream length can lead to performance improvements. + approxMaxStreamLength bool +} + +func NewRedisProducer(rdb redis.UniversalClient, namespace string, + maxStreamLength int64, approxMaxStreamLength bool) *RedisProducer { + return &RedisProducer{ + rdb: rdb, + namespace: namespace, + maxStreamLength: maxStreamLength, + approxMaxStreamLength: approxMaxStreamLength, + } +} + +// Send sends information to the Redis stream. +// Returns the message ID in case of success. +func (p *RedisProducer) Send(ctx context.Context, streamID string, payload map[string]interface{}) (string, error) { + // ensure we transpose streamID using the key namespace + transposedStreamID := transposeStreamID(p.namespace, streamID) + + // send message to stream - will create the stream if it doesn't exist yet + // NOTE: response is the message ID (See https://redis.io/commands/xadd/) + args := &redis.XAddArgs{ + Stream: transposedStreamID, + Values: payload, + MaxLen: p.maxStreamLength, + Approx: p.approxMaxStreamLength, + ID: "*", // let redis create message ID + } + msgID, err := p.rdb.XAdd(ctx, args).Result() + if err != nil { + return "", fmt.Errorf("failed to write to stream '%s' (redis stream '%s'). Error: %w", + streamID, transposedStreamID, err) + } + + return msgID, nil +} diff --git a/stream/stream.go b/stream/stream.go new file mode 100644 index 0000000000..2986b40363 --- /dev/null +++ b/stream/stream.go @@ -0,0 +1,76 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stream + +import ( + "context" + "errors" + "fmt" + "time" +) + +var ( + ErrAlreadyStarted = errors.New("consumer already started") + + defaultConfig = ConsumerConfig{ + Concurrency: 2, + DefaultHandlerConfig: HandlerConfig{ + idleTimeout: 1 * time.Minute, + maxRetries: 2, + }, + } +) + +// ConsumerConfig defines the configuration of a consumer containing externally exposed values +// that can be configured using the available ConsumerOptions. +type ConsumerConfig struct { + // Concurrency specifies the number of worker go routines executing stream handlers. + Concurrency int + + // DefaultHandlerConfig is the default config used for stream handlers. + DefaultHandlerConfig HandlerConfig +} + +// HandlerConfig defines the configuration for a single stream handler containing externally exposed values +// that can be configured using the available HandlerOptions. +type HandlerConfig struct { + // idleTimeout specifies the maximum duration a message stays read but unacknowleged + // before it can be claimed by others. + idleTimeout time.Duration + + // maxRetries specifies the max number a stream message is retried. + maxRetries int +} + +// HandlerFunc defines the signature of a function handling stream messages. +type HandlerFunc func(ctx context.Context, messageID string, payload map[string]interface{}) error + +// handler defines a handler of a single stream. +type handler struct { + handle HandlerFunc + config HandlerConfig +} + +// message is used internally for passing stream messages via channels. +type message struct { + streamID string + id string + values map[string]interface{} +} + +// transposeStreamID transposes the provided streamID based on the namespace. +func transposeStreamID(namespace string, streamID string) string { + return fmt.Sprintf("%s:%s", namespace, streamID) +} diff --git a/types/authz.go b/types/authz.go new file mode 100644 index 0000000000..cb9298b009 --- /dev/null +++ b/types/authz.go @@ -0,0 +1,42 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import "github.com/harness/gitness/types/enum" + +// PermissionCheck represents a permission check. +type PermissionCheck struct { + Scope Scope + Resource Resource + Permission enum.Permission +} + +// Resource represents the resource of a permission check. +// Note: Keep the name empty in case access is requested for all resources of that type. +type Resource struct { + Type enum.ResourceType + Name string +} + +// Scope represents the scope of a permission check +// Notes: +// - In case the permission check is for resource REPO, keep repo empty (repo is resource, not scope) +// - In case the permission check is for resource SPACE, SpacePath is an ancestor of the space (space is +// resource, not scope) +// - Repo isn't use as of now (will be useful once we add access control for repo child resources, e.g. branches). +type Scope struct { + SpacePath string + Repo string +} diff --git a/types/check.go b/types/check.go new file mode 100644 index 0000000000..f011fa1440 --- /dev/null +++ b/types/check.go @@ -0,0 +1,73 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "encoding/json" + + "github.com/harness/gitness/types/enum" +) + +type Check struct { + ID int64 `json:"id"` + CreatedBy int64 `json:"-"` // clients will use "reported_by" + Created int64 `json:"created"` + Updated int64 `json:"updated"` + RepoID int64 `json:"-"` // status checks are always returned for a commit in a repository + CommitSHA string `json:"-"` // status checks are always returned for a commit in a repository + UID string `json:"uid"` + Status enum.CheckStatus `json:"status"` + Summary string `json:"summary"` + Link string `json:"link"` + Metadata json.RawMessage `json:"metadata"` + + Payload CheckPayload `json:"payload"` + ReportedBy PrincipalInfo `json:"reported_by"` +} + +type CheckPayload struct { + Version string `json:"version"` + Kind enum.CheckPayloadKind `json:"kind"` + Data json.RawMessage `json:"data"` +} + +// CheckListOptions holds check list query parameters. +type CheckListOptions struct { + Page int `json:"page"` + Size int `json:"size"` +} + +type ReqCheck struct { + ID int64 `json:"id"` + CreatedBy int64 `json:"-"` // clients will use "added_by" + Created int64 `json:"created"` + RepoID int64 `json:"-"` // required status checks are always returned for a single repository + BranchPattern string `json:"branch_pattern"` + CheckUID string `json:"check_uid"` + + AddedBy PrincipalInfo `json:"added_by"` +} + +type CheckPayloadText struct { + Details string `json:"details"` +} + +// CheckPayloadInternal is for internal use for more seamless integration for +// gitness CI status checks. +type CheckPayloadInternal struct { + Number int64 `json:"execution_number"` + RepoID int64 `json:"repo_id"` + PipelineID int64 `json:"pipeline_id"` +} diff --git a/types/check/common.go b/types/check/common.go new file mode 100644 index 0000000000..9b2db7b36b --- /dev/null +++ b/types/check/common.go @@ -0,0 +1,158 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "fmt" + "regexp" + "strings" +) + +const ( + minDisplayNameLength = 1 + maxDisplayNameLength = 256 + + minUIDLength = 1 + maxUIDLength = 100 + uidRegex = "^[a-zA-Z_][a-zA-Z0-9-_.]*$" + + minEmailLength = 1 + maxEmailLength = 250 + + maxDescriptionLength = 1024 +) + +var ( + // illegalRootSpaceUIDs is the list of space UIDs we are blocking for root spaces + // as they might cause issues with routing. + illegalRootSpaceUIDs = []string{"api", "git"} +) + +var ( + ErrDisplayNameLength = &ValidationError{ + fmt.Sprintf("DisplayName has to be between %d and %d in length.", minDisplayNameLength, maxDisplayNameLength), + } + + ErrDescriptionTooLong = &ValidationError{ + fmt.Sprintf("Description can be at most %d in length.", maxDescriptionLength), + } + + ErrUIDLength = &ValidationError{ + fmt.Sprintf("UID has to be between %d and %d in length.", + minUIDLength, maxUIDLength), + } + ErrUIDRegex = &ValidationError{ + "UID has to start with a letter (or _) and only contain the following characters [a-zA-Z0-9-_.].", + } + + ErrEmailLen = &ValidationError{ + fmt.Sprintf("Email address has to be within %d and %d characters", minEmailLength, maxEmailLength), + } + + ErrInvalidCharacters = &ValidationError{"Input contains invalid characters."} + + ErrIllegalRootSpaceUID = &ValidationError{ + fmt.Sprintf("The following names are not allowed for a root space: %v", illegalRootSpaceUIDs), + } +) + +// DisplayName checks the provided display name and returns an error if it isn't valid. +func DisplayName(displayName string) error { + l := len(displayName) + if l < minDisplayNameLength || l > maxDisplayNameLength { + return ErrDisplayNameLength + } + + return ForControlCharacters(displayName) +} + +// Description checks the provided description and returns an error if it isn't valid. +func Description(description string) error { + l := len(description) + if l > maxDescriptionLength { + return ErrDescriptionTooLong + } + + return ForControlCharacters(description) +} + +// ForControlCharacters ensures that there are no control characters in the provided string. +func ForControlCharacters(s string) error { + for _, r := range s { + if r < 32 || r == 127 { + return ErrInvalidCharacters + } + } + + return nil +} + +// UID checks the provided uid and returns an error if it isn't valid. +func UID(uid string) error { + l := len(uid) + if l < minUIDLength || l > maxUIDLength { + return ErrUIDLength + } + + if ok, _ := regexp.Match(uidRegex, []byte(uid)); !ok { + return ErrUIDRegex + } + + return nil +} + +// PrincipalUID is an abstraction of a validation method that verifies principal UIDs. +// NOTE: Enables support for different principal UID formats. +type PrincipalUID func(uid string) error + +// PrincipalUIDDefault performs the default Principal UID check. +func PrincipalUIDDefault(uid string) error { + return UID(uid) +} + +// PathUID is an abstraction of a validation method that returns true +// iff the UID is valid to be used in a resource path for repo/space. +// NOTE: Enables support for different path formats. +type PathUID func(uid string, isRoot bool) error + +// PathUIDDefault performs the default UID check and also blocks illegal root space UIDs. +func PathUIDDefault(uid string, isRoot bool) error { + if err := UID(uid); err != nil { + return err + } + + if isRoot { + uidLower := strings.ToLower(uid) + for _, p := range illegalRootSpaceUIDs { + if p == uidLower { + return ErrIllegalRootSpaceUID + } + } + } + + return nil +} + +// Email checks the provided email and returns an error if it isn't valid. +func Email(email string) error { + l := len(email) + if l < minEmailLength || l > maxEmailLength { + return ErrEmailLen + } + + // TODO: add better email validation. + + return nil +} diff --git a/types/check/error.go b/types/check/error.go new file mode 100644 index 0000000000..49dd13dec0 --- /dev/null +++ b/types/check/error.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "errors" + "fmt" +) + +var ( + ErrAny = &ValidationError{} +) + +// ValidationError is error returned for any validation errors. +// WARNING: This error will be printed to the user as is! +type ValidationError struct { + msg string +} + +func NewValidationError(msg string) *ValidationError { + return &ValidationError{ + msg: msg, + } +} + +func NewValidationErrorf(format string, args ...interface{}) *ValidationError { + return &ValidationError{ + msg: fmt.Sprintf(format, args...), + } +} + +func (e *ValidationError) Error() string { + return e.msg +} + +func (e *ValidationError) Is(target error) bool { + // If the caller is checking for any ValidationError, return true + if errors.Is(target, ErrAny) { + return true + } + + // ensure it's the correct type + err := &ValidationError{} + if !errors.As(target, &err) { + return false + } + + // only the same if the message is the same + return e.msg == err.msg +} diff --git a/types/check/password.go b/types/check/password.go new file mode 100644 index 0000000000..81f9267c8f --- /dev/null +++ b/types/check/password.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "fmt" +) + +const ( + minPasswordLength = 1 + maxPasswordLength = 128 +) + +var ( + // ErrPasswordLength is returned when the password + // is outside of the allowed length. + ErrPasswordLength = &ValidationError{ + fmt.Sprintf("Password has to be within %d and %d characters", minPasswordLength, maxPasswordLength), + } +) + +// Password returns true if the Password is valid. +// TODO: add proper password checks. +func Password(pw string) error { + // validate length + l := len(pw) + if l < minPasswordLength || l > maxPasswordLength { + return ErrPasswordLength + } + + return nil +} diff --git a/types/check/path.go b/types/check/path.go new file mode 100644 index 0000000000..0075e6b7ae --- /dev/null +++ b/types/check/path.go @@ -0,0 +1,88 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "fmt" + "strings" + + "github.com/harness/gitness/types" +) + +const ( + maxPathSegmentsForSpace = 9 + maxPathSegments = 10 +) + +var ( + ErrPathEmpty = &ValidationError{ + "Path can't be empty.", + } + ErrPathInvalidDepth = &ValidationError{ + fmt.Sprintf("A path can have at most %d segments (%d for spaces).", + maxPathSegments, maxPathSegmentsForSpace), + } + ErrEmptyPathSegment = &ValidationError{ + "Empty segments are not allowed.", + } + ErrPathCantBeginOrEndWithSeparator = &ValidationError{ + fmt.Sprintf("Path can't start or end with the separator ('%s').", types.PathSeparator), + } +) + +// Path checks the provided path and returns an error in it isn't valid. +func Path(path string, isSpace bool, uidCheck PathUID) error { + if path == "" { + return ErrPathEmpty + } + + // ensure path doesn't begin or end with / + if path[:1] == types.PathSeparator || path[len(path)-1:] == types.PathSeparator { + return ErrPathCantBeginOrEndWithSeparator + } + + // ensure path is not too deep + if err := PathDepth(path, isSpace); err != nil { + return err + } + + // ensure all segments of the path are valid uids + segments := strings.Split(path, types.PathSeparator) + for i, s := range segments { + if s == "" { + return ErrEmptyPathSegment + } else if err := uidCheck(s, i == 0); err != nil { + return fmt.Errorf("invalid segment '%s': %w", s, err) + } + } + + return nil +} + +// PathDepth Checks the depth of the provided path. +func PathDepth(path string, isSpace bool) error { + if IsPathTooDeep(path, isSpace) { + return ErrPathInvalidDepth + } + + return nil +} + +// IsPathTooDeep Checks if the provided path is too long. +// NOTE: A repository path can be one deeper than a space path (as otherwise the space would be useless). +func IsPathTooDeep(path string, isSpace bool) bool { + l := strings.Count(path, types.PathSeparator) + 1 + return (!isSpace && l > maxPathSegments) || (isSpace && l > maxPathSegmentsForSpace) +} diff --git a/types/check/service_account.go b/types/check/service_account.go new file mode 100644 index 0000000000..0f4d416b45 --- /dev/null +++ b/types/check/service_account.go @@ -0,0 +1,43 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "github.com/harness/gitness/types/enum" +) + +var ( + ErrServiceAccountParentTypeIsInvalid = &ValidationError{ + "Provided parent type is invalid.", + } + ErrServiceAccountParentIDInvalid = &ValidationError{ + "ParentID required - Global service accounts are not supported.", + } +) + +// ServiceAccountParent verifies the remaining fields of a service account +// that aren't inhereted from principal. +func ServiceAccountParent(parentType enum.ParentResourceType, parentID int64) error { + if parentType != enum.ParentResourceTypeRepo && parentType != enum.ParentResourceTypeSpace { + return ErrServiceAccountParentTypeIsInvalid + } + + // validate service account belongs to sth + if parentID <= 0 { + return ErrServiceAccountParentIDInvalid + } + + return nil +} diff --git a/types/check/token.go b/types/check/token.go new file mode 100644 index 0000000000..b409e66cbb --- /dev/null +++ b/types/check/token.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "time" +) + +const ( + minTokenLifeTime = 24 * time.Hour // 1 day + maxTokenLifeTime = 365 * 24 * time.Hour // 1 year +) + +var ( + ErrTokenLifeTimeOutOfBounds = &ValidationError{ + "The life time of a token has to be between 1 day and 365 days.", + } + ErrTokenLifeTimeRequired = &ValidationError{ + "The life time of a token is required.", + } +) + +// TokenLifetime returns true if the lifetime is valid for a token. +func TokenLifetime(lifetime *time.Duration, optional bool) error { + if lifetime == nil && !optional { + return ErrTokenLifeTimeRequired + } + + if lifetime == nil { + return nil + } + + if *lifetime < minTokenLifeTime || *lifetime > maxTokenLifeTime { + return ErrTokenLifeTimeOutOfBounds + } + + return nil +} diff --git a/types/check/wire.go b/types/check/wire.go new file mode 100644 index 0000000000..83f8ba1c65 --- /dev/null +++ b/types/check/wire.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package check + +import ( + "github.com/google/wire" +) + +// WireSet provides a wire set for this package. +var WireSet = wire.NewSet( + ProvidePrincipalUIDCheck, + ProvidePathUIDCheck, +) + +func ProvidePathUIDCheck() PathUID { + return PathUIDDefault +} + +func ProvidePrincipalUIDCheck() PrincipalUID { + return PrincipalUIDDefault +} diff --git a/types/code_comment.go b/types/code_comment.go new file mode 100644 index 0000000000..5ca429bae4 --- /dev/null +++ b/types/code_comment.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +type CodeComment struct { + ID int64 `db:"pullreq_activity_id"` + Version int64 `db:"pullreq_activity_version"` + Updated int64 `db:"pullreq_activity_updated"` + + CodeCommentFields +} + +type CodeCommentFields struct { + Outdated bool `db:"pullreq_activity_outdated" json:"outdated"` + MergeBaseSHA string `db:"pullreq_activity_code_comment_merge_base_sha" json:"merge_base_sha"` + SourceSHA string `db:"pullreq_activity_code_comment_source_sha" json:"source_sha"` + Path string `db:"pullreq_activity_code_comment_path" json:"path"` + LineNew int `db:"pullreq_activity_code_comment_line_new" json:"line_new"` + SpanNew int `db:"pullreq_activity_code_comment_span_new" json:"span_new"` + LineOld int `db:"pullreq_activity_code_comment_line_old" json:"line_old"` + SpanOld int `db:"pullreq_activity_code_comment_span_old" json:"span_old"` +} diff --git a/types/config.go b/types/config.go new file mode 100644 index 0000000000..ef71da8f5f --- /dev/null +++ b/types/config.go @@ -0,0 +1,249 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "time" +) + +// Config stores the system configuration. +type Config struct { + // InstanceID specifis the ID of the gitness instance. + // NOTE: If the value is not provided the hostname of the machine is used. + InstanceID string `envconfig:"GITNESS_INSTANCE_ID"` + + Debug bool `envconfig:"GITNESS_DEBUG"` + Trace bool `envconfig:"GITNESS_TRACE"` + + // GracefulShutdownTime defines the max time we wait when shutting down a server. + // 5min should be enough for most git clones to complete. + GracefulShutdownTime time.Duration `envconfig:"GITNESS_GRACEFUL_SHUTDOWN_TIME" default:"300s"` + + UserSignupEnabled bool `envconfig:"GITNESS_USER_SIGNUP_ENABLED" default:"true"` + NestedSpacesEnabled bool `envconfig:"GITNESS_NESTED_SPACES_ENABLED" default:"false"` + + Profiler struct { + Type string `envconfig:"GITNESS_PROFILER_TYPE"` + ServiceName string `envconfig:"GITNESS_PROFILER_SERVICE_NAME" default:"gitness"` + } + + // URL defines the URLs via which the different parts of the service are reachable by. + URL struct { + // Git defines the external URL via which the GIT API is reachable. + // NOTE: for routing to work properly, the request path & hostname reaching gitness + // have to statisfy at least one of the following two conditions: + // - Path ends with `/git` + // - Hostname matches Config.Server.HTTP.GitHost + // (this could be after proxy path / header rewrite). + Git string `envconfig:"GITNESS_URL_GIT" default:"http://localhost:3000/git"` + + // CIURL is the endpoint that can be used by running CI container builds to communicate + // with gitness (for example while performing a clone on a local repo). + // host.docker.internal allows a running container to talk to services exposed on the host + // (either running directly or via a port exposed in a docker container). + CIURL string `envconfig:"GITNESS_CI_URL_GIT" default:"http://host.docker.internal:3000/git"` + + // API defines the external URL via which the rest API is reachable. + // NOTE: for routing to work properly, the request path reaching gitness has to end with `/api` + // (this could be after proxy path rewrite). + API string `envconfig:"GITNESS_URL_API" default:"http://localhost:3000/api"` + + // APIInternal defines the internal URL via which the rest API is reachable. + // NOTE: for routing to work properly, the request path reaching gitness has to end with `/api` + // (this could be after proxy path rewrite). + APIInternal string `envconfig:"GITNESS_URL_API_INTERNAL" default:"http://localhost:3000/api"` + } + + // Git defines the git configuration parameters + Git struct { + DefaultBranch string `envconfig:"GITNESS_GIT_DEFAULTBRANCH" default:"main"` + } + + // Encrypter defines the parameters for the encrypter + Encrypter struct { + Secret string `envconfig:"GITNESS_ENCRYPTER_SECRET"` // key used for encryption + MixedContent bool `envconfig:"GITNESS_ENCRYPTER_MIXED_CONTENT"` + } + + // Server defines the server configuration parameters. + Server struct { + // HTTP defines the http configuration parameters + HTTP struct { + Bind string `envconfig:"GITNESS_HTTP_BIND" default:":3000"` + Proto string `envconfig:"GITNESS_HTTP_PROTO" default:"http"` + Host string `envconfig:"GITNESS_HTTP_HOST"` + // GitHost is the host used to identify git traffic (OPTIONAL). + GitHost string `envconfig:"GITNESS_HTTP_GIT_HOST" default:"git.localhost"` + } + + // Acme defines Acme configuration parameters. + Acme struct { + Enabled bool `envconfig:"GITNESS_ACME_ENABLED"` + Endpont string `envconfig:"GITNESS_ACME_ENDPOINT"` + Email bool `envconfig:"GITNESS_ACME_EMAIL"` + } + } + + // CI defines configuration related to build executions. + CI struct { + ParallelWorkers int `envconfig:"GITNESS_CI_PARALLEL_WORKERS" default:"2"` + // PluginsZipPath is a pointer to a zip containing all the plugins schemas. + // This could be a local path or an external location. + PluginsZipPath string `envconfig:"GITNESS_CI_PLUGINS_ZIP_PATH" default:"https://github.com/bradrydzewski/plugins/archive/refs/heads/master.zip"` + } + + // Database defines the database configuration parameters. + Database struct { + Driver string `envconfig:"GITNESS_DATABASE_DRIVER" default:"sqlite3"` + Datasource string `envconfig:"GITNESS_DATABASE_DATASOURCE" default:"database.sqlite3"` + } + + // Token defines token configuration parameters. + Token struct { + CookieName string `envconfig:"GITNESS_TOKEN_COOKIE_NAME" default:"token"` + Expire time.Duration `envconfig:"GITNESS_TOKEN_EXPIRE" default:"720h"` + } + + Logs struct { + // S3 provides optional storage option for logs. + S3 struct { + Bucket string `envconfig:"GITNESS_LOGS_S3_BUCKET"` + Prefix string `envconfig:"GITNESS_LOGS_S3_PREFIX"` + Endpoint string `envconfig:"GITNESS_LOGS_S3_ENDPOINT"` + PathStyle bool `envconfig:"GITNESS_LOGS_S3_PATH_STYLE"` + } + } + + // Cors defines http cors parameters + Cors struct { + AllowedOrigins []string `envconfig:"GITNESS_CORS_ALLOWED_ORIGINS" default:"*"` + AllowedMethods []string `envconfig:"GITNESS_CORS_ALLOWED_METHODS" default:"GET,POST,PATCH,PUT,DELETE,OPTIONS"` + AllowedHeaders []string `envconfig:"GITNESS_CORS_ALLOWED_HEADERS" default:"Origin,Accept,Accept-Language,Authorization,Content-Type,Content-Language,X-Requested-With,X-Request-Id"` //nolint:lll // struct tags can't be multiline + ExposedHeaders []string `envconfig:"GITNESS_CORS_EXPOSED_HEADERS" default:"Link"` + AllowCredentials bool `envconfig:"GITNESS_CORS_ALLOW_CREDENTIALS" default:"true"` + MaxAge int `envconfig:"GITNESS_CORS_MAX_AGE" default:"300"` + } + + // Secure defines http security parameters. + Secure struct { + AllowedHosts []string `envconfig:"GITNESS_HTTP_ALLOWED_HOSTS"` + HostsProxyHeaders []string `envconfig:"GITNESS_HTTP_PROXY_HEADERS"` + SSLRedirect bool `envconfig:"GITNESS_HTTP_SSL_REDIRECT"` + SSLTemporaryRedirect bool `envconfig:"GITNESS_HTTP_SSL_TEMPORARY_REDIRECT"` + SSLHost string `envconfig:"GITNESS_HTTP_SSL_HOST"` + SSLProxyHeaders map[string]string `envconfig:"GITNESS_HTTP_SSL_PROXY_HEADERS"` + STSSeconds int64 `envconfig:"GITNESS_HTTP_STS_SECONDS"` + STSIncludeSubdomains bool `envconfig:"GITNESS_HTTP_STS_INCLUDE_SUBDOMAINS"` + STSPreload bool `envconfig:"GITNESS_HTTP_STS_PRELOAD"` + ForceSTSHeader bool `envconfig:"GITNESS_HTTP_STS_FORCE_HEADER"` + BrowserXSSFilter bool `envconfig:"GITNESS_HTTP_BROWSER_XSS_FILTER" default:"true"` + FrameDeny bool `envconfig:"GITNESS_HTTP_FRAME_DENY" default:"true"` + ContentTypeNosniff bool `envconfig:"GITNESS_HTTP_CONTENT_TYPE_NO_SNIFF"` + ContentSecurityPolicy string `envconfig:"GITNESS_HTTP_CONTENT_SECURITY_POLICY"` + ReferrerPolicy string `envconfig:"GITNESS_HTTP_REFERRER_POLICY"` + } + + Principal struct { + // System defines the principal information used to create the system service. + System struct { + UID string `envconfig:"GITNESS_PRINCIPAL_SYSTEM_UID" default:"gitness"` + DisplayName string `envconfig:"GITNESS_PRINCIPAL_SYSTEM_DISPLAY_NAME" default:"Gitness"` + Email string `envconfig:"GITNESS_PRINCIPAL_SYSTEM_EMAIL" default:"system@gitness.io"` + } + // Pipeline defines the principal information used to create the pipeline service. + Pipeline struct { + UID string `envconfig:"GITNESS_PRINCIPAL_PIPELINE_UID" default:"pipeline"` + DisplayName string `envconfig:"GITNESS_PRINCIPAL_PIPELINE_DISPLAY_NAME" default:"Gitness Pipeline"` + Email string `envconfig:"GITNESS_PRINCIPAL_PIPELINE_EMAIL" default:"pipeline@gitness.io"` + } + // Admin defines the principal information used to create the admin user. + // NOTE: The admin user is only auto-created in case a password is provided. + Admin struct { + UID string `envconfig:"GITNESS_PRINCIPAL_ADMIN_UID" default:"admin"` + DisplayName string `envconfig:"GITNESS_PRINCIPAL_ADMIN_DISPLAY_NAME" default:"Administrator"` + Email string `envconfig:"GITNESS_PRINCIPAL_ADMIN_EMAIL" default:"admin@gitness.io"` + Password string `envconfig:"GITNESS_PRINCIPAL_ADMIN_PASSWORD"` // No default password + } + } + + Redis struct { + Endpoint string `envconfig:"GITNESS_REDIS_ENDPOINT" default:"localhost:6379"` + MaxRetries int `envconfig:"GITNESS_REDIS_MAX_RETRIES" default:"3"` + MinIdleConnections int `envconfig:"GITNESS_REDIS_MIN_IDLE_CONNECTIONS" default:"0"` + Password string `envconfig:"GITNESS_REDIS_PASSWORD"` + SentinelMode bool `envconfig:"GITNESS_REDIS_USE_SENTINEL" default:"false"` + SentinelMaster string `envconfig:"GITNESS_REDIS_SENTINEL_MASTER"` + SentinelEndpoint string `envconfig:"GITNESS_REDIS_SENTINEL_ENDPOINT"` + } + + Lock struct { + // Provider is a name of distributed lock service like redis, memory, file etc... + Provider string `envconfig:"GITNESS_LOCK_PROVIDER" default:"inmemory"` + Expiry time.Duration `envconfig:"GITNESS_LOCK_EXPIRE" default:"8s"` + Tries int `envconfig:"GITNESS_LOCK_TRIES" default:"32"` + RetryDelay time.Duration `envconfig:"GITNESS_LOCK_RETRY_DELAY" default:"250ms"` + DriftFactor float64 `envconfig:"GITNESS_LOCK_DRIFT_FACTOR" default:"0.01"` + TimeoutFactor float64 `envconfig:"GITNESS_LOCK_TIMEOUT_FACTOR" default:"0.05"` + // AppNamespace is just service app prefix to avoid conflicts on key definition + AppNamespace string `envconfig:"GITNESS_LOCK_APP_NAMESPACE" default:"gitness"` + // DefaultNamespace is when mutex doesn't specify custom namespace for their keys + DefaultNamespace string `envconfig:"GITNESS_LOCK_DEFAULT_NAMESPACE" default:"default"` + } + + PubSub struct { + // Provider is a name of distributed lock service like redis, memory, file etc... + Provider string `envconfig:"GITNESS_PUBSUB_PROVIDER" default:"inmemory"` + // AppNamespace is just service app prefix to avoid conflicts on channel definition + AppNamespace string `envconfig:"GITNESS_PUBSUB_APP_NAMESPACE" default:"gitness"` + // DefaultNamespace is custom namespace for their channels + DefaultNamespace string `envconfig:"GITNESS_PUBSUB_DEFAULT_NAMESPACE" default:"default"` + HealthInterval time.Duration `envconfig:"GITNESS_PUBSUB_HEALTH_INTERVAL" default:"3s"` + SendTimeout time.Duration `envconfig:"GITNESS_PUBSUB_SEND_TIMEOUT" default:"60s"` + ChannelSize int `envconfig:"GITNESS_PUBSUB_CHANNEL_SIZE" default:"100"` + } + + BackgroundJobs struct { + // MaxRunning is maximum number of jobs that can be running at once. + MaxRunning int `envconfig:"GITNESS_JOBS_MAX_RUNNING" default:"10"` + + // PurgeFinishedOlderThan is duration after non-recurring, + // finished and failed jobs will be purged from the DB. + PurgeFinishedOlderThan time.Duration `envconfig:"GITNESS_JOBS_PURGE_FINISHED_OLDER_THAN" default:"120h"` + } + + Webhook struct { + // UserAgentIdentity specifies the identity used for the user agent header + // IMPORTANT: do not include version. + UserAgentIdentity string `envconfig:"GITNESS_WEBHOOK_USER_AGENT_IDENTITY" default:"Gitness"` + // HeaderIdentity specifies the identity used for headers in webhook calls (e.g. X-Gitness-Trigger, ...). + // NOTE: If no value is provided, the UserAgentIdentity will be used. + HeaderIdentity string `envconfig:"GITNESS_WEBHOOK_HEADER_IDENTITY"` + Concurrency int `envconfig:"GITNESS_WEBHOOK_CONCURRENCY" default:"4"` + MaxRetries int `envconfig:"GITNESS_WEBHOOK_MAX_RETRIES" default:"3"` + AllowPrivateNetwork bool `envconfig:"GITNESS_WEBHOOK_ALLOW_PRIVATE_NETWORK" default:"false"` + AllowLoopback bool `envconfig:"GITNESS_WEBHOOK_ALLOW_LOOPBACK" default:"false"` + } + + Trigger struct { + Concurrency int `envconfig:"GITNESS_TRIGGER_CONCURRENCY" default:"4"` + MaxRetries int `envconfig:"GITNESS_TRIGGER_MAX_RETRIES" default:"3"` + } + + Metric struct { + Enabled bool `envconfig:"GITNESS_METRIC_ENABLED" default:"true"` + Endpoint string `envconfig:"GITNESS_METRIC_ENDPOINT" default:"https://stats.drone.ci/api/v1/gitness"` + Token string `envconfig:"GITNESS_METRIC_TOKEN"` + } +} diff --git a/types/config_test.go b/types/config_test.go new file mode 100644 index 0000000000..51e7fa36ea --- /dev/null +++ b/types/config_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types diff --git a/types/connector.go b/types/connector.go new file mode 100644 index 0000000000..4325e32f38 --- /dev/null +++ b/types/connector.go @@ -0,0 +1,17 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package types + +type Connector struct { + ID int64 `db:"connector_id" json:"id"` + Description string `db:"connector_description" json:"description"` + SpaceID int64 `db:"connector_space_id" json:"space_id"` + UID string `db:"connector_uid" json:"uid"` + Type string `db:"connector_type" json:"type"` + Data string `db:"connector_data" json:"data"` + Created int64 `db:"connector_created" json:"created"` + Updated int64 `db:"connector_updated" json:"updated"` + Version int64 `db:"connector_version" json:"-"` +} diff --git a/types/enum/check.go b/types/enum/check.go new file mode 100644 index 0000000000..7185d346b5 --- /dev/null +++ b/types/enum/check.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// CheckStatus defines status check status. +type CheckStatus string + +func (CheckStatus) Enum() []interface{} { return toInterfaceSlice(checkStatuses) } +func (s CheckStatus) Sanitize() (CheckStatus, bool) { return Sanitize(s, GetAllCheckStatuses) } +func GetAllCheckStatuses() ([]CheckStatus, CheckStatus) { return checkStatuses, "" } + +// CheckStatus enumeration. +const ( + CheckStatusPending CheckStatus = "pending" + CheckStatusRunning CheckStatus = "running" + CheckStatusSuccess CheckStatus = "success" + CheckStatusFailure CheckStatus = "failure" + CheckStatusError CheckStatus = "error" +) + +var checkStatuses = sortEnum([]CheckStatus{ + CheckStatusPending, + CheckStatusRunning, + CheckStatusSuccess, + CheckStatusFailure, + CheckStatusError, +}) + +// CheckPayloadKind defines status payload type. +type CheckPayloadKind string + +func (CheckPayloadKind) Enum() []interface{} { return toInterfaceSlice(checkPayloadTypes) } +func (s CheckPayloadKind) Sanitize() (CheckPayloadKind, bool) { + return Sanitize(s, GetAllCheckPayloadTypes) +} +func GetAllCheckPayloadTypes() ([]CheckPayloadKind, CheckPayloadKind) { + return checkPayloadTypes, CheckPayloadKindEmpty +} + +// CheckPayloadKind enumeration. +const ( + CheckPayloadKindEmpty CheckPayloadKind = "" + CheckPayloadKindRaw CheckPayloadKind = "raw" + CheckPayloadKindMarkdown CheckPayloadKind = "markdown" + CheckPayloadKindPipeline CheckPayloadKind = "pipeline" +) + +var checkPayloadTypes = sortEnum([]CheckPayloadKind{ + CheckPayloadKindEmpty, + CheckPayloadKindRaw, + CheckPayloadKindMarkdown, + CheckPayloadKindPipeline, +}) diff --git a/types/enum/ci_status.go b/types/enum/ci_status.go new file mode 100644 index 0000000000..6e5899c032 --- /dev/null +++ b/types/enum/ci_status.go @@ -0,0 +1,92 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Status types for CI. +package enum + +import ( + "strings" +) + +// CIStatus defines the different kinds of CI statuses for +// stages, steps and executions. +type CIStatus string + +const ( + CIStatusSkipped CIStatus = "skipped" + CIStatusBlocked CIStatus = "blocked" + CIStatusDeclined CIStatus = "declined" + CIStatusWaitingOnDeps CIStatus = "waiting_on_dependencies" + CIStatusPending CIStatus = "pending" + CIStatusRunning CIStatus = "running" + CIStatusSuccess CIStatus = "success" + CIStatusFailure CIStatus = "failure" + CIStatusKilled CIStatus = "killed" + CIStatusError CIStatus = "error" +) + +func (status CIStatus) ConvertToCheckStatus() CheckStatus { + if status == CIStatusPending || status == CIStatusWaitingOnDeps { + return CheckStatusPending + } + if status == CIStatusSuccess || status == CIStatusSkipped { + return CheckStatusSuccess + } + if status == CIStatusFailure { + return CheckStatusFailure + } + if status == CIStatusRunning { + return CheckStatusRunning + } + return CheckStatusError +} + +// ParseCIStatus converts the status from a string to typed enum. +// If the match is not exact, will just return default error status +// instead of explicitly returning not found error. +func ParseCIStatus(status string) CIStatus { + switch strings.ToLower(status) { + case "skipped", "blocked", "declined", "waiting_on_dependencies", "pending", "running", "success", "failure", "killed", "error": + return CIStatus(strings.ToLower(status)) + case "": // just in case status is not passed through + return CIStatusPending + default: + return CIStatusError + } +} + +// IsDone returns true if in a completed state. +func (status CIStatus) IsDone() bool { + switch status { + case CIStatusWaitingOnDeps, + CIStatusPending, + CIStatusRunning, + CIStatusBlocked: + return false + default: + return true + } +} + +// IsFailed returns true if in a failed state. +func (status CIStatus) IsFailed() bool { + switch status { + case CIStatusFailure, + CIStatusKilled, + CIStatusError: + return true + default: + return false + } +} diff --git a/types/enum/common.go b/types/enum/common.go new file mode 100644 index 0000000000..fe225082f7 --- /dev/null +++ b/types/enum/common.go @@ -0,0 +1,74 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import ( + "golang.org/x/exp/constraints" + "golang.org/x/exp/slices" +) + +func Sanitize[E constraints.Ordered](element E, all func() ([]E, E)) (E, bool) { + allValues, defValue := all() + var empty E + if element == empty && defValue != empty { + return defValue, true + } + idx, exists := slices.BinarySearch(allValues, element) + if exists { + return allValues[idx], true + } + return defValue, false +} + +const ( + id = "id" + uid = "uid" + path = "path" + name = "name" + email = "email" + admin = "admin" + number = "number" + created = "created" + createdAt = "created_at" + createdBy = "created_by" + updated = "updated" + updatedAt = "updated_at" + updatedBy = "updated_by" + displayName = "display_name" + date = "date" + defaultString = "default" + undefined = "undefined" + system = "system" + comment = "comment" + code = "code" + asc = "asc" + ascending = "ascending" + desc = "desc" + descending = "descending" + value = "value" +) + +func toInterfaceSlice[T interface{}](vals []T) []interface{} { + res := make([]interface{}, len(vals)) + for i := range vals { + res[i] = vals[i] + } + return res +} + +func sortEnum[T constraints.Ordered](slice []T) []T { + slices.Sort(slice) + return slice +} diff --git a/types/enum/encoding.go b/types/enum/encoding.go new file mode 100644 index 0000000000..f7813ae73a --- /dev/null +++ b/types/enum/encoding.go @@ -0,0 +1,33 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// ContentEncodingType describes the encoding of content. +type ContentEncodingType string + +const ( + // ContentEncodingTypeUTF8 describes utf-8 encoded content. + ContentEncodingTypeUTF8 ContentEncodingType = "utf8" + + // ContentEncodingTypeBase64 describes base64 encoded content. + ContentEncodingTypeBase64 ContentEncodingType = "base64" +) + +func (ContentEncodingType) Enum() []interface{} { return toInterfaceSlice(contentEncodingTypes) } + +var contentEncodingTypes = sortEnum([]ContentEncodingType{ + ContentEncodingTypeUTF8, + ContentEncodingTypeBase64, +}) diff --git a/types/enum/git.go b/types/enum/git.go new file mode 100644 index 0000000000..a908980db7 --- /dev/null +++ b/types/enum/git.go @@ -0,0 +1,89 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "strings" + +// BranchSortOption specifies the available sort options for branches. +type BranchSortOption int + +const ( + BranchSortOptionDefault BranchSortOption = iota + BranchSortOptionName + BranchSortOptionDate +) + +// ParseBranchSortOption parses the branch sort option string +// and returns the equivalent enumeration. +func ParseBranchSortOption(s string) BranchSortOption { + switch strings.ToLower(s) { + case name: + return BranchSortOptionName + case date: + return BranchSortOptionDate + default: + return BranchSortOptionDefault + } +} + +// String returns a string representation of the branch sort option. +func (o BranchSortOption) String() string { + switch o { + case BranchSortOptionName: + return name + case BranchSortOptionDate: + return date + case BranchSortOptionDefault: + return defaultString + default: + return undefined + } +} + +// TagSortOption specifies the available sort options for tags. +type TagSortOption int + +const ( + TagSortOptionDefault TagSortOption = iota + TagSortOptionName + TagSortOptionDate +) + +// ParseTagSortOption parses the tag sort option string +// and returns the equivalent enumeration. +func ParseTagSortOption(s string) TagSortOption { + switch strings.ToLower(s) { + case name: + return TagSortOptionName + case date: + return TagSortOptionDate + default: + return TagSortOptionDefault + } +} + +// String returns a string representation of the tag sort option. +func (o TagSortOption) String() string { + switch o { + case TagSortOptionName: + return name + case TagSortOptionDate: + return date + case TagSortOptionDefault: + return defaultString + default: + return undefined + } +} diff --git a/types/enum/job.go b/types/enum/job.go new file mode 100644 index 0000000000..c91b9b93a0 --- /dev/null +++ b/types/enum/job.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// JobState represents state of a background job. +type JobState string + +// JobState enumeration. +const ( + JobStateScheduled JobState = "scheduled" + JobStateRunning JobState = "running" + JobStateFinished JobState = "finished" + JobStateFailed JobState = "failed" + JobStateCanceled JobState = "canceled" +) + +var jobStates = sortEnum([]JobState{ + JobStateScheduled, + JobStateRunning, + JobStateFinished, + JobStateFailed, + JobStateCanceled, +}) + +func (JobState) Enum() []interface{} { return toInterfaceSlice(jobStates) } +func (s JobState) Sanitize() (JobState, bool) { + return Sanitize(s, GetAllJobStates) +} +func GetAllJobStates() ([]JobState, JobState) { + return jobStates, "" +} + +// JobPriority represents priority of a background job. +type JobPriority int + +// JobPriority enumeration. +const ( + JobPriorityNormal JobPriority = 0 + JobPriorityElevated JobPriority = 1 +) + +func (s JobState) IsCompleted() bool { + return s == JobStateFinished || s == JobStateFailed || s == JobStateCanceled +} diff --git a/types/enum/membership.go b/types/enum/membership.go new file mode 100644 index 0000000000..ccca07799f --- /dev/null +++ b/types/enum/membership.go @@ -0,0 +1,113 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import ( + "strings" +) + +// MembershipUserSort represents membership user sort order. +type MembershipUserSort string + +// MembershipUserSort enumeration. +const ( + MembershipUserSortName MembershipUserSort = name + MembershipUserSortCreated MembershipUserSort = created +) + +var membershipUserSorts = sortEnum([]MembershipUserSort{ + MembershipUserSortName, + MembershipUserSortCreated, +}) + +func (MembershipUserSort) Enum() []interface{} { return toInterfaceSlice(membershipUserSorts) } +func (s MembershipUserSort) Sanitize() (MembershipUserSort, bool) { + return Sanitize(s, GetAllMembershipUserSorts) +} +func GetAllMembershipUserSorts() ([]MembershipUserSort, MembershipUserSort) { + return membershipUserSorts, MembershipUserSortName +} + +// ParseMembershipUserSort parses the membership user sort attribute string +// and returns the equivalent enumeration. +func ParseMembershipUserSort(s string) MembershipUserSort { + switch strings.ToLower(s) { + case name: + return MembershipUserSortName + case created, createdAt: + return MembershipUserSortCreated + default: + return MembershipUserSortName + } +} + +// String returns the string representation of the attribute. +func (s MembershipUserSort) String() string { + switch s { + case MembershipUserSortName: + return name + case MembershipUserSortCreated: + return created + default: + return undefined + } +} + +// MembershipSpaceSort represents membership space sort order. +type MembershipSpaceSort string + +// MembershipSpaceSort enumeration. +const ( + MembershipSpaceSortUID MembershipSpaceSort = uid + MembershipSpaceSortCreated MembershipSpaceSort = created +) + +var membershipSpaceSorts = sortEnum([]MembershipSpaceSort{ + MembershipSpaceSortUID, + MembershipSpaceSortCreated, +}) + +func (MembershipSpaceSort) Enum() []interface{} { return toInterfaceSlice(membershipSpaceSorts) } +func (s MembershipSpaceSort) Sanitize() (MembershipSpaceSort, bool) { + return Sanitize(s, GetAllMembershipSpaceSorts) +} +func GetAllMembershipSpaceSorts() ([]MembershipSpaceSort, MembershipSpaceSort) { + return membershipSpaceSorts, MembershipSpaceSortUID +} + +// ParseMembershipSpaceSort parses the membership space sort attribute string +// and returns the equivalent enumeration. +func ParseMembershipSpaceSort(s string) MembershipSpaceSort { + switch strings.ToLower(s) { + case name: + return MembershipSpaceSortUID + case created, createdAt: + return MembershipSpaceSortCreated + default: + return MembershipSpaceSortUID + } +} + +// String returns the string representation of the attribute. +func (s MembershipSpaceSort) String() string { + switch s { + case MembershipSpaceSortUID: + return uid + case MembershipSpaceSortCreated: + return created + default: + return undefined + } +} diff --git a/types/enum/membership_role.go b/types/enum/membership_role.go new file mode 100644 index 0000000000..f2403e8c62 --- /dev/null +++ b/types/enum/membership_role.go @@ -0,0 +1,118 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "golang.org/x/exp/slices" + +// MembershipRole represents the different level of space memberships (permission set). +type MembershipRole string + +func (MembershipRole) Enum() []interface{} { return toInterfaceSlice(MembershipRoles) } +func (m MembershipRole) Sanitize() (MembershipRole, bool) { return Sanitize(m, GetAllMembershipRoles) } +func GetAllMembershipRoles() ([]MembershipRole, MembershipRole) { return MembershipRoles, "" } + +var MembershipRoles = sortEnum([]MembershipRole{ + MembershipRoleReader, + MembershipRoleExecutor, + MembershipRoleContributor, + MembershipRoleSpaceOwner, +}) + +var membershipRoleReaderPermissions = slices.Clip(slices.Insert([]Permission{}, 0, + PermissionRepoView, + PermissionSpaceView, + PermissionServiceAccountView, + PermissionPipelineView, + PermissionSecretView, + PermissionConnectorView, + PermissionTemplateView, +)) + +var membershipRoleExecutorPermissions = slices.Clip(slices.Insert(membershipRoleReaderPermissions, 0, + PermissionRepoReportCommitCheck, + PermissionPipelineExecute, + PermissionSecretAccess, + PermissionConnectorAccess, + PermissionTemplateAccess, +)) + +var membershipRoleContributorPermissions = slices.Clip(slices.Insert(membershipRoleReaderPermissions, 0, + PermissionRepoPush, +)) + +var membershipRoleSpaceOwnerPermissions = slices.Clip(slices.Insert(membershipRoleReaderPermissions, 0, + PermissionRepoEdit, + PermissionRepoDelete, + PermissionRepoPush, + PermissionRepoReportCommitCheck, + + PermissionSpaceEdit, + PermissionSpaceCreate, + PermissionSpaceDelete, + + PermissionServiceAccountCreate, + PermissionServiceAccountEdit, + PermissionServiceAccountDelete, + + PermissionPipelineEdit, + PermissionPipelineExecute, + PermissionPipelineDelete, + PermissionPipelineView, + + PermissionSecretAccess, + PermissionSecretDelete, + PermissionSecretEdit, + PermissionSecretView, + + PermissionConnectorAccess, + PermissionConnectorDelete, + PermissionConnectorEdit, + PermissionConnectorView, + + PermissionTemplateAccess, + PermissionTemplateDelete, + PermissionTemplateEdit, + PermissionTemplateView, +)) + +func init() { + slices.Sort(membershipRoleReaderPermissions) + slices.Sort(membershipRoleExecutorPermissions) + slices.Sort(membershipRoleContributorPermissions) + slices.Sort(membershipRoleSpaceOwnerPermissions) +} + +// Permissions returns the list of permissions for the role. +func (m MembershipRole) Permissions() []Permission { + switch m { + case MembershipRoleReader: + return membershipRoleReaderPermissions + case MembershipRoleExecutor: + return membershipRoleExecutorPermissions + case MembershipRoleContributor: + return membershipRoleContributorPermissions + case MembershipRoleSpaceOwner: + return membershipRoleSpaceOwnerPermissions + default: + return nil + } +} + +const ( + MembershipRoleReader MembershipRole = "reader" + MembershipRoleExecutor MembershipRole = "executor" + MembershipRoleContributor MembershipRole = "contributor" + MembershipRoleSpaceOwner MembershipRole = "space_owner" +) diff --git a/types/enum/order.go b/types/enum/order.go new file mode 100644 index 0000000000..4cf1ce8c5f --- /dev/null +++ b/types/enum/order.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import ( + "strings" +) + +// Order defines the sort order. +type Order int + +// Order enumeration. +const ( + OrderDefault Order = iota + OrderAsc + OrderDesc +) + +// String returns the Order as a string. +func (e Order) String() string { + switch e { + case OrderDesc: + return desc + case OrderAsc: + return asc + case OrderDefault: + return defaultString + default: + return undefined + } +} + +// ParseOrder parses the order string and returns +// an order enumeration. +func ParseOrder(s string) Order { + switch strings.ToLower(s) { + case asc, ascending: + return OrderAsc + case desc, descending: + return OrderDesc + default: + return OrderDefault + } +} diff --git a/types/enum/order_test.go b/types/enum/order_test.go new file mode 100644 index 0000000000..d1650af71c --- /dev/null +++ b/types/enum/order_test.go @@ -0,0 +1,44 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "testing" + +func TestParseOrder(t *testing.T) { + tests := []struct { + text string + want Order + }{ + {"asc", OrderAsc}, + {"Asc", OrderAsc}, + {"ASC", OrderAsc}, + {"ascending", OrderAsc}, + {"Ascending", OrderAsc}, + {"desc", OrderDesc}, + {"Desc", OrderDesc}, + {"DESC", OrderDesc}, + {"descending", OrderDesc}, + {"Descending", OrderDesc}, + {"", OrderDefault}, + {"invalid", OrderDefault}, + } + + for _, test := range tests { + got, want := ParseOrder(test.text), test.want + if got != want { + t.Errorf("Want order %q parsed as %q, got %q", test.text, want, got) + } + } +} diff --git a/types/enum/permission.go b/types/enum/permission.go new file mode 100644 index 0000000000..62b7715c5e --- /dev/null +++ b/types/enum/permission.go @@ -0,0 +1,126 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// ResourceType represents the different types of resources that can be guarded with permissions. +type ResourceType string + +const ( + ResourceTypeSpace ResourceType = "SPACE" + ResourceTypeRepo ResourceType = "REPOSITORY" + ResourceTypeUser ResourceType = "USER" + ResourceTypeServiceAccount ResourceType = "SERVICEACCOUNT" + ResourceTypeService ResourceType = "SERVICE" + ResourceTypePipeline ResourceType = "PIPELINE" + ResourceTypeSecret ResourceType = "SECRET" + ResourceTypeConnector ResourceType = "CONNECTOR" + ResourceTypeTemplate ResourceType = "TEMPLATE" +) + +// Permission represents the different types of permissions a principal can have. +type Permission string + +const ( + /* + ----- SPACE ----- + */ + PermissionSpaceCreate Permission = "space_create" + PermissionSpaceView Permission = "space_view" + PermissionSpaceEdit Permission = "space_edit" + PermissionSpaceDelete Permission = "space_delete" +) + +const ( + /* + ----- REPOSITORY ----- + */ + PermissionRepoView Permission = "repo_view" + PermissionRepoEdit Permission = "repo_edit" + PermissionRepoDelete Permission = "repo_delete" + PermissionRepoPush Permission = "repo_push" + PermissionRepoReportCommitCheck Permission = "repo_reportCommitCheck" +) + +const ( + /* + ----- USER ----- + */ + PermissionUserCreate Permission = "user_create" + PermissionUserView Permission = "user_view" + PermissionUserEdit Permission = "user_edit" + PermissionUserDelete Permission = "user_delete" + PermissionUserEditAdmin Permission = "user_editAdmin" +) + +const ( + /* + ----- SERVICE ACCOUNT ----- + */ + PermissionServiceAccountCreate Permission = "serviceaccount_create" + PermissionServiceAccountView Permission = "serviceaccount_view" + PermissionServiceAccountEdit Permission = "serviceaccount_edit" + PermissionServiceAccountDelete Permission = "serviceaccount_delete" +) + +const ( + /* + ----- SERVICE ----- + */ + PermissionServiceCreate Permission = "service_create" + PermissionServiceView Permission = "service_view" + PermissionServiceEdit Permission = "service_edit" + PermissionServiceDelete Permission = "service_delete" + PermissionServiceEditAdmin Permission = "service_editAdmin" +) + +const ( + /* + ----- PIPELINE ----- + */ + PermissionPipelineView Permission = "pipeline_view" + PermissionPipelineEdit Permission = "pipeline_edit" + PermissionPipelineDelete Permission = "pipeline_delete" + PermissionPipelineExecute Permission = "pipeline_execute" +) + +const ( + /* + ----- SECRET ----- + */ + PermissionSecretView Permission = "secret_view" + PermissionSecretEdit Permission = "secret_edit" + PermissionSecretDelete Permission = "secret_delete" + PermissionSecretAccess Permission = "secret_access" +) + +const ( + /* + ----- CONNECTOR ----- + */ + PermissionConnectorView Permission = "connector_view" + PermissionConnectorEdit Permission = "connector_edit" + PermissionConnectorDelete Permission = "connector_delete" + PermissionConnectorAccess Permission = "connector_access" +) + +const ( + /* + ----- TEMPLATE ----- + */ + PermissionTemplateView Permission = "template_view" + PermissionTemplateEdit Permission = "template_edit" + PermissionTemplateDelete Permission = "template_delete" + PermissionTemplateAccess Permission = "template_access" +) diff --git a/types/enum/principal.go b/types/enum/principal.go new file mode 100644 index 0000000000..9b279ded19 --- /dev/null +++ b/types/enum/principal.go @@ -0,0 +1,37 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// PrincipalType defines the supported types of principals. +type PrincipalType string + +func (PrincipalType) Enum() []interface{} { return toInterfaceSlice(principalTypes) } +func (s PrincipalType) Sanitize() (PrincipalType, bool) { return Sanitize(s, GetAllPrincipalTypes) } +func GetAllPrincipalTypes() ([]PrincipalType, PrincipalType) { return principalTypes, "" } + +const ( + // PrincipalTypeUser represents a user. + PrincipalTypeUser PrincipalType = "user" + // PrincipalTypeServiceAccount represents a service account. + PrincipalTypeServiceAccount PrincipalType = "serviceaccount" + // PrincipalTypeService represents a service. + PrincipalTypeService PrincipalType = "service" +) + +var principalTypes = sortEnum([]PrincipalType{ + PrincipalTypeUser, + PrincipalTypeServiceAccount, + PrincipalTypeService, +}) diff --git a/types/enum/pullreq.go b/types/enum/pullreq.go new file mode 100644 index 0000000000..2169357d62 --- /dev/null +++ b/types/enum/pullreq.go @@ -0,0 +1,225 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import ( + gitrpcenum "github.com/harness/gitness/gitrpc/enum" +) + +// PullReqState defines pull request state. +type PullReqState string + +func (PullReqState) Enum() []interface{} { return toInterfaceSlice(pullReqStates) } +func (s PullReqState) Sanitize() (PullReqState, bool) { return Sanitize(s, GetAllPullReqStates) } +func GetAllPullReqStates() ([]PullReqState, PullReqState) { return pullReqStates, "" } + +// PullReqState enumeration. +const ( + PullReqStateOpen PullReqState = "open" + PullReqStateMerged PullReqState = "merged" + PullReqStateClosed PullReqState = "closed" +) + +var pullReqStates = sortEnum([]PullReqState{ + PullReqStateOpen, + PullReqStateMerged, + PullReqStateClosed, +}) + +// PullReqSort defines pull request attribute that can be used for sorting. +type PullReqSort string + +func (PullReqSort) Enum() []interface{} { return toInterfaceSlice(pullReqSorts) } +func (s PullReqSort) Sanitize() (PullReqSort, bool) { return Sanitize(s, GetAllPullReqSorts) } +func GetAllPullReqSorts() ([]PullReqSort, PullReqSort) { return pullReqSorts, PullReqSortNumber } + +// PullReqSort enumeration. +const ( + PullReqSortNumber = "number" + PullReqSortCreated = "created" + PullReqSortEdited = "edited" + PullReqSortMerged = "merged" +) + +var pullReqSorts = sortEnum([]PullReqSort{ + PullReqSortNumber, + PullReqSortCreated, + PullReqSortEdited, + PullReqSortMerged, +}) + +// PullReqActivityType defines pull request activity message type. +// Essentially, the Type determines the structure of the pull request activity's Payload structure. +type PullReqActivityType string + +func (PullReqActivityType) Enum() []interface{} { return toInterfaceSlice(pullReqActivityTypes) } + +func (t PullReqActivityType) Sanitize() (PullReqActivityType, bool) { + return Sanitize(t, GetAllPullReqActivityTypes) +} + +func GetAllPullReqActivityTypes() ([]PullReqActivityType, PullReqActivityType) { + return pullReqActivityTypes, "" // No default value +} + +// PullReqActivityType enumeration. +const ( + PullReqActivityTypeComment PullReqActivityType = "comment" + PullReqActivityTypeCodeComment PullReqActivityType = "code-comment" + PullReqActivityTypeTitleChange PullReqActivityType = "title-change" + PullReqActivityTypeStateChange PullReqActivityType = "state-change" + PullReqActivityTypeReviewSubmit PullReqActivityType = "review-submit" + PullReqActivityTypeBranchUpdate PullReqActivityType = "branch-update" + PullReqActivityTypeBranchDelete PullReqActivityType = "branch-delete" + PullReqActivityTypeMerge PullReqActivityType = "merge" +) + +var pullReqActivityTypes = sortEnum([]PullReqActivityType{ + PullReqActivityTypeComment, + PullReqActivityTypeCodeComment, + PullReqActivityTypeTitleChange, + PullReqActivityTypeStateChange, + PullReqActivityTypeReviewSubmit, + PullReqActivityTypeBranchUpdate, + PullReqActivityTypeBranchDelete, + PullReqActivityTypeMerge, +}) + +// PullReqActivityKind defines kind of pull request activity system message. +// Kind defines the source of the pull request activity entry: +// Whether it's generated by the system, it's a user comment or a part of code review. +type PullReqActivityKind string + +func (PullReqActivityKind) Enum() []interface{} { return toInterfaceSlice(pullReqActivityKinds) } + +func (k PullReqActivityKind) Sanitize() (PullReqActivityKind, bool) { + return Sanitize(k, GetAllPullReqActivityKinds) +} + +func GetAllPullReqActivityKinds() ([]PullReqActivityKind, PullReqActivityKind) { + return pullReqActivityKinds, "" // No default value +} + +// PullReqActivityKind enumeration. +const ( + PullReqActivityKindSystem PullReqActivityKind = "system" + PullReqActivityKindComment PullReqActivityKind = "comment" + PullReqActivityKindChangeComment PullReqActivityKind = "change-comment" +) + +var pullReqActivityKinds = sortEnum([]PullReqActivityKind{ + PullReqActivityKindSystem, + PullReqActivityKindComment, + PullReqActivityKindChangeComment, +}) + +// PullReqCommentStatus defines status of a pull request comment. +type PullReqCommentStatus string + +func (PullReqCommentStatus) Enum() []interface{} { return toInterfaceSlice(pullReqCommentStatuses) } + +func (s PullReqCommentStatus) Sanitize() (PullReqCommentStatus, bool) { + return Sanitize(s, GetAllPullReqCommentStatuses) +} + +func GetAllPullReqCommentStatuses() ([]PullReqCommentStatus, PullReqCommentStatus) { + return pullReqCommentStatuses, "" // No default value +} + +// PullReqCommentStatus enumeration. +const ( + PullReqCommentStatusActive PullReqCommentStatus = "active" + PullReqCommentStatusResolved PullReqCommentStatus = "resolved" +) + +var pullReqCommentStatuses = sortEnum([]PullReqCommentStatus{ + PullReqCommentStatusActive, + PullReqCommentStatusResolved, +}) + +// PullReqReviewDecision defines state of a pull request review. +type PullReqReviewDecision string + +func (PullReqReviewDecision) Enum() []interface{} { + return toInterfaceSlice(pullReqReviewDecisions) +} + +func (decision PullReqReviewDecision) Sanitize() (PullReqReviewDecision, bool) { + return Sanitize(decision, GetAllPullReqReviewDecisions) +} + +func GetAllPullReqReviewDecisions() ([]PullReqReviewDecision, PullReqReviewDecision) { + return pullReqReviewDecisions, "" // No default value +} + +// PullReqReviewDecision enumeration. +const ( + PullReqReviewDecisionPending PullReqReviewDecision = "pending" + PullReqReviewDecisionReviewed PullReqReviewDecision = "reviewed" + PullReqReviewDecisionApproved PullReqReviewDecision = "approved" + PullReqReviewDecisionChangeReq PullReqReviewDecision = "changereq" +) + +var pullReqReviewDecisions = sortEnum([]PullReqReviewDecision{ + PullReqReviewDecisionPending, + PullReqReviewDecisionReviewed, + PullReqReviewDecisionApproved, + PullReqReviewDecisionChangeReq, +}) + +// PullReqReviewerType defines type of a pull request reviewer. +type PullReqReviewerType string + +func (PullReqReviewerType) Enum() []interface{} { return toInterfaceSlice(pullReqReviewerTypes) } + +func (reviewerType PullReqReviewerType) Sanitize() (PullReqReviewerType, bool) { + return Sanitize(reviewerType, GetAllPullReqReviewerTypes) +} + +func GetAllPullReqReviewerTypes() ([]PullReqReviewerType, PullReqReviewerType) { + return pullReqReviewerTypes, "" // No default value +} + +// PullReqReviewerType enumeration. +const ( + PullReqReviewerTypeRequested PullReqReviewerType = "requested" + PullReqReviewerTypeAssigned PullReqReviewerType = "assigned" + PullReqReviewerTypeSelfAssigned PullReqReviewerType = "self_assigned" +) + +var pullReqReviewerTypes = sortEnum([]PullReqReviewerType{ + PullReqReviewerTypeRequested, + PullReqReviewerTypeAssigned, + PullReqReviewerTypeSelfAssigned, +}) + +type MergeMethod gitrpcenum.MergeMethod + +func (MergeMethod) Enum() []interface{} { return toInterfaceSlice(gitrpcenum.MergeMethods) } +func (m MergeMethod) Sanitize() (MergeMethod, bool) { + s, ok := gitrpcenum.MergeMethod(m).Sanitize() + return MergeMethod(s), ok +} + +type MergeCheckStatus string + +const ( + // MergeCheckStatusUnchecked merge status has not been checked. + MergeCheckStatusUnchecked MergeCheckStatus = "unchecked" + // MergeCheckStatusConflict can’t merge into the target branch due to a potential conflict. + MergeCheckStatusConflict MergeCheckStatus = "conflict" + // MergeCheckStatusMergeable branch can merged cleanly into the target branch. + MergeCheckStatusMergeable MergeCheckStatus = "mergeable" +) diff --git a/types/enum/repo.go b/types/enum/repo.go new file mode 100644 index 0000000000..33934342b8 --- /dev/null +++ b/types/enum/repo.go @@ -0,0 +1,66 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import ( + "strings" +) + +// Defines repo attributes that can be used for sorting and filtering. +type RepoAttr int + +// Order enumeration. +const ( + RepoAttrNone RepoAttr = iota + RepoAttrPath + RepoAttrUID + RepoAttrCreated + RepoAttrUpdated +) + +// ParseRepoAtrr parses the repo attribute string +// and returns the equivalent enumeration. +func ParseRepoAtrr(s string) RepoAttr { + switch strings.ToLower(s) { + case uid: + return RepoAttrUID + case path: + return RepoAttrPath + case created, createdAt: + return RepoAttrCreated + case updated, updatedAt: + return RepoAttrUpdated + default: + return RepoAttrNone + } +} + +// String returns the string representation of the attribute. +func (a RepoAttr) String() string { + switch a { + case RepoAttrPath: + return path + case RepoAttrUID: + return uid + case RepoAttrCreated: + return created + case RepoAttrUpdated: + return updated + case RepoAttrNone: + return "" + default: + return undefined + } +} diff --git a/types/enum/resource.go b/types/enum/resource.go new file mode 100644 index 0000000000..6620c8b36b --- /dev/null +++ b/types/enum/resource.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// ParentResourceType defines the different types of parent resources. +type ParentResourceType string + +func (ParentResourceType) Enum() []interface{} { + return toInterfaceSlice(GetAllParentResourceTypes()) +} + +var ( + ParentResourceTypeSpace ParentResourceType = "space" + ParentResourceTypeRepo ParentResourceType = "repo" +) + +func GetAllParentResourceTypes() []ParentResourceType { + return []ParentResourceType{ + ParentResourceTypeSpace, + ParentResourceTypeRepo, + } +} diff --git a/types/enum/scm.go b/types/enum/scm.go new file mode 100644 index 0000000000..7fb753136e --- /dev/null +++ b/types/enum/scm.go @@ -0,0 +1,34 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// ScmType defines the different SCM types supported for CI. +type ScmType string + +func (ScmType) Enum() []interface{} { return toInterfaceSlice(scmTypes) } + +var scmTypes = ([]ScmType{ + ScmTypeGitness, + ScmTypeGithub, + ScmTypeGitlab, + ScmTypeUnknown, +}) + +const ( + ScmTypeUnknown ScmType = "UNKNOWN" + ScmTypeGitness ScmType = "GITNESS" + ScmTypeGithub ScmType = "GITHUB" + ScmTypeGitlab ScmType = "GITLAB" +) diff --git a/types/enum/space.go b/types/enum/space.go new file mode 100644 index 0000000000..668116e0b6 --- /dev/null +++ b/types/enum/space.go @@ -0,0 +1,59 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "strings" + +// SpaceAttr defines space attributes that can be used for sorting and filtering. +type SpaceAttr int + +// Order enumeration. +const ( + SpaceAttrNone SpaceAttr = iota + SpaceAttrUID + SpaceAttrCreated + SpaceAttrUpdated +) + +// ParseSpaceAttr parses the space attribute string +// and returns the equivalent enumeration. +func ParseSpaceAttr(s string) SpaceAttr { + switch strings.ToLower(s) { + case uid: + return SpaceAttrUID + case created, createdAt: + return SpaceAttrCreated + case updated, updatedAt: + return SpaceAttrUpdated + default: + return SpaceAttrNone + } +} + +// String returns the string representation of the attribute. +func (a SpaceAttr) String() string { + switch a { + case SpaceAttrUID: + return uid + case SpaceAttrCreated: + return created + case SpaceAttrUpdated: + return updated + case SpaceAttrNone: + return "" + default: + return undefined + } +} diff --git a/types/enum/sse.go b/types/enum/sse.go new file mode 100644 index 0000000000..ded73c23a1 --- /dev/null +++ b/types/enum/sse.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Enums for event types delivered to the event stream for the UI +package enum + +// SSEType defines the kind of server sent event +type SSEType string + +const ( + SSETypeExecutionUpdated = "execution_updated" + SSETypeExecutionRunning = "execution_running" + SSETypeExecutionCompleted = "execution_completed" + SSETypeExecutionCanceled = "execution_canceled" + + SSETypeRepositoryImportCompleted = "repository_import_completed" + SSETypeRepositoryExportCompleted = "repository_export_completed" + + SSETypePullrequesUpdated = "pullreq_updated" +) diff --git a/types/enum/token.go b/types/enum/token.go new file mode 100644 index 0000000000..f005218aff --- /dev/null +++ b/types/enum/token.go @@ -0,0 +1,29 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// TokenType represents the type of the JWT token. +type TokenType string + +const ( + // TokenTypeSession is the token returned during user login or signup. + TokenTypeSession TokenType = "session" + + // TokenTypePAT is a personal access token. + TokenTypePAT TokenType = "pat" + + // TokenTypeSAT is a service account access token. + TokenTypeSAT TokenType = "sat" +) diff --git a/types/enum/trigger_actions.go b/types/enum/trigger_actions.go new file mode 100644 index 0000000000..2ab492275a --- /dev/null +++ b/types/enum/trigger_actions.go @@ -0,0 +1,60 @@ +package enum + +// TriggerAction defines the different actions on triggers will fire. +type TriggerAction string + +// These are similar to enums defined in webhook enum but can diverge +// as these are different entities. +const ( + // TriggerActionBranchCreated gets triggered when a branch gets created. + TriggerActionBranchCreated TriggerAction = "branch_created" + // TriggerActionBranchUpdated gets triggered when a branch gets updated. + TriggerActionBranchUpdated TriggerAction = "branch_updated" + + // TriggerActionTagCreated gets triggered when a tag gets created. + TriggerActionTagCreated TriggerAction = "tag_created" + // TriggerActionTagUpdated gets triggered when a tag gets updated. + TriggerActionTagUpdated TriggerAction = "tag_updated" + + // TriggerActionPullReqCreated gets triggered when a pull request gets created. + TriggerActionPullReqCreated TriggerAction = "pullreq_created" + // TriggerActionPullReqReopened gets triggered when a pull request gets reopened. + TriggerActionPullReqReopened TriggerAction = "pullreq_reopened" + // TriggerActionPullReqBranchUpdated gets triggered when a pull request source branch gets updated. + TriggerActionPullReqBranchUpdated TriggerAction = "pullreq_branch_updated" +) + +func (TriggerAction) Enum() []interface{} { return toInterfaceSlice(triggerActions) } +func (s TriggerAction) Sanitize() (TriggerAction, bool) { return Sanitize(s, GetAllTriggerActions) } +func (t TriggerAction) GetTriggerEvent() TriggerEvent { + if t == TriggerActionPullReqCreated || + t == TriggerActionPullReqBranchUpdated || + t == TriggerActionPullReqReopened { + return TriggerEventPullRequest + } else if t == TriggerActionTagCreated || t == TriggerActionTagUpdated { + return TriggerEventTag + } else if t == "" { + return TriggerEventManual + } + return TriggerEventPush +} + +func GetAllTriggerActions() ([]TriggerAction, TriggerAction) { + return triggerActions, "" // No default value +} + +var triggerActions = sortEnum([]TriggerAction{ + TriggerActionBranchCreated, + TriggerActionBranchUpdated, + TriggerActionTagCreated, + TriggerActionTagUpdated, + TriggerActionPullReqCreated, + TriggerActionPullReqReopened, + TriggerActionPullReqBranchUpdated, +}) + +// Trigger types +const ( + TriggerHook = "@hook" + TriggerCron = "@cron" +) diff --git a/types/enum/trigger_events.go b/types/enum/trigger_events.go new file mode 100644 index 0000000000..1b921cec3c --- /dev/null +++ b/types/enum/trigger_events.go @@ -0,0 +1,27 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +// TriggerEvent defines the different kinds of events in triggers. +type TriggerEvent string + +// Hook event constants. +const ( + TriggerEventCron = "cron" + TriggerEventManual = "manual" + TriggerEventPush = "push" + TriggerEventPullRequest = "pull_request" + TriggerEventTag = "tag" +) diff --git a/types/enum/user.go b/types/enum/user.go new file mode 100644 index 0000000000..486ce2cc6c --- /dev/null +++ b/types/enum/user.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "strings" + +// UserAttr defines user attributes that can be +// used for sorting and filtering. +type UserAttr int + +// Order enumeration. +const ( + UserAttrNone UserAttr = iota + UserAttrUID + UserAttrName + UserAttrEmail + UserAttrAdmin + UserAttrCreated + UserAttrUpdated +) + +// ParseUserAttr parses the user attribute string +// and returns the equivalent enumeration. +func ParseUserAttr(s string) UserAttr { + switch strings.ToLower(s) { + case uid: + return UserAttrUID + case name: + return UserAttrName + case email: + return UserAttrEmail + case admin: + return UserAttrAdmin + case created, createdAt: + return UserAttrCreated + case updated, updatedAt: + return UserAttrUpdated + default: + return UserAttrNone + } +} diff --git a/types/enum/user_test.go b/types/enum/user_test.go new file mode 100644 index 0000000000..713d7111c1 --- /dev/null +++ b/types/enum/user_test.go @@ -0,0 +1,40 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "testing" + +func TestParseUserAttr(t *testing.T) { + tests := []struct { + text string + want UserAttr + }{ + {"uid", UserAttrUID}, + {"name", UserAttrName}, + {"email", UserAttrEmail}, + {"created", UserAttrCreated}, + {"updated", UserAttrUpdated}, + {"admin", UserAttrAdmin}, + {"", UserAttrNone}, + {"invalid", UserAttrNone}, + } + + for _, test := range tests { + got, want := ParseUserAttr(test.text), test.want + if got != want { + t.Errorf("Want user attribute %q parsed as %q, got %q", test.text, want, got) + } + } +} diff --git a/types/enum/webhook.go b/types/enum/webhook.go new file mode 100644 index 0000000000..43e30e7b25 --- /dev/null +++ b/types/enum/webhook.go @@ -0,0 +1,148 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enum + +import "strings" + +// WebhookAttr defines webhook attributes that can be used for sorting and filtering. +type WebhookAttr int + +const ( + WebhookAttrNone WebhookAttr = iota + WebhookAttrID + WebhookAttrDisplayName + WebhookAttrCreated + WebhookAttrUpdated +) + +// ParseWebhookAttr parses the webhook attribute string +// and returns the equivalent enumeration. +func ParseWebhookAttr(s string) WebhookAttr { + switch strings.ToLower(s) { + case id: + return WebhookAttrID + case displayName: + return WebhookAttrDisplayName + case created, createdAt: + return WebhookAttrCreated + case updated, updatedAt: + return WebhookAttrUpdated + default: + return WebhookAttrNone + } +} + +// String returns the string representation of the attribute. +func (a WebhookAttr) String() string { + switch a { + case WebhookAttrID: + return id + case WebhookAttrDisplayName: + return displayName + case WebhookAttrCreated: + return created + case WebhookAttrUpdated: + return updated + case WebhookAttrNone: + return "" + default: + return undefined + } +} + +// WebhookParent defines different types of parents of a webhook. +type WebhookParent string + +func (WebhookParent) Enum() []interface{} { return toInterfaceSlice(webhookParents) } + +const ( + // WebhookParentRepo describes a repo as webhook owner. + WebhookParentRepo WebhookParent = "repo" + + // WebhookParentSpace describes a space as webhook owner. + WebhookParentSpace WebhookParent = "space" +) + +var webhookParents = sortEnum([]WebhookParent{ + WebhookParentRepo, + WebhookParentSpace, +}) + +// WebhookExecutionResult defines the different results of a webhook execution. +type WebhookExecutionResult string + +func (WebhookExecutionResult) Enum() []interface{} { return toInterfaceSlice(webhookExecutionResults) } + +const ( + // WebhookExecutionResultSuccess describes a webhook execution result that succeeded. + WebhookExecutionResultSuccess WebhookExecutionResult = "success" + + // WebhookExecutionResultRetriableError describes a webhook execution result that failed with a retriable error. + WebhookExecutionResultRetriableError WebhookExecutionResult = "retriable_error" + + // WebhookExecutionResultFatalError describes a webhook execution result that failed with an unrecoverable error. + WebhookExecutionResultFatalError WebhookExecutionResult = "fatal_error" +) + +var webhookExecutionResults = sortEnum([]WebhookExecutionResult{ + WebhookExecutionResultSuccess, + WebhookExecutionResultRetriableError, + WebhookExecutionResultFatalError, +}) + +// WebhookTrigger defines the different types of webhook triggers available. +type WebhookTrigger string + +func (WebhookTrigger) Enum() []interface{} { return toInterfaceSlice(webhookTriggers) } +func (s WebhookTrigger) Sanitize() (WebhookTrigger, bool) { return Sanitize(s, GetAllWebhookTriggers) } + +func GetAllWebhookTriggers() ([]WebhookTrigger, WebhookTrigger) { + return webhookTriggers, "" // No default value +} + +const ( + // WebhookTriggerBranchCreated gets triggered when a branch gets created. + WebhookTriggerBranchCreated WebhookTrigger = "branch_created" + // WebhookTriggerBranchUpdated gets triggered when a branch gets updated. + WebhookTriggerBranchUpdated WebhookTrigger = "branch_updated" + // WebhookTriggerBranchDeleted gets triggered when a branch gets deleted. + WebhookTriggerBranchDeleted WebhookTrigger = "branch_deleted" + + // WebhookTriggerTagCreated gets triggered when a tag gets created. + WebhookTriggerTagCreated WebhookTrigger = "tag_created" + // WebhookTriggerTagUpdated gets triggered when a tag gets updated. + WebhookTriggerTagUpdated WebhookTrigger = "tag_updated" + // WebhookTriggerTagDeleted gets triggered when a tag gets deleted. + WebhookTriggerTagDeleted WebhookTrigger = "tag_deleted" + + // WebhookTriggerPullReqCreated gets triggered when a pull request gets created. + WebhookTriggerPullReqCreated WebhookTrigger = "pullreq_created" + // WebhookTriggerPullReqReopened gets triggered when a pull request gets reopened. + WebhookTriggerPullReqReopened WebhookTrigger = "pullreq_reopened" + // WebhookTriggerPullReqBranchUpdated gets triggered when a pull request source branch gets updated. + WebhookTriggerPullReqBranchUpdated WebhookTrigger = "pullreq_branch_updated" +) + +var webhookTriggers = sortEnum([]WebhookTrigger{ + WebhookTriggerBranchCreated, + WebhookTriggerBranchUpdated, + WebhookTriggerBranchDeleted, + WebhookTriggerTagCreated, + WebhookTriggerTagUpdated, + WebhookTriggerTagDeleted, + WebhookTriggerPullReqCreated, + WebhookTriggerPullReqReopened, + WebhookTriggerPullReqBranchUpdated, +}) diff --git a/types/execution.go b/types/execution.go new file mode 100644 index 0000000000..3a32587da8 --- /dev/null +++ b/types/execution.go @@ -0,0 +1,58 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import "github.com/harness/gitness/types/enum" + +// Execution represents an instance of a pipeline execution. +type Execution struct { + ID int64 `json:"-"` + PipelineID int64 `json:"pipeline_id"` + CreatedBy int64 `json:"created_by"` + RepoID int64 `json:"repo_id"` + Trigger string `json:"trigger,omitempty"` + Number int64 `json:"number"` + Parent int64 `json:"parent,omitempty"` + Status enum.CIStatus `json:"status"` + Error string `json:"error,omitempty"` + Event string `json:"event,omitempty"` + Action string `json:"action,omitempty"` + Link string `json:"link,omitempty"` + Timestamp int64 `json:"timestamp,omitempty"` + Title string `json:"title,omitempty"` + Message string `json:"message,omitempty"` + Before string `json:"before,omitempty"` + After string `json:"after,omitempty"` + Ref string `json:"ref,omitempty"` + Fork string `json:"source_repo,omitempty"` + Source string `json:"source,omitempty"` + Target string `json:"target,omitempty"` + Author string `json:"author_login,omitempty"` + AuthorName string `json:"author_name,omitempty"` + AuthorEmail string `json:"author_email,omitempty"` + AuthorAvatar string `json:"author_avatar,omitempty"` + Sender string `json:"sender,omitempty"` + Params map[string]string `json:"params,omitempty"` + Cron string `json:"cron,omitempty"` + Deploy string `json:"deploy_to,omitempty"` + DeployID int64 `json:"deploy_id,omitempty"` + Debug bool `json:"debug,omitempty"` + Started int64 `json:"started,omitempty"` + Finished int64 `json:"finished,omitempty"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + Version int64 `json:"-"` + Stages []*Stage `json:"stages,omitempty"` +} diff --git a/types/git.go b/types/git.go new file mode 100644 index 0000000000..58fa214fd3 --- /dev/null +++ b/types/git.go @@ -0,0 +1,88 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "time" + + "github.com/harness/gitness/types/enum" +) + +const NilSHA = "0000000000000000000000000000000000000000" + +// PaginationFilter stores pagination query parameters. +type PaginationFilter struct { + Page int `json:"page"` + Limit int `json:"limit"` +} + +// CommitFilter stores commit query parameters. +type CommitFilter struct { + PaginationFilter + After string `json:"after"` + Path string `json:"path"` + Since int64 `json:"since"` + Until int64 `json:"until"` + Committer string `json:"committer"` +} + +// BranchFilter stores branch query parameters. +type BranchFilter struct { + Query string `json:"query"` + Sort enum.BranchSortOption `json:"sort"` + Order enum.Order `json:"order"` + Page int `json:"page"` + Size int `json:"size"` +} + +// TagFilter stores commit tag query parameters. +type TagFilter struct { + Query string `json:"query"` + Sort enum.TagSortOption `json:"sort"` + Order enum.Order `json:"order"` + Page int `json:"page"` + Size int `json:"size"` +} + +type Commit struct { + SHA string `json:"sha"` + Title string `json:"title"` + Message string `json:"message"` + Author Signature `json:"author"` + Committer Signature `json:"committer"` +} + +type Signature struct { + Identity Identity `json:"identity"` + When time.Time `json:"when"` +} + +type Identity struct { + Name string `json:"name"` + Email string `json:"email"` +} + +type RenameDetails struct { + OldPath string `json:"old_path"` + NewPath string `json:"new_path"` + CommitShaBefore string `json:"commit_sha_before"` + CommitShaAfter string `json:"commit_sha_after"` +} + +type ListCommitResponse struct { + Commits []Commit `json:"commits"` + RenameDetails []RenameDetails `json:"rename_details"` + TotalCommits int `json:"total_commits,omitempty"` +} diff --git a/types/githook.go b/types/githook.go new file mode 100644 index 0000000000..6475f975fa --- /dev/null +++ b/types/githook.go @@ -0,0 +1,51 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "errors" +) + +// GithookPayload defines the GithookPayload the githook binary is initiated with when executing the git hooks. +type GithookPayload struct { + BaseURL string + RepoID int64 + PrincipalID int64 + RequestID string + Disabled bool +} + +func (p *GithookPayload) Validate() error { + if p == nil { + return errors.New("payload is empty") + } + + // skip further validation if githook is disabled + if p.Disabled { + return nil + } + + if p.BaseURL == "" { + return errors.New("payload doesn't contain a base url") + } + if p.PrincipalID <= 0 { + return errors.New("payload doesn't contain a principal id") + } + if p.RepoID <= 0 { + return errors.New("payload doesn't contain a repo id") + } + + return nil +} diff --git a/types/job.go b/types/job.go new file mode 100644 index 0000000000..ace7ab4ef4 --- /dev/null +++ b/types/job.go @@ -0,0 +1,56 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import "github.com/harness/gitness/types/enum" + +type Job struct { + UID string `db:"job_uid"` + Created int64 `db:"job_created"` + Updated int64 `db:"job_updated"` + Type string `db:"job_type"` + Priority enum.JobPriority `db:"job_priority"` + Data string `db:"job_data"` + Result string `db:"job_result"` + MaxDurationSeconds int `db:"job_max_duration_seconds"` + MaxRetries int `db:"job_max_retries"` + State enum.JobState `db:"job_state"` + Scheduled int64 `db:"job_scheduled"` + TotalExecutions int `db:"job_total_executions"` + RunBy string `db:"job_run_by"` + RunDeadline int64 `db:"job_run_deadline"` + RunProgress int `db:"job_run_progress"` + LastExecuted int64 `db:"job_last_executed"` + IsRecurring bool `db:"job_is_recurring"` + RecurringCron string `db:"job_recurring_cron"` + ConsecutiveFailures int `db:"job_consecutive_failures"` + LastFailureError string `db:"job_last_failure_error"` + GroupID string `db:"job_group_id"` +} + +type JobStateChange struct { + UID string `json:"uid"` + State enum.JobState `json:"state"` + Progress int `json:"progress"` + Result string `json:"result"` + Failure string `json:"failure"` +} + +type JobProgress struct { + State enum.JobState `json:"state"` + Progress int `json:"progress"` + Result string `json:"result,omitempty"` + Failure string `json:"failure,omitempty"` +} diff --git a/types/list_filters.go b/types/list_filters.go new file mode 100644 index 0000000000..f9a5392f8d --- /dev/null +++ b/types/list_filters.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +// ListQueryFilter has pagination related info and a query param. +type ListQueryFilter struct { + Pagination + Query string `json:"query"` +} diff --git a/types/membership.go b/types/membership.go new file mode 100644 index 0000000000..2fc8598dc9 --- /dev/null +++ b/types/membership.go @@ -0,0 +1,64 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +// MembershipKey can be used as a key for finding a user's space membership info. +type MembershipKey struct { + SpaceID int64 + PrincipalID int64 +} + +// Membership represents a user's membership of a space. +type Membership struct { + MembershipKey `json:"-"` + + CreatedBy int64 `json:"-"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + + Role enum.MembershipRole `json:"role"` +} + +// MembershipUser adds user info to the Membership data. +type MembershipUser struct { + Membership + Principal PrincipalInfo `json:"principal"` + AddedBy PrincipalInfo `json:"added_by"` +} + +// MembershipUserFilter holds membership user query parameters. +type MembershipUserFilter struct { + ListQueryFilter + Sort enum.MembershipUserSort `json:"sort"` + Order enum.Order `json:"order"` +} + +// MembershipSpace adds space info to the Membership data. +type MembershipSpace struct { + Membership + Space Space `json:"space"` + AddedBy PrincipalInfo `json:"added_by"` +} + +// MembershipSpaceFilter holds membership space query parameters. +type MembershipSpaceFilter struct { + ListQueryFilter + Sort enum.MembershipSpaceSort `json:"sort"` + Order enum.Order `json:"order"` +} diff --git a/types/pagination.go b/types/pagination.go new file mode 100644 index 0000000000..a6b26b0554 --- /dev/null +++ b/types/pagination.go @@ -0,0 +1,21 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +// Pagination stores pagination related params. +type Pagination struct { + Page int `json:"page"` + Size int `json:"size"` +} diff --git a/types/path.go b/types/path.go new file mode 100644 index 0000000000..bb8b3ca3e0 --- /dev/null +++ b/types/path.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +const ( + PathSeparator = "/" +) + +// SpacePath represents a full path to a space. +type SpacePath struct { + Value string `json:"value"` + IsPrimary bool `json:"is_primary"` + SpaceID int64 `json:"space_id"` +} + +// SpacePathSegment represents a segment of a path to a space. +type SpacePathSegment struct { + // TODO: int64 ID doesn't match DB + ID int64 `json:"id"` + UID string `json:"uid"` + IsPrimary bool `json:"is_primary"` + SpaceID int64 `json:"space_id"` + ParentID int64 `json:"parent_id"` + CreatedBy int64 `json:"created_by"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` +} diff --git a/types/pipeline.go b/types/pipeline.go new file mode 100644 index 0000000000..80c1de84a1 --- /dev/null +++ b/types/pipeline.go @@ -0,0 +1,31 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +type Pipeline struct { + ID int64 `db:"pipeline_id" json:"id"` + Description string `db:"pipeline_description" json:"description"` + UID string `db:"pipeline_uid" json:"uid"` + Disabled bool `db:"pipeline_disabled" json:"disabled"` + CreatedBy int64 `db:"pipeline_created_by" json:"created_by"` + Seq int64 `db:"pipeline_seq" json:"seq"` // last execution number for this pipeline + RepoID int64 `db:"pipeline_repo_id" json:"repo_id"` + DefaultBranch string `db:"pipeline_default_branch" json:"default_branch"` + ConfigPath string `db:"pipeline_config_path" json:"config_path"` + Created int64 `db:"pipeline_created" json:"created"` + Execution *Execution `db:"-" json:"execution,omitempty"` // information about the latest execution if available + Updated int64 `db:"pipeline_updated" json:"updated"` + Version int64 `db:"pipeline_version" json:"-"` +} diff --git a/types/plugin.go b/types/plugin.go new file mode 100644 index 0000000000..7956c60b74 --- /dev/null +++ b/types/plugin.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package types + +// Plugin represents a Harness plugin. It has an associated template stored +// in the spec field. The spec is used by the UI to provide a smart visual +// editor for adding plugins to YAML schema. +type Plugin struct { + UID string `db:"plugin_uid" json:"uid"` + Description string `db:"plugin_description" json:"description"` + // Currently we only support step level plugins but more can be added in the future. + Type string `db:"plugin_type" json:"type"` + Version string `db:"plugin_version" json:"version"` + Logo string `db:"plugin_logo" json:"logo"` + // Spec is a YAML template to be used for the plugin. + Spec string `db:"plugin_spec" json:"spec"` +} + +// Matches checks whether two plugins are identical. +// We can use reflection here, this is just easier to add on to +// when needed. +func (plugin *Plugin) Matches(v *Plugin) bool { + if plugin.UID != v.UID { + return false + } + if plugin.Description != v.Description { + return false + } + if plugin.Spec != v.Spec { + return false + } + if plugin.Version != v.Version { + return false + } + if plugin.Logo != v.Logo { + return false + } + return true +} diff --git a/types/principal.go b/types/principal.go new file mode 100644 index 0000000000..cdcfe42de7 --- /dev/null +++ b/types/principal.go @@ -0,0 +1,73 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package types defines common data structures. +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +// Principal represents the identity of an acting entity (User, ServiceAccount, Service). +type Principal struct { + // TODO: int64 ID doesn't match DB + ID int64 `db:"principal_id" json:"-"` + UID string `db:"principal_uid" json:"uid"` + Email string `db:"principal_email" json:"email"` + Type enum.PrincipalType `db:"principal_type" json:"type"` + DisplayName string `db:"principal_display_name" json:"display_name"` + Admin bool `db:"principal_admin" json:"admin"` + + // Should be part of principal or not? + Blocked bool `db:"principal_blocked" json:"blocked"` + Salt string `db:"principal_salt" json:"-"` + + // Other info + Created int64 `db:"principal_created" json:"created"` + Updated int64 `db:"principal_updated" json:"updated"` +} + +func (p *Principal) ToPrincipalInfo() *PrincipalInfo { + return &PrincipalInfo{ + ID: p.ID, + UID: p.UID, + DisplayName: p.DisplayName, + Email: p.Email, + Type: p.Type, + Created: p.Created, + Updated: p.Updated, + } +} + +// PrincipalInfo is a compressed representation of a principal we return as part of non-principal APIs. +type PrincipalInfo struct { + ID int64 `json:"id"` + UID string `json:"uid"` + DisplayName string `json:"display_name"` + Email string `json:"email"` + Type enum.PrincipalType `json:"type"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` +} + +func (p *PrincipalInfo) Identifier() int64 { + return p.ID +} + +type PrincipalFilter struct { + Page int `json:"page"` + Size int `json:"size"` + Query string `json:"query"` + Types []enum.PrincipalType `json:"types"` +} diff --git a/types/pullreq.go b/types/pullreq.go new file mode 100644 index 0000000000..040472dd91 --- /dev/null +++ b/types/pullreq.go @@ -0,0 +1,144 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +// PullReq represents a pull request. +type PullReq struct { + ID int64 `json:"-"` // not returned, it's an internal field + Version int64 `json:"-"` // not returned, it's an internal field + Number int64 `json:"number"` + + CreatedBy int64 `json:"-"` // not returned, because the author info is in the Author field + Created int64 `json:"created"` + Updated int64 `json:"-"` // not returned, it's updated by the server internally. Clients should use EditedAt. + Edited int64 `json:"edited"` + + State enum.PullReqState `json:"state"` + IsDraft bool `json:"is_draft"` + + CommentCount int `json:"-"` // returned as "conversations" in the Stats + UnresolvedCount int `json:"-"` // returned as "unresolved_count" in the Stats + + Title string `json:"title"` + Description string `json:"description"` + + SourceRepoID int64 `json:"source_repo_id"` + SourceBranch string `json:"source_branch"` + SourceSHA string `json:"source_sha"` + TargetRepoID int64 `json:"target_repo_id"` + TargetBranch string `json:"target_branch"` + + ActivitySeq int64 `json:"-"` // not returned, because it's a server's internal field + + MergedBy *int64 `json:"-"` // not returned, because the merger info is in the Merger field + Merged *int64 `json:"merged"` + MergeMethod *enum.MergeMethod `json:"merge_method"` + + MergeCheckStatus enum.MergeCheckStatus `json:"merge_check_status"` + MergeTargetSHA *string `json:"merge_target_sha"` + MergeBaseSHA string `json:"merge_base_sha"` + MergeSHA *string `json:"merge_sha"` + MergeConflicts *string `json:"merge_conflicts,omitempty"` + + Author PrincipalInfo `json:"author"` + Merger *PrincipalInfo `json:"merger"` + Stats PullReqStats `json:"stats"` +} + +// DiffStats shows total number of commits and modified files. +type DiffStats struct { + Commits int `json:"commits,omitempty"` + FilesChanged int `json:"files_changed,omitempty"` +} + +// PullReqStats shows Diff statistics and number of conversations. +type PullReqStats struct { + DiffStats + Conversations int `json:"conversations,omitempty"` + UnresolvedCount int `json:"unresolved_count,omitempty"` +} + +// PullReqFilter stores pull request query parameters. +type PullReqFilter struct { + Page int `json:"page"` + Size int `json:"size"` + Query string `json:"query"` + CreatedBy int64 `json:"created_by"` + SourceRepoID int64 `json:"-"` // caller should use source_repo_ref + SourceRepoRef string `json:"source_repo_ref"` + SourceBranch string `json:"source_branch"` + TargetRepoID int64 `json:"-"` + TargetBranch string `json:"target_branch"` + States []enum.PullReqState `json:"state"` + Sort enum.PullReqSort `json:"sort"` + Order enum.Order `json:"order"` +} + +// PullReqReview holds pull request review. +type PullReqReview struct { + ID int64 `json:"id"` + + CreatedBy int64 `json:"created_by"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + + PullReqID int64 `json:"pullreq_id"` + + Decision enum.PullReqReviewDecision `json:"decision"` + SHA string `json:"sha"` +} + +// PullReqReviewer holds pull request reviewer. +type PullReqReviewer struct { + PullReqID int64 `json:"-"` + PrincipalID int64 `json:"-"` + + CreatedBy int64 `json:"-"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + + RepoID int64 `json:"-"` + Type enum.PullReqReviewerType `json:"type"` + LatestReviewID *int64 `json:"latest_review_id"` + + ReviewDecision enum.PullReqReviewDecision `json:"review_decision"` + SHA string `json:"sha"` + + Reviewer PrincipalInfo `json:"reviewer"` + AddedBy PrincipalInfo `json:"added_by"` +} + +// PullReqFileView represents a file reviewed entry for a given pr and principal. +// NOTE: keep api lightweight and don't return unnecessary extra data. +type PullReqFileView struct { + PullReqID int64 `json:"-"` + PrincipalID int64 `json:"-"` + + Path string `json:"path"` + SHA string `json:"sha"` + Obsolete bool `json:"obsolete"` + + Created int64 `json:"-"` + Updated int64 `json:"-"` +} + +type MergeResponse struct { + SHA string `json:"sha,omitempty"` + ConflictFiles []string `json:"conflict_files,omitempty"` +} diff --git a/types/pullreq_activity.go b/types/pullreq_activity.go new file mode 100644 index 0000000000..d8111c12c5 --- /dev/null +++ b/types/pullreq_activity.go @@ -0,0 +1,279 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + + "github.com/harness/gitness/types/enum" +) + +var ( + // jsonRawMessageNullBytes represents the byte array that's equivalent to a nil json.RawMessage. + jsonRawMessageNullBytes = []byte("null") + + // ErrNoPayload is returned in case the activity doesn't have any payload set. + ErrNoPayload = errors.New("activity has no payload") +) + +// PullReqActivity represents a pull request activity. +type PullReqActivity struct { + ID int64 `json:"id"` + Version int64 `json:"-"` // not returned, it's an internal field + + CreatedBy int64 `json:"-"` // not returned, because the author info is in the Author field + Created int64 `json:"created"` + Updated int64 `json:"updated"` // we need updated to determine the latest version reliably. + Edited int64 `json:"edited"` + Deleted *int64 `json:"deleted,omitempty"` + + ParentID *int64 `json:"parent_id"` + RepoID int64 `json:"repo_id"` + PullReqID int64 `json:"pullreq_id"` + + Order int64 `json:"order"` + SubOrder int64 `json:"sub_order"` + ReplySeq int64 `json:"-"` // not returned, because it's a server's internal field + + Type enum.PullReqActivityType `json:"type"` + Kind enum.PullReqActivityKind `json:"kind"` + + Text string `json:"text"` + PayloadRaw json.RawMessage `json:"payload"` + Metadata map[string]interface{} `json:"metadata"` + + ResolvedBy *int64 `json:"-"` // not returned, because the resolver info is in the Resolver field + Resolved *int64 `json:"resolved,omitempty"` + + Author PrincipalInfo `json:"author"` + Resolver *PrincipalInfo `json:"resolver,omitempty"` + + CodeComment *CodeCommentFields `json:"code_comment,omitempty"` +} + +func (a *PullReqActivity) IsValidCodeComment() bool { + return a.Type == enum.PullReqActivityTypeCodeComment && + a.Kind == enum.PullReqActivityKindChangeComment && + a.CodeComment != nil +} + +func (a *PullReqActivity) AsCodeComment() *CodeComment { + if !a.IsValidCodeComment() { + return &CodeComment{} + } + return &CodeComment{ + ID: a.ID, + Version: a.Version, + Updated: a.Updated, + CodeCommentFields: CodeCommentFields{ + Outdated: a.CodeComment.Outdated, + MergeBaseSHA: a.CodeComment.MergeBaseSHA, + SourceSHA: a.CodeComment.SourceSHA, + Path: a.CodeComment.Path, + LineNew: a.CodeComment.LineNew, + SpanNew: a.CodeComment.SpanNew, + LineOld: a.CodeComment.LineOld, + SpanOld: a.CodeComment.SpanOld, + }, + } +} + +func (a *PullReqActivity) IsReplyable() bool { + return (a.Type == enum.PullReqActivityTypeComment || a.Type == enum.PullReqActivityTypeCodeComment) && + a.SubOrder == 0 +} + +func (a *PullReqActivity) IsReply() bool { + return a.SubOrder > 0 +} + +// IsBlocking returns true if the pull request activity (comment/code-comment) is blocking the pull request merge. +func (a *PullReqActivity) IsBlocking() bool { + return a.SubOrder == 0 && a.Resolved == nil && a.Deleted == nil && a.Kind != enum.PullReqActivityKindSystem +} + +// SetPayload sets the payload and verifies it's of correct type for the activity. +func (a *PullReqActivity) SetPayload(payload PullReqActivityPayload) error { + if payload == nil { + a.PayloadRaw = json.RawMessage(nil) + return nil + } + + if payload.ActivityType() != a.Type { + return fmt.Errorf("wrong payload type %T for activity %s, payload is for %s", + payload, a.Type, payload.ActivityType()) + } + + var err error + if a.PayloadRaw, err = json.Marshal(payload); err != nil { + return fmt.Errorf("failed to marshal payload: %w", err) + } + + return nil +} + +// GetPayload returns the payload of the activity. +// An error is returned in case there's an issue retrieving the payload from its raw value. +// NOTE: To ensure rawValue gets changed always use SetPayload() with the updated payload. +func (a *PullReqActivity) GetPayload() (PullReqActivityPayload, error) { + // jsonMessage could also contain "null" - we still want to return ErrNoPayload in that case + if a.PayloadRaw == nil || + bytes.Equal(a.PayloadRaw, jsonRawMessageNullBytes) { + return nil, ErrNoPayload + } + + payload, err := newPayloadForActivity(a.Type) + if err != nil { + return nil, fmt.Errorf("failed to create new payload: %w", err) + } + + if err = json.Unmarshal(a.PayloadRaw, payload); err != nil { + return nil, fmt.Errorf("failed to unmarshal payload: %w", err) + } + + return payload, nil +} + +// PullReqActivityFilter stores pull request activity query parameters. +type PullReqActivityFilter struct { + After int64 `json:"after"` + Before int64 `json:"before"` + Limit int `json:"limit"` + + Types []enum.PullReqActivityType `json:"type"` + Kinds []enum.PullReqActivityKind `json:"kind"` +} + +// PullReqActivityPayload is an interface used to identify PR activity payload types. +// The approach is inspired by what protobuf is doing for oneof. +type PullReqActivityPayload interface { + // ActivityType returns the pr activity type the payload is meant for. + // NOTE: this allows us to do easy payload type verification without any kind of reflection. + ActivityType() enum.PullReqActivityType +} + +// activityPayloadFactoryMethod is an alias for a function that creates a new PullReqActivityPayload. +// NOTE: this is used to create new instances for activities on the fly (to avoid reflection) +// NOTE: we could add new() to PullReqActivityPayload interface, but it shouldn't be the payloads' responsibility. +type activityPayloadFactoryMethod func() PullReqActivityPayload + +// allPullReqActivityPayloads is a map that contains the payload factory methods for all activity types with payload. +var allPullReqActivityPayloads = func( + factoryMethods []activityPayloadFactoryMethod, +) map[enum.PullReqActivityType]activityPayloadFactoryMethod { + payloadMap := make(map[enum.PullReqActivityType]activityPayloadFactoryMethod) + for _, factoryMethod := range factoryMethods { + payloadMap[factoryMethod().ActivityType()] = factoryMethod + } + return payloadMap +}([]activityPayloadFactoryMethod{ + func() PullReqActivityPayload { return PullRequestActivityPayloadComment{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadCodeComment{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadMerge{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadStateChange{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadTitleChange{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadReviewSubmit{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchUpdate{} }, + func() PullReqActivityPayload { return &PullRequestActivityPayloadBranchDelete{} }, +}) + +// newPayloadForActivity returns a new payload instance for the requested activity type. +func newPayloadForActivity(t enum.PullReqActivityType) (PullReqActivityPayload, error) { + payloadFactoryMethod, ok := allPullReqActivityPayloads[t] + if !ok { + return nil, fmt.Errorf("pr activity type '%s' doesn't have a payload", t) + } + + return payloadFactoryMethod(), nil +} + +type PullRequestActivityPayloadComment struct{} + +func (a PullRequestActivityPayloadComment) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeComment +} + +type PullRequestActivityPayloadCodeComment struct { + Title string `json:"title"` + Lines []string `json:"lines"` + LineStartNew bool `json:"line_start_new"` + LineEndNew bool `json:"line_end_new"` +} + +func (a *PullRequestActivityPayloadCodeComment) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeCodeComment +} + +type PullRequestActivityPayloadMerge struct { + MergeMethod enum.MergeMethod `json:"merge_method"` + MergeSHA string `json:"merge_sha"` + TargetSHA string `json:"target_sha"` + SourceSHA string `json:"source_sha"` +} + +func (a *PullRequestActivityPayloadMerge) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeMerge +} + +type PullRequestActivityPayloadStateChange struct { + Old enum.PullReqState `json:"old"` + New enum.PullReqState `json:"new"` + OldDraft bool `json:"old_draft"` + NewDraft bool `json:"new_draft"` + Message string `json:"message,omitempty"` +} + +func (a *PullRequestActivityPayloadStateChange) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeStateChange +} + +type PullRequestActivityPayloadTitleChange struct { + Old string `json:"old"` + New string `json:"new"` +} + +func (a *PullRequestActivityPayloadTitleChange) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeTitleChange +} + +type PullRequestActivityPayloadReviewSubmit struct { + CommitSHA string `json:"commit_sha"` + Message string `json:"message,omitempty"` + Decision enum.PullReqReviewDecision `json:"decision"` +} + +func (a *PullRequestActivityPayloadReviewSubmit) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeReviewSubmit +} + +type PullRequestActivityPayloadBranchUpdate struct { + Old string `json:"old"` + New string `json:"new"` +} + +func (a *PullRequestActivityPayloadBranchUpdate) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeBranchUpdate +} + +type PullRequestActivityPayloadBranchDelete struct { + SHA string `json:"sha"` +} + +func (a *PullRequestActivityPayloadBranchDelete) ActivityType() enum.PullReqActivityType { + return enum.PullReqActivityTypeBranchDelete +} diff --git a/types/repo.go b/types/repo.go new file mode 100644 index 0000000000..5379ac03b6 --- /dev/null +++ b/types/repo.go @@ -0,0 +1,70 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +// Repository represents a code repository. +type Repository struct { + // TODO: int64 ID doesn't match DB + ID int64 `json:"id"` + Version int64 `json:"-"` + ParentID int64 `json:"parent_id"` + UID string `json:"uid"` + Path string `json:"path"` + Description string `json:"description"` + IsPublic bool `json:"is_public"` + CreatedBy int64 `json:"created_by"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + + GitUID string `json:"-"` + DefaultBranch string `json:"default_branch"` + ForkID int64 `json:"fork_id"` + PullReqSeq int64 `json:"-"` + + NumForks int `json:"num_forks"` + NumPulls int `json:"num_pulls"` + NumClosedPulls int `json:"num_closed_pulls"` + NumOpenPulls int `json:"num_open_pulls"` + NumMergedPulls int `json:"num_merged_pulls"` + + Importing bool `json:"importing"` + + // git urls + GitURL string `json:"git_url"` +} + +func (r Repository) GetGitUID() string { + return r.GitUID +} + +// RepoFilter stores repo query parameters. +type RepoFilter struct { + Page int `json:"page"` + Size int `json:"size"` + Query string `json:"query"` + Sort enum.RepoAttr `json:"sort"` + Order enum.Order `json:"order"` +} + +// RepositoryGitInfo holds git info for a repository. +type RepositoryGitInfo struct { + ID int64 + ParentID int64 + GitUID string +} diff --git a/types/secret.go b/types/secret.go new file mode 100644 index 0000000000..23c36cd30a --- /dev/null +++ b/types/secret.go @@ -0,0 +1,30 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package types + +type Secret struct { + ID int64 `db:"secret_id" json:"id"` + Description string `db:"secret_description" json:"description"` + SpaceID int64 `db:"secret_space_id" json:"space_id"` + CreatedBy int64 `db:"secret_created_by" json:"created_by"` + UID string `db:"secret_uid" json:"uid"` + Data string `db:"secret_data" json:"-"` + Created int64 `db:"secret_created" json:"created"` + Updated int64 `db:"secret_updated" json:"updated"` + Version int64 `db:"secret_version" json:"-"` +} + +// Copy makes a copy of the secret without the value. +func (s *Secret) CopyWithoutData() *Secret { + return &Secret{ + ID: s.ID, + Description: s.Description, + UID: s.UID, + SpaceID: s.SpaceID, + Created: s.Created, + Updated: s.Updated, + Version: s.Version, + } +} diff --git a/types/service.go b/types/service.go new file mode 100644 index 0000000000..54c2445a96 --- /dev/null +++ b/types/service.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package types defines common data structures. +package types + +import "github.com/harness/gitness/types/enum" + +type ( + // Service is a principal representing a different internal service that runs alongside gitness. + Service struct { + // Fields from Principal + ID int64 `db:"principal_id" json:"-"` + UID string `db:"principal_uid" json:"uid"` + Email string `db:"principal_email" json:"email"` + DisplayName string `db:"principal_display_name" json:"display_name"` + Admin bool `db:"principal_admin" json:"admin"` + Blocked bool `db:"principal_blocked" json:"blocked"` + Salt string `db:"principal_salt" json:"-"` + Created int64 `db:"principal_created" json:"created"` + Updated int64 `db:"principal_updated" json:"updated"` + } +) + +func (s *Service) ToPrincipal() *Principal { + return &Principal{ + ID: s.ID, + UID: s.UID, + Email: s.Email, + Type: enum.PrincipalTypeService, + DisplayName: s.DisplayName, + Admin: s.Admin, + Blocked: s.Blocked, + Salt: s.Salt, + Created: s.Created, + Updated: s.Updated, + } +} + +func (s *Service) ToPrincipalInfo() *PrincipalInfo { + return s.ToPrincipal().ToPrincipalInfo() +} diff --git a/types/service_account.go b/types/service_account.go new file mode 100644 index 0000000000..3a4dabe3be --- /dev/null +++ b/types/service_account.go @@ -0,0 +1,65 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package types defines common data structures. +package types + +import "github.com/harness/gitness/types/enum" + +type ( + // ServiceAccount is a principal representing a service account. + ServiceAccount struct { + // Fields from Principal (without admin, as it's never an admin) + ID int64 `db:"principal_id" json:"-"` + UID string `db:"principal_uid" json:"uid"` + Email string `db:"principal_email" json:"email"` + DisplayName string `db:"principal_display_name" json:"display_name"` + Admin bool `db:"principal_admin" json:"admin"` + Blocked bool `db:"principal_blocked" json:"blocked"` + Salt string `db:"principal_salt" json:"-"` + Created int64 `db:"principal_created" json:"created"` + Updated int64 `db:"principal_updated" json:"updated"` + + // ServiceAccount specific fields + ParentType enum.ParentResourceType `db:"principal_sa_parent_type" json:"parent_type"` + ParentID int64 `db:"principal_sa_parent_id" json:"parent_id"` + } + + // ServiceAccountInput store details used to + // create or update a service account. + ServiceAccountInput struct { + DisplayName *string `json:"display_name"` + ParentType *enum.ParentResourceType `json:"parent_type"` + ParentID *int64 `json:"parent_id"` + } +) + +func (s *ServiceAccount) ToPrincipal() *Principal { + return &Principal{ + ID: s.ID, + UID: s.UID, + Email: s.Email, + Type: enum.PrincipalTypeServiceAccount, + DisplayName: s.DisplayName, + Admin: s.Admin, + Blocked: s.Blocked, + Salt: s.Salt, + Created: s.Created, + Updated: s.Updated, + } +} + +func (s *ServiceAccount) ToPrincipalInfo() *PrincipalInfo { + return s.ToPrincipal().ToPrincipalInfo() +} diff --git a/types/space.go b/types/space.go new file mode 100644 index 0000000000..bcd5c621ba --- /dev/null +++ b/types/space.go @@ -0,0 +1,53 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +/* +Space represents a space. +There isn't a one-solves-all hierarchical data structure for DBs, +so for now we are using a mix of materialized paths and adjacency list. +Every space stores its parent, and a space's path is stored in a separate table. +PRO: Quick lookup of childs, quick lookup based on fqdn (apis) +CON: Changing a space uid requires changing all its ancestors' Paths. + +Interesting reads: +https://stackoverflow.com/questions/4048151/what-are-the-options-for-storing-hierarchical-data-in-a-relational-database +https://www.slideshare.net/billkarwin/models-for-hierarchical-data +*/ +type Space struct { + ID int64 `json:"id"` + Version int64 `json:"-"` + ParentID int64 `json:"parent_id"` + Path string `json:"path"` + UID string `json:"uid"` + Description string `json:"description"` + IsPublic bool `json:"is_public"` + CreatedBy int64 `json:"created_by"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` +} + +// Stores spaces query parameters. +type SpaceFilter struct { + Page int `json:"page"` + Size int `json:"size"` + Query string `json:"query"` + Sort enum.SpaceAttr `json:"sort"` + Order enum.Order `json:"order"` +} diff --git a/types/stage.go b/types/stage.go new file mode 100644 index 0000000000..55e61471a5 --- /dev/null +++ b/types/stage.go @@ -0,0 +1,48 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import "github.com/harness/gitness/types/enum" + +type Stage struct { + ID int64 `json:"-"` + ExecutionID int64 `json:"execution_id"` + RepoID int64 `json:"repo_id"` + Number int64 `json:"number"` + Name string `json:"name"` + Kind string `json:"kind,omitempty"` + Type string `json:"type,omitempty"` + Status enum.CIStatus `json:"status"` + Error string `json:"error,omitempty"` + ErrIgnore bool `json:"errignore,omitempty"` + ExitCode int `json:"exit_code"` + Machine string `json:"machine,omitempty"` + OS string `json:"os,omitempty"` + Arch string `json:"arch,omitempty"` + Variant string `json:"variant,omitempty"` + Kernel string `json:"kernel,omitempty"` + Limit int `json:"limit,omitempty"` + LimitRepo int `json:"throttle,omitempty"` + Started int64 `json:"started,omitempty"` + Stopped int64 `json:"stopped,omitempty"` + Created int64 `json:"-"` + Updated int64 `json:"-"` + Version int64 `json:"-"` + OnSuccess bool `json:"on_success"` + OnFailure bool `json:"on_failure"` + DependsOn []string `json:"depends_on,omitempty"` + Labels map[string]string `json:"labels,omitempty"` + Steps []*Step `json:"steps,omitempty"` +} diff --git a/types/step.go b/types/step.go new file mode 100644 index 0000000000..fb345ec681 --- /dev/null +++ b/types/step.go @@ -0,0 +1,50 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "encoding/json" + "fmt" + + "github.com/harness/gitness/types/enum" +) + +type Step struct { + ID int64 `json:"-"` + StageID int64 `json:"-"` + Number int64 `json:"number"` + Name string `json:"name"` + Status enum.CIStatus `json:"status"` + Error string `json:"error,omitempty"` + ErrIgnore bool `json:"errignore,omitempty"` + ExitCode int `json:"exit_code"` + Started int64 `json:"started,omitempty"` + Stopped int64 `json:"stopped,omitempty"` + Version int64 `json:"-" db:"step_version"` + DependsOn []string `json:"depends_on,omitempty"` + Image string `json:"image,omitempty"` + Detached bool `json:"detached"` + Schema string `json:"schema,omitempty"` +} + +// Pretty print a step +func (s Step) String() string { + // Convert the Step struct to JSON + jsonStr, err := json.MarshalIndent(s, "", " ") + if err != nil { + return fmt.Sprintf("Error converting to JSON: %v", err) + } + return string(jsonStr) +} diff --git a/types/stream.go b/types/stream.go new file mode 100644 index 0000000000..069ba99bf8 --- /dev/null +++ b/types/stream.go @@ -0,0 +1,19 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +type Stream[T any] interface { + Next() (T, error) +} diff --git a/types/template.go b/types/template.go new file mode 100644 index 0000000000..1e8df640fc --- /dev/null +++ b/types/template.go @@ -0,0 +1,16 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package types + +type Template struct { + ID int64 `db:"template_id" json:"id"` + Description string `db:"template_description" json:"description"` + SpaceID int64 `db:"template_space_id" json:"space_id"` + UID string `db:"template_uid" json:"uid"` + Data string `db:"template_data" json:"data"` + Created int64 `db:"template_created" json:"created"` + Updated int64 `db:"template_updated" json:"updated"` + Version int64 `db:"template_version" json:"-"` +} diff --git a/types/token.go b/types/token.go new file mode 100644 index 0000000000..6d0f1841dc --- /dev/null +++ b/types/token.go @@ -0,0 +1,39 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +// Represents server side infos stored for tokens we distribute. +type Token struct { + // TODO: int64 ID doesn't match DB + ID int64 `db:"token_id" json:"-"` + PrincipalID int64 `db:"token_principal_id" json:"principal_id"` + Type enum.TokenType `db:"token_type" json:"type"` + UID string `db:"token_uid" json:"uid"` + // ExpiresAt is an optional unix time that if specified restricts the validity of a token. + ExpiresAt *int64 `db:"token_expires_at" json:"expires_at,omitempty"` + // IssuedAt is the unix time at which the token was issued. + IssuedAt int64 `db:"token_issued_at" json:"issued_at"` + CreatedBy int64 `db:"token_created_by" json:"created_by"` +} + +// TokenResponse is returned as part of token creation for PAT / SAT / User Session. +type TokenResponse struct { + AccessToken string `json:"access_token"` + Token Token `json:"token"` +} diff --git a/types/trigger.go b/types/trigger.go new file mode 100644 index 0000000000..51f49cf35a --- /dev/null +++ b/types/trigger.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness Inc. All rights reserved. +// Use of this source code is governed by the Polyform Free Trial License +// that can be found in the LICENSE.md file for this repository. + +package types + +import "github.com/harness/gitness/types/enum" + +type Trigger struct { + ID int64 `json:"id"` + Description string `json:"description"` + Type string `json:"trigger_type"` + PipelineID int64 `json:"pipeline_id"` + Secret string `json:"-"` + RepoID int64 `json:"repo_id"` + CreatedBy int64 `json:"created_by"` + Disabled bool `json:"disabled"` + Actions []enum.TriggerAction `json:"actions"` + UID string `json:"uid"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + Version int64 `json:"-"` +} diff --git a/types/types_test.go b/types/types_test.go new file mode 100644 index 0000000000..51e7fa36ea --- /dev/null +++ b/types/types_test.go @@ -0,0 +1,15 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types diff --git a/types/user.go b/types/user.go new file mode 100644 index 0000000000..77172ce192 --- /dev/null +++ b/types/user.go @@ -0,0 +1,76 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package types defines common data structures. +package types + +import ( + "github.com/harness/gitness/types/enum" +) + +type ( + // User is a principal representing an end user. + User struct { + // Fields from Principal + ID int64 `db:"principal_id" json:"-"` + UID string `db:"principal_uid" json:"uid"` + Email string `db:"principal_email" json:"email"` + DisplayName string `db:"principal_display_name" json:"display_name"` + Admin bool `db:"principal_admin" json:"admin"` + Blocked bool `db:"principal_blocked" json:"blocked"` + Salt string `db:"principal_salt" json:"-"` + Created int64 `db:"principal_created" json:"created"` + Updated int64 `db:"principal_updated" json:"updated"` + + // User specific fields + Password string `db:"principal_user_password" json:"-"` + } + + // UserInput store user account details used to + // create or update a user. + UserInput struct { + Email *string `json:"email"` + Password *string `json:"password"` + Name *string `json:"name"` + Admin *bool `json:"admin"` + } + + // UserFilter stores user query parameters. + UserFilter struct { + Page int `json:"page"` + Size int `json:"size"` + Sort enum.UserAttr `json:"sort"` + Order enum.Order `json:"order"` + Admin bool `json:"admin"` + } +) + +func (u *User) ToPrincipal() *Principal { + return &Principal{ + ID: u.ID, + UID: u.UID, + Email: u.Email, + Type: enum.PrincipalTypeUser, + DisplayName: u.DisplayName, + Admin: u.Admin, + Blocked: u.Blocked, + Salt: u.Salt, + Created: u.Created, + Updated: u.Updated, + } +} + +func (u *User) ToPrincipalInfo() *PrincipalInfo { + return u.ToPrincipal().ToPrincipalInfo() +} diff --git a/types/webhook.go b/types/webhook.go new file mode 100644 index 0000000000..761251a5b2 --- /dev/null +++ b/types/webhook.go @@ -0,0 +1,106 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "encoding/json" + + "github.com/harness/gitness/types/enum" +) + +// Webhook represents a webhook. +type Webhook struct { + ID int64 `json:"id"` + Version int64 `json:"version"` + ParentID int64 `json:"parent_id"` + ParentType enum.WebhookParent `json:"parent_type"` + CreatedBy int64 `json:"created_by"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` + Internal bool `json:"-"` + + DisplayName string `json:"display_name"` + Description string `json:"description"` + URL string `json:"url"` + Secret string `json:"-"` + Enabled bool `json:"enabled"` + Insecure bool `json:"insecure"` + Triggers []enum.WebhookTrigger `json:"triggers"` + LatestExecutionResult *enum.WebhookExecutionResult `json:"latest_execution_result,omitempty"` +} + +// MarshalJSON overrides the default json marshaling for `Webhook` allowing us to inject the `HasSecret` field. +// NOTE: This is required as we don't expose the `Secret` field and thus the caller wouldn't know whether +// the webhook contains a secret or not. +// NOTE: This is used as an alternative to adding an `HasSecret` field to Webhook itself, which would +// require us to keep `HasSecret` in sync with the `Secret` field, while `HasSecret` is not used internally at all. +func (w *Webhook) MarshalJSON() ([]byte, error) { + // WebhookAlias allows us to embed the original Webhook object (avoiding redefining all fields) + // while avoiding an infinite loop of marsheling. + type WebhookAlias Webhook + return json.Marshal(&struct { + *WebhookAlias + HasSecret bool `json:"has_secret"` + }{ + WebhookAlias: (*WebhookAlias)(w), + HasSecret: w != nil && w.Secret != "", + }) +} + +// WebhookExecution represents a single execution of a webhook. +type WebhookExecution struct { + ID int64 `json:"id"` + RetriggerOf *int64 `json:"retrigger_of,omitempty"` + Retriggerable bool `json:"retriggerable"` + Created int64 `json:"created"` + WebhookID int64 `json:"webhook_id"` + TriggerType enum.WebhookTrigger `json:"trigger_type"` + TriggerID string `json:"-"` + Result enum.WebhookExecutionResult `json:"result"` + Duration int64 `json:"duration"` + Error string `json:"error,omitempty"` + Request WebhookExecutionRequest `json:"request"` + Response WebhookExecutionResponse `json:"response"` +} + +// WebhookExecutionRequest represents the request of a webhook execution. +type WebhookExecutionRequest struct { + URL string `json:"url"` + Headers string `json:"headers"` + Body string `json:"body"` +} + +// WebhookExecutionResponse represents the response of a webhook execution. +type WebhookExecutionResponse struct { + StatusCode int `json:"status_code"` + Status string `json:"status"` + Headers string `json:"headers"` + Body string `json:"body"` +} + +// WebhookFilter stores Webhook query parameters for listing. +type WebhookFilter struct { + Query string `json:"query"` + Page int `json:"page"` + Size int `json:"size"` + Sort enum.WebhookAttr `json:"sort"` + Order enum.Order `json:"order"` +} + +// WebhookExecutionFilter stores WebhookExecution query parameters for listing. +type WebhookExecutionFilter struct { + Page int `json:"page"` + Size int `json:"size"` +} diff --git a/version/version.go b/version/version.go new file mode 100644 index 0000000000..8634cad3f8 --- /dev/null +++ b/version/version.go @@ -0,0 +1,62 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package version provides the version number. +package version + +import ( + "strconv" + + "github.com/coreos/go-semver/semver" +) + +var ( + // GitRepository is the git repository that was compiled. + GitRepository string + // GitCommit is the git commit that was compiled. + GitCommit string +) + +var ( + // major is for an API incompatible changes. + major string + // minor is for functionality in a backwards-compatible manner. + minor string + // patch is for backwards-compatible bug fixes. + patch string + // pre indicates prerelease. + pre = "" + // dev indicates development branch. Releases will be empty string. + dev string + + // Version is the specification version that the package types support. + Version = semver.Version{ + Major: parseVersionNumber(major), + Minor: parseVersionNumber(minor), + Patch: parseVersionNumber(patch), + PreRelease: semver.PreRelease(pre), + Metadata: dev, + } +) + +func parseVersionNumber(versionNum string) int64 { + if versionNum == "" { + return 0 + } + i, err := strconv.ParseInt(versionNum, 10, 64) + if err != nil { + panic(err) + } + return i +} diff --git a/version/version_test.go b/version/version_test.go new file mode 100644 index 0000000000..9ef952fa06 --- /dev/null +++ b/version/version_test.go @@ -0,0 +1,23 @@ +// Copyright 2023 Harness, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package version + +import "testing" + +func TestVersion(t *testing.T) { + if got, want := Version.String(), "1.0.0"; got != want { + t.Errorf("Want version %s, got %s", want, got) + } +} diff --git a/web/.eslintignore b/web/.eslintignore new file mode 100644 index 0000000000..eb5fcac6b4 --- /dev/null +++ b/web/.eslintignore @@ -0,0 +1,6 @@ +src/services +jest.config.js +scripts/ +node_modules +dist +*.js \ No newline at end of file diff --git a/web/.eslintrc.yml b/web/.eslintrc.yml new file mode 100644 index 0000000000..b9fa077351 --- /dev/null +++ b/web/.eslintrc.yml @@ -0,0 +1,126 @@ +--- +parser: '@typescript-eslint/parser' +parserOptions: + ecmaVersion: 2020 + sourceType: module + ecmaFeatures: + jsx: true + impliedStrict: true + project: ./tsconfig-eslint.json +plugins: + - react + - '@typescript-eslint/eslint-plugin' + - react-hooks + - jest + - import +env: + browser: true + node: true + shared-node-browser: true + es6: true + jest: true +globals: + __DEV__: readonly +extends: + - eslint:recommended + - plugin:react/recommended + - plugin:@typescript-eslint/recommended + - plugin:import/errors + - plugin:import/typescript + - prettier +settings: + react: + version: detect + import/resolver: + typescript: + alwaysTryTypes: true +rules: + '@typescript-eslint/ban-types': + - error + - extendDefaults: true + types: + '{}': false + + # custom rules + no-document-body-snapshot: 2 + duplicate-data-tooltip-id: 'warn' + jest-no-mock: + - 2 + - module: + react-router-dom: 'react-router-dom should not be mocked. Wrap the component inside TestWrapper instead' + + # built-in + no-console: 2 + semi: 0 + no-await-in-loop: 2 + no-shadow: 0 + + # react hooks + react-hooks/rules-of-hooks: 2 + react-hooks/exhaustive-deps: 1 + + # react + react/prop-types: 0 + react/display-name: 1 + + #typescript + '@typescript-eslint/no-use-before-define': 0 + '@typescript-eslint/explicit-function-return-type': 0 + no-unused-vars: 0 + '@typescript-eslint/no-unused-vars': + - 2 + - vars: all + args: after-used + ignoreRestSiblings: true + argsIgnorePattern: ^_ + '@typescript-eslint/member-delimiter-style': 0 + '@typescript-eslint/no-shadow': 2 + '@typescript-eslint/no-extra-semi': 0 + '@typescript-eslint/explicit-module-boundary-types': 0 + + #import + import/order: + - error + - groups: + - builtin + - external + - internal + - - parent + - sibling + pathGroups: + - pattern: '*.scss' + group: index + position: after + patternOptions: + matchBase: true + import/no-useless-path-segments: 2 + + no-restricted-imports: + - error + - patterns: + - lodash.* + paths: + - lodash + +overrides: + - files: + - '**/*.test.ts' + - '**/*.test.tsx' + rules: + '@typescript-eslint/no-magic-numbers': 0 + '@typescript-eslint/no-non-null-assertion': 'off' + '@typescript-eslint/no-non-null-asserted-optional-chain': 0 + '@typescript-eslint/no-explicit-any': 0 + no-await-in-loop: 0 + jest/consistent-test-it: + - 2 + - fn: test + withinDescribe: test + jest/expect-expect: 2 + jest/no-disabled-tests: 2 + jest/no-commented-out-tests: 2 + - files: + - services.tsx + rules: + '@typescript-eslint/explicit-function-return-type': 0 + '@typescript-eslint/no-explicit-any': 0 diff --git a/web/.prettierrc.yml b/web/.prettierrc.yml new file mode 100644 index 0000000000..065611beea --- /dev/null +++ b/web/.prettierrc.yml @@ -0,0 +1,10 @@ +--- +printWidth: 120 +tabWidth: 2 +useTabs: false +semi: false +singleQuote: true +trailingComma: none +bracketSpacing: true +bracketSameLine: true +arrowParens: avoid diff --git a/web/.vscode/extensions.json b/web/.vscode/extensions.json new file mode 100644 index 0000000000..d7df89c9cd --- /dev/null +++ b/web/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["esbenp.prettier-vscode", "dbaeumer.vscode-eslint"] +} diff --git a/web/.vscode/settings.json b/web/.vscode/settings.json new file mode 100644 index 0000000000..59a141f605 --- /dev/null +++ b/web/.vscode/settings.json @@ -0,0 +1,27 @@ +{ + "search.exclude": { + "**/node_modules": true, + "npm-debug.log*": true, + "**/static": true, + "dist/": true, + "yarn-error.*": true, + "**/yarn.lock": true + }, + "editor.formatOnSave": true, + "editor.tabSize": 2, + "editor.insertSpaces": true, + "editor.detectIndentation": false, + "editor.defaultFormatter": "esbenp.prettier-vscode", + "[typescriptreact]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "eslint.options": { + "rulePaths": ["./scripts/eslint-rules"] + }, + "[json]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + } +} diff --git a/web/config/moduleFederation.config.js b/web/config/moduleFederation.config.js new file mode 100644 index 0000000000..9198733642 --- /dev/null +++ b/web/config/moduleFederation.config.js @@ -0,0 +1,49 @@ +const packageJSON = require('../package.json') +const { pick, omit, mapValues } = require('lodash') + +/** + * These packages must be stricly shared with exact versions + */ +const ExactSharedPackages = [ + 'react-dom', + 'react', + 'react-router-dom', + '@blueprintjs/core', + '@blueprintjs/select', + '@blueprintjs/datetime', + 'restful-react' +] + +/** + * @type {import('webpack').ModuleFederationPluginOptions} + */ +module.exports = { + name: 'codeRemote', + filename: 'remoteEntry.js', + exposes: { + './App': './src/App.tsx', + './Repositories': './src/pages/RepositoriesListing/RepositoriesListing.tsx', + './Repository': './src/pages/Repository/Repository.tsx', + './FileEdit': './src/pages/RepositoryFileEdit/RepositoryFileEdit.tsx', + './Commits': './src/pages/RepositoryCommits/RepositoryCommits.tsx', + './Commit': './src/pages/RepositoryCommit/RepositoryCommit.tsx', + './Branches': './src/pages/RepositoryBranches/RepositoryBranches.tsx', + './PullRequests': './src/pages/PullRequests/PullRequests.tsx', + './Tags': './src/pages/RepositoryTags/RepositoryTags.tsx', + './PullRequest': './src/pages/PullRequest/PullRequest.tsx', + './Compare': './src/pages/Compare/Compare.tsx', + './Settings': './src/pages/RepositorySettings/RepositorySettings.tsx', + './Webhooks': './src/pages/Webhooks/Webhooks.tsx', + './WebhookNew': './src/pages/WebhookNew/WebhookNew.tsx', + './Search': './src/pages/Search/Search.tsx', + './WebhookDetails': './src/pages/WebhookDetails/WebhookDetails.tsx', + './NewRepoModalButton': './src/components/NewRepoModalButton/NewRepoModalButton.tsx' + }, + shared: { + formik: packageJSON.dependencies['formik'], + ...mapValues(pick(packageJSON.dependencies, ExactSharedPackages), version => ({ + singleton: true, + requiredVersion: version + })) + } +} diff --git a/web/config/webpack.common.js b/web/config/webpack.common.js new file mode 100644 index 0000000000..ea8799bd3f --- /dev/null +++ b/web/config/webpack.common.js @@ -0,0 +1,240 @@ +const path = require('path') + +const webpack = require('webpack') +const { + container: { ModuleFederationPlugin }, + DefinePlugin +} = require('webpack') +const MiniCssExtractPlugin = require('mini-css-extract-plugin') +const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin') +const GenerateStringTypesPlugin = require('../scripts/webpack/GenerateStringTypesPlugin').GenerateStringTypesPlugin +const { RetryChunkLoadPlugin } = require('webpack-retry-chunk-load-plugin') +const MonacoWebpackPlugin = require('monaco-editor-webpack-plugin') +const moduleFederationConfig = require('./moduleFederation.config') +const CONTEXT = process.cwd() +const DEV = process.env.NODE_ENV === 'development' + +module.exports = { + target: 'web', + context: CONTEXT, + stats: { + modules: false, + children: false + }, + entry: { + [moduleFederationConfig.name]: './src/public-path' + }, + output: { + publicPath: 'auto', + filename: DEV ? 'static/[name].js' : 'static/[name].[contenthash:6].js', + chunkFilename: DEV ? 'static/[name].[id].js' : 'static/[name].[id].[contenthash:6].js', + pathinfo: false + }, + module: { + rules: [ + { + test: /\.m?js$/, + include: /node_modules/, + type: 'javascript/auto' + }, + { + test: /\.(j|t)sx?$/, + exclude: /node_modules/, + use: [ + { + loader: 'ts-loader', + options: { + transpileOnly: true + } + } + ] + }, + { + test: /\.module\.scss$/, + exclude: /node_modules/, + use: [ + MiniCssExtractPlugin.loader, + { + loader: 'css-loader', + options: { + importLoaders: 1, + modules: { + mode: 'local', + localIdentName: DEV ? '[name]_[local]_[hash:base64:6]' : '[hash:base64:6]', + exportLocalsConvention: 'camelCaseOnly' + } + } + }, + { + loader: 'sass-loader', + options: { + sassOptions: { + includePaths: [path.join(CONTEXT, 'src')] + }, + sourceMap: false, + implementation: require('sass') + } + } + ] + }, + { + test: /(?/scripts/jest/setup-file.js'], + collectCoverageFrom: [ + 'src/**/*.{ts,tsx}', + '!src/index.tsx', + '!src/App.tsx', + '!src/bootstrap.tsx', + '!src/framework/strings/**', + '!src/services/**', + '!src/**/*.d.ts', + '!src/**/*.test.{ts,tsx}', + '!src/**/*.stories.{ts,tsx}', + '!src/**/__test__/**', + '!src/**/__tests__/**', + '!src/utils/test/**', + '!src/AppUtils.ts' + ], + coverageReporters: ['lcov', 'json-summary'], + transform: { + '^.+\\.tsx?$': 'ts-jest', + '^.+\\.js$': 'ts-jest', + '^.+\\.ya?ml$': '/scripts/jest/yaml-transform.js', + '^.+\\.gql$': '/scripts/jest/gql-loader.js' + }, + moduleDirectories: ['node_modules', 'src'], + testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'], + moduleNameMapper: { + '\\.s?css$': 'identity-obj-proxy', + 'monaco-editor': '/node_modules/react-monaco-editor', + '\\.(jpg|jpeg|png|gif|svg|eot|otf|webp|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': + '/scripts/jest/file-mock.js' + }, + coverageThreshold: { + global: { + statements: 60, + branches: 40, + functions: 40, + lines: 60 + } + }, + transformIgnorePatterns: ['node_modules/(?!(date-fns|lodash-es)/)'], + testPathIgnorePatterns: ['/dist'] +} diff --git a/web/jest.coverage.config.js b/web/jest.coverage.config.js new file mode 100644 index 0000000000..1e8cbf44db --- /dev/null +++ b/web/jest.coverage.config.js @@ -0,0 +1,9 @@ +process.env.TZ = 'GMT' + +const config = require('./jest.config') +const { omit } = require('lodash') + +module.exports = { + ...omit(config, ['coverageThreshold', 'coverageReporters']), + coverageReporters: ['text-summary', 'json-summary'] +} diff --git a/web/package.json b/web/package.json new file mode 100644 index 0000000000..580cd716c6 --- /dev/null +++ b/web/package.json @@ -0,0 +1,159 @@ +{ + "name": "codeui", + "description": "Harness Code UI", + "version": "0.0.1", + "author": "Harness Inc", + "license": "Harness Inc", + "private": true, + "homepage": "http://app.harness.io/", + "repository": { + "type": "git", + "url": "https://github.com/harness/gitness.git" + }, + "bugs": { + "url": "https://github.com/harness/gitness/issues" + }, + "keywords": [], + "scripts": { + "webpack": "NODE_ENV=development webpack serve --config config/webpack.dev.js", + "typed-scss": "typed-scss-modules src --watch", + "dev": "run-p webpack typed-scss", + "test": "jest src --silent", + "test:watch": "jest --watch", + "build": "rm -rf dist && webpack --config config/webpack.prod.js", + "lint": "eslint --rulesdir ./scripts/eslint-rules --ext .ts --ext .tsx src", + "prettier": "prettier --check \"./src/**/*.{ts,tsx,css,scss}\"", + "coverage": "npm test --coverage", + "typecheck": "tsc", + "check:all": "npm-run-all typecheck lint prettier test", + "clean": "rm -rf dist && rm -rf node_modules/.cache", + "fmt": "prettier --write \"./src/**/*.{ts,tsx,css,scss}\"", + "services": "restful-react import --config restful-react.config.js code", + "postservices": "prettier --write src/services/**/*.tsx", + "strings": "npm-run-all strings:*", + "strings:genTypes": "node scripts/strings/generateTypesCli.mjs" + }, + "dependencies": { + "@blueprintjs/core": "3.26.1", + "@blueprintjs/datetime": "3.13.0", + "@blueprintjs/select": "3.12.3", + "@codemirror/commands": "^6.2.3", + "@codemirror/lang-markdown": "^6.1.1", + "@codemirror/language-data": "^6.3.0", + "@codemirror/state": "^6.2.0", + "@codemirror/view": "^6.9.6", + "@harnessio/design-system": "^1.6.0-beta.1", + "@harnessio/icons": "^2.0.0-beta.2", + "@harnessio/uicore": "^4.0.0-beta.1", + "@types/react-monaco-editor": "^0.16.0", + "@uiw/codemirror-extensions-color": "^4.19.9", + "@uiw/codemirror-extensions-hyper-link": "^4.19.9", + "@uiw/codemirror-themes-all": "^4.19.9", + "@uiw/react-markdown-preview": "^4.1.12", + "anser": "^2.1.1", + "classnames": "^2.2.6", + "clipboard-copy": "^3.1.0", + "diff2html": "3.4.22", + "formik": "2.2.9", + "highlight.js": "^11.8.0", + "iconoir-react": "^6.11.0", + "immer": "^9.0.6", + "lang-map": "^0.4.0", + "lodash-es": "^4.17.15", + "marked": "^4.0.12", + "masonry-layout": "^4.2.2", + "moment": "^2.25.3", + "monaco-editor": "^0.40.0", + "monaco-editor-webpack-plugin": "^7.1.0", + "monaco-yaml": "^4.0.4", + "qs": "^6.9.4", + "react": "^17.0.2", + "react-complex-tree": "^1.1.11", + "react-dom": "^17.0.2", + "react-draggable": "^4.4.2", + "react-hotkeys-hook": "^4.4.1", + "react-intersection-observer": "^9.4.1", + "react-jsx-match": "^1.1.5", + "react-keywords": "^0.0.5", + "react-monaco-editor": "^0.54.0", + "react-pdf": "^7.1.2", + "react-resize-detector": "^7.1.2", + "react-router-dom": "^5.2.1", + "react-split-pane": "^0.1.92", + "react-table": "^7.1.0", + "react-timeago": "^4.4.0", + "rehype-external-links": "^2.0.1", + "rehype-video": "^1.2.2", + "restful-react": "15.6.0", + "webpack-retry-chunk-load-plugin": "^3.1.0", + "yaml": "^1.10.0", + "yup": "^0.29.1" + }, + "devDependencies": { + "@testing-library/jest-dom": "^5.12.0", + "@testing-library/react": "^10.0.3", + "@testing-library/react-hooks": "5", + "@types/classnames": "^2.2.10", + "@types/jest": "^26.0.15", + "@types/lodash-es": "^4.17.3", + "@types/masonry-layout": "^4.2.1", + "@types/mustache": "^4.0.1", + "@types/node": "^16.4.10", + "@types/path-to-regexp": "^1.7.0", + "@types/qs": "^6.9.4", + "@types/react": "^17.0.3", + "@types/react-dom": "^17.0.3", + "@types/react-router-dom": "^5.2.1", + "@types/react-table": "^7.0.18", + "@types/react-timeago": "^4.1.1", + "@types/testing-library__react-hooks": "^3.2.0", + "@types/yup": "^0.29.0", + "@typescript-eslint/eslint-plugin": "^5.33.1", + "@typescript-eslint/parser": "^5.33.1", + "case": "^1.6.3", + "css-loader": "^6.3.0", + "dotenv": "^10.0.0", + "eslint": "^7.27.0", + "eslint-config-prettier": "^8.3.0", + "eslint-import-resolver-typescript": "^2.4.0", + "eslint-plugin-import": "^2.23.3", + "eslint-plugin-jest": "^24.3.6", + "eslint-plugin-react": "^7.23.2", + "eslint-plugin-react-hooks": "^4.2.0", + "fast-json-stable-stringify": "^2.1.0", + "file-loader": "^6.2.0", + "fork-ts-checker-webpack-plugin": "^6.2.1", + "glob": "^7.1.6", + "html-webpack-plugin": "^5.3.1", + "identity-obj-proxy": "^3.0.0", + "jest": "^26.2.0", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "mini-css-extract-plugin": "^2.4.2", + "mustache": "^4.0.1", + "npm-run-all": "^4.1.5", + "path-to-regexp": "^6.1.0", + "prettier": "^2.3.2", + "raw-loader": "^4.0.2", + "react-test-renderer": "^17.0.2", + "sass": "^1.32.8", + "sass-loader": "^12.1.0", + "style-loader": "^3.3.0", + "ts-jest": "^26.5.5", + "ts-loader": "^9.2.6", + "tsconfig-paths-webpack-plugin": "^3.5.1", + "typed-scss-modules": "^7.1.4", + "typescript": "^4.7.4", + "url-loader": "^4.1.1", + "webpack": "^5.58.0", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^4.15.1", + "yaml-loader": "^0.6.0" + }, + "resolutions": { + "canvas": "link:./node_modules/.cache/null" + }, + "engines": { + "node": ">=14.16.0" + } +} diff --git a/web/restful-react.config.js b/web/restful-react.config.js new file mode 100644 index 0000000000..6462fb7b3f --- /dev/null +++ b/web/restful-react.config.js @@ -0,0 +1,32 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Please match the config key to the directory under services. + * This is required for the transform to work + */ +const customGenerator = require('./scripts/swagger-custom-generator.js') + +module.exports = { + code: { + output: 'src/services/code/index.tsx', + file: 'src/services/code/swagger.yaml', + customImport: `import { getConfig } from "../config";`, + customProps: { + base: `{getConfig("code/api/v1")}` + } + } +} diff --git a/web/scripts/clean-css-types.js b/web/scripts/clean-css-types.js new file mode 100644 index 0000000000..4ababbe3f0 --- /dev/null +++ b/web/scripts/clean-css-types.js @@ -0,0 +1,55 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable @typescript-eslint/no-var-requires, no-console */ + +/** + * Since all the ".css.d.ts" files are generated automatically from webpack, developers might be + * aware of its existence. + * + * Example: "MyAwesomeComponent.css" file will have a "MyAwesomeComponent.css.d.ts" file + * + * When a ".css" file is deleted, the corresponding ".css.d.ts" file must be deleted too, but this is + * hard to do in a fast-paced development environment. + * + * The rationale here is that, since these files are generated automatically, they must be cleaned + * automatically too. + * + * How do we do it? + * We glob for all the ".css.d.ts" files and check if it has a corresponding ".css" file + * If it doesn't, we delete that ".css.d.ts" file + */ + +const fs = require('fs'); +const glob = require('glob'); + +const files = glob.sync('src/**/*.css.d.ts'); +console.log(`Found ${files.length} '.css.d.ts' files`); + +let i = 0; + +files.forEach(file => { + // for every '.css' there will be a coresponding '.css.d.ts' file and vice versa + const cssFile = file.replace('.d.ts', ''); + + if (!fs.existsSync(cssFile)) { + console.log(`Deleting "${file}" because corresponding "${cssFile}" does not exist`); + fs.unlinkSync(file); + i++; + } +}); + +console.log(`Deleted total of ${i} '.css.d.ts' files`); diff --git a/web/scripts/eslint-rules/duplicate-data-tooltip-id.js b/web/scripts/eslint-rules/duplicate-data-tooltip-id.js new file mode 100644 index 0000000000..70f7b9c721 --- /dev/null +++ b/web/scripts/eslint-rules/duplicate-data-tooltip-id.js @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const { get } = require('lodash') +const toolTipValuesMap = {} +module.exports = { + meta: { + docs: { + description: `Give warning for duplicate tooltip id's'` + } + }, + + create: function (context) { + return { + JSXAttribute(node) { + if (get(node, 'name.name') === 'data-tooltip-id' && get(node, 'value.type') === 'Literal') { + if (toolTipValuesMap[get(node, 'value.value')]) { + return context.report({ + node, + message: 'Duplicate tooltip id' + }) + } else { + toolTipValuesMap[get(node, 'value.value')] = true + } + } + return null + } + } + } +} diff --git a/web/scripts/eslint-rules/jest-no-mock.js b/web/scripts/eslint-rules/jest-no-mock.js new file mode 100644 index 0000000000..3d003f77c6 --- /dev/null +++ b/web/scripts/eslint-rules/jest-no-mock.js @@ -0,0 +1,59 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const { get } = require('lodash') + +module.exports = { + meta: { + schema: [ + { + type: 'object', + properties: { + module: { + type: 'object' + } + }, + additionalProperties: false + } + ], + docs: { + description: `Restrict some properties from being mocked in jest` + } + }, + + create: function (context) { + return { + CallExpression(node) { + const moduleList = context.options[0].module + if ( + get(node, 'callee.type') === 'MemberExpression' && + get(node, 'callee.object.type') === 'Identifier' && + get(node, 'callee.object.name') === 'jest' && + get(node, 'callee.property.name') === 'mock' && + get(node, 'arguments[0].type') === 'Literal' && + moduleList.hasOwnProperty(get(node, 'arguments[0].value')) + ) { + const errorMessage = moduleList[get(node, 'arguments[0].value')] + return context.report({ + node, + message: errorMessage + }) + } + return null + } + } + } +} diff --git a/web/scripts/eslint-rules/no-document-body-snapshot.js b/web/scripts/eslint-rules/no-document-body-snapshot.js new file mode 100644 index 0000000000..6b24e14f97 --- /dev/null +++ b/web/scripts/eslint-rules/no-document-body-snapshot.js @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const { get } = require('lodash') + +module.exports = { + meta: { + docs: { + description: `Give warning for statements 'expect(document.body).toMatchSnapshot()'` + } + }, + + create: function (context) { + return { + CallExpression(node) { + if ( + get(node, 'callee.object.callee.name') === 'expect' && + get(node, 'callee.object.arguments[0].object.name') === 'document' && + get(node, 'callee.object.arguments[0].property.name') === 'body' && + get(node, 'callee.property.name') === 'toMatchSnapshot' + ) { + return context.report({ + node, + message: 'document.body match snapshot not allowed' + }) + } + return null + } + } + } +} diff --git a/web/scripts/jest/file-mock.js b/web/scripts/jest/file-mock.js new file mode 100644 index 0000000000..97fd848f65 --- /dev/null +++ b/web/scripts/jest/file-mock.js @@ -0,0 +1,17 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +module.exports = 'test-file-stub' diff --git a/web/scripts/jest/gql-loader.js b/web/scripts/jest/gql-loader.js new file mode 100644 index 0000000000..51ea195aa5 --- /dev/null +++ b/web/scripts/jest/gql-loader.js @@ -0,0 +1,26 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +module.exports = { + process(src) { + return ( + 'module.exports = ' + + JSON.stringify(src) + .replace(/\u2028/g, '\\u2028') + .replace(/\u2029/g, '\\u2029') + ) + } +} diff --git a/web/scripts/jest/setup-file.js b/web/scripts/jest/setup-file.js new file mode 100644 index 0000000000..55edb159a3 --- /dev/null +++ b/web/scripts/jest/setup-file.js @@ -0,0 +1,59 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import '@testing-library/jest-dom' +import { setAutoFreeze, enableMapSet } from 'immer' +import { noop } from 'lodash-es' + +// set up Immer +setAutoFreeze(false) +enableMapSet() + +process.env.TZ = 'UTC' + +document.createRange = () => ({ + setStart: () => {}, + setEnd: () => {}, + commonAncestorContainer: { + nodeName: 'BODY', + ownerDocument: document, + }, +}) +window.HTMLElement.prototype.scrollIntoView = jest.fn() +window.scrollTo = jest.fn() + +window.fetch = jest.fn((url, options) => { + fail(`A fetch is being made to url '${url}' with options: +${JSON.stringify(options, null, 2)} +Please mock this call.`) + throw new Error() +}) + +Object.defineProperty(window, 'matchMedia', { + writable: true, + value: jest.fn().mockImplementation((query) => ({ + matches: false, + media: query, + onchange: null, + addListener: jest.fn(), // Deprecated + removeListener: jest.fn(), // Deprecated + addEventListener: jest.fn(), + removeEventListener: jest.fn(), + dispatchEvent: jest.fn(), + })), +}) + +jest.mock('react-timeago', () => () => 'dummy date') diff --git a/web/scripts/jest/yaml-transform.js b/web/scripts/jest/yaml-transform.js new file mode 100644 index 0000000000..78c5170bce --- /dev/null +++ b/web/scripts/jest/yaml-transform.js @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const yaml = require('yaml') + +module.exports = { + process(src) { + const json = yaml.parse(src) + + return { code: `module.exports = ${JSON.stringify(json)}` } + } +} diff --git a/web/scripts/strings/generateTypes.cjs b/web/scripts/strings/generateTypes.cjs new file mode 100644 index 0000000000..aff1297c9e --- /dev/null +++ b/web/scripts/strings/generateTypes.cjs @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const path = require('path') +const fs = require('fs') + +const yaml = require('yaml') +const _ = require('lodash') +const glob = require('glob') + +const runPrettier = require('../utils/runPrettier.cjs') + +function flattenKeys(data, parentPath = []) { + const keys = [] + + _.keys(data).forEach(key => { + const value = data[key] + const newPath = [...parentPath, key] + + if (Array.isArray(value)) { + throw new TypeError(`Array is not supported in strings.yaml\nPath: "${newPath.join('.')}"`) + } + + if (_.isPlainObject(data[key])) { + keys.push(...flattenKeys(data[key], [...parentPath, key])) + } else { + keys.push([...parentPath, key].join('.')) + } + }) + + keys.sort() + + return keys +} + +async function generateTypes() { + const i18nContent = await fs.promises.readFile(path.resolve(process.cwd(), `src/i18n/strings.en.yaml`), 'utf8') + + const allData = [ + { + moduleRef: null, + keys: flattenKeys(yaml.parse(i18nContent)) + } + ] + + let content = ` +/** + * This file is auto-generated. Please do not modify this file manually. + * Use the command \`yarn strings\` to regenerate this file. + */ +export interface StringsMap {` + + allData + .flatMap(({ keys }) => keys) + .forEach(key => { + content += `\n '${key}': string` + }) + + content += `\n}` + + content = await runPrettier(content, 'typescript') + + await fs.promises.writeFile(path.resolve(process.cwd(), 'src/framework/strings/stringTypes.ts'), content, 'utf8') +} + +module.exports = generateTypes diff --git a/web/scripts/strings/generateTypesCli.mjs b/web/scripts/strings/generateTypesCli.mjs new file mode 100644 index 0000000000..6999296141 --- /dev/null +++ b/web/scripts/strings/generateTypesCli.mjs @@ -0,0 +1,21 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import generateTypes from './generateTypes.cjs' + +await generateTypes() + +console.log('✅ Generated type for string files succesfully!') diff --git a/web/scripts/swagger-custom-generator.js b/web/scripts/swagger-custom-generator.js new file mode 100644 index 0000000000..c5db474ad5 --- /dev/null +++ b/web/scripts/swagger-custom-generator.js @@ -0,0 +1,37 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const { camel } = require("case"); + +module.exports = ({ componentName, verb, route, description, genericsTypes, paramsInPath, paramsTypes }, basePath) => { + const propsType = type => + `${type}UsingFetchProps<${genericsTypes}>${paramsInPath.length ? ` & {${paramsTypes}}` : ""}`; + + if (verb === "get") { + return `${description}export const ${camel(componentName)}Promise = (${ + paramsInPath.length ? `{${paramsInPath.join(", ")}, ...props}` : "props" + }: ${propsType( + "Get", + )}, signal?: RequestInit["signal"]) => getUsingFetch<${genericsTypes}>(${basePath}, \`${route}\`, props, signal);\n\n` + } + else { + return `${description}export const ${camel(componentName)}Promise = (${ + paramsInPath.length ? `{${paramsInPath.join(", ")}, ...props}` : "props" + }: ${propsType( + "Mutate", + )}, signal?: RequestInit["signal"]) => mutateUsingFetch<${genericsTypes}>("${verb.toUpperCase()}", ${basePath}, \`${route}\`, props, signal);\n\n`; + } +} diff --git a/web/scripts/swagger-transform.js b/web/scripts/swagger-transform.js new file mode 100644 index 0000000000..8857ea0b54 --- /dev/null +++ b/web/scripts/swagger-transform.js @@ -0,0 +1,61 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const fs = require('fs') +const path = require('path') +const _ = require('lodash') +const yaml = require('js-yaml') +const stringify = require('fast-json-stable-stringify') + +module.exports = inputSchema => { + const argv = process.argv.slice(2) + const config = argv[0] + + if (config) { + const overridesFile = path.join('src/services', config, 'overrides.yaml') + const transformFile = path.join('src/services', config, 'transform.js') + + let paths = inputSchema.paths + + if (fs.existsSync(overridesFile)) { + const data = fs.readFileSync(overridesFile, 'utf8') + const { allowpaths, operationIdOverrides } = yaml.safeLoad(data) + + if (!allowpaths.includes('*')) { + paths = _.pick(paths, ...allowpaths) + } + + _.forIn(operationIdOverrides, (value, key) => { + const [path, method] = key.split('.') + + if (path && method && _.has(paths, path) && _.has(paths[path], method)) { + _.set(paths, [path, method, 'operationId'], value) + } + }) + } + + inputSchema.paths = paths + + if (fs.existsSync(transformFile)) { + const transform = require(path.resolve(process.cwd(), transformFile)) + + inputSchema = transform(inputSchema) + } + } + + // stringify and parse json to get a stable object + return JSON.parse(stringify(inputSchema)) +} diff --git a/web/scripts/utils/runPrettier.cjs b/web/scripts/utils/runPrettier.cjs new file mode 100644 index 0000000000..93d817e61f --- /dev/null +++ b/web/scripts/utils/runPrettier.cjs @@ -0,0 +1,33 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const fs = require('fs') +const path = require('path') + +const prettier = require('prettier') + +/** + * Run prettier on given content using the specified parser + * @param content {String} + * @param parser {String} + */ +async function runPrettier(content, parser) { + const prettierConfig = await prettier.resolveConfig(process.cwd()) + + return prettier.format(content, { ...prettierConfig, parser }) +} + +module.exports = runPrettier diff --git a/web/scripts/webpack/GenerateStringTypesPlugin.js b/web/scripts/webpack/GenerateStringTypesPlugin.js new file mode 100644 index 0000000000..0e7d540fa6 --- /dev/null +++ b/web/scripts/webpack/GenerateStringTypesPlugin.js @@ -0,0 +1,34 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +const generateStringTypes = require('../strings/generateTypes.cjs') + +class GenerateStringTypesPlugin { + apply(compiler) { + compiler.hooks.emit.tapAsync('GenerateStringTypesPlugin', (compilation, callback) => { + try { + generateStringTypes().then( + () => callback(), + e => callback(e) + ) + } catch (e) { + callback(e) + } + }) + } +} + +module.exports.GenerateStringTypesPlugin = GenerateStringTypesPlugin diff --git a/web/src/App.module.scss b/web/src/App.module.scss new file mode 100644 index 0000000000..164fe3b1df --- /dev/null +++ b/web/src/App.module.scss @@ -0,0 +1,39 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@import 'src/utils/vars'; + +.main { + @include vars; + + &.fullPage { + --font-family: Inter, sans-serif; + --font-family-mono: Roboto Mono, monospace; + + height: var(--page-height); + } + + :global { + div[data-testid='page-body'] > div[data-testid='page-error'] { + height: 70vh !important; + } + + .PageBody--pageBody { + min-height: calc(var(--page-height) - var(--page-header-height, 64px)); + overflow: visible; + } + } +} diff --git a/web/src/App.module.scss.d.ts b/web/src/App.module.scss.d.ts new file mode 100644 index 0000000000..c4e4524b97 --- /dev/null +++ b/web/src/App.module.scss.d.ts @@ -0,0 +1,20 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const fullPage: string +export declare const main: string diff --git a/web/src/App.tsx b/web/src/App.tsx new file mode 100644 index 0000000000..8e40ba5af5 --- /dev/null +++ b/web/src/App.tsx @@ -0,0 +1,118 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useState, useCallback, useMemo } from 'react' +import { RestfulProvider } from 'restful-react' +import { IconoirProvider } from 'iconoir-react' +import cx from 'classnames' +import { Container } from '@harnessio/uicore' +import { FocusStyleManager } from '@blueprintjs/core' +import AppErrorBoundary from 'framework/AppErrorBoundary/AppErrorBoundary' +import { AppContextProvider, defaultCurrentUser } from 'AppContext' +import type { AppProps } from 'AppProps' +import { buildResfulReactRequestOptions, handle401 } from 'AppUtils' +import { RouteDestinations } from 'RouteDestinations' +import { routes as _routes } from 'RouteDefinitions' +import { getConfig } from 'services/config' +import { ModalProvider } from 'hooks/useModalHook' +import { languageLoader } from './framework/strings/languageLoader' +import type { LanguageRecord } from './framework/strings/languageLoader' +import { StringsContextProvider } from './framework/strings/StringsContextProvider' +import 'highlight.js/styles/github.css' +import 'diff2html/bundles/css/diff2html.min.css' +import css from './App.module.scss' + +FocusStyleManager.onlyShowFocusOnTabs() + +const App: React.FC = React.memo(function App({ + standalone = false, + space = '', + routes = _routes, + lang = 'en', + on401 = handle401, + children, + hooks, + currentUserProfileURL = '' +}: AppProps) { + const [strings, setStrings] = useState() + const getRequestOptions = useCallback( + (): Partial => buildResfulReactRequestOptions(hooks?.useGetToken?.() || ''), + [hooks] + ) + const routingId = useMemo(() => (standalone ? '' : space.split('/').shift() || ''), [standalone, space]) + const queryParams = useMemo(() => (!standalone ? { routingId } : {}), [standalone, routingId]) + + useEffect(() => { + languageLoader(lang).then(setStrings) + }, [lang, setStrings]) + + const Wrapper: React.FC<{ fullPage: boolean }> = useCallback( + props => { + return strings ? ( + + + + { + if (!response.ok && response.status === 401) { + on401() + } + }}> + + + {props.children ? props.children : } + + + + + + + ) : null + }, + [strings] // eslint-disable-line react-hooks/exhaustive-deps + ) + + useEffect(() => { + AppWrapper = function _AppWrapper({ children: _children }) { + return {_children} + } + }, [Wrapper]) + + return {children} +}) + +export let AppWrapper: React.FC = () => +export default App diff --git a/web/src/AppContext.tsx b/web/src/AppContext.tsx new file mode 100644 index 0000000000..a373f9a712 --- /dev/null +++ b/web/src/AppContext.tsx @@ -0,0 +1,83 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState, useContext, useEffect, useMemo } from 'react' +import { matchPath } from 'react-router-dom' +import { noop } from 'lodash-es' +import { useGet } from 'restful-react' +import type { AppProps } from 'AppProps' +import { routes } from 'RouteDefinitions' +import type { TypesUser } from 'services/code' + +interface AppContextProps extends AppProps { + setAppContext: (value: Partial) => void +} + +export const defaultCurrentUser: Required = { + admin: false, + blocked: false, + created: 0, + updated: 0, + display_name: '', + email: '', + uid: '' +} + +const AppContext = React.createContext({ + standalone: true, + setAppContext: noop, + routes, + hooks: {}, + currentUser: defaultCurrentUser, + currentUserProfileURL: '', + routingId: '' +}) + +export const AppContextProvider: React.FC<{ value: AppProps }> = React.memo(function AppContextProvider({ + value: initialValue, + children +}) { + const lazy = useMemo( + () => initialValue.standalone && !!matchPath(location.pathname, { path: '/(signin|register)' }), + [initialValue.standalone] + ) + const { data: currentUser = defaultCurrentUser } = useGet({ + path: '/api/v1/user', + lazy + }) + const [appStates, setAppStates] = useState(initialValue) + + useEffect(() => { + if (initialValue.space && initialValue.space !== appStates.space) { + setAppStates({ ...appStates, ...initialValue }) + } + }, [initialValue, appStates]) + + return ( + , + setAppContext: props => { + setAppStates({ ...appStates, ...props }) + } + }}> + {children} + + ) +}) + +export const useAppContext: () => AppContextProps = () => useContext(AppContext) diff --git a/web/src/AppProps.ts b/web/src/AppProps.ts new file mode 100644 index 0000000000..9c43128072 --- /dev/null +++ b/web/src/AppProps.ts @@ -0,0 +1,71 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type React from 'react' +import type { CODERoutes } from 'RouteDefinitions' +import type { TypesUser } from 'services/code' +import type { LangLocale } from './framework/strings/languageLoader' + +/** + * AppProps defines an interface for host (parent) and + * child (micro-frontend) apps to talk to each other. It allows behaviors + * of the child app to be customized from the parent app. + * + * Areas of customization: + * - API token + * - Active user + * - Active locale (i18n) + * - Global error handling (like 401) + * - etc... + * + * Under standalone mode, the micro-frontend app uses default + * implementation of the interface in AppUtils.ts. + * + * This interface is published to allow parent to do type checking. + */ +export interface AppProps { + /** Flag to tell if App is mounted as a standalone app */ + standalone: boolean + + /** Harness routingId */ + routingId: string + + /** App children. When provided, children is a remote view which will be mounted under App contexts */ + children?: React.ReactNode + + /** Active space when app is embedded */ + space?: string + + /** Routing utlis (used to generate app specific URLs) */ + routes: CODERoutes + + /** Language to use in the app, default is 'en' */ + lang?: LangLocale + + /** 401 handler. Used in parent app to override 401 handling from child app */ + on401?: () => void + + /** React Hooks that Harness Platform passes down. Note: Pass only hooks that your app need */ + hooks: Partial<{ + useGetToken: Unknown + usePermissionTranslate: Unknown + useGenerateToken: Unknown + }> + + currentUser: Required + + currentUserProfileURL: string +} diff --git a/web/src/AppUtils.ts b/web/src/AppUtils.ts new file mode 100644 index 0000000000..eeae4f0af2 --- /dev/null +++ b/web/src/AppUtils.ts @@ -0,0 +1,53 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { routes } from 'RouteDefinitions' + +/** + * Handle 401 error from API. + * + * This function is called to handle 401 (unauthorized) API calls under standalone mode. + * In embedded mode, the parent app is responsible to pass its handler down. + * + * Mostly, the implementation of this function is just a redirection to signin page. + */ +export function handle401() { + const signinUrl = window.location.origin + routes.toSignIn() + + if (window.location.href !== signinUrl) { + window.location.href = signinUrl + } +} + +/** + * Build Restful React Request Options. + * + * This function is an extension to configure HTTP headers before passing to Restful + * React to make an API call. Customizations to fulfill the micro-frontend backend + * service happen here. + * + * @param token API token + * @returns Resful React RequestInit object. + */ +export function buildResfulReactRequestOptions(token?: string): Partial { + const headers: RequestInit['headers'] = {} + + if (token?.length) { + headers.Authorization = `Bearer ${token}` + } + + return { headers } +} diff --git a/web/src/RouteDefinitions.ts b/web/src/RouteDefinitions.ts new file mode 100644 index 0000000000..3cba950bb0 --- /dev/null +++ b/web/src/RouteDefinitions.ts @@ -0,0 +1,155 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export interface CODEProps { + space?: string + repoName?: string + repoPath?: string + gitRef?: string + resourcePath?: string + commitRef?: string + branch?: string + tags?: string + diffRefs?: string + pullRequestId?: string + pullRequestSection?: string + webhookId?: string + pipeline?: string + execution?: string + commitSHA?: string + secret?: string +} + +export interface CODEQueryProps { + query?: string +} + +export const pathProps: Readonly, 'repoPath' | 'branch' | 'tags'>> = { + space: ':space*', + repoName: ':repoName', + gitRef: ':gitRef*', + resourcePath: ':resourcePath*', + commitRef: ':commitRef*', + diffRefs: ':diffRefs*', + pullRequestId: ':pullRequestId', + pullRequestSection: ':pullRequestSection', + webhookId: ':webhookId', + pipeline: ':pipeline', + execution: ':execution', + commitSHA: ':commitSHA', + secret: ':secret' +} + +export interface CODERoutes { + toSignIn: () => string + toRegister: () => string + + toCODEHome: () => string + + toCODESpaceAccessControl: (args: Required>) => string + toCODESpaceSettings: (args: Required>) => string + toCODEPipelines: (args: Required>) => string + toCODEPipelineEdit: (args: Required>) => string + toCODEPipelineSettings: (args: Required>) => string + toCODESecrets: (args: Required>) => string + + toCODEGlobalSettings: () => string + toCODEUsers: () => string + toCODEUserProfile: () => string + toCODEUserChangePassword: () => string + + toCODERepositories: (args: Required>) => string + toCODERepository: (args: RequiredField, 'repoPath'>) => string + toCODEFileEdit: (args: Required>) => string + toCODECommits: (args: Required>) => string + toCODECommit: (args: Required>) => string + toCODEPullRequests: (args: Required>) => string + toCODEPullRequest: ( + args: RequiredField< + Pick, + 'repoPath' | 'pullRequestId' + > + ) => string + toCODECompare: (args: Required>) => string + toCODEBranches: (args: Required>) => string + toCODETags: (args: Required>) => string + toCODEWebhooks: (args: Required>) => string + toCODEWebhookNew: (args: Required>) => string + toCODEWebhookDetails: (args: Required>) => string + toCODESettings: (args: Required>) => string + toCODESearch: (args: Required>) => string + toCODEExecutions: (args: Required>) => string + toCODEExecution: (args: Required>) => string + toCODESecret: (args: Required>) => string +} + +/** + * NOTE: NEVER IMPORT AND USE THIS ROUTES EXPORT DIRECTLY IN CODE. + * + * routes is used to created URLs in standalone version. Instead, use + * the `routes` from AppContext which is mapped to this export in standalone + * version or Harness Platform routes which is passed from Harness Platform UI. + * + * Correct usage: const { routes } = useAppContext() + */ +export const routes: CODERoutes = { + toSignIn: (): string => '/signin', + toRegister: (): string => '/register', + + toCODEHome: () => `/`, + + toCODESpaceAccessControl: ({ space }) => `/access-control/${space}`, + toCODESpaceSettings: ({ space }) => `/settings/${space}`, + toCODEPipelines: ({ repoPath }) => `/${repoPath}/pipelines`, + toCODEPipelineEdit: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/edit`, + toCODEPipelineSettings: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}/triggers`, + toCODESecrets: ({ space }) => `/secrets/${space}`, + + toCODEGlobalSettings: () => '/settings', + toCODEUsers: () => '/users', + toCODEUserProfile: () => '/profile', + toCODEUserChangePassword: () => '/change-password', + + toCODERepositories: ({ space }) => `/spaces/${space}`, + toCODERepository: ({ repoPath, gitRef, resourcePath }) => + `/${repoPath}${gitRef ? '/files/' + gitRef : ''}${resourcePath ? '/~/' + resourcePath : ''}`, + toCODEFileEdit: ({ + repoPath, + gitRef, + resourcePath + }: RequiredField, 'repoPath' | 'gitRef'>) => + `/${repoPath}/edit/${gitRef}/~/${resourcePath || ''}`, + + toCODECommits: ({ repoPath, commitRef }) => `/${repoPath}/commits/${commitRef}`, + toCODECommit: ({ repoPath, commitRef }) => `/${repoPath}/commit/${commitRef}`, + toCODEPullRequests: ({ repoPath }) => `/${repoPath}/pulls`, + toCODEPullRequest: ({ repoPath, pullRequestId, pullRequestSection, commitSHA }) => + `/${repoPath}/pulls/${pullRequestId}${pullRequestSection ? '/' + pullRequestSection : ''}${ + commitSHA ? '/' + commitSHA : '' + }`, + toCODECompare: ({ repoPath, diffRefs }) => `/${repoPath}/pulls/compare/${diffRefs}`, + toCODEBranches: ({ repoPath }) => `/${repoPath}/branches`, + toCODETags: ({ repoPath }) => `/${repoPath}/tags`, + toCODESettings: ({ repoPath }) => `/${repoPath}/settings`, + toCODESearch: ({ repoPath }) => `/${repoPath}/search`, + toCODEWebhooks: ({ repoPath }) => `/${repoPath}/webhooks`, + toCODEWebhookNew: ({ repoPath }) => `/${repoPath}/webhooks/new`, + toCODEWebhookDetails: ({ repoPath, webhookId }) => `/${repoPath}/webhook/${webhookId}`, + + toCODEExecutions: ({ repoPath, pipeline }) => `/${repoPath}/pipelines/${pipeline}`, + toCODEExecution: ({ repoPath, pipeline, execution }) => `/${repoPath}/pipelines/${pipeline}/execution/${execution}`, + toCODESecret: ({ space, secret }) => `/secrets/${space}/secret/${secret}` +} diff --git a/web/src/RouteDestinations.tsx b/web/src/RouteDestinations.tsx new file mode 100644 index 0000000000..143d8084eb --- /dev/null +++ b/web/src/RouteDestinations.tsx @@ -0,0 +1,333 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Route, Switch, BrowserRouter } from 'react-router-dom' +import { SignIn } from 'pages/SignIn/SignIn' +import { SignUp } from 'pages/SignUp/SignUp' +import Repository from 'pages/Repository/Repository' +import { routes, pathProps } from 'RouteDefinitions' +import RepositoriesListing from 'pages/RepositoriesListing/RepositoriesListing' +import PipelineList from 'pages/PipelineList/PipelineList' +import SecretList from 'pages/SecretList/SecretList' +import { LayoutWithSideNav, LayoutWithoutSideNav } from 'layouts/layout' +import RepositoryFileEdit from 'pages/RepositoryFileEdit/RepositoryFileEdit' +import RepositoryCommits from 'pages/RepositoryCommits/RepositoryCommits' +import RepositoryCommit from 'pages/RepositoryCommit/RepositoryCommit' +import RepositoryBranches from 'pages/RepositoryBranches/RepositoryBranches' +import RepositoryTags from 'pages/RepositoryTags/RepositoryTags' +import Compare from 'pages/Compare/Compare' +import PullRequest from 'pages/PullRequest/PullRequest' +import PullRequests from 'pages/PullRequests/PullRequests' +import WebhookNew from 'pages/WebhookNew/WebhookNew' +import WebhookDetails from 'pages/WebhookDetails/WebhookDetails' +import Webhooks from 'pages/Webhooks/Webhooks' +import RepositorySettings from 'pages/RepositorySettings/RepositorySettings' +import UsersListing from 'pages/UsersListing/UsersListing' +import Home from 'pages/Home/Home' +import UserProfile from 'pages/UserProfile/UserProfile' +import ChangePassword from 'pages/ChangePassword/ChangePassword' +import SpaceAccessControl from 'pages/SpaceAccessControl/SpaceAccessControl' +import SpaceSettings from 'pages/SpaceSettings/SpaceSettings' +import { useStrings } from 'framework/strings' +import ExecutionList from 'pages/ExecutionList/ExecutionList' +import Execution from 'pages/Execution/Execution' +import Secret from 'pages/Secret/Secret' +import Search from 'pages/Search/Search' +import AddUpdatePipeline from 'pages/AddUpdatePipeline/AddUpdatePipeline' +import { useAppContext } from 'AppContext' +import PipelineSettings from 'components/PipelineSettings/PipelineSettings' + +export const RouteDestinations: React.FC = React.memo(function RouteDestinations() { + const { getString } = useStrings() + const repoPath = `${pathProps.space}/${pathProps.repoName}` + const { standalone } = useAppContext() + + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + {standalone && ( + + + + + + )} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) +}) diff --git a/web/src/bootstrap.scss b/web/src/bootstrap.scss new file mode 100644 index 0000000000..1d28b5644e --- /dev/null +++ b/web/src/bootstrap.scss @@ -0,0 +1,35 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + NOTE: This file is loaded only in standalone mode. In embedded mode, + the shared styles are loaded by Harness Platform already. +*/ +@import '~normalize.css'; +@import '~@blueprintjs/core/lib/css/blueprint.css'; +@import '~@blueprintjs/datetime/lib/css/blueprint-datetime.css'; +@import '~@harnessio/design-system/dist/style.css'; +@import '~@harnessio/uicore/dist/index.css'; + +html, +body, +#react-root { + margin: 0; + padding: 0; + width: 100%; + height: 100%; + background-color: var(--white); +} diff --git a/web/src/bootstrap.scss.d.ts b/web/src/bootstrap.scss.d.ts new file mode 100644 index 0000000000..12fd6994af --- /dev/null +++ b/web/src/bootstrap.scss.d.ts @@ -0,0 +1,19 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const reactRoot: string diff --git a/web/src/bootstrap.tsx b/web/src/bootstrap.tsx new file mode 100644 index 0000000000..f410cedf29 --- /dev/null +++ b/web/src/bootstrap.tsx @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import ReactDOM from 'react-dom' +import { noop } from 'lodash-es' +import { routes } from 'RouteDefinitions' +import { defaultCurrentUser } from 'AppContext' +import App from './App' +import './bootstrap.scss' + +// This flag is used in services/config.ts to customize API path when app is run +// in multiple modes (standalone vs. embedded). +// Also being used in when generating proper URLs inside the app. +// In standalone mode, we don't need `code/` prefix in API URIs. +window.STRIP_CODE_PREFIX = true + +ReactDOM.render( + , + document.getElementById('react-root') +) diff --git a/web/src/components/AuthLayout/AuthLayout.module.scss b/web/src/components/AuthLayout/AuthLayout.module.scss new file mode 100644 index 0000000000..29abff93bb --- /dev/null +++ b/web/src/components/AuthLayout/AuthLayout.module.scss @@ -0,0 +1,98 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.layout { + // Signup card widths + --min-width: 950px; + --max-width: 35%; + + display: grid; + grid-template-columns: 60% 40%; + grid-template-rows: 100vh; + + .cardColumn { + overflow-y: auto; + overflow-x: hidden; + } + + .card { + padding: 250px 125px; + overflow-x: hidden !important; + // width: var(--min-width); + margin: 0 auto; + height: 100%; + + .cardChildren { + height: 100%; + } + } + + .imageColumn { + overflow-y: hidden; + + background: var(--primary-10); + .image { + object-fit: contain; + height: 90%; + width: 90%; + } + .subtractContainer { + display: flex; + justify-content: flex-end; + margin-bottom: var(--spacing-xxxlarge); + margin-right: var(--spacing-xxxlarge); + } + .subtractImage { + float: right; + } + .harnessImage { + float: left; + } + .imageContainer { + padding: 10% 25%; + } + .gitnessContainer { + padding: 6% 22%; + } + + .overlayImage { + position: absolute; + top: 0; + left: 0; + z-index: 1; + } + } +} + +@media (max-width: 1280px) { + .layout { + display: flex; + justify-content: center; + width: 100vw; + } + + .imageColumn { + display: none; + } +} + +// Firefox has a bug where padding-bottom is ignored when overflow occurs +// https://bugzilla.mozilla.org/show_bug.cgi?id=748518 +@-moz-document url-prefix() { + .disclaimer { + margin-bottom: 85px !important; + } +} diff --git a/web/src/components/AuthLayout/AuthLayout.module.scss.d.ts b/web/src/components/AuthLayout/AuthLayout.module.scss.d.ts new file mode 100644 index 0000000000..94acecd768 --- /dev/null +++ b/web/src/components/AuthLayout/AuthLayout.module.scss.d.ts @@ -0,0 +1,31 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const card: string +export declare const cardChildren: string +export declare const cardColumn: string +export declare const disclaimer: string +export declare const gitnessContainer: string +export declare const harnessImage: string +export declare const image: string +export declare const imageColumn: string +export declare const imageContainer: string +export declare const layout: string +export declare const overlayImage: string +export declare const subtractContainer: string +export declare const subtractImage: string diff --git a/web/src/components/AuthLayout/AuthLayout.tsx b/web/src/components/AuthLayout/AuthLayout.tsx new file mode 100644 index 0000000000..86f268e4af --- /dev/null +++ b/web/src/components/AuthLayout/AuthLayout.tsx @@ -0,0 +1,46 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Container, Layout } from '@harnessio/uicore' +import cx from 'classnames' +import { Images } from 'images' +import css from './AuthLayout.module.scss' + +const AuthLayout: React.FC> = props => { + return ( +
+
+ + + + + + + + + +
+
+
+ {props.children} +
+
+
+ ) +} + +export default AuthLayout diff --git a/web/src/components/BranchTagSelect/BranchTagSelect.module.scss b/web/src/components/BranchTagSelect/BranchTagSelect.module.scss new file mode 100644 index 0000000000..43a8fde1de --- /dev/null +++ b/web/src/components/BranchTagSelect/BranchTagSelect.module.scss @@ -0,0 +1,119 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +html[class=''] { + .button:focus { + --border: 1px solid var(--primary-7) !important; + } +} + +.branchContainer { + :global(> .bp3-button-text) { + display: flex !important; + > .branchSpan { + display: flex !important; + align-items: center !important; + padding-right: var(--spacing-tiny); + } + } +} + +.button { + --border: 1px solid var(--grey-200) !important; + --background-color-active: var(--white) !important; + --box-shadow: none !important; + white-space: nowrap !important; + + &:active, + &:hover, + &[class*='bp3-active'] { + --border: 1px solid var(--primary-7) !important; + } + + .prefix { + color: var(--grey-450) !important; + font-weight: normal; + padding-right: var(--spacing-xsmall); + } + + :global { + .bp3-button-text { + max-width: 250px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } + } +} + +.main { + .input { + margin-bottom: 0 !important; + + span[data-icon], + span[icon] { + margin-top: 10px !important; + } + } + + .tabContainer { + margin-top: 0 !important; + + [role='tablist'] { + background-color: var(--white) !important; + padding-left: 0; + padding-right: 0; + border-bottom: 1px solid var(--grey-200); + } + + [role='tabpanel'] { + margin-top: 0; + } + + &.branchesOnly { + [role='tablist'] { + display: none; + } + } + } +} + +.popover { + > div[class*='popover-arrow'] { + display: none; + } + + .listContainer { + min-height: 50px; + max-height: 200px; + overflow-y: auto; + + :global { + a.bp3-menu-item:hover { + background: var(--primary-1) !important; + color: var(--grey-1000) !important; + } + + .bp3-menu-item { + max-width: 600px; + } + } + } + + .newBtnText { + font-weight: normal; + } +} diff --git a/web/src/components/BranchTagSelect/BranchTagSelect.module.scss.d.ts b/web/src/components/BranchTagSelect/BranchTagSelect.module.scss.d.ts new file mode 100644 index 0000000000..9dd8e110f0 --- /dev/null +++ b/web/src/components/BranchTagSelect/BranchTagSelect.module.scss.d.ts @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const branchContainer: string +export declare const branchesOnly: string +export declare const branchSpan: string +export declare const button: string +export declare const input: string +export declare const listContainer: string +export declare const main: string +export declare const newBtnText: string +export declare const popover: string +export declare const prefix: string +export declare const tabContainer: string diff --git a/web/src/components/BranchTagSelect/BranchTagSelect.tsx b/web/src/components/BranchTagSelect/BranchTagSelect.tsx new file mode 100644 index 0000000000..7607d81c67 --- /dev/null +++ b/web/src/components/BranchTagSelect/BranchTagSelect.tsx @@ -0,0 +1,323 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useMemo, useRef, useState } from 'react' +import { Classes, Icon as BPIcon, Menu, MenuItem, PopoverPosition } from '@blueprintjs/core' +import { Button, ButtonProps, Container, Layout, ButtonVariation, TextInput, Tabs, Text } from '@harnessio/uicore' +import { FontVariation } from '@harnessio/design-system' +import { Link } from 'react-router-dom' +import cx from 'classnames' +import { useGet } from 'restful-react' +import { noop } from 'lodash-es' +import { String, useStrings } from 'framework/strings' +import { getErrorMessage, LIST_FETCHING_LIMIT } from 'utils/Utils' +import { useAppContext } from 'AppContext' +import { CodeIcon, GitInfoProps, GitRefType, isRefATag, REFS_TAGS_PREFIX } from 'utils/GitUtils' +import Branches from '../../icons/Branches.svg' +import css from './BranchTagSelect.module.scss' + +export interface BranchTagSelectProps extends Omit, Pick { + gitRef: string + onSelect: (ref: string, type: GitRefType) => void + onCreateBranch?: (newBranch?: string) => void + disableBranchCreation?: boolean + disableViewAllBranches?: boolean + forBranchesOnly?: boolean + labelPrefix?: string + placeHolder?: string + popoverClassname?: string +} + +export const BranchTagSelect: React.FC = ({ + repoMetadata, + gitRef, + onSelect, + onCreateBranch = noop, + disableBranchCreation, + disableViewAllBranches, + forBranchesOnly, + labelPrefix, + placeHolder, + className, + popoverClassname, + ...props +}) => { + const [query, onQuery] = useState('') + const [gitRefType, setGitRefType] = useState(isRefATag(gitRef) ? GitRefType.TAG : GitRefType.BRANCH) + const text = gitRef.replace(REFS_TAGS_PREFIX, '') + + return ( + + ) +} diff --git a/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx b/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx new file mode 100644 index 0000000000..77ac10bf16 --- /dev/null +++ b/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx @@ -0,0 +1,168 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useMemo } from 'react' +import { Divider, PopoverInteractionKind, Position } from '@blueprintjs/core' +import { Checkbox, Container, FlexExpander, Layout, Popover, Text } from '@harnessio/uicore' +import { Color, FontVariation } from '@harnessio/design-system' +import ReactTimeago from 'react-timeago' + +import { useStrings } from 'framework/strings' +import type { TypesCommit } from 'services/code' + +import css from '../Changes.module.scss' + +type CommitRangeDropdownProps = { + allCommits: TypesCommit[] + selectedCommits: string[] + setSelectedCommits: React.Dispatch> +} + +const sortSelectedCommits = (selectedCommits: string[], sortedCommits: string[]) => { + return selectedCommits.sort((commitA, commitB) => { + const commitAIdx = sortedCommits.indexOf(commitA) + const commitBIdx = sortedCommits.indexOf(commitB) + + return commitBIdx - commitAIdx + }) +} + +function getBiggerSubarray(array: Array, index: number) { + if (index >= 0 && index < array.length) { + const subarray1 = array.slice(0, index) + const subarray2 = array.slice(index + 1) + + return subarray1.length > subarray2.length ? subarray1 : subarray2 + } else { + return [] + } +} + +const getCommitRange = (selectedCommits: string[], allCommitsSHA: string[]) => { + const sortedCommits = sortSelectedCommits(selectedCommits, allCommitsSHA) + const selectedCommitRange = allCommitsSHA + .slice(allCommitsSHA.indexOf(sortedCommits[sortedCommits.length - 1]), allCommitsSHA.indexOf(sortedCommits[0]) + 1) + .reverse() + + return selectedCommitRange +} + +const CommitRangeDropdown: React.FC = ({ + allCommits, + selectedCommits, + setSelectedCommits +}) => { + const { getString } = useStrings() + const allCommitsSHA = useMemo(() => allCommits.map(commit => commit.sha as string), [allCommits]) + + useEffect(() => { + if (selectedCommits.length && allCommitsSHA.length) { + setSelectedCommits(prevVal => getCommitRange(prevVal, allCommitsSHA)) + } + }, [allCommitsSHA, setSelectedCommits, selectedCommits.length]) + + const handleCheckboxClick = ( + event: React.MouseEvent, + selectedCommitSHA: string + ) => { + setSelectedCommits(current => { + // handle single commit clicked (either no shift held, or no commit selected yet) + if (!event.shiftKey || current.length == 0) { + return [selectedCommitSHA] + } + + // handle already selected commit clicked + if (current.includes(selectedCommitSHA)) { + const sortedCommits = sortSelectedCommits(current, allCommitsSHA) + const subArray = getBiggerSubarray(sortedCommits, sortedCommits.indexOf(selectedCommitSHA)) + + return subArray + } + + // clicked commit is outside of current range - extend it! + const extendedArray = getCommitRange([...current, selectedCommitSHA], allCommitsSHA) + + // NOTE: this CAN contain all commits - we let it through for consistent user experience. + // This way, the user sees selected exactly what they clicked on (+ we don't have to handle single commit pr differently) + return extendedArray + }) + } + + const areAllCommitsSelected = !selectedCommits.length + + return ( + setSelectedCommits(selectedCommits)} + content={ + + {getString('allCommits')}} + checked={areAllCommitsSelected} + onClick={() => setSelectedCommits([])} + margin={{ bottom: 'small' }} + /> + + + {allCommits?.map((prCommit, index) => { + const isSelected = selectedCommits.includes(prCommit.sha || '') + + return ( + handleCheckboxClick(e, prCommit.sha as string)}> + handleCheckboxClick(e, prCommit.sha as string)} /> + + {`${allCommits.length - index} ${prCommit.title}`} + + + + + + + ) + })} + + + + {getString('selectRange')} + + + }> + + { + areAllCommitsSelected + ? getString('allCommits') + : `${selectedCommits.length} ${selectedCommits.length > 1 ? getString('commits') : getString('commit')}` + } + + + ) +} + +export default CommitRangeDropdown diff --git a/web/src/components/Changes/DiffViewConfiguration.tsx b/web/src/components/Changes/DiffViewConfiguration.tsx new file mode 100644 index 0000000000..9842fc4b57 --- /dev/null +++ b/web/src/components/Changes/DiffViewConfiguration.tsx @@ -0,0 +1,101 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Container, Layout, Text, FlexExpander } from '@harnessio/uicore' +import { FontVariation } from '@harnessio/design-system' +import { ButtonGroup, Button as BButton, Classes } from '@blueprintjs/core' +import cx from 'classnames' +import { useStrings } from 'framework/strings' +import { ButtonRoleProps } from 'utils/Utils' +import { ViewStyle } from 'components/DiffViewer/DiffViewerUtils' + +interface DiffViewConfigurationProps { + viewStyle: ViewStyle + lineBreaks: boolean + setViewStyle: (val: ViewStyle) => void + setLineBreaks: (val: boolean) => void +} + +export const DiffViewConfiguration: React.FC = ({ + viewStyle, + setViewStyle, + lineBreaks, + setLineBreaks +}) => { + const { getString } = useStrings() + + return ( + + + + + {getString('pr.diffView')} + + + { + setViewStyle(ViewStyle.SIDE_BY_SIDE) + window.scroll({ top: 0 }) + }}> + {getString('pr.split')} + + { + setViewStyle(ViewStyle.LINE_BY_LINE) + window.scroll({ top: 0 }) + }}> + {getString('pr.unified')} + + + + + + + {getString('lineBreaks')} + + + setLineBreaks(true)}> + {getString('on')} + + setLineBreaks(false)}> + {getString('off')} + + + + + +
+ } + tooltipProps={{ interactionKind: 'click' }} + iconProps={{ size: 14, padding: { right: 3 } }} + rightIconProps={{ size: 13, padding: { left: 0 } }} + padding={{ left: 'small' }} + {...ButtonRoleProps} + /> + ) +} diff --git a/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx b/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx new file mode 100644 index 0000000000..600f690aa5 --- /dev/null +++ b/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx @@ -0,0 +1,129 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ButtonVariation, Container, SplitButton, useToaster, Text, Layout } from '@harnessio/uicore' +import { Icon, IconName } from '@harnessio/icons' +import { Color, FontVariation } from '@harnessio/design-system' +import { Menu, PopoverPosition } from '@blueprintjs/core' +import cx from 'classnames' +import { useMutate } from 'restful-react' +import React, { useCallback, useMemo } from 'react' +import { useStrings } from 'framework/strings' +import type { EnumPullReqReviewDecision, TypesPullReq } from 'services/code' +import type { GitInfoProps } from 'utils/GitUtils' +import { getErrorMessage } from 'utils/Utils' +import css from '../Changes.module.scss' + +interface PrReviewOption { + method: EnumPullReqReviewDecision | 'reject' + title: string + disabled?: boolean + icon: IconName + color: Color +} + +interface ReviewSplitButtonProps extends Pick { + shouldHide: boolean + pullRequestMetadata?: TypesPullReq + refreshPr: () => void + disabled?: boolean + refetchReviewers?: () => void +} +const ReviewSplitButton = (props: ReviewSplitButtonProps) => { + const { refetchReviewers, pullRequestMetadata, repoMetadata, shouldHide, refreshPr, disabled } = props + const { getString } = useStrings() + const { showError, showSuccess } = useToaster() + const prDecisionOptions: PrReviewOption[] = useMemo( + () => [ + { + method: 'approved', + title: getString('approve'), + icon: 'tick-circle' as IconName, + color: Color.GREEN_700 + }, + { + method: 'changereq', + title: getString('requestChanges'), + icon: 'error' as IconName, + color: Color.ORANGE_700 + } + ], + [getString] + ) + + const { mutate, loading } = useMutate({ + verb: 'POST', + path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/reviews` + }) + const submitReview = useCallback( + (decision: PrReviewOption) => { + mutate({ decision: decision.method, commit_sha: pullRequestMetadata?.source_sha }) + .then(() => { + showSuccess(getString(decision.method === 'approved' ? 'pr.reviewSubmitted' : 'pr.requestSubmitted')) + refreshPr?.() + refetchReviewers?.() + }) + .catch(exception => showError(getErrorMessage(exception))) + }, + [mutate, showError, showSuccess, getString, refreshPr, pullRequestMetadata?.source_sha, refetchReviewers] + ) + return ( + + { + submitReview(prDecisionOptions[0]) + }}> + + + + {prDecisionOptions[1].title} + + + } + onClick={() => { + submitReview(prDecisionOptions[1]) + }} + /> + + + ) +} + +export default ReviewSplitButton diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss new file mode 100644 index 0000000000..f8cf9cadb1 --- /dev/null +++ b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss @@ -0,0 +1,48 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.container { + .label { + font-size: 13px !important; + color: var(--black) !important; + } + + .layout { + height: 24px; + display: inline-flex; + justify-content: center; + align-items: center; + border: 1px solid var(--grey-200); + background-color: var(--grey-50) !important; + border-radius: 4px; + padding-left: var(--spacing-small) !important; + max-width: 300px; + .url { + width: 250px; + white-space: nowrap !important; + overflow: hidden; + text-overflow: ellipsis; + font-size: 13px !important; + } + + button#cloneCopyButton { + --button-height: 24px !important; + border-radius: 0 !important; + border-left: 1px solid var(--grey-200) !important; + margin-left: var(--spacing-small) !important; + } + } +} diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts new file mode 100644 index 0000000000..2e46dea965 --- /dev/null +++ b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts @@ -0,0 +1,23 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const cloneCopyButton: string +export declare const container: string +export declare const label: string +export declare const layout: string +export declare const url: string diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx new file mode 100644 index 0000000000..5f86abcc65 --- /dev/null +++ b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx @@ -0,0 +1,64 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState } from 'react' +import { Button, ButtonVariation, Container, Layout, Text } from '@harnessio/uicore' +import { Color, FontVariation } from '@harnessio/design-system' +import { useStrings } from 'framework/strings' +import { CopyButton } from 'components/CopyButton/CopyButton' +import { CodeIcon } from 'utils/GitUtils' +import CloneCredentialDialog from 'components/CloneCredentialDialog/CloneCredentialDialog' +import css from './CloneButtonTooltip.module.scss' + +interface CloneButtonTooltipProps { + httpsURL: string +} + +export function CloneButtonTooltip({ httpsURL }: CloneButtonTooltipProps) { + const { getString } = useStrings() + const [flag, setFlag] = useState(false) + + return ( + + + {getString('cloneHTTPS')} + + {getString('generateCloneText')} + + + + + {httpsURL} + + + + + + + + + ) +} diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss new file mode 100644 index 0000000000..36c20c1021 --- /dev/null +++ b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.layout { + height: 33px; + display: inline-flex; + justify-content: center; + align-items: center; + border: 1px solid var(--grey-200); + background-color: var(--grey-50) !important; + border-radius: 4px; + padding-left: var(--spacing-small) !important; + max-width: 100%; + .url { + // width: 80%; + white-space: nowrap !important; + overflow: hidden; + text-overflow: ellipsis; + font-size: 13px !important; + } + + button#cloneCopyButton { + --button-height: 24px !important; + border-radius: 0 !important; + border-left: 1px solid var(--grey-200) !important; + margin-left: var(--spacing-small) !important; + } +} diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts new file mode 100644 index 0000000000..7a96c29bf0 --- /dev/null +++ b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts @@ -0,0 +1,21 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const cloneCopyButton: string +export declare const layout: string +export declare const url: string diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx new file mode 100644 index 0000000000..857c132af3 --- /dev/null +++ b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx @@ -0,0 +1,123 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback, useEffect, useState } from 'react' +import { Button, ButtonVariation, Container, Dialog, FlexExpander, Layout, Text, useToaster } from '@harnessio/uicore' +import { FontVariation } from '@harnessio/design-system' +import { useMutate } from 'restful-react' +import { useHistory } from 'react-router-dom' +import { useStrings } from 'framework/strings' +import { CopyButton } from 'components/CopyButton/CopyButton' +import { CodeIcon } from 'utils/GitUtils' +import { useAppContext } from 'AppContext' +import { generateAlphaNumericHash } from 'utils/Utils' +import css from './CloneCredentialDialog.module.scss' + +interface CloneCredentialDialogProps { + setFlag: (val: boolean) => void + flag: boolean +} + +const CloneCredentialDialog = (props: CloneCredentialDialogProps) => { + const { setFlag, flag } = props + const history = useHistory() + const { getString } = useStrings() + const { hooks, currentUser, currentUserProfileURL, standalone, routes } = useAppContext() + const [token, setToken] = useState('') + const { showError } = useToaster() + const hash = generateAlphaNumericHash(6) + const { mutate } = useMutate({ path: '/api/v1/user/tokens', verb: 'POST' }) + const genToken = useCallback( + async (_props: { uid: string }) => { + const res = await mutate({ uid: _props.uid }) + try { + setToken(res?.access_token) + } catch { + showError(res?.data?.message || res?.message) + } + return res + }, + [mutate, showError] + ) + const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser?.uid, flag) + + useEffect(() => { + if (tokenData) { + if (tokenData && tokenData?.status !== 400) { + setToken(tokenData?.data) + } else if (tokenData?.status === 400 && flag) { + showError(tokenData?.data?.message || tokenData?.message) + } + } else if (!tokenData && standalone && flag) { + genToken({ uid: `code_token_${hash}` }) + } + }, [flag, tokenData, showError]) // eslint-disable-line react-hooks/exhaustive-deps + return ( + { + setFlag(false) + }} + title={ + + {getString('getMyCloneTitle')} + + } + style={{ width: 490, maxHeight: '95vh', overflow: 'auto' }}> + + + {getString('userName')} + + + + {currentUser.display_name} + + + + + + {getString('passwordApi')} + + + + + {token} + + + + + + {getString('cloneText')} + + + ) +} + +export default CloneCredentialDialog diff --git a/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx b/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx new file mode 100644 index 0000000000..284b7c212d --- /dev/null +++ b/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx @@ -0,0 +1,90 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo, useState } from 'react' +import { useMutate } from 'restful-react' +import { useToaster, Button, ButtonVariation, ButtonSize, ButtonProps, useIsMounted } from '@harnessio/uicore' +import { useStrings } from 'framework/strings' +import type { GitInfoProps } from 'utils/GitUtils' +import type { TypesPullReqActivity } from 'services/code' +import { useEmitCodeCommentStatus } from 'hooks/useEmitCodeCommentStatus' +import { CodeCommentState, getErrorMessage } from 'utils/Utils' +import type { CommentItem } from '../CommentBox/CommentBox' + +interface CodeCommentSecondarySaveButtonProps + extends Pick, + ButtonProps { + commentItems: CommentItem[] +} + +export const CodeCommentSecondarySaveButton: React.FC = ({ + repoMetadata, + pullRequestMetadata, + commentItems, + onClick, + ...props +}) => { + const { getString } = useStrings() + const isMounted = useIsMounted() + const { showError } = useToaster() + const path = useMemo( + () => `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/comments`, + [repoMetadata.path, pullRequestMetadata?.number] + ) + const { mutate: updateCodeCommentStatus } = useMutate({ verb: 'PUT', path: ({ id }) => `${path}/${id}/status` }) + const [resolved, setResolved] = useState(commentItems[0]?.payload?.resolved ? true : false) + const emitCodeCommentStatus = useEmitCodeCommentStatus({ + id: commentItems[0]?.payload?.id, + onMatch: status => { + if (isMounted.current) { + setResolved(status === CodeCommentState.RESOLVED) + } + } + }) + + return ( + + ) +} diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss new file mode 100644 index 0000000000..4b91c5df81 --- /dev/null +++ b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss @@ -0,0 +1,31 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.select { + width: 110px !important; + cursor: pointer; + + :global { + .bp3-menu { + width: 100px !important; + min-width: 103px !important; + + .Select--menuItem { + width: 100px !important; + } + } + } +} diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts new file mode 100644 index 0000000000..8f73f4c0e4 --- /dev/null +++ b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts @@ -0,0 +1,19 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const select: string diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx new file mode 100644 index 0000000000..d51b897803 --- /dev/null +++ b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx @@ -0,0 +1,106 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo, useState } from 'react' +import { useMutate } from 'restful-react' +import { useToaster, Select } from '@harnessio/uicore' +import { useStrings } from 'framework/strings' +import type { GitInfoProps } from 'utils/GitUtils' +import type { TypesPullReqActivity } from 'services/code' +import { CodeCommentState, getErrorMessage } from 'utils/Utils' +import { useEmitCodeCommentStatus } from 'hooks/useEmitCodeCommentStatus' +import type { CommentItem } from '../CommentBox/CommentBox' +import css from './CodeCommentStatusSelect.module.scss' + +interface CodeCommentStatusSelectProps extends Pick { + commentItems: CommentItem[] + onCommentUpdate: () => void + + refetchActivities?: () => void +} + +export const CodeCommentStatusSelect: React.FC = ({ + repoMetadata, + pullRequestMetadata, + commentItems, + onCommentUpdate, + refetchActivities +}) => { + const { getString } = useStrings() + const { showError } = useToaster() + const path = useMemo( + () => `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/comments`, + [repoMetadata.path, pullRequestMetadata?.number] + ) + const { mutate: updateCodeCommentStatus } = useMutate({ verb: 'PUT', path: ({ id }) => `${path}/${id}/status` }) + const codeCommentStatusItems = useMemo( + () => [ + { + label: getString('active'), + value: CodeCommentState.ACTIVE + }, + { + label: getString('resolved'), + value: CodeCommentState.RESOLVED + } + ], + [getString] + ) + const [codeCommentStatus, setCodeCommentStatus] = useState( + commentItems[0]?.payload?.resolved ? codeCommentStatusItems[1] : codeCommentStatusItems[0] + ) + const emitCodeCommentStatus = useEmitCodeCommentStatus({ + id: commentItems[0]?.payload?.id, + onMatch: status => { + setCodeCommentStatus(status === CodeCommentState.ACTIVE ? codeCommentStatusItems[0] : codeCommentStatusItems[1]) + } + }) + + return ( + { + setActivityFilter(newState) + refetchActivities() + }} + /> + + + { + if (dateOrderSort === orderSortDate.ASC) { + setDateOrderSort(orderSortDate.DESC) + } else { + setDateOrderSort(orderSortDate.ASC) + } + }}> + {dateOrderSort === orderSortDate.ASC ? getString('ascending') : getString('descending')} + + + + {dateOrderSort != orderSortDate.DESC ? null : + + {newCommentBox} + + } + + {renderedActivityBlocks} + + {dateOrderSort != orderSortDate.ASC ? null : + + {newCommentBox} + + } + + + + + + + + + + + + ) +} + +export enum PRCommentFilterType { + SHOW_EVERYTHING = 'showEverything', + ALL_COMMENTS = 'allComments', + MY_COMMENTS = 'myComments', + RESOLVED_COMMENTS = 'resolvedComments', + UNRESOLVED_COMMENTS = 'unresolvedComments' +} + +function useActivityFilters() { + const { getString } = useStrings() + + return useMemo( + () => [ + { + label: getString('showEverything'), + value: PRCommentFilterType.SHOW_EVERYTHING + }, + { + label: getString('allComments'), + value: PRCommentFilterType.ALL_COMMENTS + }, + { + label: getString('myComments'), + value: PRCommentFilterType.MY_COMMENTS + }, + { + label: getString('unrsolvedComment'), + value: PRCommentFilterType.UNRESOLVED_COMMENTS + }, + { + label: getString('resolvedComments'), + value: PRCommentFilterType.RESOLVED_COMMENTS + } + ], + [getString] + ) +} diff --git a/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx b/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx new file mode 100644 index 0000000000..c0cb30df93 --- /dev/null +++ b/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx @@ -0,0 +1,122 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useState } from 'react' +import { Container, useToaster } from '@harnessio/uicore' +import cx from 'classnames' +import { useMutate } from 'restful-react' +import { MarkdownViewer } from 'components/MarkdownViewer/MarkdownViewer' +import { useStrings } from 'framework/strings' +import type { OpenapiUpdatePullReqRequest } from 'services/code' +import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' +import { MarkdownEditorWithPreview } from 'components/MarkdownEditorWithPreview/MarkdownEditorWithPreview' +import { NavigationCheck } from 'components/NavigationCheck/NavigationCheck' +import { getErrorMessage } from 'utils/Utils' +import type { ConversationProps } from './Conversation' +import css from './Conversation.module.scss' + +interface DescriptionBoxProps extends Omit { + onCancelEditDescription: () => void +} + +export const DescriptionBox: React.FC = ({ + repoMetadata, + pullRequestMetadata, + onCommentUpdate: refreshPullRequestMetadata, + onCancelEditDescription +}) => { + const [edit, setEdit] = useState(false) + const [dirty, setDirty] = useState(false) + const [originalContent, setOriginalContent] = useState(pullRequestMetadata.description as string) + const [content, setContent] = useState(originalContent) + const { getString } = useStrings() + const { showError } = useToaster() + const { mutate } = useMutate({ + verb: 'PATCH', + path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}` + }) + + useEffect(() => { + setEdit(!pullRequestMetadata?.description?.length) + + if (pullRequestMetadata?.description) { + setContent(pullRequestMetadata?.description) + } + }, [pullRequestMetadata?.description, pullRequestMetadata?.description?.length]) + + return ( + + + {(edit && ( + { + const payload: OpenapiUpdatePullReqRequest = { + title: pullRequestMetadata.title, + description: value || '' + } + mutate(payload) + .then(() => { + setContent(value) + setOriginalContent(value) + setEdit(false) + refreshPullRequestMetadata() + }) + .catch(exception => showError(getErrorMessage(exception), 0, getString('pr.failedToUpdate'))) + }} + onCancel={() => { + setContent(originalContent) + setEdit(false) + onCancelEditDescription() + }} + setDirty={setDirty} + i18n={{ + placeHolder: getString('pr.enterDesc'), + tabEdit: getString('write'), + tabPreview: getString('preview'), + save: getString('save'), + cancel: getString('cancel') + }} + editorHeight="400px" + autoFocusAndPosition={true} + /> + )) || ( + + + + setEdit(true) + } + ]} + /> + + + )} + + + + ) +} diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss new file mode 100644 index 0000000000..cffc1c9d8f --- /dev/null +++ b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss @@ -0,0 +1,155 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + --bar-height: 60px; + + background-color: var(--green-50) !important; + margin: -24px -24px 0 !important; + position: sticky; + top: 0; + z-index: 4; + + &.merged { + border-color: transparent !important; + background: #e8e8ff !important; + } + + &.error { + background-color: var(--red-50) !important; + } + + &.closed { + background-color: var(--grey-100) !important; + } + + &.draft { + background-color: var(--orange-100) !important; + } + + &.unchecked { + background-color: var(--orange-50) !important; // Note: No UICore color variable for this background + } + + .layout { + height: var(--bar-height); + padding: 0 var(--spacing-xlarge) !important; + + .secondaryButton, + [class*='Button--variation-tertiary'] { + --box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.04), 0px 2px 4px rgba(96, 97, 112, 0.16) !important; + } + } + + .btn { + background-color: var(--green-800) !important; + color: var(--white) !important; + } + + .heading { + font-weight: 600 !important; + font-size: 16px !important; + line-height: 24px !important; + color: var(--grey-700) !important; + } + + .sub { + font-weight: 600 !important; + font-size: 13px !important; + line-height: 20px !important; + color: var(--green-800) !important; + + &.closed { + color: var(--grey-600) !important; + } + + &.merged { + color: var(--purple-700) !important; + } + + &.draft { + color: var(--orange-900) !important; + } + + &.unmergeable { + color: var(--red-500) !important; + } + + &.unchecked { + color: #c05809 !important; // Note: No UICore color variable for this text + } + } +} + +.popover { + transform: translateY(5px) !important; + + .menuItem { + strong { + display: inline-block; + margin-left: 10px; + } + + p { + font-size: 13px; + padding-left: 27px; + line-height: 16px; + margin: 5px 0; + max-width: 320px; + white-space: break-spaces !important; + } + } + .menuReviewItem { + strong { + display: inline-block; + margin-left: 10px; + } + + p { + font-size: 13px; + padding-left: 2px; + line-height: 16px; + margin: 0px 1px; + max-width: 320px; + } + } +} + +.btnWrapper { + &.hasError button { + --background-color: var(--grey-50) !important; + --background-color-hover: var(--white) !important; + --background-color-active: var(--grey-100) !important; + } + + &.disabled { + pointer-events: none; + opacity: 0.5; + } + + a, + button { + --background-color: var(--green-800) !important; + --background-color-hover: var(--green-900) !important; + --background-color-active: var(--green-900) !important; + } +} + +.mergeContainer { + border-radius: 4px; + background: #e8e8ff !important; + padding: 2px 5px !important; +} diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts new file mode 100644 index 0000000000..8662fea10a --- /dev/null +++ b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts @@ -0,0 +1,37 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const btn: string +export declare const btnWrapper: string +export declare const closed: string +export declare const disabled: string +export declare const draft: string +export declare const error: string +export declare const hasError: string +export declare const heading: string +export declare const layout: string +export declare const main: string +export declare const menuItem: string +export declare const menuReviewItem: string +export declare const mergeContainer: string +export declare const merged: string +export declare const popover: string +export declare const secondaryButton: string +export declare const sub: string +export declare const unchecked: string +export declare const unmergeable: string diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx new file mode 100644 index 0000000000..1fdf7641e4 --- /dev/null +++ b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx @@ -0,0 +1,386 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo, useState } from 'react' +import { + Button, + ButtonVariation, + Container, + FlexExpander, + Layout, + SplitButton, + StringSubstitute, + Text, + useToaster +} from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import { Color } from '@harnessio/design-system' +import { useMutate } from 'restful-react' +import { Case, Else, Match, Render, Truthy } from 'react-jsx-match' +import { Menu, PopoverPosition, Icon as BIcon } from '@blueprintjs/core' +import cx from 'classnames' +import ReactTimeago from 'react-timeago' +import type { + EnumMergeMethod, + EnumPullReqState, + OpenapiMergePullReq, + OpenapiStatePullReqRequest, + TypesPullReq +} from 'services/code' +import { useStrings } from 'framework/strings' +import { CodeIcon, GitInfoProps, PullRequestFilterOption, PullRequestState } from 'utils/GitUtils' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { useAppContext } from 'AppContext' +import { Images } from 'images' +import { getErrorMessage, MergeCheckStatus, permissionProps } from 'utils/Utils' +import { UserPreference, useUserPreference } from 'hooks/useUserPreference' +import ReviewSplitButton from 'components/Changes/ReviewSplitButton/ReviewSplitButton' +import css from './PullRequestActionsBox.module.scss' + +interface PullRequestActionsBoxProps extends Pick { + onPRStateChanged: () => void + refetchReviewers: () => void +} + +interface PRMergeOption { + method: EnumMergeMethod | 'close' + title: string + desc: string + disabled?: boolean +} + +interface PRDraftOption { + method: 'close' | 'open' + title: string + desc: string + disabled?: boolean +} + +export const PullRequestActionsBox: React.FC = ({ + repoMetadata, + pullRequestMetadata, + onPRStateChanged, + refetchReviewers +}) => { + const { getString } = useStrings() + const { showError } = useToaster() + const { currentUser } = useAppContext() + const { hooks, standalone } = useAppContext() + const space = useGetSpaceParam() + const { mutate: mergePR, loading } = useMutate({ + verb: 'POST', + path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}/merge` + }) + const { mutate: updatePRState, loading: loadingState } = useMutate({ + verb: 'POST', + path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}/state` + }) + const mergeable = useMemo( + () => pullRequestMetadata.merge_check_status === MergeCheckStatus.MERGEABLE, + [pullRequestMetadata] + ) + const isClosed = pullRequestMetadata.state === PullRequestState.CLOSED + const isOpen = pullRequestMetadata.state === PullRequestState.OPEN + const isConflict = pullRequestMetadata.merge_check_status === MergeCheckStatus.CONFLICT + const unchecked = useMemo( + () => pullRequestMetadata.merge_check_status === MergeCheckStatus.UNCHECKED && !isClosed, + [pullRequestMetadata, isClosed] + ) + const isDraft = pullRequestMetadata.is_draft + const mergeOptions: PRMergeOption[] = [ + { + method: 'squash', + title: getString('pr.mergeOptions.squashAndMerge'), + desc: getString('pr.mergeOptions.squashAndMergeDesc'), + disabled: mergeable === false + }, + { + method: 'merge', + title: getString('pr.mergeOptions.createMergeCommit'), + desc: getString('pr.mergeOptions.createMergeCommitDesc'), + disabled: mergeable === false + }, + { + method: 'rebase', + title: getString('pr.mergeOptions.rebaseAndMerge'), + desc: getString('pr.mergeOptions.rebaseAndMergeDesc'), + disabled: mergeable === false + }, + { + method: 'close', + title: getString('pr.mergeOptions.close'), + desc: getString('pr.mergeOptions.closeDesc') + } + ] + const draftOptions: PRDraftOption[] = [ + { + method: 'open', + title: getString('pr.draftOpenForReview.title'), + desc: getString('pr.draftOpenForReview.desc') + }, + { + method: 'close', + title: getString('pr.mergeOptions.close'), + desc: getString('pr.mergeOptions.closeDesc') + } + ] + + const [mergeOption, setMergeOption, resetMergeOption] = useUserPreference( + UserPreference.PULL_REQUEST_MERGE_STRATEGY, + mergeOptions[1], + option => option.method !== 'close' + ) + const [draftOption, setDraftOption] = useState(draftOptions[0]) + const permPushResult = hooks?.usePermissionTranslate?.( + { + resource: { + resourceType: 'CODE_REPOSITORY' + }, + permissions: ['code_repo_push'] + }, + [space] + ) + const isActiveUserPROwner = useMemo(() => { + return ( + !!currentUser?.uid && !!pullRequestMetadata?.author?.uid && currentUser?.uid === pullRequestMetadata?.author?.uid + ) + }, [currentUser, pullRequestMetadata]) + + if (pullRequestMetadata.state === PullRequestFilterOption.MERGED) { + return + } + + return ( + + + + + {(unchecked && ) || ( + + )} + + {getString( + isDraft + ? 'prState.draftHeading' + : isClosed + ? 'pr.prClosed' + : unchecked + ? 'pr.checkingToMerge' + : mergeable === false && isOpen + ? 'pr.cantBeMerged' + : 'pr.branchHasNoConflicts' + )} + + + + + + + + + { + if (draftOption.method === 'open') { + updatePRState({ is_draft: false, state: 'open' }) + .then(onPRStateChanged) + .catch(exception => showError(getErrorMessage(exception))) + } else { + updatePRState({ state: 'closed' }) + .then(onPRStateChanged) + .catch(exception => showError(getErrorMessage(exception))) + } + }}> + {draftOptions.map(option => { + return ( + + + {option.title} +

{option.desc}

+ + } + onClick={() => setDraftOption(option)} + /> + ) + })} +
+
+ + + + + +
+ {tagArr.length !== 0 ? ( + <> + ) : ( + + {getString('noneYet')} + + )} */} +
+
+ + ) +} + +export default PullRequestSideBar diff --git a/web/src/pages/PullRequest/Conversation/SystemComment.tsx b/web/src/pages/PullRequest/Conversation/SystemComment.tsx new file mode 100644 index 0000000000..422360f059 --- /dev/null +++ b/web/src/pages/PullRequest/Conversation/SystemComment.tsx @@ -0,0 +1,308 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Avatar, Container, Layout, StringSubstitute, Text } from '@harnessio/uicore' +import { Icon, IconName } from '@harnessio/icons' +import { Color, FontVariation } from '@harnessio/design-system' +import ReactTimeago from 'react-timeago' +import { Render } from 'react-jsx-match' +import { CodeIcon, GitInfoProps } from 'utils/GitUtils' +import { MarkdownViewer } from 'components/MarkdownViewer/MarkdownViewer' +import { useStrings } from 'framework/strings' +import type { TypesPullReqActivity } from 'services/code' +import type { CommentItem } from 'components/CommentBox/CommentBox' +import { formatDate, formatTime, PullRequestSection } from 'utils/Utils' +import { CommentType } from 'components/DiffViewer/DiffViewerUtils' +import { useAppContext } from 'AppContext' +import { CommitActions } from 'components/CommitActions/CommitActions' +import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' +import css from './Conversation.module.scss' + +interface SystemCommentProps extends Pick { + commentItems: CommentItem[] + repoMetadataPath?: string +} + +export const SystemComment: React.FC = ({ + pullRequestMetadata, + commentItems, + repoMetadataPath +}) => { + const { getString } = useStrings() + const payload = commentItems[0].payload + const type = payload?.type + const { routes } = useAppContext() + + switch (type) { + case CommentType.MERGE: { + return ( + + + + + + + + + {pullRequestMetadata.merger?.display_name}, + source: {pullRequestMetadata.source_branch}, + target: {pullRequestMetadata.target_branch}, + time: ( + + + + + ) + }} + /> + + + + ) + } + + case CommentType.REVIEW_SUBMIT: { + return ( + + + + + + + {payload?.author?.display_name}, + state: {(payload?.payload as Unknown)?.decision}, + time: ( + + + + + ) + }} + /> + + + + ) + } + + case CommentType.BRANCH_UPDATE: { + return ( + + + + + + {payload?.author?.display_name} + + ), + commit: ( + + + + ) + }} + /> + + + + + + + + ) + } + + case CommentType.BRANCH_DELETE: { + return ( + + + + + + {payload?.author?.display_name} + + ), + commit: ( + + + + ) + }} + /> + + + + + + + + ) + } + + case CommentType.STATE_CHANGE: { + const openFromDraft = + (payload?.payload as Unknown)?.old_draft === true && (payload?.payload as Unknown)?.new_draft === false + + return ( + + + + + {payload?.author?.display_name}, + old: {(payload?.payload as Unknown)?.old}, + new: {(payload?.payload as Unknown)?.new} + }} + /> + + + + + + + + ) + } + + case CommentType.TITLE_CHANGE: { + return ( + + + + + {payload?.author?.display_name}, + old: ( + + {(payload?.payload as Unknown)?.old} + + ), + new: {(payload?.payload as Unknown)?.new} + }} + /> + + + + + + + + 1}> + + index > 0) + .map( + item => + `|${item.author}|${(item.payload?.payload as Unknown)?.old}|${ + (item.payload?.payload as Unknown)?.new + }|${formatDate(item.edited)} ${formatTime(item.edited)}|` + ) + ) + .join('\n')} + /> + + + + ) + } + + default: { + // eslint-disable-next-line no-console + console.warn('Unable to render system type activity', commentItems) + return ( + + + {type} + + ) + } + } +} + +const generateReviewDecisionIcon = ( + reviewDecision: string +): { + name: IconName + color: string | undefined + size: number | undefined + icon: IconName + iconProps?: { color?: Color } +} => { + let icon: IconName = 'dot' + let color: Color | undefined = undefined + let size: number | undefined = undefined + + switch (reviewDecision) { + case 'changereq': + icon = 'main-issue-filled' + color = Color.ORANGE_700 + size = 18 + break + case 'approved': + icon = 'execution-success' + size = 18 + color = Color.GREEN_700 + break + } + const name = icon + return { name, color, size, icon, ...(color ? { iconProps: { color } } : undefined) } +} diff --git a/web/src/pages/PullRequest/PullRequest.module.scss b/web/src/pages/PullRequest/PullRequest.module.scss new file mode 100644 index 0000000000..b5bfc9d40c --- /dev/null +++ b/web/src/pages/PullRequest/PullRequest.module.scss @@ -0,0 +1,88 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + min-height: var(--page-height); + + > div[class*='PageHeader'] { + border-bottom: none !important; + } +} + +.prNumber { + font-size: 20px; + font-weight: 400; + color: var(--grey-500); + padding-left: var(--spacing-small); +} + +.prTitle { + align-items: center; + + .btn > span { + background: var(--grey-100); + border: 1px solid var(--grey-200); + border-radius: 4px; + } + + .input { + margin-top: 0 !important; + margin-bottom: 0 !important; + + input { + width: 800px; + font-weight: 600; + padding: 0 var(--spacing-small) !important; + line-height: 22px !important; + } + } + + .titleText { + flex-shrink: 0; + max-width: calc(100vw - 500px); + } +} + +.changes { + padding: 0 var(--spacing-xlarge) var(--spacing-xlarge) !important; +} + +.checksCount { + border: 1px solid var(--grey-100); + background-color: var(--grey-50) !important; + display: inline-flex; + margin-left: 8px !important; + border-radius: 4px; + padding: 2px 5px 0 !important; + + .checksCountLayout { + display: inline-flex; + height: 18px; + align-items: center; + justify-content: center; + + > span { + padding: 0 !important; + margin: 0 !important; + align-self: flex-start; + line-height: 16px !important; + + &:last-of-type { + padding-left: 5px !important; + } + } + } +} diff --git a/web/src/pages/PullRequest/PullRequest.module.scss.d.ts b/web/src/pages/PullRequest/PullRequest.module.scss.d.ts new file mode 100644 index 0000000000..0151b32e7a --- /dev/null +++ b/web/src/pages/PullRequest/PullRequest.module.scss.d.ts @@ -0,0 +1,27 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const btn: string +export declare const changes: string +export declare const checksCount: string +export declare const checksCountLayout: string +export declare const input: string +export declare const main: string +export declare const prNumber: string +export declare const prTitle: string +export declare const titleText: string diff --git a/web/src/pages/PullRequest/PullRequest.tsx b/web/src/pages/PullRequest/PullRequest.tsx new file mode 100644 index 0000000000..3a8ca52e99 --- /dev/null +++ b/web/src/pages/PullRequest/PullRequest.tsx @@ -0,0 +1,355 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback, useEffect, useMemo, useState } from 'react' +import { Container, Layout, PageBody, Tabs, Text } from '@harnessio/uicore' +import { FontVariation } from '@harnessio/design-system' +import { useGet, useMutate } from 'restful-react' +import { Render } from 'react-jsx-match' +import { useHistory } from 'react-router-dom' +import { compact, isEqual } from 'lodash-es' +import { useAppContext } from 'AppContext' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { useStrings } from 'framework/strings' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' +import { voidFn, getErrorMessage, PullRequestSection, MergeCheckStatus } from 'utils/Utils' +import { CodeIcon } from 'utils/GitUtils' +import type { TypesPullReq, TypesPullReqStats, TypesRepository } from 'services/code' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { TabTitleWithCount, tabContainerCSS } from 'components/TabTitleWithCount/TabTitleWithCount' +import { usePRChecksDecision } from 'hooks/usePRChecksDecision' +import { ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus' +import useSpaceSSE from 'hooks/useSpaceSSE' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { PullRequestMetaLine } from './PullRequestMetaLine' +import { Conversation } from './Conversation/Conversation' +import { Checks } from './Checks/Checks' +import { Changes } from '../../components/Changes/Changes' +import { PullRequestCommits } from './PullRequestCommits/PullRequestCommits' +import { PullRequestTitle } from './PullRequestTitle' +import css from './PullRequest.module.scss' + +const SSE_EVENTS = ['pullreq_updated'] + +export default function PullRequest() { + const history = useHistory() + const { getString } = useStrings() + const { routes, standalone } = useAppContext() + const space = useGetSpaceParam() + const { + repoMetadata, + error, + loading, + refetch, + pullRequestId, + pullRequestSection = PullRequestSection.CONVERSATION, + commitSHA + } = useGetRepositoryMetadata() + const path = useMemo( + () => `/api/v1/repos/${repoMetadata?.path}/+/pullreq/${pullRequestId}`, + [repoMetadata?.path, pullRequestId] + ) + const { + data: pullRequestData, + error: prError, + loading: prLoading, + refetch: refetchPullRequest + } = useGet({ + path, + lazy: !repoMetadata + }) + + const eventHandler = useCallback((data : TypesPullReq)=> { + // ensure this update belongs to the PR we are showing right now - to avoid unnecessary reloads + if (!data || !repoMetadata || + data.target_repo_id !== repoMetadata.id || + String(data.number) !== pullRequestId + ) { + return + } + // NOTE: we refresh as events don't contain all pr stats yet (can be optimized) + refetchPullRequest() + }, [pullRequestId, repoMetadata, refetchPullRequest]) + useSpaceSSE({ + space, + events: SSE_EVENTS, + onEvent: eventHandler + }) + + const [prData, setPrData] = useState() + const prChecksDecisionResult = usePRChecksDecision({ + repoMetadata, + pullRequestMetadata: prData + }) + const showSpinner = useMemo(() => { + return loading || (prLoading && !prData) + }, [loading, prLoading, prData]) + const [showEditDescription, setShowEditDescription] = useState(false) + + const [prStats, setPRStats] = useState() + useMemo(() => { + setPRStats(oldPRStats => { + if (isEqual(oldPRStats, prData?.stats)) { + return oldPRStats + } + + return prData?.stats + }) + }, [prData, setPRStats]) + + const onAddDescriptionClick = useCallback(() => { + setShowEditDescription(true) + history.replace( + routes.toCODEPullRequest({ + repoPath: repoMetadata?.path as string, + pullRequestId, + pullRequestSection: PullRequestSection.CONVERSATION + }) + ) + }, [history, routes, repoMetadata?.path, pullRequestId]) + const recheckPath = useMemo( + () => `/api/v1/repos/${repoMetadata?.path}/+/pullreq/${pullRequestId}/recheck`, + [repoMetadata?.path, pullRequestId] + ) + const { mutate: recheckPR, loading: loadingRecheckPR } = useMutate({ + verb: 'POST', + path: recheckPath + }) + + // prData holds the latest good PR data to make sure page is not broken + // when polling fails + useEffect( + function setPrDataIfNotSet() { + if (!pullRequestData || (prData && isEqual(prData, pullRequestData))) { + return + } + + // recheck pr (merge-check, ...) in case it's unavailable + // Approximation of identifying target branch update: + // 1. branch got updated before page was loaded (status is unchecked and prData is empty) + // NOTE: This doesn't guarantee the status is UNCHECKED due to target branch update and can cause duplicate + // PR merge checks being run on PR creation or source branch update. + // 2. branch got updated while we are on the page (same source_sha but status changed to UNCHECKED) + // NOTE: This doesn't cover the case in which the status changed back to UNCHECKED before the PR is refetched. + // In that case, the user will have to re-open the PR - better than us spamming the backend with rechecks. + // This is a TEMPORARY SOLUTION and will most likely change in the future (more so on backend side) + if ( + pullRequestData.state == 'open' && + pullRequestData.merge_check_status == MergeCheckStatus.UNCHECKED && + // case 1: + (!prData || + // case 2: + (prData?.merge_check_status != MergeCheckStatus.UNCHECKED && + prData?.source_sha == pullRequestData.source_sha)) && + !loadingRecheckPR + ) { + // best effort attempt to recheck PR - fail silently + recheckPR({}) + } + + setPrData(pullRequestData) + }, + [pullRequestData, loadingRecheckPR, recheckPR, setPrData] // eslint-disable-line react-hooks/exhaustive-deps + ) + + useEffect(() => { + const fn = () => { + if (repoMetadata) { + refetchPullRequest().then(() => { + interval = window.setTimeout(fn, PR_POLLING_INTERVAL) + }) + } + } + let interval = window.setTimeout(fn, PR_POLLING_INTERVAL) + + return () => window.clearTimeout(interval) + }, [repoMetadata, refetchPullRequest, path]) + + const activeTab = useMemo( + () => + Object.values(PullRequestSection).find(value => value === pullRequestSection) + ? pullRequestSection + : PullRequestSection.CONVERSATION, + [pullRequestSection] + ) + + return ( + + + ) : ( + '' + ) + } + dataTooltipId="repositoryPullRequests" + extraBreadcrumbLinks={ + repoMetadata && [ + { + label: getString('pullRequests'), + url: routes.toCODEPullRequests({ repoPath: repoMetadata.path as string }) + } + ] + } + /> + + + + + <> + + + { + history.replace( + routes.toCODEPullRequest({ + repoPath: repoMetadata?.path as string, + pullRequestId, + pullRequestSection: tabId !== PullRequestSection.CONVERSATION ? (tabId as string) : undefined + }) + ) + }} + tabList={[ + { + id: PullRequestSection.CONVERSATION, + title: ( + + ), + panel: ( + { + setShowEditDescription(false) + refetchPullRequest() + }} + prStats={prStats} + showEditDescription={showEditDescription} + onCancelEditDescription={() => setShowEditDescription(false)} + /> + ) + }, + { + id: PullRequestSection.COMMITS, + title: ( + + ), + panel: ( + + ) + }, + { + id: PullRequestSection.FILES_CHANGED, + title: ( + + ), + panel: ( + + + + ) + }, + { + id: PullRequestSection.CHECKS, + title: ( + + + + + + {prChecksDecisionResult?.count[prChecksDecisionResult?.overallStatus]} + + + + ) : null + } + count={prChecksDecisionResult?.count?.failure || 0} + padding={{ left: 'medium' }} + /> + ), + panel: ( + + ) + } + ]} + /> + + + + + + ) +} + +const PR_POLLING_INTERVAL = 20000 diff --git a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss new file mode 100644 index 0000000000..edfc786e4c --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss @@ -0,0 +1,18 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { +} diff --git a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx new file mode 100644 index 0000000000..cf40048c87 --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx @@ -0,0 +1,61 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { useGet } from 'restful-react' +import type { TypesCommit } from 'services/code' +import type { GitInfoProps } from 'utils/GitUtils' +import { voidFn, LIST_FETCHING_LIMIT } from 'utils/Utils' +import { usePageIndex } from 'hooks/usePageIndex' +import { useStrings } from 'framework/strings' +import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' +import { CommitsView } from 'components/CommitsView/CommitsView' +import { PullRequestTabContentWrapper } from '../PullRequestTabContentWrapper' + +export const PullRequestCommits: React.FC> = ({ + repoMetadata, + pullRequestMetadata, +}) => { + const limit = LIST_FETCHING_LIMIT + const [page, setPage] = usePageIndex() + const { getString } = useStrings() + const { data, error, loading, refetch, response } = useGet<{ + commits: TypesCommit[] + }>({ + path: `/api/v1/repos/${repoMetadata?.path}/+/commits`, + queryParams: { + limit, + page, + git_ref: pullRequestMetadata.source_sha, + after: pullRequestMetadata.merge_base_sha + }, + lazy: !repoMetadata + }) + + return ( + + + + + + ) +} diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss new file mode 100644 index 0000000000..d6e94309fb --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss @@ -0,0 +1,65 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + background-color: var(--white) !important; + + .metaline { + font-size: 13px !important; + font-weight: 500 !important; + line-height: 20px !important; + color: var(--grey-500) !important; + + &.time { + color: var(--grey-400) !important; + } + + strong { + color: var(--grey-700) !important; + } + } + + .layout { + align-items: center; + } +} + +.copyContainer { + background-color: var(--primary-1) !important; + color: var(--primary-7) !important; + border-radius: 4px !important; + --button-height: unset !important; + --text-color: unset !important; + padding: unset !important ; + --padding-right: 2px !important; + padding-left: 8px !important; + min-width: unset !important; +} + +.linkText { + color: var(--primary-7) !important; +} + +.link { + background-color: var(--primary-1) !important; + color: var(--primary-7) !important; + border-radius: 4px; + line-height: 20px; + font-size: 13px; + font-weight: 600; + padding: 2px 6px !important; + width: fit-content !important; +} diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts new file mode 100644 index 0000000000..df654a3c71 --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const copyContainer: string +export declare const layout: string +export declare const link: string +export declare const linkText: string +export declare const main: string +export declare const metaline: string +export declare const time: string diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.tsx b/web/src/pages/PullRequest/PullRequestMetaLine.tsx new file mode 100644 index 0000000000..c3c594d5ae --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestMetaLine.tsx @@ -0,0 +1,77 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Container, Text, Layout, StringSubstitute } from '@harnessio/uicore' +import cx from 'classnames' +import ReactTimeago from 'react-timeago' +import type { GitInfoProps } from 'utils/GitUtils' +import { useAppContext } from 'AppContext' +import { useStrings } from 'framework/strings' +import type { TypesPullReq } from 'services/code' +import { PullRequestStateLabel } from 'components/PullRequestStateLabel/PullRequestStateLabel' +import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' +import { GitRefLink } from 'components/GitRefLink/GitRefLink' +import css from './PullRequestMetaLine.module.scss' + +export const PullRequestMetaLine: React.FC> = ({ + repoMetadata, + target_branch, + source_branch, + author, + edited, + state, + is_draft, + stats +}) => { + const { getString } = useStrings() + const { routes } = useAppContext() + const vars = { + user: {author?.display_name}, + commits: {stats?.commits}, + commitsCount: stats?.commits, + target: ( + + ), + source: ( + + ) + } + + return ( + + + + + + + + + + + + + + ) +} diff --git a/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts b/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts new file mode 100644 index 0000000000..8ff3b47363 --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// this is an auto-generated file +declare const styles: { + readonly main: string + readonly state: string + readonly metaline: string + readonly time: string +} +export default styles diff --git a/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx b/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx new file mode 100644 index 0000000000..a7e58193e9 --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Container, PageError } from '@harnessio/uicore' +import { getErrorMessage } from 'utils/Utils' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' + +interface PullRequestTabContentWrapperProps { + className?: string + loading?: boolean + error?: Unknown + onRetry: () => void +} + +export const PullRequestTabContentWrapper: React.FC = ({ + className, + loading, + error, + onRetry, + children +}) => { + return ( + + + {error && } + {!error && children} + + ) +} diff --git a/web/src/pages/PullRequest/PullRequestTitle.tsx b/web/src/pages/PullRequest/PullRequestTitle.tsx new file mode 100644 index 0000000000..557268a9ce --- /dev/null +++ b/web/src/pages/PullRequest/PullRequestTitle.tsx @@ -0,0 +1,140 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback, useEffect, useState } from 'react' +import { Container, Text, Layout, Button, ButtonVariation, ButtonSize, TextInput, useToaster } from '@harnessio/uicore' +import { FontVariation } from '@harnessio/design-system' +import { useMutate } from 'restful-react' +import { Match, Truthy, Else } from 'react-jsx-match' +import { useStrings } from 'framework/strings' +import { ButtonRoleProps, getErrorMessage } from 'utils/Utils' +import type { GitInfoProps } from 'utils/GitUtils' +import type { TypesPullReq } from 'services/code' +import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' +import css from './PullRequest.module.scss' + +interface PullRequestTitleProps extends TypesPullReq, Pick { + onSaveDone?: (newTitle: string) => Promise + onAddDescriptionClick: () => void +} + +export const PullRequestTitle: React.FC = ({ + repoMetadata, + title, + number, + description, + onAddDescriptionClick +}) => { + const [original, setOriginal] = useState(title) + const [val, setVal] = useState(title) + const [edit, setEdit] = useState(false) + const { getString } = useStrings() + const { showError } = useToaster() + const { mutate } = useMutate({ + verb: 'PATCH', + path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${number}` + }) + const submitChange = useCallback(() => { + mutate({ + title: val, + description + }) + .then(() => { + setEdit(false) + setOriginal(val) + }) + .catch(exception => showError(getErrorMessage(exception), 0)) + }, [description, val, mutate, showError]) + + useEffect(() => { + setOriginal(title) + + // make sure to update editor if it's not open + if (!edit) { + setVal(title) + } + }, [title, edit]) + + return ( + + + + + + event.target.select()} + onInput={event => setVal(event.currentTarget.value)} + autoFocus + onKeyDown={event => { + switch (event.key) { + case 'Enter': + submitChange() + break + case 'Escape': // does not work, maybe TextInput cancels ESC? + setEdit(false) + break + } + }} + /> + + + + + README, + LICENSE: LICENSE, + GITIGNORE: .gitignore + }} + /> + + + + + + {getString('firstTimeTitle')} + + {getString('cloneHTTPS')} + + + + + {repoMetadata.git_url} + + + + + + + + + { + history.push(standalone ? routes.toCODEUserProfile() : currentUserProfileURL) + }}> + here + + ) + }} + /> + + + + + + + + + + + ) +} diff --git a/web/src/pages/Repository/Repository.module.scss b/web/src/pages/Repository/Repository.module.scss new file mode 100644 index 0000000000..7e8df8dcc7 --- /dev/null +++ b/web/src/pages/Repository/Repository.module.scss @@ -0,0 +1,72 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + min-height: var(--page-height); + background-color: var(--white) !important; + + &.withFileViewer { + &, + > div:first-of-type { + min-height: var(--page-height) !important; + } + + > div:first-of-type { + display: flex; + flex-direction: column; + } + } +} + +.bannerContainer { + padding: var(--spacing-small) var(--spacing-xsmall) !important; + background-color: var(--red-50) !important; + position: sticky; + top: 0; + z-index: 2; + margin: var(--spacing-small) var(--spacing-xlarge) !important; + border-radius: 5px; +} + +.layout { + height: 33px; + display: inline-flex; + justify-content: center; + align-items: center; + border: 1px solid var(--grey-200); + background-color: var(--grey-50) !important; + border-radius: 4px; + padding-left: var(--spacing-small) !important; + max-width: 100%; + .url { + // width: 80%; + white-space: nowrap !important; + overflow: hidden; + text-overflow: ellipsis; + font-size: 13px !important; + } + + button#cloneCopyButton { + --button-height: 24px !important; + border-radius: 0 !important; + border-left: 1px solid var(--grey-200) !important; + margin-left: var(--spacing-small) !important; + } +} + +.text { + font-size: 16px !important; +} diff --git a/web/src/pages/Repository/Repository.module.scss.d.ts b/web/src/pages/Repository/Repository.module.scss.d.ts new file mode 100644 index 0000000000..11fa3e197e --- /dev/null +++ b/web/src/pages/Repository/Repository.module.scss.d.ts @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const bannerContainer: string +export declare const cloneCopyButton: string +export declare const layout: string +export declare const main: string +export declare const text: string +export declare const url: string +export declare const withFileViewer: string diff --git a/web/src/pages/Repository/Repository.tsx b/web/src/pages/Repository/Repository.tsx new file mode 100644 index 0000000000..a8e8ba5b26 --- /dev/null +++ b/web/src/pages/Repository/Repository.tsx @@ -0,0 +1,107 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useState } from 'react' +import { Container, Layout, PageBody, StringSubstitute, Text } from '@harnessio/uicore' +import { Falsy, Match, Truthy } from 'react-jsx-match' +import cx from 'classnames' +import { useGetResourceContent } from 'hooks/useGetResourceContent' +import { voidFn, getErrorMessage } from 'utils/Utils' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { useStrings } from 'framework/strings' +import type { OpenapiGetContentOutput, TypesRepository } from 'services/code' +import { Images } from 'images' +import { RepositoryContent } from './RepositoryContent/RepositoryContent' +import { RepositoryHeader } from './RepositoryHeader/RepositoryHeader' +import { ContentHeader } from './RepositoryContent/ContentHeader/ContentHeader' +import { EmptyRepositoryInfo } from './EmptyRepositoryInfo' +import css from './Repository.module.scss' + +export default function Repository() { + const { gitRef, resourcePath, repoMetadata, error, loading, refetch, commitRef } = useGetRepositoryMetadata() + const { + data: resourceContent, + error: resourceError, + loading: resourceLoading, + isRepositoryEmpty + } = useGetResourceContent({ repoMetadata, gitRef, resourcePath, includeCommit: true }) + const [fileNotExist, setFileNotExist] = useState(false) + const { getString } = useStrings() + + useEffect(() => setFileNotExist(resourceError?.status === 404), [resourceError]) + + return ( + + + + + + + + + + + + + + fileNotExist === true, + message: getString('error404Text'), + image: Images.error404 + }}> + + + + + + + {!!repoMetadata && ( + <> + + + {!!resourceContent && ( + + )} + + {isRepositoryEmpty && } + + )} + + + + + ) +} diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss new file mode 100644 index 0000000000..1a43307e5b --- /dev/null +++ b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss @@ -0,0 +1,214 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + padding: var(--spacing-large) var(--spacing-xlarge) 0 var(--spacing-xlarge) !important; + position: relative; + + div[class*='TextInput'] { + margin-bottom: 0 !important; + margin-left: 0 !important; + + input { + background: transparent !important; + border: none; + } + } + + > div { + align-items: center; + padding-bottom: var(--spacing-large) !important; + // border-bottom: 1px solid var(--grey-100); + } + + .btnColorFix > span[data-icon] { + svg[fill='none'] g > path { + fill: none !important; + } + } + + .refRoot, + .rootSlash { + align-self: center; + } + + :global { + .bp3-breadcrumb, + .bp3-breadcrumb-current, + .bp3-breadcrumbs-collapsed { + white-space: nowrap !important; + font-size: 13px; + font-weight: var(--typography-weight, normal); + font-family: var(--font-family); + color: var(--grey-900); + line-height: var(--typography-line-height, var(--line-height)); + } + + .bp3-breadcrumbs > li::after { + background: none; + content: '/'; + color: var(--grey-900); + background: none; + text-align: center; + height: 100%; + } + + .bp3-breadcrumbs-collapsed { + background: var(--grey-100); + } + } + + .searchBox { + position: absolute; + right: 16px; + top: -50px; + z-index: 2; + padding-bottom: 0 !important; + margin: 0; + cursor: pointer; + + input, + input:focus { + border: 1px solid var(--ai-purple-600) !important; + pointer-events: none; + user-select: none; + } + + input { + width: 350px !important; + } + + svg path { + fill: var(--ai-purple-600) !important; + } + + img { + position: absolute; + top: 5px; + right: 6px; + } + } +} + +.searchModal { + --modal-width: min(970px, 80vw); + --input-width: calc(var(--modal-width) - 154px); + + width: var(--modal-width); + padding: var(--spacing-medium) var(--spacing-xxlarge) var(--spacing-xlarge); + + > span:first-of-type { + display: none; + } + + .layout { + width: 100%; + + .searchContainer { + position: relative; + + span[icon] { + display: none; + } + + .searchIcon { + position: absolute; + left: 12px; + top: 11px; + z-index: 1; + } + + img { + position: absolute; + right: 14px; + top: 6px; + } + + input { + padding-left: 35px !important; + font-size: 14px; + font-weight: 500; + line-height: 19px; + letter-spacing: 0.23749999701976776px; + border-color: var(--ai-purple-600); + color: var(--grey-500); + width: var(--input-width) !important; + } + } + + button { + --button-height: 40px !important; + } + } + + .sectionHeader { + font-size: 13px !important; + font-weight: 500 !important; + letter-spacing: 0.23749999701976776px; + color: var(--grey-500) !important; + text-transform: uppercase; + } + + .sampleQuery { + height: 44px; + background-color: var(--grey-50); + color: var(--grey-500) !important; + border-radius: 4px; + padding-left: 32px; + position: relative; + + font-size: 14px !important; + font-weight: 500 !important; + line-height: 19px !important; + letter-spacing: 0.23749999701976776px; + display: flex; + align-items: center; + + background-image: url('data:image/svg+xml,%3Csvg%20fill%3D%22none%22%20viewBox%3D%220%200%2017%2017%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Cpath%20d%3D%22m9.91699%208.50004h3.54171c.1878%200%20.368.07463.5008.20747.1329.13283.2075.313.2075.50086v2.47913c0%20.1879-.0746.3681-.2075.5009-.1328.1328-.313.2075-.5008.2075h-2.8334c-.1878%200-.368-.0747-.5008-.2075-.13288-.1328-.20751-.313-.20751-.5009zm0%200c0-1.77083.70831-2.83333%202.83331-3.89583m-9.91664%203.89583h3.54167c.18786%200%20.36802.07463.50086.20747.13284.13283.20747.313.20747.50086v2.47913c0%20.1879-.07463.3681-.20747.5009s-.313.2075-.50086.2075h-2.83334c-.18786%200-.36803-.0747-.50087-.2075-.13283-.1328-.20746-.313-.20746-.5009zm0%200c0-1.77083.70833-2.83333%202.83333-3.89583%22%20stroke%3D%22%23dad0f6%22%20stroke-linecap%3D%22round%22%2F%3E%3Cg%20fill%3D%22%23dad0f6%22%3E%3Cpath%20d%3D%22m10%209h4v3h-4z%22%2F%3E%3Cpath%20d%3D%22m3%209h4v3h-4z%22%2F%3E%3C%2Fg%3E%3C%2Fsvg%3E'); + background-repeat: no-repeat; + background-size: 16px; + background-position: left 12px top 7px; + + &:hover, + &.selected { + background-color: var(--grey-100); + } + + &.selected svg { + visibility: visible; + } + + svg { + position: absolute; + top: 14px; + right: 15px; + color: var(--grey-300); + visibility: hidden; + } + } +} + +.backdrop { + background-color: rgb(16 22 26 / 25%); +} + +.portal { + :global { + .bp3-dialog-container.bp3-overlay-content { + align-items: flex-start !important; + } + } +} diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts new file mode 100644 index 0000000000..0dbff10a68 --- /dev/null +++ b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts @@ -0,0 +1,32 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const backdrop: string +export declare const btnColorFix: string +export declare const layout: string +export declare const main: string +export declare const portal: string +export declare const refRoot: string +export declare const rootSlash: string +export declare const sampleQuery: string +export declare const searchBox: string +export declare const searchContainer: string +export declare const searchIcon: string +export declare const searchModal: string +export declare const sectionHeader: string +export declare const selected: string diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx new file mode 100644 index 0000000000..90ba292ef9 --- /dev/null +++ b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx @@ -0,0 +1,308 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback, useMemo, useState } from 'react' +import { noop } from 'lodash-es' +import { + Container, + Layout, + Button, + ButtonSize, + FlexExpander, + ButtonVariation, + Text, + Utils, + Dialog +} from '@harnessio/uicore' +import cx from 'classnames' +import { Icon } from '@harnessio/icons' +import { useHotkeys } from 'react-hotkeys-hook' +import { LongArrowDownLeft, Search } from 'iconoir-react' +import { Color } from '@harnessio/design-system' +import { Breadcrumbs, IBreadcrumbProps } from '@blueprintjs/core' +import { Link, useHistory } from 'react-router-dom' +import { useStrings } from 'framework/strings' +import { useAppContext } from 'AppContext' +import { CloneButtonTooltip } from 'components/CloneButtonTooltip/CloneButtonTooltip' +import { CodeIcon, GitInfoProps, isDir, isRefATag } from 'utils/GitUtils' +import { BranchTagSelect } from 'components/BranchTagSelect/BranchTagSelect' +import { useCreateBranchModal } from 'components/CreateBranchModal/CreateBranchModal' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { ButtonRoleProps, permissionProps } from 'utils/Utils' +import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' +import svg from './search-background.svg' +import css from './ContentHeader.module.scss' + +export function ContentHeader({ + repoMetadata, + gitRef = repoMetadata.default_branch as string, + resourcePath, + resourceContent +}: Pick) { + const { getString } = useStrings() + const { routes, standalone, hooks } = useAppContext() + const history = useHistory() + const _isDir = isDir(resourceContent) + const space = useGetSpaceParam() + const [showSearchModal, setShowSearchModal] = useState(false) + const [searchSampleQueryIndex, setSearchSampleQueryIndex] = useState(0) + const [search, setSearch] = useState('') + const performSearch = useCallback( + (q: string) => { + history.push({ + pathname: routes.toCODESearch({ + repoPath: repoMetadata.path as string + }), + search: `q=${q}` + }) + }, + [history, repoMetadata.path, routes] + ) + const onSearch = useCallback(() => { + if (search?.trim()) { + performSearch(search) + } else if (searchSampleQueryIndex > 0 && searchSampleQueryIndex <= searchSampleQueries.length) { + performSearch(searchSampleQueries[searchSampleQueryIndex - 1]) + } + }, [performSearch, search, searchSampleQueryIndex]) + + useHotkeys( + 'ctrl+k', + () => { + if (!showSearchModal) { + setShowSearchModal(true) + } + }, + [showSearchModal] + ) + + const permPushResult = hooks?.usePermissionTranslate?.( + { + resource: { + resourceType: 'CODE_REPOSITORY' + }, + permissions: ['code_repo_push'] + }, + [space] + ) + const openCreateNewBranchModal = useCreateBranchModal({ + repoMetadata, + onSuccess: branchInfo => { + history.push( + routes.toCODERepository({ + repoPath: repoMetadata.path as string, + gitRef: branchInfo.name + }) + ) + }, + suggestedSourceBranch: gitRef, + showSuccessMessage: true + }) + const breadcrumbs = useMemo(() => { + return resourcePath.split('/').map((_path, index, paths) => { + const pathAtIndex = paths.slice(0, index + 1).join('/') + const href = routes.toCODERepository({ + repoPath: repoMetadata.path as string, + gitRef, + resourcePath: pathAtIndex + }) + + return { href, text: _path } + }) + }, [resourcePath, gitRef, repoMetadata.path, routes]) + + return ( + + + { + history.push( + routes.toCODERepository({ + repoPath: repoMetadata.path as string, + gitRef: ref, + resourcePath + }) + ) + }} + onCreateBranch={openCreateNewBranchModal} + /> + + + + + + + / + + { + return ( + + {text} + + ) + }} + /> + + + + {_isDir && ( + <> + + + ) : ( + <> + + {getString('repoDelete.deleteConfirm2', { + repo: repoMetadata?.uid + })} + + { + setDeleteConfirmString(e.currentTarget.value) + }} + /> + + + )} + + ) + }, [showConfirmPage, deleteConfirmString, loading, repoMetadata]) + + return { + openModal, + hideModal + } +} + +export default useDeleteRepoModal diff --git a/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx b/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx new file mode 100644 index 0000000000..8d0a8f1a52 --- /dev/null +++ b/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx @@ -0,0 +1,189 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState } from 'react' +import { + Container, + Layout, + Text, + Button, + ButtonVariation, + Formik, + useToaster, + ButtonSize, + TextInput +} from '@harnessio/uicore' +import { Color, Intent } from '@harnessio/design-system' +import { useMutate } from 'restful-react' +import { ACCESS_MODES, permissionProps, voidFn } from 'utils/Utils' +import { useStrings } from 'framework/strings' +import type { TypesRepository } from 'services/code' +import { useAppContext } from 'AppContext' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import useDeleteRepoModal from './DeleteRepoModal/DeleteRepoModal' +import css from '../RepositorySettings.module.scss' + +interface GeneralSettingsProps { + repoMetadata: TypesRepository | undefined + refetch: () => void +} + +const GeneralSettingsContent = (props: GeneralSettingsProps) => { + const { repoMetadata, refetch } = props + const { openModal: openDeleteRepoModal } = useDeleteRepoModal() + + const [editDesc, setEditDesc] = useState(ACCESS_MODES.VIEW) + const { showError, showSuccess } = useToaster() + + const space = useGetSpaceParam() + const { standalone } = useAppContext() + const { hooks } = useAppContext() + const { getString } = useStrings() + const { mutate } = useMutate({ + verb: 'PATCH', + path: `/api/v1/repos/${repoMetadata?.path}/+/` + }) + + const permEditResult = hooks?.usePermissionTranslate?.( + { + resource: { + resourceType: 'CODE_REPOSITORY' + }, + permissions: ['code_repo_edit'] + }, + [space] + ) + const permDeleteResult = hooks?.usePermissionTranslate?.( + { + resource: { + resourceType: 'CODE_REPOSITORY' + }, + permissions: ['code_repo_delete'] + }, + [space] + ) + + return ( + + {formik => { + return ( + + + + + + {getString('repositoryName')} + + + + + {repoMetadata?.uid} + + + + + + + {getString('description')} + + + + {editDesc === ACCESS_MODES.EDIT ? ( + + { + formik.setFieldValue('desc', (evt.currentTarget as HTMLInputElement)?.value) + }} + value={formik.values.desc || repoMetadata?.description} + name="desc" + /> + + + + + + ) + }} + + ) +} + +export default GeneralSettingsContent diff --git a/web/src/pages/RepositorySettings/RepositorySettings.module.scss b/web/src/pages/RepositorySettings/RepositorySettings.module.scss new file mode 100644 index 0000000000..d745a404cb --- /dev/null +++ b/web/src/pages/RepositorySettings/RepositorySettings.module.scss @@ -0,0 +1,104 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + min-height: var(--page-height); + background-color: var(--primary-bg) !important; + width: 100%; + margin: var(--spacing-small); + :global { + .bp3-tab { + width: 184px !important; + height: 34px; + } + + .bp3-tab-panel { + width: 100%; + height: 500px; + } + + .bp3-tab { + margin-top: 20px; + } + + .bp3-tabs.bp3-vertical > .bp3-tab-list .bp3-tab[aria-selected='true'] { + background-color: var(--primary-2); + -webkit-box-shadow: none; + box-shadow: none; + } + + // .bp3-tab-indicator-wrapper { + // height: unset !important; + // } + + // .bp3-tab:not([aria-disabled='true']):active { + // color: var(--primary-7) !important; + // background: #cdf4fe !important; + // padding: 5px; + // } + } +} + +.webhooksContent { + width: 100%; +} + +.btn { + margin-top: 5px; +} + +.webhookHeader { + padding-left: 0 !important; + margin-left: 0 !important; +} + +.contentContainer { + margin: 20px !important; +} + +.generalContainer { + width: 100%; + background: var(--grey-0) !important; + box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16); + border-radius: 4px; +} + +.label { + width: 30%; + padding-right: var(--spacing-medium); +} + +.content { + width: 70%; +} + +.deleteContainer { + display: flex; + justify-content: space-between; +} + +.saveBtn { + margin-left: var(--spacing-medium) !important; +} + +.textContainer { + width: 80%; +} + +.buttonContainer { + width: 20%; + padding-top: var(--spacing-xsmall) !important; +} diff --git a/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts b/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts new file mode 100644 index 0000000000..49a731550e --- /dev/null +++ b/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts @@ -0,0 +1,30 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const btn: string +export declare const buttonContainer: string +export declare const content: string +export declare const contentContainer: string +export declare const deleteContainer: string +export declare const generalContainer: string +export declare const label: string +export declare const main: string +export declare const saveBtn: string +export declare const textContainer: string +export declare const webhookHeader: string +export declare const webhooksContent: string diff --git a/web/src/pages/RepositorySettings/RepositorySettings.tsx b/web/src/pages/RepositorySettings/RepositorySettings.tsx new file mode 100644 index 0000000000..648dd3b543 --- /dev/null +++ b/web/src/pages/RepositorySettings/RepositorySettings.tsx @@ -0,0 +1,70 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' + +import { PageBody, Container, Tabs } from '@harnessio/uicore' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { useStrings } from 'framework/strings' + +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' +import { getErrorMessage, voidFn } from 'utils/Utils' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import GeneralSettingsContent from './GeneralSettingsContent/GeneralSettingsContent' +import css from './RepositorySettings.module.scss' + +enum SettingsTab { + webhooks = 'webhooks', + general = 'general' +} +export default function RepositorySettings() { + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + + const [activeTab, setActiveTab] = React.useState(SettingsTab.general) + + const { getString } = useStrings() + return ( + + + + + {repoMetadata && ( + + setActiveTab(id)} + tabList={[ + { + id: SettingsTab.general, + title: getString('general'), + panel: , + iconProps: { name: 'cog' } + } + ]}> + + )} + + + ) +} diff --git a/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx b/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx new file mode 100644 index 0000000000..2ea3c9defb --- /dev/null +++ b/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// import React from 'react' +// import type { GitInfoProps } from 'utils/GitUtils' + +// export function RepositorySettingsContent({ repoMetadata }: Pick) { +// if (repoMetadata) { +// return +// } +// return null +// } diff --git a/web/src/pages/RepositorySettings/SettingsContent.tsx b/web/src/pages/RepositorySettings/SettingsContent.tsx new file mode 100644 index 0000000000..21803673d5 --- /dev/null +++ b/web/src/pages/RepositorySettings/SettingsContent.tsx @@ -0,0 +1,78 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo } from 'react' +import { orderBy } from 'lodash-es' +import { Container, TableV2 as Table, Text, Layout, Button, ButtonVariation } from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import { Color } from '@harnessio/design-system' +import type { CellProps, Column } from 'react-table' +import { useStrings } from 'framework/strings' +import type { GitInfoProps } from 'utils/GitUtils' +import css from './RepositorySettings.module.scss' + +interface Hook { + url: string +} +interface SettingsContentProps extends Pick { + hooks: Hook[] +} + +export function SettingsContent({ hooks }: SettingsContentProps) { + const { getString } = useStrings() + const columns: Column[] = useMemo( + () => [ + { + id: 'url', + width: '85%', + Cell: ({ row }: CellProps) => { + return ( + + + + {row.original.url} + + ({getString('webhookListingContent')}) + + ) + } + }, + { + id: 'actions', + width: '15%', + Cell: () => { + return ( + + + + + ) + } + } + ], + [getString] + ) + + return ( + + hideHeaders columns={columns} data={orderBy(hooks)} /> + + ) +} diff --git a/web/src/pages/RepositorySettings/mockWebhooks.json b/web/src/pages/RepositorySettings/mockWebhooks.json new file mode 100644 index 0000000000..efbd3e6548 --- /dev/null +++ b/web/src/pages/RepositorySettings/mockWebhooks.json @@ -0,0 +1,14 @@ +[ + { + "id": 1, + "url": "https://some.url/path", + "verifySsl": true, + "events": [ + "eventA", + "eventB" + ], + "Created": 1667799167640, + "Updated": 1667799167640, + "CreatedBy": 42 + } +] \ No newline at end of file diff --git a/web/src/pages/RepositoryTags/RepositoryTags.module.scss b/web/src/pages/RepositoryTags/RepositoryTags.module.scss new file mode 100644 index 0000000000..3085b05ff3 --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTags.module.scss @@ -0,0 +1,20 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + min-height: var(--page-height); + background-color: var(--primary-bg) !important; +} diff --git a/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts new file mode 100644 index 0000000000..e0cc3bd1ac --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts @@ -0,0 +1,19 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const main: string diff --git a/web/src/pages/RepositoryTags/RepositoryTags.tsx b/web/src/pages/RepositoryTags/RepositoryTags.tsx new file mode 100644 index 0000000000..435e07b5cc --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTags.tsx @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react' +import { Container, PageBody } from '@harnessio/uicore' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { useStrings } from 'framework/strings' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { voidFn, getErrorMessage } from 'utils/Utils' +import { RepositoryTagsContent } from './RepositoryTagsContent/RepositoryTagsContent' +import css from './RepositoryTags.module.scss' + +export default function RepositoryTags() { + const { getString } = useStrings() + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + + return ( + + + + + + {repoMetadata ? : null} + + + ) +} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss new file mode 100644 index 0000000000..06663d8c6c --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss @@ -0,0 +1,23 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.resourceContent { + background-color: var(--primary-bg); + + .noData > div { + height: calc(100vh - var(--page-header-height, 64px) - 120px) !important; + } +} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts new file mode 100644 index 0000000000..b77c2cab3c --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts @@ -0,0 +1,20 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const noData: string +export declare const resourceContent: string diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx new file mode 100644 index 0000000000..896b9b6cd6 --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx @@ -0,0 +1,139 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useEffect, useState } from 'react' +import { Container } from '@harnessio/uicore' +import { useGet } from 'restful-react' +import { useHistory } from 'react-router-dom' +import type { RepoCommitTag } from 'services/code' +import { usePageIndex } from 'hooks/usePageIndex' +import { LIST_FETCHING_LIMIT, permissionProps, PageBrowserProps } from 'utils/Utils' +import { useQueryParams } from 'hooks/useQueryParams' +import { useUpdateQueryParams } from 'hooks/useUpdateQueryParams' +import { useAppContext } from 'AppContext' +import type { GitInfoProps } from 'utils/GitUtils' +import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' +import { useShowRequestError } from 'hooks/useShowRequestError' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { useStrings } from 'framework/strings' +import { NoResultCard } from 'components/NoResultCard/NoResultCard' +import { useCreateTagModal } from 'components/CreateTagModal/CreateTagModal' +import { RepositoryTagsContentHeader } from '../RepositoryTagsContentHeader/RepositoryTagsContentHeader' +import { TagsContent } from '../TagsContent/TagsContent' +import css from './RepositoryTagsContent.module.scss' + +export function RepositoryTagsContent({ repoMetadata }: Pick) { + const { getString } = useStrings() + const { routes } = useAppContext() + const history = useHistory() + const [searchTerm, setSearchTerm] = useState('') + const openModal = useCreateTagModal({ + repoMetadata, + onSuccess: () => { + refetch() + }, + showSuccessMessage: true + }) + const { updateQueryParams } = useUpdateQueryParams() + + const pageBrowser = useQueryParams() + const pageInit = pageBrowser.page ? parseInt(pageBrowser.page) : 1 + const [page, setPage] = usePageIndex(pageInit) + const { + data: branches, + response, + error, + loading, + refetch + } = useGet({ + path: `/api/v1/repos/${repoMetadata.path}/+/tags`, + queryParams: { + limit: LIST_FETCHING_LIMIT, + page, + sort: 'date', + order: 'desc', + include_commit: true, + query: searchTerm, + debounce: 500 + } + }) + + useEffect(() => { + if (page > 1) { + updateQueryParams({ page: page.toString() }) + } + }, [setPage]) // eslint-disable-line react-hooks/exhaustive-deps + + useShowRequestError(error) + const space = useGetSpaceParam() + + const { standalone } = useAppContext() + const { hooks } = useAppContext() + const permPushResult = hooks?.usePermissionTranslate?.( + { + resource: { + resourceType: 'CODE_REPOSITORY' + }, + permissions: ['code_repo_push'] + }, + [space] + ) + + return ( + + { + setPage(1) + history.push( + routes.toCODECommits({ + repoPath: repoMetadata.path as string, + commitRef: gitRef + }) + ) + }} + onSearchTermChanged={value => { + setSearchTerm(value) + setPage(1) + }} + onNewBranchCreated={refetch} + /> + + {!!branches?.length && ( + + )} + + !!branches && branches.length === 0} + forSearch={!!searchTerm} + message={getString('tagEmpty')} + onButtonClick={() => { + openModal() + }} + /> + + + + ) +} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss new file mode 100644 index 0000000000..8cd071e957 --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss @@ -0,0 +1,36 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + div[class*='TextInput'] { + margin-bottom: 0 !important; + margin-left: 0 !important; + + // input { + // background: transparent !important; + // border: none; + // } + } + + > div { + align-items: center; + padding-bottom: var(--spacing-xlarge) !important; + } +} + +.branchDropdown { + background-color: var(--white); +} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts new file mode 100644 index 0000000000..8864cab8a7 --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts @@ -0,0 +1,20 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const branchDropdown: string +export declare const main: string diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx new file mode 100644 index 0000000000..20c527e86f --- /dev/null +++ b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx @@ -0,0 +1,66 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState } from 'react' +import { Container, Layout, FlexExpander, ButtonVariation } from '@harnessio/uicore' +import { useStrings } from 'framework/strings' +import { GitBranchType, CodeIcon, GitInfoProps } from 'utils/GitUtils' +import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' +import { CreateTagModalButton } from 'components/CreateTagModal/CreateTagModal' +import css from './RepositoryTagsContentHeader.module.scss' + +interface RepositoryTagsContentHeaderProps extends Pick { + loading?: boolean + activeBranchType?: GitBranchType + onBranchTypeSwitched: (branchType: GitBranchType) => void + onSearchTermChanged: (searchTerm: string) => void + onNewBranchCreated: () => void +} + +export function RepositoryTagsContentHeader({ + onSearchTermChanged, + repoMetadata, + onNewBranchCreated, + loading +}: RepositoryTagsContentHeaderProps) { + const { getString } = useStrings() + const [searchTerm, setSearchTerm] = useState('') + + return ( + + + { + setSearchTerm(value) + onSearchTermChanged(value) + }} + /> + + + + + ) +} diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss new file mode 100644 index 0000000000..4c8c3f5d3c --- /dev/null +++ b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss @@ -0,0 +1,80 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.container { + .table { + background-color: var(--white) !important; + + [class*='TableV2--header'] { + box-shadow: 0px 0px 1px rgb(40 41 61 / 8%), 0px 0.5px 2px rgb(96 97 112 / 16%); + border-bottom: 1px solid var(--grey-100); + + [class*='variation-table-headers'] { + text-transform: none; + color: var(--grey-400); + font-weight: 500; + font-size: 13px; + } + } + + .row { + justify-content: center; + padding-top: 0; + padding-bottom: 0px; + height: 40px; + box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16); + overflow: hidden; + border-radius: 0; + margin-bottom: 0; + + &.odd { + background-color: var(--grey-50); + } + + &:hover { + background-color: #0092e40a !important; + } + + .rowText { + font-size: 13px; + font-weight: 400; + + &.defaultBranch .commitLink { + font-weight: 600; + } + + .spacer { + display: inline-block; + width: 6px; + } + } + + .commitLink { + font-weight: 400; + font-size: 13px; + color: var(--black); + + &:hover { + color: var(--primary-8); + } + } + } + } +} + +.popover { + padding: var(--spacing-small); +} diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts new file mode 100644 index 0000000000..0010c02e45 --- /dev/null +++ b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts @@ -0,0 +1,27 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const commitLink: string +export declare const container: string +export declare const defaultBranch: string +export declare const odd: string +export declare const popover: string +export declare const row: string +export declare const rowText: string +export declare const spacer: string +export declare const table: string diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx b/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx new file mode 100644 index 0000000000..cd9cb77f22 --- /dev/null +++ b/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx @@ -0,0 +1,246 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo } from 'react' +import { Container, TableV2 as Table, Text, Avatar, useToaster, StringSubstitute } from '@harnessio/uicore' +import { Color, Intent } from '@harnessio/design-system' +import type { CellProps, Column } from 'react-table' +import { Link, useHistory } from 'react-router-dom' +import cx from 'classnames' +import Keywords from 'react-keywords' +import { useMutate } from 'restful-react' +import { noop } from 'lodash-es' +import { String, useStrings } from 'framework/strings' +import { useAppContext } from 'AppContext' + +import type { RepoBranch, RepoCommitTag, TypesRepository } from 'services/code' +import { formatDate, getErrorMessage, voidFn } from 'utils/Utils' +import { useConfirmAction } from 'hooks/useConfirmAction' +import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' +import { useCreateBranchModal } from 'components/CreateBranchModal/CreateBranchModal' +import { CommitActions } from 'components/CommitActions/CommitActions' +import { CodeIcon, REFS_TAGS_PREFIX } from 'utils/GitUtils' +import css from './TagsContent.module.scss' + +interface TagsContentProps { + searchTerm?: string + repoMetadata: TypesRepository + branches: RepoBranch[] + onDeleteSuccess: () => void +} + +export function TagsContent({ repoMetadata, searchTerm = '', branches, onDeleteSuccess }: TagsContentProps) { + const { routes } = useAppContext() + const history = useHistory() + const { getString } = useStrings() + + const onSuccess = voidFn(noop) + + const columns: Column[] = useMemo( + () => [ + { + Header: getString('tag'), + width: '20%', + Cell: ({ row }: CellProps) => { + return ( + + + {row.original?.name} + + + ) + } + }, + { + Header: getString('description'), + width: '35%', + Cell: ({ row }: CellProps) => { + return ( + + {row.original?.message} + + ) + } + }, + { + Header: getString('commit'), + Id: 'commit', + width: '15%', + Cell: ({ row }: CellProps) => { + return ( + + ) + } + }, + + { + Header: getString('tagger'), + width: '15%', + Cell: ({ row }: CellProps) => { + return ( + + {row.original.tagger?.identity?.name ? ( + + ) : ( + '' + )} + + {row.original.tagger?.identity?.name || ''} + + ) + } + }, + { + Header: getString('creationDate'), + width: '200px', + Cell: ({ row }: CellProps) => { + return row.original.tagger?.when ? ( + + + {formatDate(row.original.tagger?.when as string)} + + ) : ( + '' + ) + } + }, + { + id: 'action', + width: '30px', + Cell: ({ row }: CellProps) => { + const { mutate: deleteBranch } = useMutate({ + verb: 'DELETE', + path: `/api/v1/repos/${repoMetadata.path}/+/tags/${row.original.name}` + }) + const { showSuccess, showError } = useToaster() + const confirmDeleteTag = useConfirmAction({ + title: getString('deleteTag'), + confirmText: getString('confirmDelete'), + intent: Intent.DANGER, + message: , + action: async () => { + deleteBranch({}) + .then(() => { + showSuccess( + , + 5000 + ) + onDeleteSuccess() + }) + .catch(error => { + showError(getErrorMessage(error), 0, 'failedToDeleteTag') + }) + } + }) + const openModal = useCreateBranchModal({ + repoMetadata, + onSuccess, + showSuccessMessage: true, + suggestedSourceBranch: row.original.name, + showBranchTag: false, + refIsATag: true + }) + + return ( + { + openModal() + } + }, + { + text: getString('viewFiles'), + iconName: CodeIcon.FileLight, + iconSize: 16, + hasIcon: true, + onClick: () => { + history.push( + routes.toCODERepository({ + repoPath: repoMetadata.path as string, + gitRef: `${REFS_TAGS_PREFIX}${row.original?.name}` + }) + ) + } + }, + '-', + { + text: getString('deleteTag'), + iconName: CodeIcon.Delete, + iconSize: 16, + hasIcon: true, + isDanger: true, + onClick: confirmDeleteTag + } + ]} + isDark + /> + ) + } + } + ], + [ + // eslint-disable-line react-hooks/exhaustive-deps + getString, + routes, + searchTerm, + history, + onDeleteSuccess, + repoMetadata, + onSuccess + ] // eslint-disable-line react-hooks/exhaustive-deps + ) + + return ( + + + className={css.table} + columns={columns} + data={branches || []} + getRowClassName={row => cx(css.row, (row.index + 1) % 2 ? css.odd : '')} + /> + + ) +} diff --git a/web/src/pages/Search/Search.module.scss b/web/src/pages/Search/Search.module.scss new file mode 100644 index 0000000000..1b2d9f335a --- /dev/null +++ b/web/src/pages/Search/Search.module.scss @@ -0,0 +1,245 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@import 'src/utils/utils'; + +.main { + --header-height: 128px; + --border-color: var(--grey-100); + + min-height: var(--page-height); + background-color: var(--primary-bg) !important; + + .pageHeader { + flex-direction: column; + height: var(--header-height); + align-items: normal !important; + justify-content: flex-start !important; + padding-top: 0 !important; + + [class*='breadcrumbs'] > [class*='module_header'] { + padding-top: 13px !important; + } + + .searchBox { + display: flex; + flex-direction: row; + + > div { + flex-grow: 1; + + div { + width: 100%; + } + } + + input { + width: calc(100% - 8px) !important; + border: 1px solid var(--ai-purple-600) !important; + } + + svg path { + fill: var(--ai-purple-600) !important; + } + } + + & + div { + --page-header-height: var(--header-height) !important; + } + } + + .split { + > div:first-of-type { + background-color: #fbfcfd; + overflow: auto; + } + + > div:last-of-type { + background-color: var(--white); + overflow: scroll; + } + + .searchResult { + padding: var(--spacing-medium) var(--spacing-large) var(--spacing-large) var(--spacing-xlarge); + + .resultTitle { + text-transform: uppercase; + + font-size: 10px; + font-weight: 600; + color: var(--grey-400); + } + + .result { + padding: var(--spacing-medium); + border: 1px solid rgba(243, 243, 250, 1); + border-radius: 5px; + background-color: var(--white); + + &.selected { + border-color: rgba(217, 218, 229, 1); + background-color: rgba(246, 241, 255, 1); + box-shadow: 0px 0.5px 2px 0px rgba(96, 97, 112, 0.16), 0px 0px 1px 0px rgba(40, 41, 61, 0.08); + } + + &:hover:not(.selected) { + border-color: rgba(217, 218, 229, 1); + background-color: rgba(246, 241, 255, 0.5); + } + + .layout { + align-items: baseline; + } + + .texts { + flex-grow: 1; + } + + .filename { + font-size: 13px; + font-weight: 600; + color: rgba(79, 81, 98, 1); + } + + .path { + font-size: 11px; + font-weight: 500; + color: rgba(146, 147, 171, 1); + } + + .aiLabel { + background: var(--ai-purple-100); + color: var(--ai-purple-600); + text-transform: uppercase; + font-size: 8px; + font-weight: 800; + text-align: center; + padding: 3px 6px; + border-radius: 2px; + white-space: nowrap; + } + } + } + + :global { + .Resizer.vertical { + width: 13px; + background-color: var(--border-color); + opacity: 1; + + &:active, + &:focus, + &:hover { + background-color: var(--primary-6); + border-color: transparent !important; + } + } + } + + .preview { + height: 100%; + position: relative; + + &.noResult { + > * { + visibility: hidden; + } + } + + .filePath { + height: 45px; + border-bottom: 1px solid var(--border-color); + display: flex; + align-items: center; + padding: 0 var(--spacing-medium); + + > div:first-of-type { + flex-grow: 1; + width: calc(100% - 150px); + } + + button { + white-space: nowrap; + } + + .pathText { + align-self: center; + color: var(--grey-500); + } + + :global { + .bp3-breadcrumb, + .bp3-breadcrumb-current, + .bp3-breadcrumbs-collapsed { + white-space: nowrap !important; + font-size: 13px; + font-weight: 400; + color: var(--grey-500); + } + + .bp3-breadcrumbs > li::after { + background: none; + content: '/'; + color: var(--grey-500); + background: none; + text-align: center; + height: 100%; + } + + .bp3-breadcrumbs-collapsed { + background: var(--grey-100); + } + } + } + + .fileContent { + flex-grow: 1; + height: calc(100% - 45px); + overflow: auto; + + :global { + .cm-editor { + border: none; + + .cm-scroller { + padding: 0; + + .cm-line { + &, + * { + @include mono-font; + } + } + } + + .cm-gutters { + border-right: none; + + .cm-gutterElement { + padding-left: 30px; + padding-right: 6px; + } + } + } + } + } + + .highlightLineNumber { + background-color: var(--ai-purple-100); + } + } + } +} diff --git a/web/src/pages/Search/Search.module.scss.d.ts b/web/src/pages/Search/Search.module.scss.d.ts new file mode 100644 index 0000000000..20a628a1af --- /dev/null +++ b/web/src/pages/Search/Search.module.scss.d.ts @@ -0,0 +1,37 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const aiLabel: string +export declare const fileContent: string +export declare const filename: string +export declare const filePath: string +export declare const highlightLineNumber: string +export declare const layout: string +export declare const main: string +export declare const noResult: string +export declare const pageHeader: string +export declare const path: string +export declare const pathText: string +export declare const preview: string +export declare const result: string +export declare const resultTitle: string +export declare const searchBox: string +export declare const searchResult: string +export declare const selected: string +export declare const split: string +export declare const texts: string diff --git a/web/src/pages/Search/Search.tsx b/web/src/pages/Search/Search.tsx new file mode 100644 index 0000000000..581c79e494 --- /dev/null +++ b/web/src/pages/Search/Search.tsx @@ -0,0 +1,359 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { + Button, + ButtonSize, + ButtonVariation, + Container, + Layout, + PageBody, + stringSubstitute, + Text, + useToaster +} from '@harnessio/uicore' +import cx from 'classnames' +import { lineNumbers, ViewUpdate } from '@codemirror/view' +import { Breadcrumbs, IBreadcrumbProps } from '@blueprintjs/core' +import { Link, useHistory, useLocation } from 'react-router-dom' +import { EditorView } from '@codemirror/view' +import { Match, Truthy, Falsy } from 'react-jsx-match' +import { Icon } from '@harnessio/icons' +import { useMutate } from 'restful-react' +import { Editor } from 'components/Editor/Editor' +import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' +import { useStrings } from 'framework/strings' +import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' +import { Split } from 'components/Split/Split' +import { CodeIcon, decodeGitContent } from 'utils/GitUtils' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { useQueryParams } from 'hooks/useQueryParams' +import { useAppContext } from 'AppContext' +import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' +import { voidFn, getErrorMessage, ButtonRoleProps } from 'utils/Utils' +import type { RepoFileContent } from 'services/code' +import { useShowRequestError } from 'hooks/useShowRequestError' +import { NoResultCard } from 'components/NoResultCard/NoResultCard' +import { useGetResourceContent } from 'hooks/useGetResourceContent' +import { addClassToLinesExtension } from 'utils/codemirror/addClassToLinesExtension' +import css from './Search.module.scss' + +export default function Search() { + const { showError } = useToaster() + const history = useHistory() + const location = useLocation() + const highlightedLines = useRef([]) + const [highlightlLineNumbersExtension, updateHighlightlLineNumbers] = useMemo( + () => addClassToLinesExtension([], css.highlightLineNumber), + [] + ) + const extensions = useMemo(() => { + return [ + lineNumbers({ + formatNumber: (lineNo: number) => lineNo.toString() + }), + highlightlLineNumbersExtension + ] + }, [highlightlLineNumbersExtension]) + const viewRef = useRef() + const { getString } = useStrings() + const { routes } = useAppContext() + const { q } = useQueryParams<{ q: string }>() + const [searchTerm, setSearchTerm] = useState(q || '') + const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() + const [resourcePath, setResourcePath] = useState('') + const [filename, setFileName] = useState('') + const gitRef = useMemo(() => repoMetadata?.default_branch || '', [repoMetadata]) + const breadcrumbs = useMemo(() => { + return repoMetadata?.path + ? resourcePath.split('/').map((_path, index, paths) => { + const pathAtIndex = paths.slice(0, index + 1).join('/') + const href = routes.toCODERepository({ + repoPath: repoMetadata.path as string, + gitRef, + resourcePath: pathAtIndex + }) + + return { href, text: _path } + }) + : [] + }, [resourcePath, repoMetadata, gitRef, routes]) + const onSelectResult = useCallback( + (fileName: string, filePath: string, _content: string, _highlightedLines: number[]) => { + updateHighlightlLineNumbers(_highlightedLines, viewRef.current) + highlightedLines.current = _highlightedLines + setFileName(fileName) + setResourcePath(filePath) + }, + [updateHighlightlLineNumbers] + ) + const { + data: resourceContent, + error: resourceError = null, + loading: resourceLoading + } = useGetResourceContent({ repoMetadata, gitRef, resourcePath, includeCommit: false, lazy: !resourcePath }) + const fileContent: string = useMemo( + () => + resourceContent?.path === resourcePath + ? decodeGitContent((resourceContent?.content as RepoFileContent)?.data) + : resourceError + ? getString('failedToFetchFileContent') + : '', + + [resourceContent?.content, resourceContent?.path, resourcePath, resourceError, getString] + ) + + // eslint-disable-next-line react-hooks/exhaustive-deps + const onViewUpdate = useCallback(({ view, docChanged }: ViewUpdate) => { + const firstLine = (highlightedLines.current || [])[0] + + if (docChanged && firstLine > 0 && view.state.doc.lines >= firstLine) { + view.dispatch({ + effects: EditorView.scrollIntoView(view.state.doc.line(firstLine).from, { y: 'start', yMargin: 18 * 2 }) + }) + } + }, []) + const [loadingSearch, setLoadingSearch] = useState(false) + const { mutate: sendSearch } = useMutate({ + verb: 'POST', + path: `/api/v1/repos/${repoMetadata?.path}/+/semantic/search` + }) + const [searchResult, setSearchResult] = useState([]) + const performSearch = useCallback(() => { + setLoadingSearch(true) + history.replace({ pathname: location.pathname, search: `q=${searchTerm}` }) + + sendSearch({ query: searchTerm }) + .then(response => { + setSearchResult(response) + }) + .catch(exception => { + showError(getErrorMessage(exception), 0) + }) + .finally(() => { + setLoadingSearch(false) + }) + }, [searchTerm, history, location, sendSearch, showError]) + + useEffect(() => { + if (q && repoMetadata?.path) { + performSearch() + } + }, [repoMetadata?.path]) // eslint-disable-line react-hooks/exhaustive-deps + + useEffect(() => { + if (fileContent && fileContent !== viewRef?.current?.state.doc.toString()) { + viewRef?.current?.dispatch({ + changes: { from: 0, to: viewRef?.current?.state.doc.length, insert: fileContent } + }) + } + }, [fileContent]) + + useShowRequestError(resourceError) + + return ( + + + + + + + + + + {getString('privacyPolicy')} , + terms: {getString('termsOfUse')} + }} + /> + + + + + ) +} diff --git a/web/src/pages/SignUp/SignUp.module.scss b/web/src/pages/SignUp/SignUp.module.scss new file mode 100644 index 0000000000..b03f62775a --- /dev/null +++ b/web/src/pages/SignUp/SignUp.module.scss @@ -0,0 +1,129 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.root { + /* box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1),0 1px 2px 0 rgba(0, 0, 0, 0.06); */ + box-shadow: 0 12px 18px 2px rgb(34 0 51 / 4%), 0 6px 22px 4px rgb(7 48 114 / 12%), 0 6px 10px -4px rgb(14 13 26 / 12%); + box-shadow: rgba(0, 0, 0, 0) 0px 0px 0px 0px, rgba(0, 0, 0, 0) 0px 0px 0px 0px, rgba(0, 0, 0, 0.1) 0px 1px 3px 0px, + rgba(0, 0, 0, 0.06) 0px 1px 2px 0px; + box-sizing: border-box; + background: rgb(255, 255, 255); + border-radius: 16px; + border-radius: 6px; + display: block; + padding: 32px 40px; + width: 420px; + position: relative; + + h2 { + font-weight: 400; + font-size: 20px; + text-align: center; + margin-bottom: 25px; + margin-top: 40px; + } + + /* box-sizing: border-box; + background: #FFFFFF; + border-radius: 8px; + border: 1px solid #F1F5F9; + display: block; + margin: 80px auto; + padding: 32px 40px; + width: 420px; */ + + label { + display: block; + font-weight: 500; + margin-bottom: 10px; + margin-top: 20px; + display: none; + } + + .input { + display: block; + padding: 12px 12px; + width: 100%; + box-shadow: none; + border-radius: 6px; + } + + .submit { + background: #0f172a; + box-shadow: rgb(0 0 0 / 0%) 0px 0px 0px 0px, rgb(0 0 0 / 0%) 0px 0px 0px 0px, rgb(0 0 0 / 20%) 0px 1px 2px 0px; + justify-content: center; + padding: 12px 12px; + width: 100%; + border-radius: 6px; + font-size: 14px; + + &:hover { + background: #1e293b; + } + + &:active { + background: #1e40af; + } + } +} + +.field { + margin: 15px 0px; +} + +.actions { + margin-top: 20px; + text-align: center; + + a, + a:visited { + color: #0060e0; + } +} + +.logo { + position: absolute; + /* top: -50px; + left: 161px; */ + top: -40px; + left: 175px; + height: 80px; + width: 80px; + left: calc(50% - 40px); + transform: rotate(45deg); + display: flex; + align-items: center; + justify-content: center; + + border-color: transparent #e2e8f0 #e2e8f0 transparent; + border-style: solid; + border-width: 1px; + border-radius: 100%; + background: #f7fafc; + + img { + width: 100px; + height: 100px; + width: 60px; + height: 60px; + transform: rotate(-45deg) translateX(1px); + } +} + +.signUpContainer { + height: 100%; + padding: 4% 0px !important; +} diff --git a/web/src/pages/SignUp/SignUp.module.scss.d.ts b/web/src/pages/SignUp/SignUp.module.scss.d.ts new file mode 100644 index 0000000000..36106104d1 --- /dev/null +++ b/web/src/pages/SignUp/SignUp.module.scss.d.ts @@ -0,0 +1,25 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const actions: string +export declare const field: string +export declare const input: string +export declare const logo: string +export declare const root: string +export declare const signUpContainer: string +export declare const submit: string diff --git a/web/src/pages/SignUp/SignUp.tsx b/web/src/pages/SignUp/SignUp.tsx new file mode 100644 index 0000000000..2268f048eb --- /dev/null +++ b/web/src/pages/SignUp/SignUp.tsx @@ -0,0 +1,147 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useCallback } from 'react' +import { + Button, + Container, + FormInput, + Formik, + FormikForm, + Layout, + StringSubstitute, + Text, + useToaster +} from '@harnessio/uicore' +import { Color } from '@harnessio/design-system' +import * as Yup from 'yup' +import { Link } from 'react-router-dom' +import { useStrings } from 'framework/strings' +import AuthLayout from 'components/AuthLayout/AuthLayout' +import { useAppContext } from 'AppContext' +import { getErrorMessage, type RegisterForm } from 'utils/Utils' +import { useOnRegister } from 'services/code' +import css from './SignUp.module.scss' + +export const SignUp: React.FC = () => { + const { routes } = useAppContext() + const { getString } = useStrings() + const { showError, showSuccess } = useToaster() + + const { mutate } = useOnRegister({ + queryParams: { + include_cookie: true + } + }) + const onRegister = useCallback( + (data: RegisterForm) => { + mutate( + { + display_name: data.username, + email: data.email, + uid: data.username, + password: data.password + }, + { + headers: { Authorization: '' } + } + ) + .then(() => { + showSuccess(getString('userCreated')) + window.location.replace(window.location.origin + routes.toCODEHome()) + }) + .catch(error => { + showError(getErrorMessage(error)) + }) + }, + [mutate, showSuccess, showError, getString, routes] + ) + + const handleSubmit = (data: RegisterForm): void => { + if (data.username && data.password) { + onRegister(data) + } + } + return ( + + + + {getString('signUp')} + + + + + initialValues={{ username: '', email: '', password: '', confirmPassword: '' }} + formName="loginPageForm" + validationSchema={Yup.object().shape({ + username: Yup.string().required(getString('userNameRequired')), + email: Yup.string().email().required(getString('emailRequired')), + password: Yup.string().min(6, getString('minPassLimit')).required(getString('passwordRequired')), + confirmPassword: Yup.string() + .required(getString('confirmPassRequired')) + .oneOf([Yup.ref('password')], getString('matchPassword')) + })} + onSubmit={handleSubmit}> + + + + + + + + + + + + + + + {getString('privacyPolicy')} , + terms: {getString('termsOfUse')} + }} + /> + + + + {getString('alreadyHaveAccount')} + {getString('signIn')} + + + + ) +} diff --git a/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx b/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx new file mode 100644 index 0000000000..cde362d1ab --- /dev/null +++ b/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx @@ -0,0 +1,166 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo, useState } from 'react' +import { Button, ButtonVariation, Dialog, FormikForm, FormInput, SelectOption, useToaster } from '@harnessio/uicore' +import { Formik } from 'formik' +import * as Yup from 'yup' +import { useGet } from 'restful-react' +import { useStrings } from 'framework/strings' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { + MembershipAddRequestBody, + TypesMembershipUser, + TypesPrincipalInfo, + useMembershipAdd, + useMembershipUpdate +} from 'services/code' +import { getErrorMessage, LIST_FETCHING_LIMIT } from 'utils/Utils' +import { useModalHook } from 'hooks/useModalHook' +import { roleStringKeyMap } from '../SpaceAccessControl' + +const roles = ['reader', 'executor', 'contributor', 'space_owner'] as const + +const useAddNewMember = ({ onClose }: { onClose: () => void }) => { + const [isEditFlow, setIsEditFlow] = useState(false) + const [membershipDetails, setMembershipDetails] = useState() + const [searchTerm, setSearchTerm] = useState('') + + const space = useGetSpaceParam() + const { getString } = useStrings() + const { showError, showSuccess } = useToaster() + + const { mutate: addMembership } = useMembershipAdd({ space_ref: space }) + const { mutate: updateMembership } = useMembershipUpdate({ + space_ref: space, + user_uid: membershipDetails?.principal?.uid || '' + }) + + const { data: users, loading: fetchingUsers } = useGet({ + path: `/api/v1/principals`, + queryParams: { + query: searchTerm, + page: 1, + limit: LIST_FETCHING_LIMIT, + type: 'user' + }, + debounce: 500 + }) + + const roleOptions: SelectOption[] = useMemo( + () => + roles.map(role => ({ + value: role, + label: getString(roleStringKeyMap[role]) + })), + [] // eslint-disable-line react-hooks/exhaustive-deps + ) + + const userOptions: SelectOption[] = useMemo( + () => + users?.map(user => ({ + value: user.uid as string, + label: (user.display_name || user.email) as string + })) || [], + [users] + ) + const [selectUser, setSelectUser] = useState() + + const handleClose = () => { + setSearchTerm('') + hideModal() + } + + const [openModal, hideModal] = useModalHook(() => { + return ( + + + initialValues={{ + user_uid: membershipDetails?.principal?.uid || '', + role: membershipDetails?.role || 'reader' + }} + validationSchema={Yup.object().shape({ + user_uid: Yup.string().required(getString('validation.uidRequired')) + })} + onSubmit={async values => { + try { + if (isEditFlow) { + await updateMembership({ role: values.role }) + showSuccess(getString('spaceMemberships.memberUpdated')) + } else { + await addMembership(values) + showSuccess(getString('spaceMemberships.memberAdded')) + } + + handleClose() + onClose() + } catch (error) { + showError(getErrorMessage(error)) + } + }}> + + setSelectUser(item)} + /> + + + ) + }, [isEditFlow, membershipDetails, userOptions, selectUser]) + + return { + openModal: (isEditing?: boolean, memberInfo?: TypesPrincipalInfo) => { + openModal() + setIsEditFlow(Boolean(isEditing)) + setMembershipDetails(memberInfo) + }, + hideModal + } +} + +export default useAddNewMember diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss new file mode 100644 index 0000000000..d745229684 --- /dev/null +++ b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss @@ -0,0 +1,28 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.mainCtn { + height: var(--page-height); + background-color: var(--primary-bg) !important; + + .roleBadge { + padding: var(--spacing-xsmall) 6px; + border-radius: 4px; + border: 1px solid var(--grey-200); + background: var(--grey-50); + width: max-content; + } +} diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts new file mode 100644 index 0000000000..b3fe1e10bd --- /dev/null +++ b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts @@ -0,0 +1,20 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const mainCtn: string +export declare const roleBadge: string diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx b/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx new file mode 100644 index 0000000000..84ee4925a2 --- /dev/null +++ b/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx @@ -0,0 +1,161 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useMemo } from 'react' +import { Avatar, Button, ButtonVariation, Container, Layout, Page, TableV2, Text, useToaster } from '@harnessio/uicore' +import { Color, FontVariation } from '@harnessio/design-system' +import type { CellProps, Column } from 'react-table' + +import { StringKeys, useStrings } from 'framework/strings' +import { useConfirmAct } from 'hooks/useConfirmAction' +import { useGetSpaceParam } from 'hooks/useGetSpaceParam' +import { EnumMembershipRole, TypesMembershipUser, useMembershipDelete, useMembershipList } from 'services/code' +import { getErrorMessage } from 'utils/Utils' +import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' +import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' + +import useAddNewMember from './AddNewMember/AddNewMember' + +import css from './SpaceAccessControl.module.scss' + +export const roleStringKeyMap: Record = { + contributor: 'contributor', + executor: 'executor', + reader: 'reader', + space_owner: 'owner' +} + +const SpaceAccessControl = () => { + const { getString } = useStrings() + const { showError, showSuccess } = useToaster() + const space = useGetSpaceParam() + + const { data, refetch, loading } = useMembershipList({ + space_ref: space + }) + + const { openModal } = useAddNewMember({ onClose: refetch }) + + const { mutate: deleteMembership } = useMembershipDelete({ + space_ref: space + }) + + const onConfirmAct = useConfirmAct() + const handleRemoveMember = async (userId: string) => + await onConfirmAct({ + action: async () => { + try { + await deleteMembership(userId) + refetch() + showSuccess(getString('spaceMemberships.removeMembershipToast')) + } catch (error) { + showError(getErrorMessage(error)) + } + }, + message: getString('spaceMemberships.removeMembershipMsg'), + intent: 'danger', + title: getString('spaceMemberships.removeMember') + }) + + const columns = useMemo( + () => + [ + { + Header: getString('user'), + width: '30%', + Cell: ({ row }: CellProps) => ( + + + + {row.original.principal?.display_name} + + + ) + }, + { + Header: getString('role'), + width: '40%', + Cell: ({ row }: CellProps) => { + const stringKey = row.original.role ? roleStringKeyMap[row.original.role] : undefined + + return ( + + {stringKey ? getString(stringKey) : row.original.role} + + ) + } + }, + { + Header: getString('email'), + width: '25%', + Cell: ({ row }: CellProps) => ( + + {row.original.principal?.email} + + ) + }, + { + accessor: 'action', + width: '5%', + Cell: ({ row }: CellProps) => { + return ( + handleRemoveMember(row.original.principal?.uid as string) + }, + { + text: getString('spaceMemberships.changeRole'), + onClick: () => openModal(true, row.original) + } + ]} + /> + ) + } + } + ] as Column[], + [] // eslint-disable-line react-hooks/exhaustive-deps + ) + + return ( + + + + + + + + ) : ( + <> + + {getString('spaceSetting.deleteConfirm2', { + space + })} + + { + setDeleteConfirmString(e.currentTarget.value) + }} + /> + + + )} + + ) + }, [showConfirmPage, deleteConfirmString, loading]) + + return { + openModal, + hideModal + } +} + +export default useDeleteSpaceModal diff --git a/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx b/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx new file mode 100644 index 0000000000..83b99ca7db --- /dev/null +++ b/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx @@ -0,0 +1,348 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState } from 'react' +import { Intent } from '@blueprintjs/core' +import * as yup from 'yup' +import { useGet } from 'restful-react' +import { FontVariation } from '@harnessio/design-system' + +import { Color } from '@harnessio/design-system' +import { + Button, + Container, + Label, + Layout, + FlexExpander, + Formik, + FormikForm, + FormInput, + Text, + ButtonSize, + ButtonVariation +} from '@harnessio/uicore' +import { Icon } from '@harnessio/icons' +import type { TypesRepository } from 'services/code' + +import { useStrings } from 'framework/strings' +import type { ExportFormDataExtended } from 'utils/GitUtils' +import Upgrade from '../../../icons/Upgrade.svg' +import css from '../SpaceSettings.module.scss' + +interface ExportFormProps { + handleSubmit: (data: ExportFormDataExtended) => void + loading: boolean + // eslint-disable-next-line @typescript-eslint/no-explicit-any + hideModal: any + step: number + setStep: React.Dispatch> + space: string +} + +const ExportForm = (props: ExportFormProps) => { + const { handleSubmit, loading, hideModal, step, setStep, space } = props + const { getString } = useStrings() + const [auth, setAuth] = useState(false) + const formInitialValues: ExportFormDataExtended = { + accountId: '', + token: '', + organization: '', + name: '', + repoCount: 0 + } + + const validationSchemaStepOne = yup.object().shape({ + accountId: yup.string().trim().required(getString('exportSpace.accIdRequired')), + token: yup.string().trim().required(getString('exportSpace.accesstokenReq')) + }) + + const validationSchemaStepTwo = yup.object().shape({ + organization: yup.string().trim().required(getString('importSpace.orgRequired')), + name: yup.string().trim().required(getString('importSpace.spaceNameRequired')) + }) + const { data: repositories } = useGet({ + path: `/api/v1/spaces/${space}/+/repos` + }) + + return ( + + {formik => { + const handleValidationClick = async () => { + try { + if (step === 0) { + await validationSchemaStepOne.validate(formik.values, { abortEarly: false }) + setStep(1) + } else if (step === 1) { + await validationSchemaStepTwo.validate(formik.values, { abortEarly: false }) + setStep(2) + } // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (err: any) { + formik.setErrors( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + err.inner.reduce((acc: { [x: string]: any }, current: { path: string | number; message: string }) => { + acc[current.path] = current.message + return acc + }, {}) + ) + } + } + return ( + + + {step === 0 ? ( + <> + + + {formik.errors.accountId ? ( + + {formik.errors.accountId} + + ) : null} + + {formik.errors.token ? ( + + {formik.errors.token} + + ) : null} + + + ) : null} + {step === 1 ? ( + <> + + + {formik.errors.organization ? ( + + {formik.errors.organization} + + ) : null} + + + {formik.errors.name ? ( + + {formik.errors.name} + + ) : null} + + + + + + + + + { + setAuth(!auth) + }} + disabled + padding={{ right: 'small' }} + className={css.checkbox} + /> + + { + setAuth(!auth) + }} + /> + + + + + + ) : null} + + {step === 2 && ( + <> + + + {formik.errors.organization ? ( + + {formik.errors.organization} + + ) : null} + + {formik.errors.name ? ( + + {formik.errors.name} + + ) : null} + + + {getString('exportSpace.repoToConvert', { length: repositories?.length })} + + + + + )} + +
+ + + {step === 0 && ( + + + +
+
+ + ) + }} + +
+ + + ) +} diff --git a/web/src/pages/UserProfile/EditableTextField.tsx b/web/src/pages/UserProfile/EditableTextField.tsx new file mode 100644 index 0000000000..5d68d0d3af --- /dev/null +++ b/web/src/pages/UserProfile/EditableTextField.tsx @@ -0,0 +1,78 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React, { useState } from 'react' +import { Button, ButtonSize, ButtonVariation, Container, Layout, Text, TextInput } from '@harnessio/uicore' +import { Color, FontVariation } from '@harnessio/design-system' +import { useStrings } from 'framework/strings' +import css from './UserProfile.module.scss' + +enum ACCESS_MODES { + VIEW, + EDIT +} + +const EditableTextField = ({ onSave, value }: { value: string; onSave: (text: string) => void }) => { + const { getString } = useStrings() + const [viewMode, setViewMode] = useState(ACCESS_MODES.VIEW) + const [text, setText] = useState(value) + + return ( + + {viewMode === ACCESS_MODES.EDIT ? ( + + setText((e.target as HTMLInputElement).value)} + wrapperClassName={css.textInput} + /> + + + + + + + + } + position={Position.RIGHT} + interactionKind="click"> + + + + + + + {row.original.display_name} + + {!!row.original.triggers?.length && ( + + ({formatTriggers(row.original?.triggers).join(', ')}) + + )} + {!row.original.triggers?.length && ( + + {getString('webhookAllEventsSelected')} + + )} + + +
+ ) + } + }, + { + id: 'executionStatus', + width: '15px', + Cell: ({ row }: CellProps) => { + return ( + + ) + } + }, + { + id: 'action', + width: '60px', + Cell: ({ row }: CellProps) => { + const { mutate: deleteWebhook } = useMutate({ + verb: 'DELETE', + path: `/api/v1/repos/${repoMetadata?.path}/+/webhooks/${row.original.id}` + }) + const confirmDelete = useConfirmAct() + + return ( + + { + history.push( + routes.toCODEWebhookDetails({ + repoPath: repoMetadata?.path as string, + webhookId: String(row.original?.id) + }) + ) + } + }, + { + hasIcon: true, + iconName: 'main-trash', + text: getString('delete'), + onClick: async () => { + confirmDelete({ + message: getString('confirmDeleteWebhook'), + action: async () => { + deleteWebhook({}) + .then(() => { + showSuccess(getString('webhookDeleted'), 5000) + setPage(1) + refetchWebhooks() + }) + .catch(exception => { + showError(getErrorMessage(exception), 0, 'failedToDeleteWebhook') + }) + } + }) + } + } + ]} + /> + + ) + } + } + ], + [history, getString, refetchWebhooks, repoMetadata?.path, routes, setPage, showError, showSuccess] + ) + + return ( + + + + + + {repoMetadata && ( + + { + setSearchTerm(value) + setPage(1) + }} + /> + + {!!webhooks?.length && ( + <> + + className={css.table} + hideHeaders + columns={columns} + data={webhooks} + getRowClassName={() => css.row} + onRowClick={row => { + history.push( + routes.toCODEWebhookDetails({ + repoPath: repoMetadata.path as string, + webhookId: String(row.id) + }) + ) + }} + /> + + + + )} + + webhooks?.length === 0} + forSearch={!!searchTerm} + message={getString('webhookEmpty')} + buttonText={getString('newWebhook')} + onButtonClick={() => + history.push( + routes.toCODEWebhookNew({ + repoPath: repoMetadata?.path as string + }) + ) + } + /> + + + )} + + + ) +} + +const generateLastExecutionStateIcon = ( + webhook: OpenapiWebhookType +): { icon: IconName; iconProps?: { color?: Color } } => { + let icon: IconName = 'dot' + let color: Color | undefined = undefined + + switch (webhook.latest_execution_result) { + case 'fatal_error': + icon = 'danger-icon' + break + case 'retriable_error': + icon = 'solid-error' + break + case 'success': + icon = 'success-tick' + break + default: + color = Color.GREY_250 + } + + return { icon, ...(color ? { iconProps: { color } } : undefined) } +} diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss new file mode 100644 index 0000000000..d3ce9495c4 --- /dev/null +++ b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.main { + padding-bottom: 0 !important; + + div[class*='TextInput'] { + margin-bottom: 0 !important; + margin-left: 0 !important; + } + + > div { + align-items: center; + } + + .input { + margin-bottom: 0 !important; + + span[data-icon], + span[icon] { + margin-top: 10px !important; + } + } +} + +.branchDropdown { + background-color: var(--white); +} diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts new file mode 100644 index 0000000000..ab6a17504f --- /dev/null +++ b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts @@ -0,0 +1,21 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ +// This is an auto-generated file +export declare const branchDropdown: string +export declare const input: string +export declare const main: string diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx new file mode 100644 index 0000000000..048fd71b14 --- /dev/null +++ b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx @@ -0,0 +1,59 @@ +/* + * Copyright 2023 Harness, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { useHistory } from 'react-router-dom' +import React, { useState } from 'react' +import { Container, Layout, FlexExpander, ButtonVariation, Button } from '@harnessio/uicore' +import { useStrings } from 'framework/strings' +import { CodeIcon, GitInfoProps } from 'utils/GitUtils' +import { useAppContext } from 'AppContext' +import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' +import css from './WebhooksHeader.module.scss' + +interface WebhooksHeaderProps extends Pick { + loading?: boolean + onSearchTermChanged: (searchTerm: string) => void +} + +export function WebhooksHeader({ repoMetadata, loading, onSearchTermChanged }: WebhooksHeaderProps) { + const history = useHistory() + const [searchTerm, setSearchTerm] = useState('') + const { routes } = useAppContext() + const { getString } = useStrings() + + return ( + + + { + setSearchTerm(value) + onSearchTermChanged(value) + }} + /> + + - ) -} diff --git a/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx b/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx deleted file mode 100644 index 77ac10bf16..0000000000 --- a/web/src/components/Changes/CommitRangeDropdown/CommitRangeDropdown.tsx +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useEffect, useMemo } from 'react' -import { Divider, PopoverInteractionKind, Position } from '@blueprintjs/core' -import { Checkbox, Container, FlexExpander, Layout, Popover, Text } from '@harnessio/uicore' -import { Color, FontVariation } from '@harnessio/design-system' -import ReactTimeago from 'react-timeago' - -import { useStrings } from 'framework/strings' -import type { TypesCommit } from 'services/code' - -import css from '../Changes.module.scss' - -type CommitRangeDropdownProps = { - allCommits: TypesCommit[] - selectedCommits: string[] - setSelectedCommits: React.Dispatch> -} - -const sortSelectedCommits = (selectedCommits: string[], sortedCommits: string[]) => { - return selectedCommits.sort((commitA, commitB) => { - const commitAIdx = sortedCommits.indexOf(commitA) - const commitBIdx = sortedCommits.indexOf(commitB) - - return commitBIdx - commitAIdx - }) -} - -function getBiggerSubarray(array: Array, index: number) { - if (index >= 0 && index < array.length) { - const subarray1 = array.slice(0, index) - const subarray2 = array.slice(index + 1) - - return subarray1.length > subarray2.length ? subarray1 : subarray2 - } else { - return [] - } -} - -const getCommitRange = (selectedCommits: string[], allCommitsSHA: string[]) => { - const sortedCommits = sortSelectedCommits(selectedCommits, allCommitsSHA) - const selectedCommitRange = allCommitsSHA - .slice(allCommitsSHA.indexOf(sortedCommits[sortedCommits.length - 1]), allCommitsSHA.indexOf(sortedCommits[0]) + 1) - .reverse() - - return selectedCommitRange -} - -const CommitRangeDropdown: React.FC = ({ - allCommits, - selectedCommits, - setSelectedCommits -}) => { - const { getString } = useStrings() - const allCommitsSHA = useMemo(() => allCommits.map(commit => commit.sha as string), [allCommits]) - - useEffect(() => { - if (selectedCommits.length && allCommitsSHA.length) { - setSelectedCommits(prevVal => getCommitRange(prevVal, allCommitsSHA)) - } - }, [allCommitsSHA, setSelectedCommits, selectedCommits.length]) - - const handleCheckboxClick = ( - event: React.MouseEvent, - selectedCommitSHA: string - ) => { - setSelectedCommits(current => { - // handle single commit clicked (either no shift held, or no commit selected yet) - if (!event.shiftKey || current.length == 0) { - return [selectedCommitSHA] - } - - // handle already selected commit clicked - if (current.includes(selectedCommitSHA)) { - const sortedCommits = sortSelectedCommits(current, allCommitsSHA) - const subArray = getBiggerSubarray(sortedCommits, sortedCommits.indexOf(selectedCommitSHA)) - - return subArray - } - - // clicked commit is outside of current range - extend it! - const extendedArray = getCommitRange([...current, selectedCommitSHA], allCommitsSHA) - - // NOTE: this CAN contain all commits - we let it through for consistent user experience. - // This way, the user sees selected exactly what they clicked on (+ we don't have to handle single commit pr differently) - return extendedArray - }) - } - - const areAllCommitsSelected = !selectedCommits.length - - return ( - setSelectedCommits(selectedCommits)} - content={ - - {getString('allCommits')}} - checked={areAllCommitsSelected} - onClick={() => setSelectedCommits([])} - margin={{ bottom: 'small' }} - /> - - - {allCommits?.map((prCommit, index) => { - const isSelected = selectedCommits.includes(prCommit.sha || '') - - return ( - handleCheckboxClick(e, prCommit.sha as string)}> - handleCheckboxClick(e, prCommit.sha as string)} /> - - {`${allCommits.length - index} ${prCommit.title}`} - - - - - - - ) - })} - - - - {getString('selectRange')} - - - }> - - { - areAllCommitsSelected - ? getString('allCommits') - : `${selectedCommits.length} ${selectedCommits.length > 1 ? getString('commits') : getString('commit')}` - } - - - ) -} - -export default CommitRangeDropdown diff --git a/web/src/components/Changes/DiffViewConfiguration.tsx b/web/src/components/Changes/DiffViewConfiguration.tsx deleted file mode 100644 index 9842fc4b57..0000000000 --- a/web/src/components/Changes/DiffViewConfiguration.tsx +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { Container, Layout, Text, FlexExpander } from '@harnessio/uicore' -import { FontVariation } from '@harnessio/design-system' -import { ButtonGroup, Button as BButton, Classes } from '@blueprintjs/core' -import cx from 'classnames' -import { useStrings } from 'framework/strings' -import { ButtonRoleProps } from 'utils/Utils' -import { ViewStyle } from 'components/DiffViewer/DiffViewerUtils' - -interface DiffViewConfigurationProps { - viewStyle: ViewStyle - lineBreaks: boolean - setViewStyle: (val: ViewStyle) => void - setLineBreaks: (val: boolean) => void -} - -export const DiffViewConfiguration: React.FC = ({ - viewStyle, - setViewStyle, - lineBreaks, - setLineBreaks -}) => { - const { getString } = useStrings() - - return ( - - - - - {getString('pr.diffView')} - - - { - setViewStyle(ViewStyle.SIDE_BY_SIDE) - window.scroll({ top: 0 }) - }}> - {getString('pr.split')} - - { - setViewStyle(ViewStyle.LINE_BY_LINE) - window.scroll({ top: 0 }) - }}> - {getString('pr.unified')} - - - - - - - {getString('lineBreaks')} - - - setLineBreaks(true)}> - {getString('on')} - - setLineBreaks(false)}> - {getString('off')} - - - - - - - } - tooltipProps={{ interactionKind: 'click' }} - iconProps={{ size: 14, padding: { right: 3 } }} - rightIconProps={{ size: 13, padding: { left: 0 } }} - padding={{ left: 'small' }} - {...ButtonRoleProps} - /> - ) -} diff --git a/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx b/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx deleted file mode 100644 index 600f690aa5..0000000000 --- a/web/src/components/Changes/ReviewSplitButton/ReviewSplitButton.tsx +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { ButtonVariation, Container, SplitButton, useToaster, Text, Layout } from '@harnessio/uicore' -import { Icon, IconName } from '@harnessio/icons' -import { Color, FontVariation } from '@harnessio/design-system' -import { Menu, PopoverPosition } from '@blueprintjs/core' -import cx from 'classnames' -import { useMutate } from 'restful-react' -import React, { useCallback, useMemo } from 'react' -import { useStrings } from 'framework/strings' -import type { EnumPullReqReviewDecision, TypesPullReq } from 'services/code' -import type { GitInfoProps } from 'utils/GitUtils' -import { getErrorMessage } from 'utils/Utils' -import css from '../Changes.module.scss' - -interface PrReviewOption { - method: EnumPullReqReviewDecision | 'reject' - title: string - disabled?: boolean - icon: IconName - color: Color -} - -interface ReviewSplitButtonProps extends Pick { - shouldHide: boolean - pullRequestMetadata?: TypesPullReq - refreshPr: () => void - disabled?: boolean - refetchReviewers?: () => void -} -const ReviewSplitButton = (props: ReviewSplitButtonProps) => { - const { refetchReviewers, pullRequestMetadata, repoMetadata, shouldHide, refreshPr, disabled } = props - const { getString } = useStrings() - const { showError, showSuccess } = useToaster() - const prDecisionOptions: PrReviewOption[] = useMemo( - () => [ - { - method: 'approved', - title: getString('approve'), - icon: 'tick-circle' as IconName, - color: Color.GREEN_700 - }, - { - method: 'changereq', - title: getString('requestChanges'), - icon: 'error' as IconName, - color: Color.ORANGE_700 - } - ], - [getString] - ) - - const { mutate, loading } = useMutate({ - verb: 'POST', - path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/reviews` - }) - const submitReview = useCallback( - (decision: PrReviewOption) => { - mutate({ decision: decision.method, commit_sha: pullRequestMetadata?.source_sha }) - .then(() => { - showSuccess(getString(decision.method === 'approved' ? 'pr.reviewSubmitted' : 'pr.requestSubmitted')) - refreshPr?.() - refetchReviewers?.() - }) - .catch(exception => showError(getErrorMessage(exception))) - }, - [mutate, showError, showSuccess, getString, refreshPr, pullRequestMetadata?.source_sha, refetchReviewers] - ) - return ( - - { - submitReview(prDecisionOptions[0]) - }}> - - - - {prDecisionOptions[1].title} - - - } - onClick={() => { - submitReview(prDecisionOptions[1]) - }} - /> - - - ) -} - -export default ReviewSplitButton diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss deleted file mode 100644 index f8cf9cadb1..0000000000 --- a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.container { - .label { - font-size: 13px !important; - color: var(--black) !important; - } - - .layout { - height: 24px; - display: inline-flex; - justify-content: center; - align-items: center; - border: 1px solid var(--grey-200); - background-color: var(--grey-50) !important; - border-radius: 4px; - padding-left: var(--spacing-small) !important; - max-width: 300px; - .url { - width: 250px; - white-space: nowrap !important; - overflow: hidden; - text-overflow: ellipsis; - font-size: 13px !important; - } - - button#cloneCopyButton { - --button-height: 24px !important; - border-radius: 0 !important; - border-left: 1px solid var(--grey-200) !important; - margin-left: var(--spacing-small) !important; - } - } -} diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts deleted file mode 100644 index 2e46dea965..0000000000 --- a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.module.scss.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const cloneCopyButton: string -export declare const container: string -export declare const label: string -export declare const layout: string -export declare const url: string diff --git a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx b/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx deleted file mode 100644 index 5f86abcc65..0000000000 --- a/web/src/components/CloneButtonTooltip/CloneButtonTooltip.tsx +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useState } from 'react' -import { Button, ButtonVariation, Container, Layout, Text } from '@harnessio/uicore' -import { Color, FontVariation } from '@harnessio/design-system' -import { useStrings } from 'framework/strings' -import { CopyButton } from 'components/CopyButton/CopyButton' -import { CodeIcon } from 'utils/GitUtils' -import CloneCredentialDialog from 'components/CloneCredentialDialog/CloneCredentialDialog' -import css from './CloneButtonTooltip.module.scss' - -interface CloneButtonTooltipProps { - httpsURL: string -} - -export function CloneButtonTooltip({ httpsURL }: CloneButtonTooltipProps) { - const { getString } = useStrings() - const [flag, setFlag] = useState(false) - - return ( - - - {getString('cloneHTTPS')} - - {getString('generateCloneText')} - - - - - {httpsURL} - - - - - - - - - ) -} diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss deleted file mode 100644 index 36c20c1021..0000000000 --- a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.layout { - height: 33px; - display: inline-flex; - justify-content: center; - align-items: center; - border: 1px solid var(--grey-200); - background-color: var(--grey-50) !important; - border-radius: 4px; - padding-left: var(--spacing-small) !important; - max-width: 100%; - .url { - // width: 80%; - white-space: nowrap !important; - overflow: hidden; - text-overflow: ellipsis; - font-size: 13px !important; - } - - button#cloneCopyButton { - --button-height: 24px !important; - border-radius: 0 !important; - border-left: 1px solid var(--grey-200) !important; - margin-left: var(--spacing-small) !important; - } -} diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts deleted file mode 100644 index 7a96c29bf0..0000000000 --- a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.module.scss.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const cloneCopyButton: string -export declare const layout: string -export declare const url: string diff --git a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx b/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx deleted file mode 100644 index 857c132af3..0000000000 --- a/web/src/components/CloneCredentialDialog/CloneCredentialDialog.tsx +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback, useEffect, useState } from 'react' -import { Button, ButtonVariation, Container, Dialog, FlexExpander, Layout, Text, useToaster } from '@harnessio/uicore' -import { FontVariation } from '@harnessio/design-system' -import { useMutate } from 'restful-react' -import { useHistory } from 'react-router-dom' -import { useStrings } from 'framework/strings' -import { CopyButton } from 'components/CopyButton/CopyButton' -import { CodeIcon } from 'utils/GitUtils' -import { useAppContext } from 'AppContext' -import { generateAlphaNumericHash } from 'utils/Utils' -import css from './CloneCredentialDialog.module.scss' - -interface CloneCredentialDialogProps { - setFlag: (val: boolean) => void - flag: boolean -} - -const CloneCredentialDialog = (props: CloneCredentialDialogProps) => { - const { setFlag, flag } = props - const history = useHistory() - const { getString } = useStrings() - const { hooks, currentUser, currentUserProfileURL, standalone, routes } = useAppContext() - const [token, setToken] = useState('') - const { showError } = useToaster() - const hash = generateAlphaNumericHash(6) - const { mutate } = useMutate({ path: '/api/v1/user/tokens', verb: 'POST' }) - const genToken = useCallback( - async (_props: { uid: string }) => { - const res = await mutate({ uid: _props.uid }) - try { - setToken(res?.access_token) - } catch { - showError(res?.data?.message || res?.message) - } - return res - }, - [mutate, showError] - ) - const tokenData = standalone ? false : hooks?.useGenerateToken?.(hash, currentUser?.uid, flag) - - useEffect(() => { - if (tokenData) { - if (tokenData && tokenData?.status !== 400) { - setToken(tokenData?.data) - } else if (tokenData?.status === 400 && flag) { - showError(tokenData?.data?.message || tokenData?.message) - } - } else if (!tokenData && standalone && flag) { - genToken({ uid: `code_token_${hash}` }) - } - }, [flag, tokenData, showError]) // eslint-disable-line react-hooks/exhaustive-deps - return ( - { - setFlag(false) - }} - title={ - - {getString('getMyCloneTitle')} - - } - style={{ width: 490, maxHeight: '95vh', overflow: 'auto' }}> - - - {getString('userName')} - - - - {currentUser.display_name} - - - - - - {getString('passwordApi')} - - - - - {token} - - - - - - {getString('cloneText')} - - - ) -} - -export default CloneCredentialDialog diff --git a/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx b/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx deleted file mode 100644 index 284b7c212d..0000000000 --- a/web/src/components/CodeCommentSecondarySaveButton/CodeCommentSecondarySaveButton.tsx +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo, useState } from 'react' -import { useMutate } from 'restful-react' -import { useToaster, Button, ButtonVariation, ButtonSize, ButtonProps, useIsMounted } from '@harnessio/uicore' -import { useStrings } from 'framework/strings' -import type { GitInfoProps } from 'utils/GitUtils' -import type { TypesPullReqActivity } from 'services/code' -import { useEmitCodeCommentStatus } from 'hooks/useEmitCodeCommentStatus' -import { CodeCommentState, getErrorMessage } from 'utils/Utils' -import type { CommentItem } from '../CommentBox/CommentBox' - -interface CodeCommentSecondarySaveButtonProps - extends Pick, - ButtonProps { - commentItems: CommentItem[] -} - -export const CodeCommentSecondarySaveButton: React.FC = ({ - repoMetadata, - pullRequestMetadata, - commentItems, - onClick, - ...props -}) => { - const { getString } = useStrings() - const isMounted = useIsMounted() - const { showError } = useToaster() - const path = useMemo( - () => `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/comments`, - [repoMetadata.path, pullRequestMetadata?.number] - ) - const { mutate: updateCodeCommentStatus } = useMutate({ verb: 'PUT', path: ({ id }) => `${path}/${id}/status` }) - const [resolved, setResolved] = useState(commentItems[0]?.payload?.resolved ? true : false) - const emitCodeCommentStatus = useEmitCodeCommentStatus({ - id: commentItems[0]?.payload?.id, - onMatch: status => { - if (isMounted.current) { - setResolved(status === CodeCommentState.RESOLVED) - } - } - }) - - return ( - - ) -} diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss deleted file mode 100644 index 4b91c5df81..0000000000 --- a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.select { - width: 110px !important; - cursor: pointer; - - :global { - .bp3-menu { - width: 100px !important; - min-width: 103px !important; - - .Select--menuItem { - width: 100px !important; - } - } - } -} diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts deleted file mode 100644 index 8f73f4c0e4..0000000000 --- a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.module.scss.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const select: string diff --git a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx b/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx deleted file mode 100644 index d51b897803..0000000000 --- a/web/src/components/CodeCommentStatusSelect/CodeCommentStatusSelect.tsx +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo, useState } from 'react' -import { useMutate } from 'restful-react' -import { useToaster, Select } from '@harnessio/uicore' -import { useStrings } from 'framework/strings' -import type { GitInfoProps } from 'utils/GitUtils' -import type { TypesPullReqActivity } from 'services/code' -import { CodeCommentState, getErrorMessage } from 'utils/Utils' -import { useEmitCodeCommentStatus } from 'hooks/useEmitCodeCommentStatus' -import type { CommentItem } from '../CommentBox/CommentBox' -import css from './CodeCommentStatusSelect.module.scss' - -interface CodeCommentStatusSelectProps extends Pick { - commentItems: CommentItem[] - onCommentUpdate: () => void - - refetchActivities?: () => void -} - -export const CodeCommentStatusSelect: React.FC = ({ - repoMetadata, - pullRequestMetadata, - commentItems, - onCommentUpdate, - refetchActivities -}) => { - const { getString } = useStrings() - const { showError } = useToaster() - const path = useMemo( - () => `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata?.number}/comments`, - [repoMetadata.path, pullRequestMetadata?.number] - ) - const { mutate: updateCodeCommentStatus } = useMutate({ verb: 'PUT', path: ({ id }) => `${path}/${id}/status` }) - const codeCommentStatusItems = useMemo( - () => [ - { - label: getString('active'), - value: CodeCommentState.ACTIVE - }, - { - label: getString('resolved'), - value: CodeCommentState.RESOLVED - } - ], - [getString] - ) - const [codeCommentStatus, setCodeCommentStatus] = useState( - commentItems[0]?.payload?.resolved ? codeCommentStatusItems[1] : codeCommentStatusItems[0] - ) - const emitCodeCommentStatus = useEmitCodeCommentStatus({ - id: commentItems[0]?.payload?.id, - onMatch: status => { - setCodeCommentStatus(status === CodeCommentState.ACTIVE ? codeCommentStatusItems[0] : codeCommentStatusItems[1]) - } - }) - - return ( - { - setActivityFilter(newState) - refetchActivities() - }} - /> - - - { - if (dateOrderSort === orderSortDate.ASC) { - setDateOrderSort(orderSortDate.DESC) - } else { - setDateOrderSort(orderSortDate.ASC) - } - }}> - {dateOrderSort === orderSortDate.ASC ? getString('ascending') : getString('descending')} - - - - {dateOrderSort != orderSortDate.DESC ? null : - - {newCommentBox} - - } - - {renderedActivityBlocks} - - {dateOrderSort != orderSortDate.ASC ? null : - - {newCommentBox} - - } - - - - - - - - - - - - ) -} - -export enum PRCommentFilterType { - SHOW_EVERYTHING = 'showEverything', - ALL_COMMENTS = 'allComments', - MY_COMMENTS = 'myComments', - RESOLVED_COMMENTS = 'resolvedComments', - UNRESOLVED_COMMENTS = 'unresolvedComments' -} - -function useActivityFilters() { - const { getString } = useStrings() - - return useMemo( - () => [ - { - label: getString('showEverything'), - value: PRCommentFilterType.SHOW_EVERYTHING - }, - { - label: getString('allComments'), - value: PRCommentFilterType.ALL_COMMENTS - }, - { - label: getString('myComments'), - value: PRCommentFilterType.MY_COMMENTS - }, - { - label: getString('unrsolvedComment'), - value: PRCommentFilterType.UNRESOLVED_COMMENTS - }, - { - label: getString('resolvedComments'), - value: PRCommentFilterType.RESOLVED_COMMENTS - } - ], - [getString] - ) -} diff --git a/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx b/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx deleted file mode 100644 index c0cb30df93..0000000000 --- a/web/src/pages/PullRequest/Conversation/DescriptionBox.tsx +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useEffect, useState } from 'react' -import { Container, useToaster } from '@harnessio/uicore' -import cx from 'classnames' -import { useMutate } from 'restful-react' -import { MarkdownViewer } from 'components/MarkdownViewer/MarkdownViewer' -import { useStrings } from 'framework/strings' -import type { OpenapiUpdatePullReqRequest } from 'services/code' -import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' -import { MarkdownEditorWithPreview } from 'components/MarkdownEditorWithPreview/MarkdownEditorWithPreview' -import { NavigationCheck } from 'components/NavigationCheck/NavigationCheck' -import { getErrorMessage } from 'utils/Utils' -import type { ConversationProps } from './Conversation' -import css from './Conversation.module.scss' - -interface DescriptionBoxProps extends Omit { - onCancelEditDescription: () => void -} - -export const DescriptionBox: React.FC = ({ - repoMetadata, - pullRequestMetadata, - onCommentUpdate: refreshPullRequestMetadata, - onCancelEditDescription -}) => { - const [edit, setEdit] = useState(false) - const [dirty, setDirty] = useState(false) - const [originalContent, setOriginalContent] = useState(pullRequestMetadata.description as string) - const [content, setContent] = useState(originalContent) - const { getString } = useStrings() - const { showError } = useToaster() - const { mutate } = useMutate({ - verb: 'PATCH', - path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}` - }) - - useEffect(() => { - setEdit(!pullRequestMetadata?.description?.length) - - if (pullRequestMetadata?.description) { - setContent(pullRequestMetadata?.description) - } - }, [pullRequestMetadata?.description, pullRequestMetadata?.description?.length]) - - return ( - - - {(edit && ( - { - const payload: OpenapiUpdatePullReqRequest = { - title: pullRequestMetadata.title, - description: value || '' - } - mutate(payload) - .then(() => { - setContent(value) - setOriginalContent(value) - setEdit(false) - refreshPullRequestMetadata() - }) - .catch(exception => showError(getErrorMessage(exception), 0, getString('pr.failedToUpdate'))) - }} - onCancel={() => { - setContent(originalContent) - setEdit(false) - onCancelEditDescription() - }} - setDirty={setDirty} - i18n={{ - placeHolder: getString('pr.enterDesc'), - tabEdit: getString('write'), - tabPreview: getString('preview'), - save: getString('save'), - cancel: getString('cancel') - }} - editorHeight="400px" - autoFocusAndPosition={true} - /> - )) || ( - - - - setEdit(true) - } - ]} - /> - - - )} - - - - ) -} diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss deleted file mode 100644 index cffc1c9d8f..0000000000 --- a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - --bar-height: 60px; - - background-color: var(--green-50) !important; - margin: -24px -24px 0 !important; - position: sticky; - top: 0; - z-index: 4; - - &.merged { - border-color: transparent !important; - background: #e8e8ff !important; - } - - &.error { - background-color: var(--red-50) !important; - } - - &.closed { - background-color: var(--grey-100) !important; - } - - &.draft { - background-color: var(--orange-100) !important; - } - - &.unchecked { - background-color: var(--orange-50) !important; // Note: No UICore color variable for this background - } - - .layout { - height: var(--bar-height); - padding: 0 var(--spacing-xlarge) !important; - - .secondaryButton, - [class*='Button--variation-tertiary'] { - --box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.04), 0px 2px 4px rgba(96, 97, 112, 0.16) !important; - } - } - - .btn { - background-color: var(--green-800) !important; - color: var(--white) !important; - } - - .heading { - font-weight: 600 !important; - font-size: 16px !important; - line-height: 24px !important; - color: var(--grey-700) !important; - } - - .sub { - font-weight: 600 !important; - font-size: 13px !important; - line-height: 20px !important; - color: var(--green-800) !important; - - &.closed { - color: var(--grey-600) !important; - } - - &.merged { - color: var(--purple-700) !important; - } - - &.draft { - color: var(--orange-900) !important; - } - - &.unmergeable { - color: var(--red-500) !important; - } - - &.unchecked { - color: #c05809 !important; // Note: No UICore color variable for this text - } - } -} - -.popover { - transform: translateY(5px) !important; - - .menuItem { - strong { - display: inline-block; - margin-left: 10px; - } - - p { - font-size: 13px; - padding-left: 27px; - line-height: 16px; - margin: 5px 0; - max-width: 320px; - white-space: break-spaces !important; - } - } - .menuReviewItem { - strong { - display: inline-block; - margin-left: 10px; - } - - p { - font-size: 13px; - padding-left: 2px; - line-height: 16px; - margin: 0px 1px; - max-width: 320px; - } - } -} - -.btnWrapper { - &.hasError button { - --background-color: var(--grey-50) !important; - --background-color-hover: var(--white) !important; - --background-color-active: var(--grey-100) !important; - } - - &.disabled { - pointer-events: none; - opacity: 0.5; - } - - a, - button { - --background-color: var(--green-800) !important; - --background-color-hover: var(--green-900) !important; - --background-color-active: var(--green-900) !important; - } -} - -.mergeContainer { - border-radius: 4px; - background: #e8e8ff !important; - padding: 2px 5px !important; -} diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts deleted file mode 100644 index 8662fea10a..0000000000 --- a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.module.scss.d.ts +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const btn: string -export declare const btnWrapper: string -export declare const closed: string -export declare const disabled: string -export declare const draft: string -export declare const error: string -export declare const hasError: string -export declare const heading: string -export declare const layout: string -export declare const main: string -export declare const menuItem: string -export declare const menuReviewItem: string -export declare const mergeContainer: string -export declare const merged: string -export declare const popover: string -export declare const secondaryButton: string -export declare const sub: string -export declare const unchecked: string -export declare const unmergeable: string diff --git a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx b/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx deleted file mode 100644 index 1fdf7641e4..0000000000 --- a/web/src/pages/PullRequest/Conversation/PullRequestActionsBox/PullRequestActionsBox.tsx +++ /dev/null @@ -1,386 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo, useState } from 'react' -import { - Button, - ButtonVariation, - Container, - FlexExpander, - Layout, - SplitButton, - StringSubstitute, - Text, - useToaster -} from '@harnessio/uicore' -import { Icon } from '@harnessio/icons' -import { Color } from '@harnessio/design-system' -import { useMutate } from 'restful-react' -import { Case, Else, Match, Render, Truthy } from 'react-jsx-match' -import { Menu, PopoverPosition, Icon as BIcon } from '@blueprintjs/core' -import cx from 'classnames' -import ReactTimeago from 'react-timeago' -import type { - EnumMergeMethod, - EnumPullReqState, - OpenapiMergePullReq, - OpenapiStatePullReqRequest, - TypesPullReq -} from 'services/code' -import { useStrings } from 'framework/strings' -import { CodeIcon, GitInfoProps, PullRequestFilterOption, PullRequestState } from 'utils/GitUtils' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { useAppContext } from 'AppContext' -import { Images } from 'images' -import { getErrorMessage, MergeCheckStatus, permissionProps } from 'utils/Utils' -import { UserPreference, useUserPreference } from 'hooks/useUserPreference' -import ReviewSplitButton from 'components/Changes/ReviewSplitButton/ReviewSplitButton' -import css from './PullRequestActionsBox.module.scss' - -interface PullRequestActionsBoxProps extends Pick { - onPRStateChanged: () => void - refetchReviewers: () => void -} - -interface PRMergeOption { - method: EnumMergeMethod | 'close' - title: string - desc: string - disabled?: boolean -} - -interface PRDraftOption { - method: 'close' | 'open' - title: string - desc: string - disabled?: boolean -} - -export const PullRequestActionsBox: React.FC = ({ - repoMetadata, - pullRequestMetadata, - onPRStateChanged, - refetchReviewers -}) => { - const { getString } = useStrings() - const { showError } = useToaster() - const { currentUser } = useAppContext() - const { hooks, standalone } = useAppContext() - const space = useGetSpaceParam() - const { mutate: mergePR, loading } = useMutate({ - verb: 'POST', - path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}/merge` - }) - const { mutate: updatePRState, loading: loadingState } = useMutate({ - verb: 'POST', - path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${pullRequestMetadata.number}/state` - }) - const mergeable = useMemo( - () => pullRequestMetadata.merge_check_status === MergeCheckStatus.MERGEABLE, - [pullRequestMetadata] - ) - const isClosed = pullRequestMetadata.state === PullRequestState.CLOSED - const isOpen = pullRequestMetadata.state === PullRequestState.OPEN - const isConflict = pullRequestMetadata.merge_check_status === MergeCheckStatus.CONFLICT - const unchecked = useMemo( - () => pullRequestMetadata.merge_check_status === MergeCheckStatus.UNCHECKED && !isClosed, - [pullRequestMetadata, isClosed] - ) - const isDraft = pullRequestMetadata.is_draft - const mergeOptions: PRMergeOption[] = [ - { - method: 'squash', - title: getString('pr.mergeOptions.squashAndMerge'), - desc: getString('pr.mergeOptions.squashAndMergeDesc'), - disabled: mergeable === false - }, - { - method: 'merge', - title: getString('pr.mergeOptions.createMergeCommit'), - desc: getString('pr.mergeOptions.createMergeCommitDesc'), - disabled: mergeable === false - }, - { - method: 'rebase', - title: getString('pr.mergeOptions.rebaseAndMerge'), - desc: getString('pr.mergeOptions.rebaseAndMergeDesc'), - disabled: mergeable === false - }, - { - method: 'close', - title: getString('pr.mergeOptions.close'), - desc: getString('pr.mergeOptions.closeDesc') - } - ] - const draftOptions: PRDraftOption[] = [ - { - method: 'open', - title: getString('pr.draftOpenForReview.title'), - desc: getString('pr.draftOpenForReview.desc') - }, - { - method: 'close', - title: getString('pr.mergeOptions.close'), - desc: getString('pr.mergeOptions.closeDesc') - } - ] - - const [mergeOption, setMergeOption, resetMergeOption] = useUserPreference( - UserPreference.PULL_REQUEST_MERGE_STRATEGY, - mergeOptions[1], - option => option.method !== 'close' - ) - const [draftOption, setDraftOption] = useState(draftOptions[0]) - const permPushResult = hooks?.usePermissionTranslate?.( - { - resource: { - resourceType: 'CODE_REPOSITORY' - }, - permissions: ['code_repo_push'] - }, - [space] - ) - const isActiveUserPROwner = useMemo(() => { - return ( - !!currentUser?.uid && !!pullRequestMetadata?.author?.uid && currentUser?.uid === pullRequestMetadata?.author?.uid - ) - }, [currentUser, pullRequestMetadata]) - - if (pullRequestMetadata.state === PullRequestFilterOption.MERGED) { - return - } - - return ( - - - - - {(unchecked && ) || ( - - )} - - {getString( - isDraft - ? 'prState.draftHeading' - : isClosed - ? 'pr.prClosed' - : unchecked - ? 'pr.checkingToMerge' - : mergeable === false && isOpen - ? 'pr.cantBeMerged' - : 'pr.branchHasNoConflicts' - )} - - - - - - - - - { - if (draftOption.method === 'open') { - updatePRState({ is_draft: false, state: 'open' }) - .then(onPRStateChanged) - .catch(exception => showError(getErrorMessage(exception))) - } else { - updatePRState({ state: 'closed' }) - .then(onPRStateChanged) - .catch(exception => showError(getErrorMessage(exception))) - } - }}> - {draftOptions.map(option => { - return ( - - - {option.title} -

{option.desc}

- - } - onClick={() => setDraftOption(option)} - /> - ) - })} -
-
- - - - - -
- {tagArr.length !== 0 ? ( - <> - ) : ( - - {getString('noneYet')} - - )} */} -
-
- - ) -} - -export default PullRequestSideBar diff --git a/web/src/pages/PullRequest/Conversation/SystemComment.tsx b/web/src/pages/PullRequest/Conversation/SystemComment.tsx deleted file mode 100644 index 422360f059..0000000000 --- a/web/src/pages/PullRequest/Conversation/SystemComment.tsx +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { Avatar, Container, Layout, StringSubstitute, Text } from '@harnessio/uicore' -import { Icon, IconName } from '@harnessio/icons' -import { Color, FontVariation } from '@harnessio/design-system' -import ReactTimeago from 'react-timeago' -import { Render } from 'react-jsx-match' -import { CodeIcon, GitInfoProps } from 'utils/GitUtils' -import { MarkdownViewer } from 'components/MarkdownViewer/MarkdownViewer' -import { useStrings } from 'framework/strings' -import type { TypesPullReqActivity } from 'services/code' -import type { CommentItem } from 'components/CommentBox/CommentBox' -import { formatDate, formatTime, PullRequestSection } from 'utils/Utils' -import { CommentType } from 'components/DiffViewer/DiffViewerUtils' -import { useAppContext } from 'AppContext' -import { CommitActions } from 'components/CommitActions/CommitActions' -import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' -import css from './Conversation.module.scss' - -interface SystemCommentProps extends Pick { - commentItems: CommentItem[] - repoMetadataPath?: string -} - -export const SystemComment: React.FC = ({ - pullRequestMetadata, - commentItems, - repoMetadataPath -}) => { - const { getString } = useStrings() - const payload = commentItems[0].payload - const type = payload?.type - const { routes } = useAppContext() - - switch (type) { - case CommentType.MERGE: { - return ( - - - - - - - - - {pullRequestMetadata.merger?.display_name}, - source: {pullRequestMetadata.source_branch}, - target: {pullRequestMetadata.target_branch}, - time: ( - - - - - ) - }} - /> - - - - ) - } - - case CommentType.REVIEW_SUBMIT: { - return ( - - - - - - - {payload?.author?.display_name}, - state: {(payload?.payload as Unknown)?.decision}, - time: ( - - - - - ) - }} - /> - - - - ) - } - - case CommentType.BRANCH_UPDATE: { - return ( - - - - - - {payload?.author?.display_name} - - ), - commit: ( - - - - ) - }} - /> - - - - - - - - ) - } - - case CommentType.BRANCH_DELETE: { - return ( - - - - - - {payload?.author?.display_name} - - ), - commit: ( - - - - ) - }} - /> - - - - - - - - ) - } - - case CommentType.STATE_CHANGE: { - const openFromDraft = - (payload?.payload as Unknown)?.old_draft === true && (payload?.payload as Unknown)?.new_draft === false - - return ( - - - - - {payload?.author?.display_name}, - old: {(payload?.payload as Unknown)?.old}, - new: {(payload?.payload as Unknown)?.new} - }} - /> - - - - - - - - ) - } - - case CommentType.TITLE_CHANGE: { - return ( - - - - - {payload?.author?.display_name}, - old: ( - - {(payload?.payload as Unknown)?.old} - - ), - new: {(payload?.payload as Unknown)?.new} - }} - /> - - - - - - - - 1}> - - index > 0) - .map( - item => - `|${item.author}|${(item.payload?.payload as Unknown)?.old}|${ - (item.payload?.payload as Unknown)?.new - }|${formatDate(item.edited)} ${formatTime(item.edited)}|` - ) - ) - .join('\n')} - /> - - - - ) - } - - default: { - // eslint-disable-next-line no-console - console.warn('Unable to render system type activity', commentItems) - return ( - - - {type} - - ) - } - } -} - -const generateReviewDecisionIcon = ( - reviewDecision: string -): { - name: IconName - color: string | undefined - size: number | undefined - icon: IconName - iconProps?: { color?: Color } -} => { - let icon: IconName = 'dot' - let color: Color | undefined = undefined - let size: number | undefined = undefined - - switch (reviewDecision) { - case 'changereq': - icon = 'main-issue-filled' - color = Color.ORANGE_700 - size = 18 - break - case 'approved': - icon = 'execution-success' - size = 18 - color = Color.GREEN_700 - break - } - const name = icon - return { name, color, size, icon, ...(color ? { iconProps: { color } } : undefined) } -} diff --git a/web/src/pages/PullRequest/PullRequest.module.scss b/web/src/pages/PullRequest/PullRequest.module.scss deleted file mode 100644 index b5bfc9d40c..0000000000 --- a/web/src/pages/PullRequest/PullRequest.module.scss +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - min-height: var(--page-height); - - > div[class*='PageHeader'] { - border-bottom: none !important; - } -} - -.prNumber { - font-size: 20px; - font-weight: 400; - color: var(--grey-500); - padding-left: var(--spacing-small); -} - -.prTitle { - align-items: center; - - .btn > span { - background: var(--grey-100); - border: 1px solid var(--grey-200); - border-radius: 4px; - } - - .input { - margin-top: 0 !important; - margin-bottom: 0 !important; - - input { - width: 800px; - font-weight: 600; - padding: 0 var(--spacing-small) !important; - line-height: 22px !important; - } - } - - .titleText { - flex-shrink: 0; - max-width: calc(100vw - 500px); - } -} - -.changes { - padding: 0 var(--spacing-xlarge) var(--spacing-xlarge) !important; -} - -.checksCount { - border: 1px solid var(--grey-100); - background-color: var(--grey-50) !important; - display: inline-flex; - margin-left: 8px !important; - border-radius: 4px; - padding: 2px 5px 0 !important; - - .checksCountLayout { - display: inline-flex; - height: 18px; - align-items: center; - justify-content: center; - - > span { - padding: 0 !important; - margin: 0 !important; - align-self: flex-start; - line-height: 16px !important; - - &:last-of-type { - padding-left: 5px !important; - } - } - } -} diff --git a/web/src/pages/PullRequest/PullRequest.module.scss.d.ts b/web/src/pages/PullRequest/PullRequest.module.scss.d.ts deleted file mode 100644 index 0151b32e7a..0000000000 --- a/web/src/pages/PullRequest/PullRequest.module.scss.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const btn: string -export declare const changes: string -export declare const checksCount: string -export declare const checksCountLayout: string -export declare const input: string -export declare const main: string -export declare const prNumber: string -export declare const prTitle: string -export declare const titleText: string diff --git a/web/src/pages/PullRequest/PullRequest.tsx b/web/src/pages/PullRequest/PullRequest.tsx deleted file mode 100644 index 3a8ca52e99..0000000000 --- a/web/src/pages/PullRequest/PullRequest.tsx +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback, useEffect, useMemo, useState } from 'react' -import { Container, Layout, PageBody, Tabs, Text } from '@harnessio/uicore' -import { FontVariation } from '@harnessio/design-system' -import { useGet, useMutate } from 'restful-react' -import { Render } from 'react-jsx-match' -import { useHistory } from 'react-router-dom' -import { compact, isEqual } from 'lodash-es' -import { useAppContext } from 'AppContext' -import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' -import { useStrings } from 'framework/strings' -import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' -import { voidFn, getErrorMessage, PullRequestSection, MergeCheckStatus } from 'utils/Utils' -import { CodeIcon } from 'utils/GitUtils' -import type { TypesPullReq, TypesPullReqStats, TypesRepository } from 'services/code' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import { TabTitleWithCount, tabContainerCSS } from 'components/TabTitleWithCount/TabTitleWithCount' -import { usePRChecksDecision } from 'hooks/usePRChecksDecision' -import { ExecutionStatus } from 'components/ExecutionStatus/ExecutionStatus' -import useSpaceSSE from 'hooks/useSpaceSSE' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { PullRequestMetaLine } from './PullRequestMetaLine' -import { Conversation } from './Conversation/Conversation' -import { Checks } from './Checks/Checks' -import { Changes } from '../../components/Changes/Changes' -import { PullRequestCommits } from './PullRequestCommits/PullRequestCommits' -import { PullRequestTitle } from './PullRequestTitle' -import css from './PullRequest.module.scss' - -const SSE_EVENTS = ['pullreq_updated'] - -export default function PullRequest() { - const history = useHistory() - const { getString } = useStrings() - const { routes, standalone } = useAppContext() - const space = useGetSpaceParam() - const { - repoMetadata, - error, - loading, - refetch, - pullRequestId, - pullRequestSection = PullRequestSection.CONVERSATION, - commitSHA - } = useGetRepositoryMetadata() - const path = useMemo( - () => `/api/v1/repos/${repoMetadata?.path}/+/pullreq/${pullRequestId}`, - [repoMetadata?.path, pullRequestId] - ) - const { - data: pullRequestData, - error: prError, - loading: prLoading, - refetch: refetchPullRequest - } = useGet({ - path, - lazy: !repoMetadata - }) - - const eventHandler = useCallback((data : TypesPullReq)=> { - // ensure this update belongs to the PR we are showing right now - to avoid unnecessary reloads - if (!data || !repoMetadata || - data.target_repo_id !== repoMetadata.id || - String(data.number) !== pullRequestId - ) { - return - } - // NOTE: we refresh as events don't contain all pr stats yet (can be optimized) - refetchPullRequest() - }, [pullRequestId, repoMetadata, refetchPullRequest]) - useSpaceSSE({ - space, - events: SSE_EVENTS, - onEvent: eventHandler - }) - - const [prData, setPrData] = useState() - const prChecksDecisionResult = usePRChecksDecision({ - repoMetadata, - pullRequestMetadata: prData - }) - const showSpinner = useMemo(() => { - return loading || (prLoading && !prData) - }, [loading, prLoading, prData]) - const [showEditDescription, setShowEditDescription] = useState(false) - - const [prStats, setPRStats] = useState() - useMemo(() => { - setPRStats(oldPRStats => { - if (isEqual(oldPRStats, prData?.stats)) { - return oldPRStats - } - - return prData?.stats - }) - }, [prData, setPRStats]) - - const onAddDescriptionClick = useCallback(() => { - setShowEditDescription(true) - history.replace( - routes.toCODEPullRequest({ - repoPath: repoMetadata?.path as string, - pullRequestId, - pullRequestSection: PullRequestSection.CONVERSATION - }) - ) - }, [history, routes, repoMetadata?.path, pullRequestId]) - const recheckPath = useMemo( - () => `/api/v1/repos/${repoMetadata?.path}/+/pullreq/${pullRequestId}/recheck`, - [repoMetadata?.path, pullRequestId] - ) - const { mutate: recheckPR, loading: loadingRecheckPR } = useMutate({ - verb: 'POST', - path: recheckPath - }) - - // prData holds the latest good PR data to make sure page is not broken - // when polling fails - useEffect( - function setPrDataIfNotSet() { - if (!pullRequestData || (prData && isEqual(prData, pullRequestData))) { - return - } - - // recheck pr (merge-check, ...) in case it's unavailable - // Approximation of identifying target branch update: - // 1. branch got updated before page was loaded (status is unchecked and prData is empty) - // NOTE: This doesn't guarantee the status is UNCHECKED due to target branch update and can cause duplicate - // PR merge checks being run on PR creation or source branch update. - // 2. branch got updated while we are on the page (same source_sha but status changed to UNCHECKED) - // NOTE: This doesn't cover the case in which the status changed back to UNCHECKED before the PR is refetched. - // In that case, the user will have to re-open the PR - better than us spamming the backend with rechecks. - // This is a TEMPORARY SOLUTION and will most likely change in the future (more so on backend side) - if ( - pullRequestData.state == 'open' && - pullRequestData.merge_check_status == MergeCheckStatus.UNCHECKED && - // case 1: - (!prData || - // case 2: - (prData?.merge_check_status != MergeCheckStatus.UNCHECKED && - prData?.source_sha == pullRequestData.source_sha)) && - !loadingRecheckPR - ) { - // best effort attempt to recheck PR - fail silently - recheckPR({}) - } - - setPrData(pullRequestData) - }, - [pullRequestData, loadingRecheckPR, recheckPR, setPrData] // eslint-disable-line react-hooks/exhaustive-deps - ) - - useEffect(() => { - const fn = () => { - if (repoMetadata) { - refetchPullRequest().then(() => { - interval = window.setTimeout(fn, PR_POLLING_INTERVAL) - }) - } - } - let interval = window.setTimeout(fn, PR_POLLING_INTERVAL) - - return () => window.clearTimeout(interval) - }, [repoMetadata, refetchPullRequest, path]) - - const activeTab = useMemo( - () => - Object.values(PullRequestSection).find(value => value === pullRequestSection) - ? pullRequestSection - : PullRequestSection.CONVERSATION, - [pullRequestSection] - ) - - return ( - - - ) : ( - '' - ) - } - dataTooltipId="repositoryPullRequests" - extraBreadcrumbLinks={ - repoMetadata && [ - { - label: getString('pullRequests'), - url: routes.toCODEPullRequests({ repoPath: repoMetadata.path as string }) - } - ] - } - /> - - - - - <> - - - { - history.replace( - routes.toCODEPullRequest({ - repoPath: repoMetadata?.path as string, - pullRequestId, - pullRequestSection: tabId !== PullRequestSection.CONVERSATION ? (tabId as string) : undefined - }) - ) - }} - tabList={[ - { - id: PullRequestSection.CONVERSATION, - title: ( - - ), - panel: ( - { - setShowEditDescription(false) - refetchPullRequest() - }} - prStats={prStats} - showEditDescription={showEditDescription} - onCancelEditDescription={() => setShowEditDescription(false)} - /> - ) - }, - { - id: PullRequestSection.COMMITS, - title: ( - - ), - panel: ( - - ) - }, - { - id: PullRequestSection.FILES_CHANGED, - title: ( - - ), - panel: ( - - - - ) - }, - { - id: PullRequestSection.CHECKS, - title: ( - - - - - - {prChecksDecisionResult?.count[prChecksDecisionResult?.overallStatus]} - - - - ) : null - } - count={prChecksDecisionResult?.count?.failure || 0} - padding={{ left: 'medium' }} - /> - ), - panel: ( - - ) - } - ]} - /> - - - - - - ) -} - -const PR_POLLING_INTERVAL = 20000 diff --git a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss deleted file mode 100644 index edfc786e4c..0000000000 --- a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.module.scss +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { -} diff --git a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx b/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx deleted file mode 100644 index cf40048c87..0000000000 --- a/web/src/pages/PullRequest/PullRequestCommits/PullRequestCommits.tsx +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { useGet } from 'restful-react' -import type { TypesCommit } from 'services/code' -import type { GitInfoProps } from 'utils/GitUtils' -import { voidFn, LIST_FETCHING_LIMIT } from 'utils/Utils' -import { usePageIndex } from 'hooks/usePageIndex' -import { useStrings } from 'framework/strings' -import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' -import { CommitsView } from 'components/CommitsView/CommitsView' -import { PullRequestTabContentWrapper } from '../PullRequestTabContentWrapper' - -export const PullRequestCommits: React.FC> = ({ - repoMetadata, - pullRequestMetadata, -}) => { - const limit = LIST_FETCHING_LIMIT - const [page, setPage] = usePageIndex() - const { getString } = useStrings() - const { data, error, loading, refetch, response } = useGet<{ - commits: TypesCommit[] - }>({ - path: `/api/v1/repos/${repoMetadata?.path}/+/commits`, - queryParams: { - limit, - page, - git_ref: pullRequestMetadata.source_sha, - after: pullRequestMetadata.merge_base_sha - }, - lazy: !repoMetadata - }) - - return ( - - - - - - ) -} diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss deleted file mode 100644 index d6e94309fb..0000000000 --- a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - background-color: var(--white) !important; - - .metaline { - font-size: 13px !important; - font-weight: 500 !important; - line-height: 20px !important; - color: var(--grey-500) !important; - - &.time { - color: var(--grey-400) !important; - } - - strong { - color: var(--grey-700) !important; - } - } - - .layout { - align-items: center; - } -} - -.copyContainer { - background-color: var(--primary-1) !important; - color: var(--primary-7) !important; - border-radius: 4px !important; - --button-height: unset !important; - --text-color: unset !important; - padding: unset !important ; - --padding-right: 2px !important; - padding-left: 8px !important; - min-width: unset !important; -} - -.linkText { - color: var(--primary-7) !important; -} - -.link { - background-color: var(--primary-1) !important; - color: var(--primary-7) !important; - border-radius: 4px; - line-height: 20px; - font-size: 13px; - font-weight: 600; - padding: 2px 6px !important; - width: fit-content !important; -} diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts b/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts deleted file mode 100644 index df654a3c71..0000000000 --- a/web/src/pages/PullRequest/PullRequestMetaLine.module.scss.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const copyContainer: string -export declare const layout: string -export declare const link: string -export declare const linkText: string -export declare const main: string -export declare const metaline: string -export declare const time: string diff --git a/web/src/pages/PullRequest/PullRequestMetaLine.tsx b/web/src/pages/PullRequest/PullRequestMetaLine.tsx deleted file mode 100644 index c3c594d5ae..0000000000 --- a/web/src/pages/PullRequest/PullRequestMetaLine.tsx +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { Container, Text, Layout, StringSubstitute } from '@harnessio/uicore' -import cx from 'classnames' -import ReactTimeago from 'react-timeago' -import type { GitInfoProps } from 'utils/GitUtils' -import { useAppContext } from 'AppContext' -import { useStrings } from 'framework/strings' -import type { TypesPullReq } from 'services/code' -import { PullRequestStateLabel } from 'components/PullRequestStateLabel/PullRequestStateLabel' -import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' -import { GitRefLink } from 'components/GitRefLink/GitRefLink' -import css from './PullRequestMetaLine.module.scss' - -export const PullRequestMetaLine: React.FC> = ({ - repoMetadata, - target_branch, - source_branch, - author, - edited, - state, - is_draft, - stats -}) => { - const { getString } = useStrings() - const { routes } = useAppContext() - const vars = { - user: {author?.display_name}, - commits: {stats?.commits}, - commitsCount: stats?.commits, - target: ( - - ), - source: ( - - ) - } - - return ( - - - - - - - - - - - - - - ) -} diff --git a/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts b/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts deleted file mode 100644 index 8ff3b47363..0000000000 --- a/web/src/pages/PullRequest/PullRequestMetadataInfo.module.scss.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// this is an auto-generated file -declare const styles: { - readonly main: string - readonly state: string - readonly metaline: string - readonly time: string -} -export default styles diff --git a/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx b/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx deleted file mode 100644 index a7e58193e9..0000000000 --- a/web/src/pages/PullRequest/PullRequestTabContentWrapper.tsx +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { Container, PageError } from '@harnessio/uicore' -import { getErrorMessage } from 'utils/Utils' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' - -interface PullRequestTabContentWrapperProps { - className?: string - loading?: boolean - error?: Unknown - onRetry: () => void -} - -export const PullRequestTabContentWrapper: React.FC = ({ - className, - loading, - error, - onRetry, - children -}) => { - return ( - - - {error && } - {!error && children} - - ) -} diff --git a/web/src/pages/PullRequest/PullRequestTitle.tsx b/web/src/pages/PullRequest/PullRequestTitle.tsx deleted file mode 100644 index 557268a9ce..0000000000 --- a/web/src/pages/PullRequest/PullRequestTitle.tsx +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback, useEffect, useState } from 'react' -import { Container, Text, Layout, Button, ButtonVariation, ButtonSize, TextInput, useToaster } from '@harnessio/uicore' -import { FontVariation } from '@harnessio/design-system' -import { useMutate } from 'restful-react' -import { Match, Truthy, Else } from 'react-jsx-match' -import { useStrings } from 'framework/strings' -import { ButtonRoleProps, getErrorMessage } from 'utils/Utils' -import type { GitInfoProps } from 'utils/GitUtils' -import type { TypesPullReq } from 'services/code' -import { PipeSeparator } from 'components/PipeSeparator/PipeSeparator' -import css from './PullRequest.module.scss' - -interface PullRequestTitleProps extends TypesPullReq, Pick { - onSaveDone?: (newTitle: string) => Promise - onAddDescriptionClick: () => void -} - -export const PullRequestTitle: React.FC = ({ - repoMetadata, - title, - number, - description, - onAddDescriptionClick -}) => { - const [original, setOriginal] = useState(title) - const [val, setVal] = useState(title) - const [edit, setEdit] = useState(false) - const { getString } = useStrings() - const { showError } = useToaster() - const { mutate } = useMutate({ - verb: 'PATCH', - path: `/api/v1/repos/${repoMetadata.path}/+/pullreq/${number}` - }) - const submitChange = useCallback(() => { - mutate({ - title: val, - description - }) - .then(() => { - setEdit(false) - setOriginal(val) - }) - .catch(exception => showError(getErrorMessage(exception), 0)) - }, [description, val, mutate, showError]) - - useEffect(() => { - setOriginal(title) - - // make sure to update editor if it's not open - if (!edit) { - setVal(title) - } - }, [title, edit]) - - return ( - - - - - - event.target.select()} - onInput={event => setVal(event.currentTarget.value)} - autoFocus - onKeyDown={event => { - switch (event.key) { - case 'Enter': - submitChange() - break - case 'Escape': // does not work, maybe TextInput cancels ESC? - setEdit(false) - break - } - }} - /> - - - - - README, - LICENSE: LICENSE, - GITIGNORE: .gitignore - }} - /> - - - - - - {getString('firstTimeTitle')} - - {getString('cloneHTTPS')} - - - - - {repoMetadata.git_url} - - - - - - - - - { - history.push(standalone ? routes.toCODEUserProfile() : currentUserProfileURL) - }}> - here - - ) - }} - /> - - - - - - - - - - - ) -} diff --git a/web/src/pages/Repository/Repository.module.scss b/web/src/pages/Repository/Repository.module.scss deleted file mode 100644 index 7e8df8dcc7..0000000000 --- a/web/src/pages/Repository/Repository.module.scss +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - min-height: var(--page-height); - background-color: var(--white) !important; - - &.withFileViewer { - &, - > div:first-of-type { - min-height: var(--page-height) !important; - } - - > div:first-of-type { - display: flex; - flex-direction: column; - } - } -} - -.bannerContainer { - padding: var(--spacing-small) var(--spacing-xsmall) !important; - background-color: var(--red-50) !important; - position: sticky; - top: 0; - z-index: 2; - margin: var(--spacing-small) var(--spacing-xlarge) !important; - border-radius: 5px; -} - -.layout { - height: 33px; - display: inline-flex; - justify-content: center; - align-items: center; - border: 1px solid var(--grey-200); - background-color: var(--grey-50) !important; - border-radius: 4px; - padding-left: var(--spacing-small) !important; - max-width: 100%; - .url { - // width: 80%; - white-space: nowrap !important; - overflow: hidden; - text-overflow: ellipsis; - font-size: 13px !important; - } - - button#cloneCopyButton { - --button-height: 24px !important; - border-radius: 0 !important; - border-left: 1px solid var(--grey-200) !important; - margin-left: var(--spacing-small) !important; - } -} - -.text { - font-size: 16px !important; -} diff --git a/web/src/pages/Repository/Repository.module.scss.d.ts b/web/src/pages/Repository/Repository.module.scss.d.ts deleted file mode 100644 index 11fa3e197e..0000000000 --- a/web/src/pages/Repository/Repository.module.scss.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const bannerContainer: string -export declare const cloneCopyButton: string -export declare const layout: string -export declare const main: string -export declare const text: string -export declare const url: string -export declare const withFileViewer: string diff --git a/web/src/pages/Repository/Repository.tsx b/web/src/pages/Repository/Repository.tsx deleted file mode 100644 index a8e8ba5b26..0000000000 --- a/web/src/pages/Repository/Repository.tsx +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useEffect, useState } from 'react' -import { Container, Layout, PageBody, StringSubstitute, Text } from '@harnessio/uicore' -import { Falsy, Match, Truthy } from 'react-jsx-match' -import cx from 'classnames' -import { useGetResourceContent } from 'hooks/useGetResourceContent' -import { voidFn, getErrorMessage } from 'utils/Utils' -import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import { useStrings } from 'framework/strings' -import type { OpenapiGetContentOutput, TypesRepository } from 'services/code' -import { Images } from 'images' -import { RepositoryContent } from './RepositoryContent/RepositoryContent' -import { RepositoryHeader } from './RepositoryHeader/RepositoryHeader' -import { ContentHeader } from './RepositoryContent/ContentHeader/ContentHeader' -import { EmptyRepositoryInfo } from './EmptyRepositoryInfo' -import css from './Repository.module.scss' - -export default function Repository() { - const { gitRef, resourcePath, repoMetadata, error, loading, refetch, commitRef } = useGetRepositoryMetadata() - const { - data: resourceContent, - error: resourceError, - loading: resourceLoading, - isRepositoryEmpty - } = useGetResourceContent({ repoMetadata, gitRef, resourcePath, includeCommit: true }) - const [fileNotExist, setFileNotExist] = useState(false) - const { getString } = useStrings() - - useEffect(() => setFileNotExist(resourceError?.status === 404), [resourceError]) - - return ( - - - - - - - - - - - - - - fileNotExist === true, - message: getString('error404Text'), - image: Images.error404 - }}> - - - - - - - {!!repoMetadata && ( - <> - - - {!!resourceContent && ( - - )} - - {isRepositoryEmpty && } - - )} - - - - - ) -} diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss deleted file mode 100644 index 1a43307e5b..0000000000 --- a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - padding: var(--spacing-large) var(--spacing-xlarge) 0 var(--spacing-xlarge) !important; - position: relative; - - div[class*='TextInput'] { - margin-bottom: 0 !important; - margin-left: 0 !important; - - input { - background: transparent !important; - border: none; - } - } - - > div { - align-items: center; - padding-bottom: var(--spacing-large) !important; - // border-bottom: 1px solid var(--grey-100); - } - - .btnColorFix > span[data-icon] { - svg[fill='none'] g > path { - fill: none !important; - } - } - - .refRoot, - .rootSlash { - align-self: center; - } - - :global { - .bp3-breadcrumb, - .bp3-breadcrumb-current, - .bp3-breadcrumbs-collapsed { - white-space: nowrap !important; - font-size: 13px; - font-weight: var(--typography-weight, normal); - font-family: var(--font-family); - color: var(--grey-900); - line-height: var(--typography-line-height, var(--line-height)); - } - - .bp3-breadcrumbs > li::after { - background: none; - content: '/'; - color: var(--grey-900); - background: none; - text-align: center; - height: 100%; - } - - .bp3-breadcrumbs-collapsed { - background: var(--grey-100); - } - } - - .searchBox { - position: absolute; - right: 16px; - top: -50px; - z-index: 2; - padding-bottom: 0 !important; - margin: 0; - cursor: pointer; - - input, - input:focus { - border: 1px solid var(--ai-purple-600) !important; - pointer-events: none; - user-select: none; - } - - input { - width: 350px !important; - } - - svg path { - fill: var(--ai-purple-600) !important; - } - - img { - position: absolute; - top: 5px; - right: 6px; - } - } -} - -.searchModal { - --modal-width: min(970px, 80vw); - --input-width: calc(var(--modal-width) - 154px); - - width: var(--modal-width); - padding: var(--spacing-medium) var(--spacing-xxlarge) var(--spacing-xlarge); - - > span:first-of-type { - display: none; - } - - .layout { - width: 100%; - - .searchContainer { - position: relative; - - span[icon] { - display: none; - } - - .searchIcon { - position: absolute; - left: 12px; - top: 11px; - z-index: 1; - } - - img { - position: absolute; - right: 14px; - top: 6px; - } - - input { - padding-left: 35px !important; - font-size: 14px; - font-weight: 500; - line-height: 19px; - letter-spacing: 0.23749999701976776px; - border-color: var(--ai-purple-600); - color: var(--grey-500); - width: var(--input-width) !important; - } - } - - button { - --button-height: 40px !important; - } - } - - .sectionHeader { - font-size: 13px !important; - font-weight: 500 !important; - letter-spacing: 0.23749999701976776px; - color: var(--grey-500) !important; - text-transform: uppercase; - } - - .sampleQuery { - height: 44px; - background-color: var(--grey-50); - color: var(--grey-500) !important; - border-radius: 4px; - padding-left: 32px; - position: relative; - - font-size: 14px !important; - font-weight: 500 !important; - line-height: 19px !important; - letter-spacing: 0.23749999701976776px; - display: flex; - align-items: center; - - background-image: url('data:image/svg+xml,%3Csvg%20fill%3D%22none%22%20viewBox%3D%220%200%2017%2017%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Cpath%20d%3D%22m9.91699%208.50004h3.54171c.1878%200%20.368.07463.5008.20747.1329.13283.2075.313.2075.50086v2.47913c0%20.1879-.0746.3681-.2075.5009-.1328.1328-.313.2075-.5008.2075h-2.8334c-.1878%200-.368-.0747-.5008-.2075-.13288-.1328-.20751-.313-.20751-.5009zm0%200c0-1.77083.70831-2.83333%202.83331-3.89583m-9.91664%203.89583h3.54167c.18786%200%20.36802.07463.50086.20747.13284.13283.20747.313.20747.50086v2.47913c0%20.1879-.07463.3681-.20747.5009s-.313.2075-.50086.2075h-2.83334c-.18786%200-.36803-.0747-.50087-.2075-.13283-.1328-.20746-.313-.20746-.5009zm0%200c0-1.77083.70833-2.83333%202.83333-3.89583%22%20stroke%3D%22%23dad0f6%22%20stroke-linecap%3D%22round%22%2F%3E%3Cg%20fill%3D%22%23dad0f6%22%3E%3Cpath%20d%3D%22m10%209h4v3h-4z%22%2F%3E%3Cpath%20d%3D%22m3%209h4v3h-4z%22%2F%3E%3C%2Fg%3E%3C%2Fsvg%3E'); - background-repeat: no-repeat; - background-size: 16px; - background-position: left 12px top 7px; - - &:hover, - &.selected { - background-color: var(--grey-100); - } - - &.selected svg { - visibility: visible; - } - - svg { - position: absolute; - top: 14px; - right: 15px; - color: var(--grey-300); - visibility: hidden; - } - } -} - -.backdrop { - background-color: rgb(16 22 26 / 25%); -} - -.portal { - :global { - .bp3-dialog-container.bp3-overlay-content { - align-items: flex-start !important; - } - } -} diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts deleted file mode 100644 index 0dbff10a68..0000000000 --- a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.module.scss.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const backdrop: string -export declare const btnColorFix: string -export declare const layout: string -export declare const main: string -export declare const portal: string -export declare const refRoot: string -export declare const rootSlash: string -export declare const sampleQuery: string -export declare const searchBox: string -export declare const searchContainer: string -export declare const searchIcon: string -export declare const searchModal: string -export declare const sectionHeader: string -export declare const selected: string diff --git a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx b/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx deleted file mode 100644 index 90ba292ef9..0000000000 --- a/web/src/pages/Repository/RepositoryContent/ContentHeader/ContentHeader.tsx +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback, useMemo, useState } from 'react' -import { noop } from 'lodash-es' -import { - Container, - Layout, - Button, - ButtonSize, - FlexExpander, - ButtonVariation, - Text, - Utils, - Dialog -} from '@harnessio/uicore' -import cx from 'classnames' -import { Icon } from '@harnessio/icons' -import { useHotkeys } from 'react-hotkeys-hook' -import { LongArrowDownLeft, Search } from 'iconoir-react' -import { Color } from '@harnessio/design-system' -import { Breadcrumbs, IBreadcrumbProps } from '@blueprintjs/core' -import { Link, useHistory } from 'react-router-dom' -import { useStrings } from 'framework/strings' -import { useAppContext } from 'AppContext' -import { CloneButtonTooltip } from 'components/CloneButtonTooltip/CloneButtonTooltip' -import { CodeIcon, GitInfoProps, isDir, isRefATag } from 'utils/GitUtils' -import { BranchTagSelect } from 'components/BranchTagSelect/BranchTagSelect' -import { useCreateBranchModal } from 'components/CreateBranchModal/CreateBranchModal' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { ButtonRoleProps, permissionProps } from 'utils/Utils' -import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' -import svg from './search-background.svg' -import css from './ContentHeader.module.scss' - -export function ContentHeader({ - repoMetadata, - gitRef = repoMetadata.default_branch as string, - resourcePath, - resourceContent -}: Pick) { - const { getString } = useStrings() - const { routes, standalone, hooks } = useAppContext() - const history = useHistory() - const _isDir = isDir(resourceContent) - const space = useGetSpaceParam() - const [showSearchModal, setShowSearchModal] = useState(false) - const [searchSampleQueryIndex, setSearchSampleQueryIndex] = useState(0) - const [search, setSearch] = useState('') - const performSearch = useCallback( - (q: string) => { - history.push({ - pathname: routes.toCODESearch({ - repoPath: repoMetadata.path as string - }), - search: `q=${q}` - }) - }, - [history, repoMetadata.path, routes] - ) - const onSearch = useCallback(() => { - if (search?.trim()) { - performSearch(search) - } else if (searchSampleQueryIndex > 0 && searchSampleQueryIndex <= searchSampleQueries.length) { - performSearch(searchSampleQueries[searchSampleQueryIndex - 1]) - } - }, [performSearch, search, searchSampleQueryIndex]) - - useHotkeys( - 'ctrl+k', - () => { - if (!showSearchModal) { - setShowSearchModal(true) - } - }, - [showSearchModal] - ) - - const permPushResult = hooks?.usePermissionTranslate?.( - { - resource: { - resourceType: 'CODE_REPOSITORY' - }, - permissions: ['code_repo_push'] - }, - [space] - ) - const openCreateNewBranchModal = useCreateBranchModal({ - repoMetadata, - onSuccess: branchInfo => { - history.push( - routes.toCODERepository({ - repoPath: repoMetadata.path as string, - gitRef: branchInfo.name - }) - ) - }, - suggestedSourceBranch: gitRef, - showSuccessMessage: true - }) - const breadcrumbs = useMemo(() => { - return resourcePath.split('/').map((_path, index, paths) => { - const pathAtIndex = paths.slice(0, index + 1).join('/') - const href = routes.toCODERepository({ - repoPath: repoMetadata.path as string, - gitRef, - resourcePath: pathAtIndex - }) - - return { href, text: _path } - }) - }, [resourcePath, gitRef, repoMetadata.path, routes]) - - return ( - - - { - history.push( - routes.toCODERepository({ - repoPath: repoMetadata.path as string, - gitRef: ref, - resourcePath - }) - ) - }} - onCreateBranch={openCreateNewBranchModal} - /> - - - - - - - / - - { - return ( - - {text} - - ) - }} - /> - - - - {_isDir && ( - <> - - - ) : ( - <> - - {getString('repoDelete.deleteConfirm2', { - repo: repoMetadata?.uid - })} - - { - setDeleteConfirmString(e.currentTarget.value) - }} - /> - - - )} - - ) - }, [showConfirmPage, deleteConfirmString, loading, repoMetadata]) - - return { - openModal, - hideModal - } -} - -export default useDeleteRepoModal diff --git a/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx b/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx deleted file mode 100644 index 8d0a8f1a52..0000000000 --- a/web/src/pages/RepositorySettings/GeneralSettingsContent/GeneralSettingsContent.tsx +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useState } from 'react' -import { - Container, - Layout, - Text, - Button, - ButtonVariation, - Formik, - useToaster, - ButtonSize, - TextInput -} from '@harnessio/uicore' -import { Color, Intent } from '@harnessio/design-system' -import { useMutate } from 'restful-react' -import { ACCESS_MODES, permissionProps, voidFn } from 'utils/Utils' -import { useStrings } from 'framework/strings' -import type { TypesRepository } from 'services/code' -import { useAppContext } from 'AppContext' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import useDeleteRepoModal from './DeleteRepoModal/DeleteRepoModal' -import css from '../RepositorySettings.module.scss' - -interface GeneralSettingsProps { - repoMetadata: TypesRepository | undefined - refetch: () => void -} - -const GeneralSettingsContent = (props: GeneralSettingsProps) => { - const { repoMetadata, refetch } = props - const { openModal: openDeleteRepoModal } = useDeleteRepoModal() - - const [editDesc, setEditDesc] = useState(ACCESS_MODES.VIEW) - const { showError, showSuccess } = useToaster() - - const space = useGetSpaceParam() - const { standalone } = useAppContext() - const { hooks } = useAppContext() - const { getString } = useStrings() - const { mutate } = useMutate({ - verb: 'PATCH', - path: `/api/v1/repos/${repoMetadata?.path}/+/` - }) - - const permEditResult = hooks?.usePermissionTranslate?.( - { - resource: { - resourceType: 'CODE_REPOSITORY' - }, - permissions: ['code_repo_edit'] - }, - [space] - ) - const permDeleteResult = hooks?.usePermissionTranslate?.( - { - resource: { - resourceType: 'CODE_REPOSITORY' - }, - permissions: ['code_repo_delete'] - }, - [space] - ) - - return ( - - {formik => { - return ( - - - - - - {getString('repositoryName')} - - - - - {repoMetadata?.uid} - - - - - - - {getString('description')} - - - - {editDesc === ACCESS_MODES.EDIT ? ( - - { - formik.setFieldValue('desc', (evt.currentTarget as HTMLInputElement)?.value) - }} - value={formik.values.desc || repoMetadata?.description} - name="desc" - /> - - - - - - ) - }} - - ) -} - -export default GeneralSettingsContent diff --git a/web/src/pages/RepositorySettings/RepositorySettings.module.scss b/web/src/pages/RepositorySettings/RepositorySettings.module.scss deleted file mode 100644 index d745a404cb..0000000000 --- a/web/src/pages/RepositorySettings/RepositorySettings.module.scss +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - min-height: var(--page-height); - background-color: var(--primary-bg) !important; - width: 100%; - margin: var(--spacing-small); - :global { - .bp3-tab { - width: 184px !important; - height: 34px; - } - - .bp3-tab-panel { - width: 100%; - height: 500px; - } - - .bp3-tab { - margin-top: 20px; - } - - .bp3-tabs.bp3-vertical > .bp3-tab-list .bp3-tab[aria-selected='true'] { - background-color: var(--primary-2); - -webkit-box-shadow: none; - box-shadow: none; - } - - // .bp3-tab-indicator-wrapper { - // height: unset !important; - // } - - // .bp3-tab:not([aria-disabled='true']):active { - // color: var(--primary-7) !important; - // background: #cdf4fe !important; - // padding: 5px; - // } - } -} - -.webhooksContent { - width: 100%; -} - -.btn { - margin-top: 5px; -} - -.webhookHeader { - padding-left: 0 !important; - margin-left: 0 !important; -} - -.contentContainer { - margin: 20px !important; -} - -.generalContainer { - width: 100%; - background: var(--grey-0) !important; - box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16); - border-radius: 4px; -} - -.label { - width: 30%; - padding-right: var(--spacing-medium); -} - -.content { - width: 70%; -} - -.deleteContainer { - display: flex; - justify-content: space-between; -} - -.saveBtn { - margin-left: var(--spacing-medium) !important; -} - -.textContainer { - width: 80%; -} - -.buttonContainer { - width: 20%; - padding-top: var(--spacing-xsmall) !important; -} diff --git a/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts b/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts deleted file mode 100644 index 49a731550e..0000000000 --- a/web/src/pages/RepositorySettings/RepositorySettings.module.scss.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const btn: string -export declare const buttonContainer: string -export declare const content: string -export declare const contentContainer: string -export declare const deleteContainer: string -export declare const generalContainer: string -export declare const label: string -export declare const main: string -export declare const saveBtn: string -export declare const textContainer: string -export declare const webhookHeader: string -export declare const webhooksContent: string diff --git a/web/src/pages/RepositorySettings/RepositorySettings.tsx b/web/src/pages/RepositorySettings/RepositorySettings.tsx deleted file mode 100644 index 648dd3b543..0000000000 --- a/web/src/pages/RepositorySettings/RepositorySettings.tsx +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' - -import { PageBody, Container, Tabs } from '@harnessio/uicore' -import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' -import { useStrings } from 'framework/strings' - -import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' -import { getErrorMessage, voidFn } from 'utils/Utils' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import GeneralSettingsContent from './GeneralSettingsContent/GeneralSettingsContent' -import css from './RepositorySettings.module.scss' - -enum SettingsTab { - webhooks = 'webhooks', - general = 'general' -} -export default function RepositorySettings() { - const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() - - const [activeTab, setActiveTab] = React.useState(SettingsTab.general) - - const { getString } = useStrings() - return ( - - - - - {repoMetadata && ( - - setActiveTab(id)} - tabList={[ - { - id: SettingsTab.general, - title: getString('general'), - panel: , - iconProps: { name: 'cog' } - } - ]}> - - )} - - - ) -} diff --git a/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx b/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx deleted file mode 100644 index 2ea3c9defb..0000000000 --- a/web/src/pages/RepositorySettings/RepossitorySettingsContent/RepositorySettingsContent.tsx +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// import React from 'react' -// import type { GitInfoProps } from 'utils/GitUtils' - -// export function RepositorySettingsContent({ repoMetadata }: Pick) { -// if (repoMetadata) { -// return -// } -// return null -// } diff --git a/web/src/pages/RepositorySettings/SettingsContent.tsx b/web/src/pages/RepositorySettings/SettingsContent.tsx deleted file mode 100644 index 21803673d5..0000000000 --- a/web/src/pages/RepositorySettings/SettingsContent.tsx +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo } from 'react' -import { orderBy } from 'lodash-es' -import { Container, TableV2 as Table, Text, Layout, Button, ButtonVariation } from '@harnessio/uicore' -import { Icon } from '@harnessio/icons' -import { Color } from '@harnessio/design-system' -import type { CellProps, Column } from 'react-table' -import { useStrings } from 'framework/strings' -import type { GitInfoProps } from 'utils/GitUtils' -import css from './RepositorySettings.module.scss' - -interface Hook { - url: string -} -interface SettingsContentProps extends Pick { - hooks: Hook[] -} - -export function SettingsContent({ hooks }: SettingsContentProps) { - const { getString } = useStrings() - const columns: Column[] = useMemo( - () => [ - { - id: 'url', - width: '85%', - Cell: ({ row }: CellProps) => { - return ( - - - - {row.original.url} - - ({getString('webhookListingContent')}) - - ) - } - }, - { - id: 'actions', - width: '15%', - Cell: () => { - return ( - - - - - ) - } - } - ], - [getString] - ) - - return ( - - hideHeaders columns={columns} data={orderBy(hooks)} /> - - ) -} diff --git a/web/src/pages/RepositorySettings/mockWebhooks.json b/web/src/pages/RepositorySettings/mockWebhooks.json deleted file mode 100644 index efbd3e6548..0000000000 --- a/web/src/pages/RepositorySettings/mockWebhooks.json +++ /dev/null @@ -1,14 +0,0 @@ -[ - { - "id": 1, - "url": "https://some.url/path", - "verifySsl": true, - "events": [ - "eventA", - "eventB" - ], - "Created": 1667799167640, - "Updated": 1667799167640, - "CreatedBy": 42 - } -] \ No newline at end of file diff --git a/web/src/pages/RepositoryTags/RepositoryTags.module.scss b/web/src/pages/RepositoryTags/RepositoryTags.module.scss deleted file mode 100644 index 3085b05ff3..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTags.module.scss +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - min-height: var(--page-height); - background-color: var(--primary-bg) !important; -} diff --git a/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts deleted file mode 100644 index e0cc3bd1ac..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTags.module.scss.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const main: string diff --git a/web/src/pages/RepositoryTags/RepositoryTags.tsx b/web/src/pages/RepositoryTags/RepositoryTags.tsx deleted file mode 100644 index 435e07b5cc..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTags.tsx +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React from 'react' -import { Container, PageBody } from '@harnessio/uicore' -import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' -import { useStrings } from 'framework/strings' -import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import { voidFn, getErrorMessage } from 'utils/Utils' -import { RepositoryTagsContent } from './RepositoryTagsContent/RepositoryTagsContent' -import css from './RepositoryTags.module.scss' - -export default function RepositoryTags() { - const { getString } = useStrings() - const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() - - return ( - - - - - - {repoMetadata ? : null} - - - ) -} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss deleted file mode 100644 index 06663d8c6c..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.resourceContent { - background-color: var(--primary-bg); - - .noData > div { - height: calc(100vh - var(--page-header-height, 64px) - 120px) !important; - } -} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts deleted file mode 100644 index b77c2cab3c..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.module.scss.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const noData: string -export declare const resourceContent: string diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx b/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx deleted file mode 100644 index 896b9b6cd6..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContent/RepositoryTagsContent.tsx +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useEffect, useState } from 'react' -import { Container } from '@harnessio/uicore' -import { useGet } from 'restful-react' -import { useHistory } from 'react-router-dom' -import type { RepoCommitTag } from 'services/code' -import { usePageIndex } from 'hooks/usePageIndex' -import { LIST_FETCHING_LIMIT, permissionProps, PageBrowserProps } from 'utils/Utils' -import { useQueryParams } from 'hooks/useQueryParams' -import { useUpdateQueryParams } from 'hooks/useUpdateQueryParams' -import { useAppContext } from 'AppContext' -import type { GitInfoProps } from 'utils/GitUtils' -import { ResourceListingPagination } from 'components/ResourceListingPagination/ResourceListingPagination' -import { useShowRequestError } from 'hooks/useShowRequestError' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { useStrings } from 'framework/strings' -import { NoResultCard } from 'components/NoResultCard/NoResultCard' -import { useCreateTagModal } from 'components/CreateTagModal/CreateTagModal' -import { RepositoryTagsContentHeader } from '../RepositoryTagsContentHeader/RepositoryTagsContentHeader' -import { TagsContent } from '../TagsContent/TagsContent' -import css from './RepositoryTagsContent.module.scss' - -export function RepositoryTagsContent({ repoMetadata }: Pick) { - const { getString } = useStrings() - const { routes } = useAppContext() - const history = useHistory() - const [searchTerm, setSearchTerm] = useState('') - const openModal = useCreateTagModal({ - repoMetadata, - onSuccess: () => { - refetch() - }, - showSuccessMessage: true - }) - const { updateQueryParams } = useUpdateQueryParams() - - const pageBrowser = useQueryParams() - const pageInit = pageBrowser.page ? parseInt(pageBrowser.page) : 1 - const [page, setPage] = usePageIndex(pageInit) - const { - data: branches, - response, - error, - loading, - refetch - } = useGet({ - path: `/api/v1/repos/${repoMetadata.path}/+/tags`, - queryParams: { - limit: LIST_FETCHING_LIMIT, - page, - sort: 'date', - order: 'desc', - include_commit: true, - query: searchTerm, - debounce: 500 - } - }) - - useEffect(() => { - if (page > 1) { - updateQueryParams({ page: page.toString() }) - } - }, [setPage]) // eslint-disable-line react-hooks/exhaustive-deps - - useShowRequestError(error) - const space = useGetSpaceParam() - - const { standalone } = useAppContext() - const { hooks } = useAppContext() - const permPushResult = hooks?.usePermissionTranslate?.( - { - resource: { - resourceType: 'CODE_REPOSITORY' - }, - permissions: ['code_repo_push'] - }, - [space] - ) - - return ( - - { - setPage(1) - history.push( - routes.toCODECommits({ - repoPath: repoMetadata.path as string, - commitRef: gitRef - }) - ) - }} - onSearchTermChanged={value => { - setSearchTerm(value) - setPage(1) - }} - onNewBranchCreated={refetch} - /> - - {!!branches?.length && ( - - )} - - !!branches && branches.length === 0} - forSearch={!!searchTerm} - message={getString('tagEmpty')} - onButtonClick={() => { - openModal() - }} - /> - - - - ) -} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss deleted file mode 100644 index 8cd071e957..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - div[class*='TextInput'] { - margin-bottom: 0 !important; - margin-left: 0 !important; - - // input { - // background: transparent !important; - // border: none; - // } - } - - > div { - align-items: center; - padding-bottom: var(--spacing-xlarge) !important; - } -} - -.branchDropdown { - background-color: var(--white); -} diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts deleted file mode 100644 index 8864cab8a7..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.module.scss.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const branchDropdown: string -export declare const main: string diff --git a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx b/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx deleted file mode 100644 index 20c527e86f..0000000000 --- a/web/src/pages/RepositoryTags/RepositoryTagsContentHeader/RepositoryTagsContentHeader.tsx +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useState } from 'react' -import { Container, Layout, FlexExpander, ButtonVariation } from '@harnessio/uicore' -import { useStrings } from 'framework/strings' -import { GitBranchType, CodeIcon, GitInfoProps } from 'utils/GitUtils' -import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' -import { CreateTagModalButton } from 'components/CreateTagModal/CreateTagModal' -import css from './RepositoryTagsContentHeader.module.scss' - -interface RepositoryTagsContentHeaderProps extends Pick { - loading?: boolean - activeBranchType?: GitBranchType - onBranchTypeSwitched: (branchType: GitBranchType) => void - onSearchTermChanged: (searchTerm: string) => void - onNewBranchCreated: () => void -} - -export function RepositoryTagsContentHeader({ - onSearchTermChanged, - repoMetadata, - onNewBranchCreated, - loading -}: RepositoryTagsContentHeaderProps) { - const { getString } = useStrings() - const [searchTerm, setSearchTerm] = useState('') - - return ( - - - { - setSearchTerm(value) - onSearchTermChanged(value) - }} - /> - - - - - ) -} diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss deleted file mode 100644 index 4c8c3f5d3c..0000000000 --- a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.container { - .table { - background-color: var(--white) !important; - - [class*='TableV2--header'] { - box-shadow: 0px 0px 1px rgb(40 41 61 / 8%), 0px 0.5px 2px rgb(96 97 112 / 16%); - border-bottom: 1px solid var(--grey-100); - - [class*='variation-table-headers'] { - text-transform: none; - color: var(--grey-400); - font-weight: 500; - font-size: 13px; - } - } - - .row { - justify-content: center; - padding-top: 0; - padding-bottom: 0px; - height: 40px; - box-shadow: 0px 0px 1px rgba(40, 41, 61, 0.08), 0px 0.5px 2px rgba(96, 97, 112, 0.16); - overflow: hidden; - border-radius: 0; - margin-bottom: 0; - - &.odd { - background-color: var(--grey-50); - } - - &:hover { - background-color: #0092e40a !important; - } - - .rowText { - font-size: 13px; - font-weight: 400; - - &.defaultBranch .commitLink { - font-weight: 600; - } - - .spacer { - display: inline-block; - width: 6px; - } - } - - .commitLink { - font-weight: 400; - font-size: 13px; - color: var(--black); - - &:hover { - color: var(--primary-8); - } - } - } - } -} - -.popover { - padding: var(--spacing-small); -} diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts b/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts deleted file mode 100644 index 0010c02e45..0000000000 --- a/web/src/pages/RepositoryTags/TagsContent/TagsContent.module.scss.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const commitLink: string -export declare const container: string -export declare const defaultBranch: string -export declare const odd: string -export declare const popover: string -export declare const row: string -export declare const rowText: string -export declare const spacer: string -export declare const table: string diff --git a/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx b/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx deleted file mode 100644 index cd9cb77f22..0000000000 --- a/web/src/pages/RepositoryTags/TagsContent/TagsContent.tsx +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo } from 'react' -import { Container, TableV2 as Table, Text, Avatar, useToaster, StringSubstitute } from '@harnessio/uicore' -import { Color, Intent } from '@harnessio/design-system' -import type { CellProps, Column } from 'react-table' -import { Link, useHistory } from 'react-router-dom' -import cx from 'classnames' -import Keywords from 'react-keywords' -import { useMutate } from 'restful-react' -import { noop } from 'lodash-es' -import { String, useStrings } from 'framework/strings' -import { useAppContext } from 'AppContext' - -import type { RepoBranch, RepoCommitTag, TypesRepository } from 'services/code' -import { formatDate, getErrorMessage, voidFn } from 'utils/Utils' -import { useConfirmAction } from 'hooks/useConfirmAction' -import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' -import { useCreateBranchModal } from 'components/CreateBranchModal/CreateBranchModal' -import { CommitActions } from 'components/CommitActions/CommitActions' -import { CodeIcon, REFS_TAGS_PREFIX } from 'utils/GitUtils' -import css from './TagsContent.module.scss' - -interface TagsContentProps { - searchTerm?: string - repoMetadata: TypesRepository - branches: RepoBranch[] - onDeleteSuccess: () => void -} - -export function TagsContent({ repoMetadata, searchTerm = '', branches, onDeleteSuccess }: TagsContentProps) { - const { routes } = useAppContext() - const history = useHistory() - const { getString } = useStrings() - - const onSuccess = voidFn(noop) - - const columns: Column[] = useMemo( - () => [ - { - Header: getString('tag'), - width: '20%', - Cell: ({ row }: CellProps) => { - return ( - - - {row.original?.name} - - - ) - } - }, - { - Header: getString('description'), - width: '35%', - Cell: ({ row }: CellProps) => { - return ( - - {row.original?.message} - - ) - } - }, - { - Header: getString('commit'), - Id: 'commit', - width: '15%', - Cell: ({ row }: CellProps) => { - return ( - - ) - } - }, - - { - Header: getString('tagger'), - width: '15%', - Cell: ({ row }: CellProps) => { - return ( - - {row.original.tagger?.identity?.name ? ( - - ) : ( - '' - )} - - {row.original.tagger?.identity?.name || ''} - - ) - } - }, - { - Header: getString('creationDate'), - width: '200px', - Cell: ({ row }: CellProps) => { - return row.original.tagger?.when ? ( - - - {formatDate(row.original.tagger?.when as string)} - - ) : ( - '' - ) - } - }, - { - id: 'action', - width: '30px', - Cell: ({ row }: CellProps) => { - const { mutate: deleteBranch } = useMutate({ - verb: 'DELETE', - path: `/api/v1/repos/${repoMetadata.path}/+/tags/${row.original.name}` - }) - const { showSuccess, showError } = useToaster() - const confirmDeleteTag = useConfirmAction({ - title: getString('deleteTag'), - confirmText: getString('confirmDelete'), - intent: Intent.DANGER, - message: , - action: async () => { - deleteBranch({}) - .then(() => { - showSuccess( - , - 5000 - ) - onDeleteSuccess() - }) - .catch(error => { - showError(getErrorMessage(error), 0, 'failedToDeleteTag') - }) - } - }) - const openModal = useCreateBranchModal({ - repoMetadata, - onSuccess, - showSuccessMessage: true, - suggestedSourceBranch: row.original.name, - showBranchTag: false, - refIsATag: true - }) - - return ( - { - openModal() - } - }, - { - text: getString('viewFiles'), - iconName: CodeIcon.FileLight, - iconSize: 16, - hasIcon: true, - onClick: () => { - history.push( - routes.toCODERepository({ - repoPath: repoMetadata.path as string, - gitRef: `${REFS_TAGS_PREFIX}${row.original?.name}` - }) - ) - } - }, - '-', - { - text: getString('deleteTag'), - iconName: CodeIcon.Delete, - iconSize: 16, - hasIcon: true, - isDanger: true, - onClick: confirmDeleteTag - } - ]} - isDark - /> - ) - } - } - ], - [ - // eslint-disable-line react-hooks/exhaustive-deps - getString, - routes, - searchTerm, - history, - onDeleteSuccess, - repoMetadata, - onSuccess - ] // eslint-disable-line react-hooks/exhaustive-deps - ) - - return ( - - - className={css.table} - columns={columns} - data={branches || []} - getRowClassName={row => cx(css.row, (row.index + 1) % 2 ? css.odd : '')} - /> - - ) -} diff --git a/web/src/pages/Search/Search.module.scss b/web/src/pages/Search/Search.module.scss deleted file mode 100644 index 1b2d9f335a..0000000000 --- a/web/src/pages/Search/Search.module.scss +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -@import 'src/utils/utils'; - -.main { - --header-height: 128px; - --border-color: var(--grey-100); - - min-height: var(--page-height); - background-color: var(--primary-bg) !important; - - .pageHeader { - flex-direction: column; - height: var(--header-height); - align-items: normal !important; - justify-content: flex-start !important; - padding-top: 0 !important; - - [class*='breadcrumbs'] > [class*='module_header'] { - padding-top: 13px !important; - } - - .searchBox { - display: flex; - flex-direction: row; - - > div { - flex-grow: 1; - - div { - width: 100%; - } - } - - input { - width: calc(100% - 8px) !important; - border: 1px solid var(--ai-purple-600) !important; - } - - svg path { - fill: var(--ai-purple-600) !important; - } - } - - & + div { - --page-header-height: var(--header-height) !important; - } - } - - .split { - > div:first-of-type { - background-color: #fbfcfd; - overflow: auto; - } - - > div:last-of-type { - background-color: var(--white); - overflow: scroll; - } - - .searchResult { - padding: var(--spacing-medium) var(--spacing-large) var(--spacing-large) var(--spacing-xlarge); - - .resultTitle { - text-transform: uppercase; - - font-size: 10px; - font-weight: 600; - color: var(--grey-400); - } - - .result { - padding: var(--spacing-medium); - border: 1px solid rgba(243, 243, 250, 1); - border-radius: 5px; - background-color: var(--white); - - &.selected { - border-color: rgba(217, 218, 229, 1); - background-color: rgba(246, 241, 255, 1); - box-shadow: 0px 0.5px 2px 0px rgba(96, 97, 112, 0.16), 0px 0px 1px 0px rgba(40, 41, 61, 0.08); - } - - &:hover:not(.selected) { - border-color: rgba(217, 218, 229, 1); - background-color: rgba(246, 241, 255, 0.5); - } - - .layout { - align-items: baseline; - } - - .texts { - flex-grow: 1; - } - - .filename { - font-size: 13px; - font-weight: 600; - color: rgba(79, 81, 98, 1); - } - - .path { - font-size: 11px; - font-weight: 500; - color: rgba(146, 147, 171, 1); - } - - .aiLabel { - background: var(--ai-purple-100); - color: var(--ai-purple-600); - text-transform: uppercase; - font-size: 8px; - font-weight: 800; - text-align: center; - padding: 3px 6px; - border-radius: 2px; - white-space: nowrap; - } - } - } - - :global { - .Resizer.vertical { - width: 13px; - background-color: var(--border-color); - opacity: 1; - - &:active, - &:focus, - &:hover { - background-color: var(--primary-6); - border-color: transparent !important; - } - } - } - - .preview { - height: 100%; - position: relative; - - &.noResult { - > * { - visibility: hidden; - } - } - - .filePath { - height: 45px; - border-bottom: 1px solid var(--border-color); - display: flex; - align-items: center; - padding: 0 var(--spacing-medium); - - > div:first-of-type { - flex-grow: 1; - width: calc(100% - 150px); - } - - button { - white-space: nowrap; - } - - .pathText { - align-self: center; - color: var(--grey-500); - } - - :global { - .bp3-breadcrumb, - .bp3-breadcrumb-current, - .bp3-breadcrumbs-collapsed { - white-space: nowrap !important; - font-size: 13px; - font-weight: 400; - color: var(--grey-500); - } - - .bp3-breadcrumbs > li::after { - background: none; - content: '/'; - color: var(--grey-500); - background: none; - text-align: center; - height: 100%; - } - - .bp3-breadcrumbs-collapsed { - background: var(--grey-100); - } - } - } - - .fileContent { - flex-grow: 1; - height: calc(100% - 45px); - overflow: auto; - - :global { - .cm-editor { - border: none; - - .cm-scroller { - padding: 0; - - .cm-line { - &, - * { - @include mono-font; - } - } - } - - .cm-gutters { - border-right: none; - - .cm-gutterElement { - padding-left: 30px; - padding-right: 6px; - } - } - } - } - } - - .highlightLineNumber { - background-color: var(--ai-purple-100); - } - } - } -} diff --git a/web/src/pages/Search/Search.module.scss.d.ts b/web/src/pages/Search/Search.module.scss.d.ts deleted file mode 100644 index 20a628a1af..0000000000 --- a/web/src/pages/Search/Search.module.scss.d.ts +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const aiLabel: string -export declare const fileContent: string -export declare const filename: string -export declare const filePath: string -export declare const highlightLineNumber: string -export declare const layout: string -export declare const main: string -export declare const noResult: string -export declare const pageHeader: string -export declare const path: string -export declare const pathText: string -export declare const preview: string -export declare const result: string -export declare const resultTitle: string -export declare const searchBox: string -export declare const searchResult: string -export declare const selected: string -export declare const split: string -export declare const texts: string diff --git a/web/src/pages/Search/Search.tsx b/web/src/pages/Search/Search.tsx deleted file mode 100644 index 581c79e494..0000000000 --- a/web/src/pages/Search/Search.tsx +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' -import { - Button, - ButtonSize, - ButtonVariation, - Container, - Layout, - PageBody, - stringSubstitute, - Text, - useToaster -} from '@harnessio/uicore' -import cx from 'classnames' -import { lineNumbers, ViewUpdate } from '@codemirror/view' -import { Breadcrumbs, IBreadcrumbProps } from '@blueprintjs/core' -import { Link, useHistory, useLocation } from 'react-router-dom' -import { EditorView } from '@codemirror/view' -import { Match, Truthy, Falsy } from 'react-jsx-match' -import { Icon } from '@harnessio/icons' -import { useMutate } from 'restful-react' -import { Editor } from 'components/Editor/Editor' -import { useGetRepositoryMetadata } from 'hooks/useGetRepositoryMetadata' -import { useStrings } from 'framework/strings' -import { RepositoryPageHeader } from 'components/RepositoryPageHeader/RepositoryPageHeader' -import { Split } from 'components/Split/Split' -import { CodeIcon, decodeGitContent } from 'utils/GitUtils' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import { useQueryParams } from 'hooks/useQueryParams' -import { useAppContext } from 'AppContext' -import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' -import { voidFn, getErrorMessage, ButtonRoleProps } from 'utils/Utils' -import type { RepoFileContent } from 'services/code' -import { useShowRequestError } from 'hooks/useShowRequestError' -import { NoResultCard } from 'components/NoResultCard/NoResultCard' -import { useGetResourceContent } from 'hooks/useGetResourceContent' -import { addClassToLinesExtension } from 'utils/codemirror/addClassToLinesExtension' -import css from './Search.module.scss' - -export default function Search() { - const { showError } = useToaster() - const history = useHistory() - const location = useLocation() - const highlightedLines = useRef([]) - const [highlightlLineNumbersExtension, updateHighlightlLineNumbers] = useMemo( - () => addClassToLinesExtension([], css.highlightLineNumber), - [] - ) - const extensions = useMemo(() => { - return [ - lineNumbers({ - formatNumber: (lineNo: number) => lineNo.toString() - }), - highlightlLineNumbersExtension - ] - }, [highlightlLineNumbersExtension]) - const viewRef = useRef() - const { getString } = useStrings() - const { routes } = useAppContext() - const { q } = useQueryParams<{ q: string }>() - const [searchTerm, setSearchTerm] = useState(q || '') - const { repoMetadata, error, loading, refetch } = useGetRepositoryMetadata() - const [resourcePath, setResourcePath] = useState('') - const [filename, setFileName] = useState('') - const gitRef = useMemo(() => repoMetadata?.default_branch || '', [repoMetadata]) - const breadcrumbs = useMemo(() => { - return repoMetadata?.path - ? resourcePath.split('/').map((_path, index, paths) => { - const pathAtIndex = paths.slice(0, index + 1).join('/') - const href = routes.toCODERepository({ - repoPath: repoMetadata.path as string, - gitRef, - resourcePath: pathAtIndex - }) - - return { href, text: _path } - }) - : [] - }, [resourcePath, repoMetadata, gitRef, routes]) - const onSelectResult = useCallback( - (fileName: string, filePath: string, _content: string, _highlightedLines: number[]) => { - updateHighlightlLineNumbers(_highlightedLines, viewRef.current) - highlightedLines.current = _highlightedLines - setFileName(fileName) - setResourcePath(filePath) - }, - [updateHighlightlLineNumbers] - ) - const { - data: resourceContent, - error: resourceError = null, - loading: resourceLoading - } = useGetResourceContent({ repoMetadata, gitRef, resourcePath, includeCommit: false, lazy: !resourcePath }) - const fileContent: string = useMemo( - () => - resourceContent?.path === resourcePath - ? decodeGitContent((resourceContent?.content as RepoFileContent)?.data) - : resourceError - ? getString('failedToFetchFileContent') - : '', - - [resourceContent?.content, resourceContent?.path, resourcePath, resourceError, getString] - ) - - // eslint-disable-next-line react-hooks/exhaustive-deps - const onViewUpdate = useCallback(({ view, docChanged }: ViewUpdate) => { - const firstLine = (highlightedLines.current || [])[0] - - if (docChanged && firstLine > 0 && view.state.doc.lines >= firstLine) { - view.dispatch({ - effects: EditorView.scrollIntoView(view.state.doc.line(firstLine).from, { y: 'start', yMargin: 18 * 2 }) - }) - } - }, []) - const [loadingSearch, setLoadingSearch] = useState(false) - const { mutate: sendSearch } = useMutate({ - verb: 'POST', - path: `/api/v1/repos/${repoMetadata?.path}/+/semantic/search` - }) - const [searchResult, setSearchResult] = useState([]) - const performSearch = useCallback(() => { - setLoadingSearch(true) - history.replace({ pathname: location.pathname, search: `q=${searchTerm}` }) - - sendSearch({ query: searchTerm }) - .then(response => { - setSearchResult(response) - }) - .catch(exception => { - showError(getErrorMessage(exception), 0) - }) - .finally(() => { - setLoadingSearch(false) - }) - }, [searchTerm, history, location, sendSearch, showError]) - - useEffect(() => { - if (q && repoMetadata?.path) { - performSearch() - } - }, [repoMetadata?.path]) // eslint-disable-line react-hooks/exhaustive-deps - - useEffect(() => { - if (fileContent && fileContent !== viewRef?.current?.state.doc.toString()) { - viewRef?.current?.dispatch({ - changes: { from: 0, to: viewRef?.current?.state.doc.length, insert: fileContent } - }) - } - }, [fileContent]) - - useShowRequestError(resourceError) - - return ( - - - - - - - - - - {getString('privacyPolicy')} , - terms: {getString('termsOfUse')} - }} - /> - - - - - ) -} diff --git a/web/src/pages/SignUp/SignUp.module.scss b/web/src/pages/SignUp/SignUp.module.scss deleted file mode 100644 index b03f62775a..0000000000 --- a/web/src/pages/SignUp/SignUp.module.scss +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.root { - /* box-shadow: 0 1px 3px 0 rgba(0, 0, 0, 0.1),0 1px 2px 0 rgba(0, 0, 0, 0.06); */ - box-shadow: 0 12px 18px 2px rgb(34 0 51 / 4%), 0 6px 22px 4px rgb(7 48 114 / 12%), 0 6px 10px -4px rgb(14 13 26 / 12%); - box-shadow: rgba(0, 0, 0, 0) 0px 0px 0px 0px, rgba(0, 0, 0, 0) 0px 0px 0px 0px, rgba(0, 0, 0, 0.1) 0px 1px 3px 0px, - rgba(0, 0, 0, 0.06) 0px 1px 2px 0px; - box-sizing: border-box; - background: rgb(255, 255, 255); - border-radius: 16px; - border-radius: 6px; - display: block; - padding: 32px 40px; - width: 420px; - position: relative; - - h2 { - font-weight: 400; - font-size: 20px; - text-align: center; - margin-bottom: 25px; - margin-top: 40px; - } - - /* box-sizing: border-box; - background: #FFFFFF; - border-radius: 8px; - border: 1px solid #F1F5F9; - display: block; - margin: 80px auto; - padding: 32px 40px; - width: 420px; */ - - label { - display: block; - font-weight: 500; - margin-bottom: 10px; - margin-top: 20px; - display: none; - } - - .input { - display: block; - padding: 12px 12px; - width: 100%; - box-shadow: none; - border-radius: 6px; - } - - .submit { - background: #0f172a; - box-shadow: rgb(0 0 0 / 0%) 0px 0px 0px 0px, rgb(0 0 0 / 0%) 0px 0px 0px 0px, rgb(0 0 0 / 20%) 0px 1px 2px 0px; - justify-content: center; - padding: 12px 12px; - width: 100%; - border-radius: 6px; - font-size: 14px; - - &:hover { - background: #1e293b; - } - - &:active { - background: #1e40af; - } - } -} - -.field { - margin: 15px 0px; -} - -.actions { - margin-top: 20px; - text-align: center; - - a, - a:visited { - color: #0060e0; - } -} - -.logo { - position: absolute; - /* top: -50px; - left: 161px; */ - top: -40px; - left: 175px; - height: 80px; - width: 80px; - left: calc(50% - 40px); - transform: rotate(45deg); - display: flex; - align-items: center; - justify-content: center; - - border-color: transparent #e2e8f0 #e2e8f0 transparent; - border-style: solid; - border-width: 1px; - border-radius: 100%; - background: #f7fafc; - - img { - width: 100px; - height: 100px; - width: 60px; - height: 60px; - transform: rotate(-45deg) translateX(1px); - } -} - -.signUpContainer { - height: 100%; - padding: 4% 0px !important; -} diff --git a/web/src/pages/SignUp/SignUp.module.scss.d.ts b/web/src/pages/SignUp/SignUp.module.scss.d.ts deleted file mode 100644 index 36106104d1..0000000000 --- a/web/src/pages/SignUp/SignUp.module.scss.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const actions: string -export declare const field: string -export declare const input: string -export declare const logo: string -export declare const root: string -export declare const signUpContainer: string -export declare const submit: string diff --git a/web/src/pages/SignUp/SignUp.tsx b/web/src/pages/SignUp/SignUp.tsx deleted file mode 100644 index 2268f048eb..0000000000 --- a/web/src/pages/SignUp/SignUp.tsx +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useCallback } from 'react' -import { - Button, - Container, - FormInput, - Formik, - FormikForm, - Layout, - StringSubstitute, - Text, - useToaster -} from '@harnessio/uicore' -import { Color } from '@harnessio/design-system' -import * as Yup from 'yup' -import { Link } from 'react-router-dom' -import { useStrings } from 'framework/strings' -import AuthLayout from 'components/AuthLayout/AuthLayout' -import { useAppContext } from 'AppContext' -import { getErrorMessage, type RegisterForm } from 'utils/Utils' -import { useOnRegister } from 'services/code' -import css from './SignUp.module.scss' - -export const SignUp: React.FC = () => { - const { routes } = useAppContext() - const { getString } = useStrings() - const { showError, showSuccess } = useToaster() - - const { mutate } = useOnRegister({ - queryParams: { - include_cookie: true - } - }) - const onRegister = useCallback( - (data: RegisterForm) => { - mutate( - { - display_name: data.username, - email: data.email, - uid: data.username, - password: data.password - }, - { - headers: { Authorization: '' } - } - ) - .then(() => { - showSuccess(getString('userCreated')) - window.location.replace(window.location.origin + routes.toCODEHome()) - }) - .catch(error => { - showError(getErrorMessage(error)) - }) - }, - [mutate, showSuccess, showError, getString, routes] - ) - - const handleSubmit = (data: RegisterForm): void => { - if (data.username && data.password) { - onRegister(data) - } - } - return ( - - - - {getString('signUp')} - - - - - initialValues={{ username: '', email: '', password: '', confirmPassword: '' }} - formName="loginPageForm" - validationSchema={Yup.object().shape({ - username: Yup.string().required(getString('userNameRequired')), - email: Yup.string().email().required(getString('emailRequired')), - password: Yup.string().min(6, getString('minPassLimit')).required(getString('passwordRequired')), - confirmPassword: Yup.string() - .required(getString('confirmPassRequired')) - .oneOf([Yup.ref('password')], getString('matchPassword')) - })} - onSubmit={handleSubmit}> - - - - - - - - - - - - - - - {getString('privacyPolicy')} , - terms: {getString('termsOfUse')} - }} - /> - - - - {getString('alreadyHaveAccount')} - {getString('signIn')} - - - - ) -} diff --git a/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx b/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx deleted file mode 100644 index cde362d1ab..0000000000 --- a/web/src/pages/SpaceAccessControl/AddNewMember/AddNewMember.tsx +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo, useState } from 'react' -import { Button, ButtonVariation, Dialog, FormikForm, FormInput, SelectOption, useToaster } from '@harnessio/uicore' -import { Formik } from 'formik' -import * as Yup from 'yup' -import { useGet } from 'restful-react' -import { useStrings } from 'framework/strings' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { - MembershipAddRequestBody, - TypesMembershipUser, - TypesPrincipalInfo, - useMembershipAdd, - useMembershipUpdate -} from 'services/code' -import { getErrorMessage, LIST_FETCHING_LIMIT } from 'utils/Utils' -import { useModalHook } from 'hooks/useModalHook' -import { roleStringKeyMap } from '../SpaceAccessControl' - -const roles = ['reader', 'executor', 'contributor', 'space_owner'] as const - -const useAddNewMember = ({ onClose }: { onClose: () => void }) => { - const [isEditFlow, setIsEditFlow] = useState(false) - const [membershipDetails, setMembershipDetails] = useState() - const [searchTerm, setSearchTerm] = useState('') - - const space = useGetSpaceParam() - const { getString } = useStrings() - const { showError, showSuccess } = useToaster() - - const { mutate: addMembership } = useMembershipAdd({ space_ref: space }) - const { mutate: updateMembership } = useMembershipUpdate({ - space_ref: space, - user_uid: membershipDetails?.principal?.uid || '' - }) - - const { data: users, loading: fetchingUsers } = useGet({ - path: `/api/v1/principals`, - queryParams: { - query: searchTerm, - page: 1, - limit: LIST_FETCHING_LIMIT, - type: 'user' - }, - debounce: 500 - }) - - const roleOptions: SelectOption[] = useMemo( - () => - roles.map(role => ({ - value: role, - label: getString(roleStringKeyMap[role]) - })), - [] // eslint-disable-line react-hooks/exhaustive-deps - ) - - const userOptions: SelectOption[] = useMemo( - () => - users?.map(user => ({ - value: user.uid as string, - label: (user.display_name || user.email) as string - })) || [], - [users] - ) - const [selectUser, setSelectUser] = useState() - - const handleClose = () => { - setSearchTerm('') - hideModal() - } - - const [openModal, hideModal] = useModalHook(() => { - return ( - - - initialValues={{ - user_uid: membershipDetails?.principal?.uid || '', - role: membershipDetails?.role || 'reader' - }} - validationSchema={Yup.object().shape({ - user_uid: Yup.string().required(getString('validation.uidRequired')) - })} - onSubmit={async values => { - try { - if (isEditFlow) { - await updateMembership({ role: values.role }) - showSuccess(getString('spaceMemberships.memberUpdated')) - } else { - await addMembership(values) - showSuccess(getString('spaceMemberships.memberAdded')) - } - - handleClose() - onClose() - } catch (error) { - showError(getErrorMessage(error)) - } - }}> - - setSelectUser(item)} - /> - - - ) - }, [isEditFlow, membershipDetails, userOptions, selectUser]) - - return { - openModal: (isEditing?: boolean, memberInfo?: TypesPrincipalInfo) => { - openModal() - setIsEditFlow(Boolean(isEditing)) - setMembershipDetails(memberInfo) - }, - hideModal - } -} - -export default useAddNewMember diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss deleted file mode 100644 index d745229684..0000000000 --- a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.mainCtn { - height: var(--page-height); - background-color: var(--primary-bg) !important; - - .roleBadge { - padding: var(--spacing-xsmall) 6px; - border-radius: 4px; - border: 1px solid var(--grey-200); - background: var(--grey-50); - width: max-content; - } -} diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts b/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts deleted file mode 100644 index b3fe1e10bd..0000000000 --- a/web/src/pages/SpaceAccessControl/SpaceAccessControl.module.scss.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const mainCtn: string -export declare const roleBadge: string diff --git a/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx b/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx deleted file mode 100644 index 84ee4925a2..0000000000 --- a/web/src/pages/SpaceAccessControl/SpaceAccessControl.tsx +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useMemo } from 'react' -import { Avatar, Button, ButtonVariation, Container, Layout, Page, TableV2, Text, useToaster } from '@harnessio/uicore' -import { Color, FontVariation } from '@harnessio/design-system' -import type { CellProps, Column } from 'react-table' - -import { StringKeys, useStrings } from 'framework/strings' -import { useConfirmAct } from 'hooks/useConfirmAction' -import { useGetSpaceParam } from 'hooks/useGetSpaceParam' -import { EnumMembershipRole, TypesMembershipUser, useMembershipDelete, useMembershipList } from 'services/code' -import { getErrorMessage } from 'utils/Utils' -import { LoadingSpinner } from 'components/LoadingSpinner/LoadingSpinner' -import { OptionsMenuButton } from 'components/OptionsMenuButton/OptionsMenuButton' - -import useAddNewMember from './AddNewMember/AddNewMember' - -import css from './SpaceAccessControl.module.scss' - -export const roleStringKeyMap: Record = { - contributor: 'contributor', - executor: 'executor', - reader: 'reader', - space_owner: 'owner' -} - -const SpaceAccessControl = () => { - const { getString } = useStrings() - const { showError, showSuccess } = useToaster() - const space = useGetSpaceParam() - - const { data, refetch, loading } = useMembershipList({ - space_ref: space - }) - - const { openModal } = useAddNewMember({ onClose: refetch }) - - const { mutate: deleteMembership } = useMembershipDelete({ - space_ref: space - }) - - const onConfirmAct = useConfirmAct() - const handleRemoveMember = async (userId: string) => - await onConfirmAct({ - action: async () => { - try { - await deleteMembership(userId) - refetch() - showSuccess(getString('spaceMemberships.removeMembershipToast')) - } catch (error) { - showError(getErrorMessage(error)) - } - }, - message: getString('spaceMemberships.removeMembershipMsg'), - intent: 'danger', - title: getString('spaceMemberships.removeMember') - }) - - const columns = useMemo( - () => - [ - { - Header: getString('user'), - width: '30%', - Cell: ({ row }: CellProps) => ( - - - - {row.original.principal?.display_name} - - - ) - }, - { - Header: getString('role'), - width: '40%', - Cell: ({ row }: CellProps) => { - const stringKey = row.original.role ? roleStringKeyMap[row.original.role] : undefined - - return ( - - {stringKey ? getString(stringKey) : row.original.role} - - ) - } - }, - { - Header: getString('email'), - width: '25%', - Cell: ({ row }: CellProps) => ( - - {row.original.principal?.email} - - ) - }, - { - accessor: 'action', - width: '5%', - Cell: ({ row }: CellProps) => { - return ( - handleRemoveMember(row.original.principal?.uid as string) - }, - { - text: getString('spaceMemberships.changeRole'), - onClick: () => openModal(true, row.original) - } - ]} - /> - ) - } - } - ] as Column[], - [] // eslint-disable-line react-hooks/exhaustive-deps - ) - - return ( - - - - - - - - ) : ( - <> - - {getString('spaceSetting.deleteConfirm2', { - space - })} - - { - setDeleteConfirmString(e.currentTarget.value) - }} - /> - - - )} - - ) - }, [showConfirmPage, deleteConfirmString, loading]) - - return { - openModal, - hideModal - } -} - -export default useDeleteSpaceModal diff --git a/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx b/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx deleted file mode 100644 index 83b99ca7db..0000000000 --- a/web/src/pages/SpaceSettings/ExportForm/ExportForm.tsx +++ /dev/null @@ -1,348 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useState } from 'react' -import { Intent } from '@blueprintjs/core' -import * as yup from 'yup' -import { useGet } from 'restful-react' -import { FontVariation } from '@harnessio/design-system' - -import { Color } from '@harnessio/design-system' -import { - Button, - Container, - Label, - Layout, - FlexExpander, - Formik, - FormikForm, - FormInput, - Text, - ButtonSize, - ButtonVariation -} from '@harnessio/uicore' -import { Icon } from '@harnessio/icons' -import type { TypesRepository } from 'services/code' - -import { useStrings } from 'framework/strings' -import type { ExportFormDataExtended } from 'utils/GitUtils' -import Upgrade from '../../../icons/Upgrade.svg' -import css from '../SpaceSettings.module.scss' - -interface ExportFormProps { - handleSubmit: (data: ExportFormDataExtended) => void - loading: boolean - // eslint-disable-next-line @typescript-eslint/no-explicit-any - hideModal: any - step: number - setStep: React.Dispatch> - space: string -} - -const ExportForm = (props: ExportFormProps) => { - const { handleSubmit, loading, hideModal, step, setStep, space } = props - const { getString } = useStrings() - const [auth, setAuth] = useState(false) - const formInitialValues: ExportFormDataExtended = { - accountId: '', - token: '', - organization: '', - name: '', - repoCount: 0 - } - - const validationSchemaStepOne = yup.object().shape({ - accountId: yup.string().trim().required(getString('exportSpace.accIdRequired')), - token: yup.string().trim().required(getString('exportSpace.accesstokenReq')) - }) - - const validationSchemaStepTwo = yup.object().shape({ - organization: yup.string().trim().required(getString('importSpace.orgRequired')), - name: yup.string().trim().required(getString('importSpace.spaceNameRequired')) - }) - const { data: repositories } = useGet({ - path: `/api/v1/spaces/${space}/+/repos` - }) - - return ( - - {formik => { - const handleValidationClick = async () => { - try { - if (step === 0) { - await validationSchemaStepOne.validate(formik.values, { abortEarly: false }) - setStep(1) - } else if (step === 1) { - await validationSchemaStepTwo.validate(formik.values, { abortEarly: false }) - setStep(2) - } // eslint-disable-next-line @typescript-eslint/no-explicit-any - } catch (err: any) { - formik.setErrors( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - err.inner.reduce((acc: { [x: string]: any }, current: { path: string | number; message: string }) => { - acc[current.path] = current.message - return acc - }, {}) - ) - } - } - return ( - - - {step === 0 ? ( - <> - - - {formik.errors.accountId ? ( - - {formik.errors.accountId} - - ) : null} - - {formik.errors.token ? ( - - {formik.errors.token} - - ) : null} - - - ) : null} - {step === 1 ? ( - <> - - - {formik.errors.organization ? ( - - {formik.errors.organization} - - ) : null} - - - {formik.errors.name ? ( - - {formik.errors.name} - - ) : null} - - - - - - - - - { - setAuth(!auth) - }} - disabled - padding={{ right: 'small' }} - className={css.checkbox} - /> - - { - setAuth(!auth) - }} - /> - - - - - - ) : null} - - {step === 2 && ( - <> - - - {formik.errors.organization ? ( - - {formik.errors.organization} - - ) : null} - - {formik.errors.name ? ( - - {formik.errors.name} - - ) : null} - - - {getString('exportSpace.repoToConvert', { length: repositories?.length })} - - - - - )} - -
- - - {step === 0 && ( - - - -
-
- - ) - }} - -
- - - ) -} diff --git a/web/src/pages/UserProfile/EditableTextField.tsx b/web/src/pages/UserProfile/EditableTextField.tsx deleted file mode 100644 index 5d68d0d3af..0000000000 --- a/web/src/pages/UserProfile/EditableTextField.tsx +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import React, { useState } from 'react' -import { Button, ButtonSize, ButtonVariation, Container, Layout, Text, TextInput } from '@harnessio/uicore' -import { Color, FontVariation } from '@harnessio/design-system' -import { useStrings } from 'framework/strings' -import css from './UserProfile.module.scss' - -enum ACCESS_MODES { - VIEW, - EDIT -} - -const EditableTextField = ({ onSave, value }: { value: string; onSave: (text: string) => void }) => { - const { getString } = useStrings() - const [viewMode, setViewMode] = useState(ACCESS_MODES.VIEW) - const [text, setText] = useState(value) - - return ( - - {viewMode === ACCESS_MODES.EDIT ? ( - - setText((e.target as HTMLInputElement).value)} - wrapperClassName={css.textInput} - /> - - - - - - - - } - position={Position.RIGHT} - interactionKind="click"> - - - - - - - {row.original.display_name} - - {!!row.original.triggers?.length && ( - - ({formatTriggers(row.original?.triggers).join(', ')}) - - )} - {!row.original.triggers?.length && ( - - {getString('webhookAllEventsSelected')} - - )} - - -
- ) - } - }, - { - id: 'executionStatus', - width: '15px', - Cell: ({ row }: CellProps) => { - return ( - - ) - } - }, - { - id: 'action', - width: '60px', - Cell: ({ row }: CellProps) => { - const { mutate: deleteWebhook } = useMutate({ - verb: 'DELETE', - path: `/api/v1/repos/${repoMetadata?.path}/+/webhooks/${row.original.id}` - }) - const confirmDelete = useConfirmAct() - - return ( - - { - history.push( - routes.toCODEWebhookDetails({ - repoPath: repoMetadata?.path as string, - webhookId: String(row.original?.id) - }) - ) - } - }, - { - hasIcon: true, - iconName: 'main-trash', - text: getString('delete'), - onClick: async () => { - confirmDelete({ - message: getString('confirmDeleteWebhook'), - action: async () => { - deleteWebhook({}) - .then(() => { - showSuccess(getString('webhookDeleted'), 5000) - setPage(1) - refetchWebhooks() - }) - .catch(exception => { - showError(getErrorMessage(exception), 0, 'failedToDeleteWebhook') - }) - } - }) - } - } - ]} - /> - - ) - } - } - ], - [history, getString, refetchWebhooks, repoMetadata?.path, routes, setPage, showError, showSuccess] - ) - - return ( - - - - - - {repoMetadata && ( - - { - setSearchTerm(value) - setPage(1) - }} - /> - - {!!webhooks?.length && ( - <> - - className={css.table} - hideHeaders - columns={columns} - data={webhooks} - getRowClassName={() => css.row} - onRowClick={row => { - history.push( - routes.toCODEWebhookDetails({ - repoPath: repoMetadata.path as string, - webhookId: String(row.id) - }) - ) - }} - /> - - - - )} - - webhooks?.length === 0} - forSearch={!!searchTerm} - message={getString('webhookEmpty')} - buttonText={getString('newWebhook')} - onButtonClick={() => - history.push( - routes.toCODEWebhookNew({ - repoPath: repoMetadata?.path as string - }) - ) - } - /> - - - )} - - - ) -} - -const generateLastExecutionStateIcon = ( - webhook: OpenapiWebhookType -): { icon: IconName; iconProps?: { color?: Color } } => { - let icon: IconName = 'dot' - let color: Color | undefined = undefined - - switch (webhook.latest_execution_result) { - case 'fatal_error': - icon = 'danger-icon' - break - case 'retriable_error': - icon = 'solid-error' - break - case 'success': - icon = 'success-tick' - break - default: - color = Color.GREY_250 - } - - return { icon, ...(color ? { iconProps: { color } } : undefined) } -} diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss deleted file mode 100644 index d3ce9495c4..0000000000 --- a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -.main { - padding-bottom: 0 !important; - - div[class*='TextInput'] { - margin-bottom: 0 !important; - margin-left: 0 !important; - } - - > div { - align-items: center; - } - - .input { - margin-bottom: 0 !important; - - span[data-icon], - span[icon] { - margin-top: 10px !important; - } - } -} - -.branchDropdown { - background-color: var(--white); -} diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts deleted file mode 100644 index ab6a17504f..0000000000 --- a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.module.scss.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* eslint-disable */ -// This is an auto-generated file -export declare const branchDropdown: string -export declare const input: string -export declare const main: string diff --git a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx b/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx deleted file mode 100644 index 048fd71b14..0000000000 --- a/web/src/pages/Webhooks/WebhooksHeader/WebhooksHeader.tsx +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2023 Harness, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { useHistory } from 'react-router-dom' -import React, { useState } from 'react' -import { Container, Layout, FlexExpander, ButtonVariation, Button } from '@harnessio/uicore' -import { useStrings } from 'framework/strings' -import { CodeIcon, GitInfoProps } from 'utils/GitUtils' -import { useAppContext } from 'AppContext' -import { SearchInputWithSpinner } from 'components/SearchInputWithSpinner/SearchInputWithSpinner' -import css from './WebhooksHeader.module.scss' - -interface WebhooksHeaderProps extends Pick { - loading?: boolean - onSearchTermChanged: (searchTerm: string) => void -} - -export function WebhooksHeader({ repoMetadata, loading, onSearchTermChanged }: WebhooksHeaderProps) { - const history = useHistory() - const [searchTerm, setSearchTerm] = useState('') - const { routes } = useAppContext() - const { getString } = useStrings() - - return ( - - - { - setSearchTerm(value) - onSearchTermChanged(value) - }} - /> - -