Skip to content

Remove sweeper #9677

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 0 additions & 56 deletions .werft/jobs/build/deploy-to-preview-environment.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,6 @@ export async function deployToPreviewEnvironment(werft: Werft, jobConfig: JobCon
| yq r - data['.dockerconfigjson'] \
| base64 -d)" | base64 -w 0`, { silent: true }).stdout.trim();

const sweeperImage = exec(`tar xfO /tmp/dev.tar.gz ./sweeper.txt`).stdout.trim();
Copy link
Contributor Author

@mads-hartmann mads-hartmann May 2, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because of this change all jobs the run off main (which is the default due to job protection) are going to fail as this file no longer exists for builds on this branch.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This can be worked around by running the werft job manually, but instructing it to update the GH status like so

werft run github -aupdateGitHubStatus=gitpod-io/gitpod

This is what I did here: https://werft.gitpod-dev.com/job/gitpod-build-mads-remove-sweeper.11


const deploymentConfig: DeploymentConfig = {
version,
destname,
Expand All @@ -94,7 +92,6 @@ export async function deployToPreviewEnvironment(werft: Werft, jobConfig: JobCon
url,
analytics,
cleanSlateDeployment,
sweeperImage,
installEELicense,
imagePullAuth,
withPayment,
Expand Down Expand Up @@ -358,34 +355,6 @@ async function deployToDevWithInstaller(werft: Werft, jobConfig: JobConfig, depl
}
addAgentSmithToken(werft, deploymentConfig.namespace, installer.options.kubeconfigPath, tokenHash)

// TODO: Fix sweeper, it does not appear to be doing clean-up
werft.log('sweeper', 'installing Sweeper');
const sweeperVersion = deploymentConfig.sweeperImage.split(":")[1];
werft.log('sweeper', `Sweeper version: ${sweeperVersion}`);

// prepare args
const args = {
"period": "10m",
"timeout": "48h", // period of inactivity that triggers a removal
branch: jobConfig.repository.branch, // the branch to check for deletion
owner: jobConfig.repository.owner,
repo: jobConfig.repository.repo,
};
const argsStr = Object.entries(args).map(([k, v]) => `\"--${k}\", \"${v}\"`).join(", ");
const allArgsStr = `--set args="{${argsStr}}" --set githubToken.secret=github-sweeper-read-branches --set githubToken.key=token`;

// TODO: Implement sweeper logic for VMs in Harvester
if (!withVM) {
// copy GH token into namespace
exec(`kubectl --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} --namespace werft get secret github-sweeper-read-branches -o yaml \
| yq w - metadata.namespace ${namespace} \
| yq d - metadata.uid \
| yq d - metadata.resourceVersion \
| yq d - metadata.creationTimestamp \
| kubectl --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} apply -f -`);
exec(`/usr/local/bin/helm3 --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} upgrade --install --set image.version=${sweeperVersion} --set command="werft run github -a namespace=${namespace} --remote-job-path .werft/wipe-devstaging.yaml github.com/gitpod-io/gitpod:main" ${allArgsStr} sweeper ./dev/charts/sweeper`);
}

werft.done(phases.DEPLOY);

async function cleanStateEnv(kubeconfig: string, shellOpts: ExecOptions) {
Expand Down Expand Up @@ -561,30 +530,6 @@ async function deployToDevWithHelm(werft: Werft, jobConfig: JobConfig, deploymen

exec(`helm dependencies up`);
exec(`/usr/local/bin/helm3 --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} upgrade --install --timeout 10m -f ../.werft/jobs/build/helm/${nodeAffinityValues[nodepoolIndex]} -f ../.werft/jobs/build/helm/values.dev.yaml ${flags} ${helmInstallName} .`);

werft.log('helm', 'installing Sweeper');
const sweeperVersion = deploymentConfig.sweeperImage.split(":")[1];
werft.log('helm', `Sweeper version: ${sweeperVersion}`);

// prepare args
const args = {
"period": "10m",
"timeout": "48h", // period of inactivity that triggers a removal
branch: jobConfig.repository.branch, // the branch to check for deletion
owner: jobConfig.repository.owner,
repo: jobConfig.repository.repo,
};
const argsStr = Object.entries(args).map(([k, v]) => `\"--${k}\", \"${v}\"`).join(", ");
const allArgsStr = `--set args="{${argsStr}}" --set githubToken.secret=github-sweeper-read-branches --set githubToken.key=token`;

// copy GH token into namespace
exec(`kubectl --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} --namespace werft get secret github-sweeper-read-branches -o yaml \
| yq w - metadata.namespace ${namespace} \
| yq d - metadata.uid \
| yq d - metadata.resourceVersion \
| yq d - metadata.creationTimestamp \
| kubectl --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} apply -f -`);
exec(`/usr/local/bin/helm3 --kubeconfig ${CORE_DEV_KUBECONFIG_PATH} upgrade --install --set image.version=${sweeperVersion} --set command="werft run github -a namespace=${namespace} --remote-job-path .werft/wipe-devstaging.yaml github.com/gitpod-io/gitpod:main" ${allArgsStr} sweeper ../dev/charts/sweeper`);
}

function addDeploymentFlags() {
Expand Down Expand Up @@ -684,7 +629,6 @@ interface DeploymentConfig {
url: string;
analytics?: string;
cleanSlateDeployment: boolean;
sweeperImage: string;
installEELicense: boolean;
imagePullAuth: string;
withPayment: boolean;
Expand Down
2 changes: 0 additions & 2 deletions .werft/wipe-devstaging.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ async function devCleanup() {
await wipePreviewCluster(env(""))
}

// sweeper runs in the dev cluster so we need to delete the k3s cluster first and then delete self contained namespace

Tracing.initialize()
.then(() => {
werft = new Werft("wipe-devstaging")
Expand Down
5 changes: 0 additions & 5 deletions chart/templates/db-deny-all-allow-explicit-networkpolicy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,4 @@ spec:
matchLabels:
app: {{ template "gitpod.fullname" . }}
component: postman
{{ if eq .Values.installation.stage "devstaging" -}}
- podSelector:
matchLabels:
app: sweeper
{{- end -}}
{{- end -}}
3 changes: 0 additions & 3 deletions codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,6 @@ flags:
dev-poolkeeper-app:
paths:
- dev/poolkeeper/
dev-sweeper-app:
paths:
- dev/sweeper/
dev-version-manifest-app:
paths:
- dev/version-manifest/
4 changes: 0 additions & 4 deletions dev/BUILD.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@ packages:
deps:
- dev/image:docker
- dev/poolkeeper:docker
- dev/sweeper:docker
- :dev-utils
config:
commands:
- ["sh", "-c", "tail -n1 dev-sweeper--docker/imgnames.txt > sweeper.txt"]
- name: all-app
type: generic
deps:
Expand Down
22 changes: 0 additions & 22 deletions dev/charts/sweeper/.helmignore

This file was deleted.

21 changes: 0 additions & 21 deletions dev/charts/sweeper/Chart.yaml

This file was deleted.

65 changes: 0 additions & 65 deletions dev/charts/sweeper/templates/_helpers.tpl

This file was deleted.

76 changes: 0 additions & 76 deletions dev/charts/sweeper/templates/deployment.yaml

This file was deleted.

37 changes: 0 additions & 37 deletions dev/charts/sweeper/values.yaml

This file was deleted.

23 changes: 0 additions & 23 deletions dev/sweeper/BUILD.yaml

This file was deleted.

Loading