chore: Remove obsolete CI/CD and configuration files

This commit is contained in:
ruv
2026-02-28 14:35:45 -05:00
parent 696a72625f
commit 6e4cb0ad5b
3 changed files with 0 additions and 730 deletions

View File

@@ -1,347 +0,0 @@
# GitLab CI/CD Pipeline for WiFi-DensePose
# This pipeline provides an alternative to GitHub Actions for GitLab users
stages:
- validate
- test
- security
- build
- deploy-staging
- deploy-production
- monitor
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
REGISTRY: $CI_REGISTRY
IMAGE_NAME: $CI_REGISTRY_IMAGE
PYTHON_VERSION: "3.11"
KUBECONFIG: /tmp/kubeconfig
# Global before_script
before_script:
- echo "Pipeline started for $CI_COMMIT_REF_NAME"
- export IMAGE_TAG=${CI_COMMIT_SHA:0:8}
# Code Quality and Validation
code-quality:
stage: validate
image: python:$PYTHON_VERSION
before_script:
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install black flake8 mypy bandit safety
script:
- echo "Running code quality checks..."
- black --check --diff src/ tests/
- flake8 src/ tests/ --max-line-length=88 --extend-ignore=E203,W503
- mypy src/ --ignore-missing-imports
- bandit -r src/ -f json -o bandit-report.json || true
- safety check --json --output safety-report.json || true
artifacts:
reports:
junit: bandit-report.json
paths:
- bandit-report.json
- safety-report.json
expire_in: 1 week
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Unit Tests
unit-tests:
stage: test
image: python:$PYTHON_VERSION
services:
- postgres:15
- redis:7
variables:
POSTGRES_DB: test_wifi_densepose
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/test_wifi_densepose
REDIS_URL: redis://redis:6379/0
ENVIRONMENT: test
before_script:
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install pytest-cov pytest-xdist
script:
- echo "Running unit tests..."
- pytest tests/unit/ -v --cov=src --cov-report=xml --cov-report=html --junitxml=junit.xml
coverage: '/TOTAL.*\s+(\d+%)$/'
artifacts:
reports:
junit: junit.xml
coverage_report:
coverage_format: cobertura
path: coverage.xml
paths:
- htmlcov/
expire_in: 1 week
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Integration Tests
integration-tests:
stage: test
image: python:$PYTHON_VERSION
services:
- postgres:15
- redis:7
variables:
POSTGRES_DB: test_wifi_densepose
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
DATABASE_URL: postgresql://postgres:postgres@postgres:5432/test_wifi_densepose
REDIS_URL: redis://redis:6379/0
ENVIRONMENT: test
before_script:
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install pytest
script:
- echo "Running integration tests..."
- pytest tests/integration/ -v --junitxml=integration-junit.xml
artifacts:
reports:
junit: integration-junit.xml
expire_in: 1 week
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Security Scanning
security-scan:
stage: security
image: python:$PYTHON_VERSION
before_script:
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install bandit semgrep safety
script:
- echo "Running security scans..."
- bandit -r src/ -f sarif -o bandit-results.sarif || true
- semgrep --config=p/security-audit --config=p/secrets --config=p/python --sarif --output=semgrep.sarif src/ || true
- safety check --json --output safety-report.json || true
artifacts:
reports:
sast:
- bandit-results.sarif
- semgrep.sarif
paths:
- safety-report.json
expire_in: 1 week
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Container Security Scan
container-security:
stage: security
image: docker:latest
services:
- docker:dind
before_script:
- docker info
- echo $CI_REGISTRY_PASSWORD | docker login -u $CI_REGISTRY_USER --password-stdin $CI_REGISTRY
script:
- echo "Building and scanning container..."
- docker build -t $IMAGE_NAME:$IMAGE_TAG .
- docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v $PWD:/tmp/.cache/ aquasec/trivy:latest image --format sarif --output /tmp/.cache/trivy-results.sarif $IMAGE_NAME:$IMAGE_TAG || true
artifacts:
reports:
container_scanning: trivy-results.sarif
expire_in: 1 week
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
# Build and Push Docker Image
build-image:
stage: build
image: docker:latest
services:
- docker:dind
before_script:
- docker info
- echo $CI_REGISTRY_PASSWORD | docker login -u $CI_REGISTRY_USER --password-stdin $CI_REGISTRY
script:
- echo "Building Docker image..."
- docker build --target production -t $IMAGE_NAME:$IMAGE_TAG -t $IMAGE_NAME:latest .
- docker push $IMAGE_NAME:$IMAGE_TAG
- docker push $IMAGE_NAME:latest
- echo "Image pushed: $IMAGE_NAME:$IMAGE_TAG"
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
# Deploy to Staging
deploy-staging:
stage: deploy-staging
image: bitnami/kubectl:latest
environment:
name: staging
url: https://staging.wifi-densepose.com
before_script:
- echo "$KUBE_CONFIG_STAGING" | base64 -d > $KUBECONFIG
- kubectl config view
script:
- echo "Deploying to staging environment..."
- kubectl set image deployment/wifi-densepose wifi-densepose=$IMAGE_NAME:$IMAGE_TAG -n wifi-densepose-staging
- kubectl rollout status deployment/wifi-densepose -n wifi-densepose-staging --timeout=600s
- kubectl get pods -n wifi-densepose-staging -l app=wifi-densepose
- echo "Staging deployment completed"
after_script:
- sleep 30
- curl -f https://staging.wifi-densepose.com/health || exit 1
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: manual
allow_failure: false
# Deploy to Production
deploy-production:
stage: deploy-production
image: bitnami/kubectl:latest
environment:
name: production
url: https://wifi-densepose.com
before_script:
- echo "$KUBE_CONFIG_PRODUCTION" | base64 -d > $KUBECONFIG
- kubectl config view
script:
- echo "Deploying to production environment..."
# Backup current deployment
- kubectl get deployment wifi-densepose -n wifi-densepose -o yaml > backup-deployment.yaml
# Blue-Green Deployment
- kubectl patch deployment wifi-densepose -n wifi-densepose -p '{"spec":{"template":{"metadata":{"labels":{"version":"green"}}}}}'
- kubectl set image deployment/wifi-densepose wifi-densepose=$IMAGE_NAME:$IMAGE_TAG -n wifi-densepose
- kubectl rollout status deployment/wifi-densepose -n wifi-densepose --timeout=600s
- kubectl wait --for=condition=ready pod -l app=wifi-densepose,version=green -n wifi-densepose --timeout=300s
# Switch traffic
- kubectl patch service wifi-densepose-service -n wifi-densepose -p '{"spec":{"selector":{"version":"green"}}}'
- echo "Production deployment completed"
after_script:
- sleep 30
- curl -f https://wifi-densepose.com/health || exit 1
artifacts:
paths:
- backup-deployment.yaml
expire_in: 1 week
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: manual
allow_failure: false
# Post-deployment Monitoring
monitor-deployment:
stage: monitor
image: curlimages/curl:latest
script:
- echo "Monitoring deployment health..."
- |
if [ "$CI_ENVIRONMENT_NAME" = "production" ]; then
BASE_URL="https://wifi-densepose.com"
else
BASE_URL="https://staging.wifi-densepose.com"
fi
- |
for i in $(seq 1 10); do
echo "Health check $i/10"
curl -f $BASE_URL/health || exit 1
curl -f $BASE_URL/api/v1/status || exit 1
sleep 30
done
- echo "Monitoring completed successfully"
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_success
- if: $CI_COMMIT_TAG
when: on_success
allow_failure: true
# Rollback Job (Manual)
rollback:
stage: deploy-production
image: bitnami/kubectl:latest
environment:
name: production
url: https://wifi-densepose.com
before_script:
- echo "$KUBE_CONFIG_PRODUCTION" | base64 -d > $KUBECONFIG
script:
- echo "Rolling back deployment..."
- kubectl rollout undo deployment/wifi-densepose -n wifi-densepose
- kubectl rollout status deployment/wifi-densepose -n wifi-densepose --timeout=600s
- kubectl get pods -n wifi-densepose -l app=wifi-densepose
- echo "Rollback completed"
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: manual
allow_failure: false
# Cleanup old images
cleanup:
stage: monitor
image: docker:latest
services:
- docker:dind
before_script:
- echo $CI_REGISTRY_PASSWORD | docker login -u $CI_REGISTRY_USER --password-stdin $CI_REGISTRY
script:
- echo "Cleaning up old images..."
- |
# Keep only the last 10 images
IMAGES_TO_DELETE=$(docker images $IMAGE_NAME --format "table {{.Tag}}" | tail -n +2 | tail -n +11)
for tag in $IMAGES_TO_DELETE; do
if [ "$tag" != "latest" ] && [ "$tag" != "$IMAGE_TAG" ]; then
echo "Deleting image: $IMAGE_NAME:$tag"
docker rmi $IMAGE_NAME:$tag || true
fi
done
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_success
allow_failure: true
# Notification
notify-success:
stage: monitor
image: curlimages/curl:latest
script:
- |
if [ -n "$SLACK_WEBHOOK_URL" ]; then
curl -X POST -H 'Content-type: application/json' \
--data "{\"text\":\"✅ Pipeline succeeded for $CI_PROJECT_NAME on $CI_COMMIT_REF_NAME\"}" \
$SLACK_WEBHOOK_URL
fi
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_success
allow_failure: true
notify-failure:
stage: monitor
image: curlimages/curl:latest
script:
- |
if [ -n "$SLACK_WEBHOOK_URL" ]; then
curl -X POST -H 'Content-type: application/json' \
--data "{\"text\":\"❌ Pipeline failed for $CI_PROJECT_NAME on $CI_COMMIT_REF_NAME\"}" \
$SLACK_WEBHOOK_URL
fi
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_failure
allow_failure: true
# Include additional pipeline configurations
include:
- template: Security/SAST.gitlab-ci.yml
- template: Security/Container-Scanning.gitlab-ci.yml
- template: Security/Dependency-Scanning.gitlab-ci.yml
- template: Security/License-Scanning.gitlab-ci.yml

View File

@@ -1,271 +0,0 @@
# WiFi-DensePose Package Manifest
# This file specifies which files to include in the source distribution
# Include essential project files
include README.md
include LICENSE
include CHANGELOG.md
include pyproject.toml
include setup.py
include requirements.txt
include requirements-dev.txt
# Include configuration files
include *.cfg
include *.ini
include *.yaml
include *.yml
include *.toml
include .env.example
# Include documentation
recursive-include docs *
include docs/Makefile
include docs/make.bat
# Include source code
recursive-include src *.py
recursive-include src *.pyx
recursive-include src *.pxd
# Include configuration and data files
recursive-include src *.yaml
recursive-include src *.yml
recursive-include src *.json
recursive-include src *.toml
recursive-include src *.cfg
recursive-include src *.ini
# Include model files
recursive-include src/models *.pth
recursive-include src/models *.onnx
recursive-include src/models *.pt
recursive-include src/models *.pkl
recursive-include src/models *.joblib
# Include database migrations
recursive-include src/database/migrations *.py
recursive-include src/database/migrations *.sql
# Include templates and static files
recursive-include src/templates *.html
recursive-include src/templates *.jinja2
recursive-include src/static *.css
recursive-include src/static *.js
recursive-include src/static *.png
recursive-include src/static *.jpg
recursive-include src/static *.svg
recursive-include src/static *.ico
# Include test files
recursive-include tests *.py
recursive-include tests *.yaml
recursive-include tests *.yml
recursive-include tests *.json
# Include test data
recursive-include tests/data *
recursive-include tests/fixtures *
# Include scripts
recursive-include scripts *.py
recursive-include scripts *.sh
recursive-include scripts *.bat
recursive-include scripts *.ps1
# Include deployment files
include Dockerfile
include docker-compose.yml
include docker-compose.*.yml
recursive-include k8s *.yaml
recursive-include k8s *.yml
recursive-include terraform *.tf
recursive-include terraform *.tfvars
recursive-include ansible *.yml
recursive-include ansible *.yaml
# Include monitoring and logging configurations
recursive-include monitoring *.yml
recursive-include monitoring *.yaml
recursive-include monitoring *.json
recursive-include logging *.yml
recursive-include logging *.yaml
recursive-include logging *.json
# Include CI/CD configurations
include .github/workflows/*.yml
include .github/workflows/*.yaml
include .gitlab-ci.yml
include .travis.yml
include .circleci/config.yml
include azure-pipelines.yml
include Jenkinsfile
# Include development tools configuration
include .pre-commit-config.yaml
include .gitignore
include .gitattributes
include .editorconfig
include .flake8
include .isort.cfg
include .mypy.ini
include .bandit
include .safety-policy.json
# Include package metadata
include PKG-INFO
include *.egg-info/*
# Include version and build information
include VERSION
include BUILD_INFO
# Exclude unnecessary files
global-exclude *.pyc
global-exclude *.pyo
global-exclude *.pyd
global-exclude __pycache__
global-exclude .DS_Store
global-exclude .git*
global-exclude *.so
global-exclude *.dylib
global-exclude *.dll
# Exclude development and temporary files
global-exclude .pytest_cache
global-exclude .mypy_cache
global-exclude .coverage
global-exclude htmlcov
global-exclude .tox
global-exclude .venv
global-exclude venv
global-exclude env
global-exclude .env
global-exclude node_modules
global-exclude npm-debug.log*
global-exclude yarn-debug.log*
global-exclude yarn-error.log*
# Exclude IDE files
global-exclude .vscode
global-exclude .idea
global-exclude *.swp
global-exclude *.swo
global-exclude *~
# Exclude build artifacts
global-exclude build
global-exclude dist
global-exclude *.egg-info
global-exclude .eggs
# Exclude log files
global-exclude *.log
global-exclude logs
# Exclude backup files
global-exclude *.bak
global-exclude *.backup
global-exclude *.orig
# Exclude OS-specific files
global-exclude Thumbs.db
global-exclude desktop.ini
# Exclude sensitive files
global-exclude .env.local
global-exclude .env.production
global-exclude secrets.yaml
global-exclude secrets.yml
global-exclude private_key*
global-exclude *.pem
global-exclude *.key
# Exclude large data files (should be downloaded separately)
global-exclude *.h5
global-exclude *.hdf5
global-exclude *.npz
global-exclude *.tar.gz
global-exclude *.zip
global-exclude *.rar
# Exclude compiled extensions
global-exclude *.c
global-exclude *.cpp
global-exclude *.o
global-exclude *.obj
# Include specific important files that might be excluded by global patterns
include src/models/README.md
include tests/data/README.md
include docs/assets/README.md
# Include license files in subdirectories
recursive-include * LICENSE*
recursive-include * COPYING*
# Include changelog and version files
recursive-include * CHANGELOG*
recursive-include * HISTORY*
recursive-include * NEWS*
recursive-include * VERSION*
# Include requirements files
include requirements*.txt
include constraints*.txt
include environment*.yml
include Pipfile
include Pipfile.lock
include poetry.lock
# Include makefile and build scripts
include Makefile
include makefile
include build.sh
include build.bat
include install.sh
include install.bat
# Include package configuration for different package managers
include setup.cfg
include tox.ini
include noxfile.py
include conftest.py
# Include security and compliance files
include SECURITY.md
include CODE_OF_CONDUCT.md
include CONTRIBUTING.md
include SUPPORT.md
# Include API documentation
recursive-include docs/api *.md
recursive-include docs/api *.rst
recursive-include docs/api *.yaml
recursive-include docs/api *.yml
recursive-include docs/api *.json
# Include example configurations
recursive-include examples *.py
recursive-include examples *.yaml
recursive-include examples *.yml
recursive-include examples *.json
recursive-include examples *.md
# Include schema files
recursive-include src/schemas *.json
recursive-include src/schemas *.yaml
recursive-include src/schemas *.yml
recursive-include src/schemas *.xsd
# Include localization files
recursive-include src/locales *.po
recursive-include src/locales *.pot
recursive-include src/locales *.mo
# Include font and asset files
recursive-include src/assets *.ttf
recursive-include src/assets *.otf
recursive-include src/assets *.woff
recursive-include src/assets *.woff2
recursive-include src/assets *.eot

View File

@@ -1,112 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = src/database/migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version number format
version_num_format = %04d
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
# behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///./data/wifi_densepose_fallback.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S