Merge pull request 'CI: run pytest and coverage tests inside ephemeral VM in the "builder" VM (nested)' (#103) from ci-vm-for-pytest into master
continuous-integration/drone/push Build is passing Details

Reviewed-on: #103
Reviewed-by: houkime <houkime@protonmail.com>
pull/107/head
houkime 2024-03-18 12:07:54 +02:00
commit 962e8d5ca7
6 changed files with 17 additions and 25 deletions

View File

@ -5,18 +5,11 @@ name: default
steps: steps:
- name: Run Tests and Generate Coverage Report - name: Run Tests and Generate Coverage Report
commands: commands:
- kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - nix flake check -L
- redis-server --bind 127.0.0.1 --port 6389 >/dev/null &
# We do not care about persistance on CI
- sleep 10
- redis-cli -h 127.0.0.1 -p 6389 config set stop-writes-on-bgsave-error no
- coverage run -m pytest -q
- coverage xml
- sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN" - sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN"
environment: environment:
SONARQUBE_TOKEN: SONARQUBE_TOKEN:
from_secret: SONARQUBE_TOKEN from_secret: SONARQUBE_TOKEN
USE_REDIS_PORT: 6389
- name: Run Bandit Checks - name: Run Bandit Checks

View File

@ -135,7 +135,6 @@
services.redis.servers.sp-api = { services.redis.servers.sp-api = {
enable = true; enable = true;
save = [ ]; save = [ ];
port = 6379; # FIXME
settings.notify-keyspace-events = "KEA"; settings.notify-keyspace-events = "KEA";
}; };
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [

View File

@ -30,7 +30,7 @@ class RedisTokensRepository(AbstractTokensRepository):
@staticmethod @staticmethod
def token_key_for_device(device_name: str): def token_key_for_device(device_name: str):
md5_hash = md5() md5_hash = md5(usedforsecurity=False)
md5_hash.update(bytes(device_name, "utf-8")) md5_hash.update(bytes(device_name, "utf-8"))
digest = md5_hash.hexdigest() digest = md5_hash.hexdigest()
return TOKENS_PREFIX + digest return TOKENS_PREFIX + digest

View File

@ -7,16 +7,16 @@ RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime"
DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME
def ten_minutes_into_future_naive(): def ten_hours_into_future_naive():
return datetime.now() + timedelta(minutes=10) return datetime.now() + timedelta(hours=10)
def ten_minutes_into_future_naive_utc(): def ten_hours_into_future_naive_utc():
return datetime.utcnow() + timedelta(minutes=10) return datetime.utcnow() + timedelta(hours=10)
def ten_minutes_into_future(): def ten_hours_into_future():
return datetime.now(timezone.utc) + timedelta(minutes=10) return datetime.now(timezone.utc) + timedelta(hours=10)
def ten_minutes_into_past_naive(): def ten_minutes_into_past_naive():
@ -34,11 +34,11 @@ def ten_minutes_into_past():
class NearFuture(datetime): class NearFuture(datetime):
@classmethod @classmethod
def now(cls, tz=None): def now(cls, tz=None):
return datetime.now(tz) + timedelta(minutes=13) return datetime.now(tz) + timedelta(hours=13)
@classmethod @classmethod
def utcnow(cls): def utcnow(cls):
return datetime.utcnow() + timedelta(minutes=13) return datetime.utcnow() + timedelta(hours=13)
def read_json(file_path): def read_json(file_path):

View File

@ -14,9 +14,9 @@ from tests.common import (
) )
# Graphql API's output should be timezone-naive # Graphql API's output should be timezone-naive
from tests.common import ten_minutes_into_future_naive_utc as ten_minutes_into_future from tests.common import ten_hours_into_future_naive_utc as ten_hours_into_future
from tests.common import ten_minutes_into_future as ten_minutes_into_future_tz from tests.common import ten_hours_into_future as ten_hours_into_future_tz
from tests.common import ten_minutes_into_past_naive_utc as ten_minutes_into_past from tests.common import ten_minutes_into_past_naive_utc as ten_hours_into_past
from tests.test_graphql.common import ( from tests.test_graphql.common import (
assert_empty, assert_empty,
@ -168,7 +168,7 @@ def test_graphql_generate_recovery_key(client, authorized_client):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"expiration_date", [ten_minutes_into_future(), ten_minutes_into_future_tz()] "expiration_date", [ten_hours_into_future(), ten_hours_into_future_tz()]
) )
def test_graphql_generate_recovery_key_with_expiration_date( def test_graphql_generate_recovery_key_with_expiration_date(
client, authorized_client, expiration_date: datetime client, authorized_client, expiration_date: datetime
@ -193,7 +193,7 @@ def test_graphql_generate_recovery_key_with_expiration_date(
def test_graphql_use_recovery_key_after_expiration(client, authorized_client, mocker): def test_graphql_use_recovery_key_after_expiration(client, authorized_client, mocker):
expiration_date = ten_minutes_into_future() expiration_date = ten_hours_into_future()
key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date)
# Timewarp to after it expires # Timewarp to after it expires
@ -219,7 +219,7 @@ def test_graphql_use_recovery_key_after_expiration(client, authorized_client, mo
def test_graphql_generate_recovery_key_with_expiration_in_the_past(authorized_client): def test_graphql_generate_recovery_key_with_expiration_in_the_past(authorized_client):
expiration_date = ten_minutes_into_past() expiration_date = ten_hours_into_past()
response = request_make_new_recovery_key( response = request_make_new_recovery_key(
authorized_client, expires_at=expiration_date authorized_client, expires_at=expiration_date
) )

View File

@ -24,7 +24,7 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
AbstractTokensRepository, AbstractTokensRepository,
) )
from tests.common import ten_minutes_into_past, ten_minutes_into_future from tests.common import ten_minutes_into_past, ten_hours_into_future
ORIGINAL_DEVICE_NAMES = [ ORIGINAL_DEVICE_NAMES = [