[Tests] Run slow tests separately
- Also run the faster ones in parallel - The slowest use KiCad, not sure how safe is to run it in parallel - Changed mechanism of coverage collection to allow parallel execution
This commit is contained in:
parent
24fa9b022c
commit
9532422984
|
|
@ -8,6 +8,7 @@ omit = */kibot/docopt.py
|
|||
*/kibot/mcpyrate/*
|
||||
*/kibot/PyPDF2/*
|
||||
*/kibot/svgutils/*
|
||||
parallel = true
|
||||
|
||||
[report]
|
||||
exclude_lines =
|
||||
|
|
|
|||
|
|
@ -49,8 +49,14 @@ jobs:
|
|||
python3-coverage erase
|
||||
# Create the caches with macros
|
||||
python3-coverage run -a src/kibot --help-outputs > /dev/null
|
||||
pytest-3 --durations=0 --test_dir output
|
||||
# Individual run for specific tests
|
||||
# pytest-3 --log-cli-level debug -k "test_ibom_parse_fail" --test_dir output
|
||||
# Run the 90% faster tests (under 3 s)
|
||||
# Do it in parallel
|
||||
pytest-3 -v --durations=0 -m "not slow" -n 2 --test_dir output
|
||||
# Run the slowest at the end and exit on the first error
|
||||
pytest-3 -v --durations=0 -m slow -x --test_dir output
|
||||
python3-coverage combine
|
||||
python3-coverage report
|
||||
python3-coverage html -d output/htmlcov
|
||||
- name: Store results
|
||||
|
|
|
|||
19
Makefile
19
Makefile
|
|
@ -38,6 +38,7 @@ lint: doc
|
|||
test_tmp: lint
|
||||
$(PY_COV) erase
|
||||
$(PYTEST)
|
||||
$(PY_COV) combine
|
||||
$(PY_COV) report
|
||||
|
||||
test: lint
|
||||
|
|
@ -46,7 +47,9 @@ test: lint
|
|||
rm -f example.kibot.yaml
|
||||
rm -f tests/.local
|
||||
$(PY_COV) erase
|
||||
$(PYTEST) --test_dir output
|
||||
$(PYTEST) -m "not slow" -n 2 --test_dir output
|
||||
$(PYTEST) -m "slow" --test_dir output
|
||||
$(PY_COV) combine
|
||||
$(PY_COV) report
|
||||
$(PY_COV) html
|
||||
x-www-browser htmlcov/index.html
|
||||
|
|
@ -58,6 +61,7 @@ test1:
|
|||
rm -f tests/.local
|
||||
$(PY_COV) erase
|
||||
$(PYTEST) --log-cli-level debug -k "test_bom_ok" --test_dir output
|
||||
$(PY_COV) combine
|
||||
$(PY_COV) report
|
||||
$(PY_COV) html
|
||||
#x-www-browser htmlcov/index.html
|
||||
|
|
@ -77,7 +81,8 @@ test_docker_local_1:
|
|||
/bin/bash -c "flake8 . --count --statistics ; python3-coverage run -a src/kibot --help-outputs > /dev/null; pytest-3 --log-cli-level debug -k 'test_print_pcb_svg_simple_2' --test_dir output ; $(PY_COV) html; chown -R $(USER_ID):$(GROUP_ID) output/ tests/board_samples/ tests/.config/kiplot/plugins/__pycache__/ tests/test_plot/fake_pcbnew/__pycache__/ tests/.config/kibot/plugins/__pycache__/ .coverage htmlcov/"
|
||||
#$(PY_COV) report
|
||||
#x-www-browser htmlcov/index.html
|
||||
rm .coverage
|
||||
# The coverage used in the image is incompatible
|
||||
$(PY_COV) erase
|
||||
|
||||
test_docker_local_1_ki6:
|
||||
rm -rf output
|
||||
|
|
@ -86,7 +91,7 @@ test_docker_local_1_ki6:
|
|||
# Run in the same directory to make the __pycache__ valid
|
||||
# Also change the owner of the files to the current user (we run as root like in GitHub)
|
||||
docker run --rm -v $(CWD):$(CWD) --workdir="$(CWD)" setsoft/kicad_auto_test:ki6 \
|
||||
/bin/bash -c "flake8 . --count --statistics ; python3-coverage run -a src/kibot --help-outputs > /dev/null; pytest-3 --log-cli-level debug -k 'test_kicad_conf_user' --test_dir output ; $(PY_COV) html; chown -R $(USER_ID):$(GROUP_ID) output/ tests/board_samples/ tests/.config/kiplot/plugins/__pycache__/ tests/test_plot/fake_pcbnew/__pycache__/ tests/.config/kibot/plugins/__pycache__/ .coverage htmlcov/"
|
||||
/bin/bash -c "flake8 . --count --statistics ; python3-coverage run -a src/kibot --help-outputs > /dev/null; pytest-3 --log-cli-level debug -k 'test_dep_pytool' --test_dir output ; $(PY_COV) html; chown -R $(USER_ID):$(GROUP_ID) output/ tests/board_samples/ tests/.config/kiplot/plugins/__pycache__/ tests/test_plot/fake_pcbnew/__pycache__/ tests/.config/kibot/plugins/__pycache__/ .coverage htmlcov/"
|
||||
#$(PY_COV) report
|
||||
#x-www-browser htmlcov/index.html
|
||||
|
||||
|
|
@ -98,8 +103,9 @@ test_docker_local:
|
|||
# Also change the owner of the files to the current user (we run as root like in GitHub)
|
||||
docker run --rm -v $(CWD):$(CWD) --workdir="$(CWD)" setsoft/kicad_auto_test:latest \
|
||||
/bin/bash -c "flake8 . --count --statistics ; python3-coverage run -a src/kibot --help-outputs ; pytest-3 --test_dir output ; $(PY_COV) html; chown -R $(USER_ID):$(GROUP_ID) output/ tests/board_samples/ .coverage htmlcov/"
|
||||
$(PY_COV) report
|
||||
x-www-browser htmlcov/index.html
|
||||
# $(PY_COV) report
|
||||
# x-www-browser htmlcov/index.html
|
||||
$(PY_COV) erase
|
||||
|
||||
test_docker_local_ki6:
|
||||
rm -rf output
|
||||
|
|
@ -109,6 +115,7 @@ test_docker_local_ki6:
|
|||
# Also change the owner of the files to the current user (we run as root like in GitHub)
|
||||
docker run --rm -v $(CWD):$(CWD) --workdir="$(CWD)" setsoft/kicad_auto_test:ki6 \
|
||||
/bin/bash -c "flake8 . --count --statistics ; python3-coverage run -a src/kibot --help-outputs ; pytest-3 --test_dir output ; $(PY_COV) html; chown -R $(USER_ID):$(GROUP_ID) output/ tests/board_samples/ .coverage htmlcov/"
|
||||
$(PY_COV) combine
|
||||
$(PY_COV) report
|
||||
x-www-browser htmlcov/index.html
|
||||
|
||||
|
|
@ -124,7 +131,7 @@ docker_shell:
|
|||
|
||||
single_test:
|
||||
rm -rf pp
|
||||
-$(PY_COV) run -a src/kibot --help-list-outputs > /dev/null
|
||||
-$(PY_COV) run src/kibot --help-list-outputs > /dev/null
|
||||
-$(PYTEST) --log-cli-level debug -k "$(SINGLE_TEST)" --test_dir pp
|
||||
@echo "********************" Output
|
||||
@cat pp/*/output.txt
|
||||
|
|
|
|||
|
|
@ -1,2 +1,5 @@
|
|||
[pytest]
|
||||
python_files = tests/test_plot/test_*.py
|
||||
markers =
|
||||
webtest: mark a test as a webtest.
|
||||
slow: mark test as slow.
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import coverage
|
|||
import yaml
|
||||
import logging
|
||||
import importlib
|
||||
import pytest
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
|
@ -120,6 +121,7 @@ def test_dep_gs(test_dir, caplog, monkeypatch):
|
|||
os.remove(os.path.join(ctx.output_dir, bin_dir, 'gs'))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_dep_convert(test_dir, caplog, monkeypatch):
|
||||
""" Check the convert_downloader """
|
||||
# Create a context to get an output directory
|
||||
|
|
|
|||
|
|
@ -9,10 +9,11 @@ For debug information use:
|
|||
pytest-3 --log-cli-level debug
|
||||
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
from . import context
|
||||
from utils.lzstring import LZString
|
||||
from kibot.misc import (BOM_ERROR)
|
||||
|
|
@ -87,6 +88,7 @@ def test_ibom_all_ops(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ibom_variant_1(test_dir):
|
||||
prj = 'kibom-variante'
|
||||
ctx = context.TestContext(test_dir, prj, 'ibom_variant_1', BOM_DIR)
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ import os
|
|||
import re
|
||||
import shutil
|
||||
import logging
|
||||
import pytest
|
||||
import subprocess
|
||||
import json
|
||||
from . import context
|
||||
|
|
@ -742,6 +743,7 @@ def check_makefile(ctx, mkfile, prj, dbg, txt):
|
|||
ctx.search_err(r'Wrong character in file name `(.*)/test_v5-top\$.svg')
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_makefile_1(test_dir):
|
||||
prj = 'test_v5'
|
||||
ctx = context.TestContext(test_dir, prj, 'makefile_1')
|
||||
|
|
@ -752,6 +754,7 @@ def test_makefile_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_makefile_2(test_dir):
|
||||
prj = 'test_v5'
|
||||
ctx = context.TestContext(test_dir, prj, 'makefile_1')
|
||||
|
|
@ -898,6 +901,7 @@ def test_cli_order(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_qr_lib_1(test_dir):
|
||||
prj = 'qr_test/qr_test'
|
||||
ctx = context.TestContext(test_dir, prj, 'qr_lib_1', POS_DIR)
|
||||
|
|
@ -959,6 +963,7 @@ def test_report_simple_1(test_dir):
|
|||
ctx.clean_up(keep_project=True)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_report_simple_2(test_dir):
|
||||
prj = 'light_control'
|
||||
ctx = context.TestContext(test_dir, prj, 'report_simple_2', POS_DIR)
|
||||
|
|
@ -1117,6 +1122,7 @@ def test_annotate_pcb_tbrl_small_grid(test_dir):
|
|||
{'C1': '1u', 'C2': '2u', 'R1': '2', 'R2': '1', 'C3': '3u', 'C4': '4u', 'R3': '3', 'R4': '4'})
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_gencad_1(test_dir):
|
||||
prj = 'gencad'
|
||||
ctx = context.TestContext(test_dir, prj, 'gencad_1')
|
||||
|
|
@ -1133,6 +1139,7 @@ def test_gencad_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_quick_start_1(test_dir):
|
||||
""" Very naive test to see if it doesn't crash """
|
||||
prj = 'light_control'
|
||||
|
|
@ -1172,6 +1179,7 @@ def test_quick_start_1(test_dir):
|
|||
ctx.expect_out_file(os.path.join('Browse', 'light_control-navigate.html'))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_netlist_classic_1(test_dir):
|
||||
prj = 'light_control'
|
||||
dir_o = 'Export'
|
||||
|
|
@ -1180,6 +1188,7 @@ def test_netlist_classic_1(test_dir):
|
|||
ctx.expect_out_file(os.path.join(dir_o, prj+'-netlist.net'))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_netlist_ipc_1(test_dir):
|
||||
prj = 'light_control'
|
||||
dir_o = 'Export'
|
||||
|
|
|
|||
|
|
@ -13,15 +13,17 @@ For debug information use:
|
|||
pytest-3 --log-cli-level debug
|
||||
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
import pytest
|
||||
import os
|
||||
import re
|
||||
from subprocess import run, PIPE
|
||||
from . import context
|
||||
from kibot.misc import DRC_ERROR, ERC_ERROR, BOM_ERROR, CORRUPTED_PCB, CORRUPTED_SCH, EXIT_BAD_CONFIG
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_erc_1(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'erc', '')
|
||||
|
|
@ -31,6 +33,7 @@ def test_erc_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_erc_fail_1(test_dir):
|
||||
""" Using an SCH with ERC errors """
|
||||
prj = 'fail-erc'
|
||||
|
|
@ -49,6 +52,7 @@ def test_erc_fail_2(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_erc_warning_1(test_dir):
|
||||
""" Using an SCH with ERC warnings """
|
||||
prj = 'warning-project'
|
||||
|
|
@ -60,6 +64,7 @@ def test_erc_warning_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_erc_warning_2(test_dir):
|
||||
""" Using an SCH with ERC warnings as errors """
|
||||
prj = 'warning-project'
|
||||
|
|
@ -130,6 +135,7 @@ def test_drc_time_out(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_update_xml_1(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'update_xml', '')
|
||||
|
|
@ -149,6 +155,7 @@ def test_update_xml_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_update_xml_fail(test_dir):
|
||||
""" Using a dummy SCH """
|
||||
prj = '3Rs'
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ pytest-3 --log-cli-level debug
|
|||
|
||||
"""
|
||||
import logging
|
||||
import pytest
|
||||
from . import context
|
||||
PDF_DIR = 'Layers'
|
||||
PDF_FILE = 'bom-F_Cu+F_SilkS.pdf'
|
||||
|
|
@ -16,6 +17,7 @@ PDF_FILE_B = 'PCB_Bot.pdf'
|
|||
PDF_FILE_C = 'PCB_Bot_def.pdf'
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_simple(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_pcb', PDF_DIR)
|
||||
|
|
@ -25,6 +27,7 @@ def test_print_pcb_simple(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_svg_simple_1(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_pcb_svg')
|
||||
|
|
@ -36,6 +39,7 @@ def test_print_pcb_svg_simple_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_svg_simple_2(test_dir):
|
||||
""" Check the portrait version is OK """
|
||||
prj = 'bom_portrait'
|
||||
|
|
@ -48,6 +52,7 @@ def test_print_pcb_svg_simple_2(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_refill_1(test_dir):
|
||||
prj = 'zone-refill'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_pcb_zone-refill')
|
||||
|
|
@ -57,6 +62,7 @@ def test_print_pcb_refill_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_refill_2(test_dir):
|
||||
""" Using KiCad 6 colors """
|
||||
if context.ki5():
|
||||
|
|
@ -69,6 +75,7 @@ def test_print_pcb_refill_2(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_variant_1(test_dir):
|
||||
prj = 'kibom-variant_3_txt'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_pcb_variant_1')
|
||||
|
|
@ -81,6 +88,7 @@ def test_print_variant_1(test_dir):
|
|||
ctx.clean_up(keep_project=True)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_pcb_options(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_pcb_options', PDF_DIR)
|
||||
|
|
@ -91,6 +99,7 @@ def test_print_pcb_options(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_wrong_paste(test_dir):
|
||||
prj = 'wrong_paste'
|
||||
ctx = context.TestContext(test_dir, prj, 'wrong_paste', PDF_DIR)
|
||||
|
|
@ -101,6 +110,7 @@ def test_print_wrong_paste(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_pcb_print_simple_1(test_dir):
|
||||
prj = 'light_control'
|
||||
ctx = context.TestContext(test_dir, prj, 'pcb_print_2')
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ pytest-3 --log-cli-level debug
|
|||
import os
|
||||
import logging
|
||||
import coverage
|
||||
import pytest
|
||||
from . import context
|
||||
from kibot.misc import (PDF_SCH_PRINT, SVG_SCH_PRINT)
|
||||
from kibot.kicad.v5_sch import Schematic, SchFileError, DrawPoligon, Pin
|
||||
|
|
@ -25,6 +26,7 @@ NI_DIR = 'no_inductor'
|
|||
cov = coverage.Coverage()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_ok(test_dir):
|
||||
prj = 'bom_no_xml' # bom has meta data, here we test no meta-data
|
||||
ctx = context.TestContext(test_dir, prj, 'print_sch')
|
||||
|
|
@ -34,6 +36,7 @@ def test_print_sch_ok(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_fail(test_dir):
|
||||
prj = 'print_err'
|
||||
ctx = context.TestContextSCH(test_dir, prj, 'print_sch')
|
||||
|
|
@ -42,6 +45,7 @@ def test_print_sch_fail(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_svg_ok(test_dir):
|
||||
prj = 'bom_no_xml' # bom has meta data, here we test no meta-data
|
||||
ctx = context.TestContext(test_dir, prj, 'print_sch_svg')
|
||||
|
|
@ -51,6 +55,7 @@ def test_print_sch_svg_ok(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_svg_fail(test_dir):
|
||||
prj = 'print_err'
|
||||
ctx = context.TestContext(test_dir, prj, 'print_sch_svg')
|
||||
|
|
@ -97,6 +102,7 @@ def test_sch_variant_ni_2(test_dir):
|
|||
check_l1(ctx)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_variant_ni_1(test_dir):
|
||||
""" Using a variant """
|
||||
prj = 'test_v5_wks' # Is the most complete, contains every KiCad object I know
|
||||
|
|
@ -109,6 +115,7 @@ def test_print_sch_variant_ni_1(test_dir):
|
|||
ctx.clean_up(keep_project=True)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_svg_variant_ni_1(test_dir):
|
||||
""" SVG using a variant """
|
||||
prj = 'test_v5' # Is the most complete, contains every KiCad object I know
|
||||
|
|
@ -121,6 +128,7 @@ def test_print_sch_svg_variant_ni_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_print_sch_variant_ni_2(test_dir):
|
||||
""" Using a filter """
|
||||
prj = 'test_v5' # Is the most complete, contains every KiCad object I know
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ pytest-3 --log-cli-level debug
|
|||
|
||||
"""
|
||||
import os
|
||||
import pytest
|
||||
from glob import glob
|
||||
from . import context
|
||||
|
||||
|
|
@ -17,6 +18,7 @@ STEP_DIR = '3D'
|
|||
# STEP_FILE = 'bom.step'
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_step_1(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'step_simple', STEP_DIR)
|
||||
|
|
@ -31,6 +33,7 @@ def test_step_1(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_step_2(test_dir):
|
||||
prj = 'bom_fake_models'
|
||||
yaml = 'step_simple_2'
|
||||
|
|
@ -44,6 +47,7 @@ def test_step_2(test_dir):
|
|||
ctx.clean_up(keep_project=True)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_step_3(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'step_simple_3', STEP_DIR)
|
||||
|
|
@ -53,6 +57,7 @@ def test_step_3(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_step_gl_env(test_dir):
|
||||
prj = 'bom'
|
||||
ctx = context.TestContext(test_dir, prj, 'step_gl_env', STEP_DIR)
|
||||
|
|
@ -67,6 +72,7 @@ def test_step_gl_env(test_dir):
|
|||
ctx.clean_up()
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_step_variant_1(test_dir):
|
||||
prj = 'kibom-variant_3'
|
||||
ctx = context.TestContext(test_dir, prj, 'step_variant_1')
|
||||
|
|
@ -78,6 +84,7 @@ def test_step_variant_1(test_dir):
|
|||
ctx.clean_up(keep_project=True)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_render_3d_variant_1(test_dir):
|
||||
# Text variables to ensure they are rendered.
|
||||
# Traces
|
||||
|
|
|
|||
|
|
@ -276,7 +276,7 @@ class TestContext(object):
|
|||
f.write('Dummy file\n')
|
||||
|
||||
def do_run(self, cmd, ret_val=None, use_a_tty=False, chdir_out=False):
|
||||
cmd_base = [COVERAGE_SCRIPT, 'run', '-a']
|
||||
cmd_base = [COVERAGE_SCRIPT, 'run']
|
||||
if chdir_out:
|
||||
cwd = os.getcwd()
|
||||
cmd_base.append('--rcfile='+os.path.join(cwd, '.coveragerc'))
|
||||
|
|
|
|||
Loading…
Reference in New Issue