Skip to content

Commit d5c2361

Browse files
authored
Merge pull request #19 from telia-oss/add-tests
Added docs and tests. Tests running on travis.
2 parents 2dad890 + cb8c665 commit d5c2361

File tree

128 files changed

+6410
-21
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

128 files changed

+6410
-21
lines changed

.coveragerc

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[run]
2+
omit =
3+
birgitta/recipetest/localtest/script_prepend.py

.travis.yml

+30-1
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,37 @@
11
dist: xenial
2+
group: edge
23
language: python
34
python: 3.6.6
5+
env:
6+
# Hack to get JAVA_HOME and PATH correct even if `language` above is not Java
7+
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
8+
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
9+
cache:
10+
directories:
11+
- $HOME/.ivy2
12+
- $HOME/spark
13+
- $HOME/.cache/pip
14+
- $HOME/.pip-cache
15+
- $HOME/.sbt/launchers
16+
jdk:
17+
- openjdk8 # oracle is no longer supported by travis, only openjdk
18+
sudo: false
19+
addons:
20+
apt:
21+
packages:
22+
- axel
423
install:
24+
# Download spark 2.3.3
25+
# - jdk_switcher use oraclejdk8 # cmd not found
26+
- export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64 # Hack to get JAVA_HOME and PATH correct even if `language` above is not Java
27+
- "[ -f spark ] || mkdir spark && cd spark && axel http://archive.apache.org/dist/spark/spark-2.3.3/spark-2.3.3-bin-hadoop2.7.tgz && cd .."
28+
- "tar -xf ./spark/spark-2.3.3-bin-hadoop2.7.tgz"
29+
- "export SPARK_HOME=`pwd`/spark-2.3.3-bin-hadoop2.7"
30+
- echo "spark.yarn.jars=$SPARK_HOME/jars/*.jar" > $SPARK_HOME/conf/spark-defaults.conf
31+
# Install Python deps.
532
- pip install -r requirements_dev.txt
633
- pip install -e .
734
script:
8-
- make test
35+
- make test
36+
after_success:
37+
- coveralls

Makefile

+15-4
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,18 @@
1-
COV_MIN = 100 # Gradually increase this as we add more tests
1+
COV_MIN = 85 # Gradually increase this as we add more tests
22
TAG = latest
33

44
SRC_DIR = $(shell pwd)
55
DIST_DIR = $(SRC_DIR)/dist
66
BIRGITTA_TESTS = $(SRC_DIR)/tests
7+
PROJECT_TESTS = $(SRC_DIR)/examples/organizations/newsltd
78

8-
package: clean
9+
package: clean json_fixtures
910
python setup.py sdist bdist_wheel
1011

12+
clean_json_fixtures:
13+
rm -rf "$(SRC_DIR)/examples/organizations/newsltd/projects/chronicle/tests/fixtures/generated_json/*"
14+
rm -rf "$(SRC_DIR)/examples/organizations/newsltd/projects/tribune/tests/fixtures/generated_json/*"
15+
1116
clean:
1217
cd $(SRC_DIR)
1318
find . -name '__pycache__' -exec rm -rf {} +
@@ -19,9 +24,14 @@ clean:
1924
rm -rf htmlcov/
2025
rm -rf spark-warehouse/
2126
rm -rf .pytest_cache/
22-
rm -rf .coverage*
27+
rm -rf .coverage
28+
rm -rf .coverage.*/
2329
rm -rf tmp/*
2430

31+
32+
json_fixtures: clean_json_fixtures
33+
python3 "$(SRC_DIR)/make_json_fixtures.py"
34+
2535
configure:
2636
pip install -r requirements.txt
2737
pip install -r requirements_dev.txt
@@ -38,10 +48,11 @@ test:
3848
# been in progress in another thread when fork() was called.
3949
OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES \
4050
pytest \
51+
$(PROJECT_TESTS) $(BIRGITTA_TESTS) \
4152
--cov=birgitta \
53+
--cov=examples/organizations \
4254
--cov-report html \
4355
--cov-report term-missing \
4456
--cov-fail-under $(COV_MIN) \
45-
$(BIRGITTA_TESTS)
4657

4758
.PHONY: build clean configure lint test

0 commit comments

Comments
 (0)