Skip to content

Commit

Permalink
progress
Browse files Browse the repository at this point in the history
  • Loading branch information
northdpole committed Apr 26, 2022
1 parent 90f118a commit 0ce5b86
Show file tree
Hide file tree
Showing 3 changed files with 127 additions and 39 deletions.
67 changes: 39 additions & 28 deletions application/cmd/cre_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def add_from_spreadsheet(spreadsheet_url: str, cache_loc: str, cre_loc: str) ->
import new mappings from <url>
export db to ../../cres/
"""
database = db_connect(path=cache_loc)
database, _, _ = db_connect(path=cache_loc)
spreadsheet = sheet_utils.readSpreadsheet(
url=spreadsheet_url, cres_loc=cre_loc, alias="new spreadsheet", validate=False
)
Expand All @@ -245,7 +245,7 @@ def add_from_disk(cache_loc: str, cre_loc: str) -> None:
import new mappings from <path>
export db to ../../cres/
"""
database = db_connect(path=cache_loc)
database, _, _ = db_connect(path=cache_loc)
for file in get_cre_files_from_disk(cre_loc):
with open(file, "rb") as standard:
parse_file(
Expand All @@ -264,7 +264,7 @@ def review_from_spreadsheet(cache: str, spreadsheet_url: str, share_with: str) -
create new spreadsheet of the new CRE landscape for review
"""
loc, cache = prepare_for_review(cache)
database = db_connect(path=cache)
database, _, _ = db_connect(path=cache)
spreadsheet = sheet_utils.readSpreadsheet(
url=spreadsheet_url, cres_loc=loc, alias="new spreadsheet", validate=False
)
Expand Down Expand Up @@ -293,7 +293,7 @@ def review_from_disk(cache: str, cre_file_loc: str, share_with: str) -> None:
create new spreadsheet of the new CRE landscape for review
"""
loc, cache = prepare_for_review(cache)
database = db_connect(path=cache)
database, _, _ = db_connect(path=cache)
for file in get_cre_files_from_disk(cre_file_loc):
with open(file, "rb") as standard:
parse_file(
Expand Down Expand Up @@ -358,31 +358,33 @@ def run(args: argparse.Namespace) -> None: # pragma: no cover
elif args.osib_out:
export_to_osib(file_loc=args.osib_out, cache=args.cache_file)
if args.zap_in:
zap_alerts_parser.parse_zap_alerts(db_connect(args.cache_file))
cache, _, _ = db_connect(args.cache_file)
zap_alerts_parser.parse_zap_alerts(cache)
if args.cheatsheets_in:
cheatsheets_parser.parse_cheatsheets(db_connect(args.cache_file))
cache, _, _ = db_connect(args.cache_file)
cheatsheets_parser.parse_cheatsheets(cache)
if args.github_tools_in:
for url in misc_tools_parser.tool_urls:
misc_tools_parser.parse_tool(
cache=db_connect(args.cache_file), tool_repo=url
)
cache, _, _ = db_connect(args.cache_file)
misc_tools_parser.parse_tool(cache=cache, tool_repo=url)
if args.owasp_proj_meta:
owasp_metadata_to_cre(args.owasp_proj_meta)

if args.compare_datasets:
compare_datasets(args.dataset1, args.dataset2)
d1, d2, ed1, ed2 = compare_datasets(args.dataset1, args.dataset2)
if len(d1) or len(d2) or len(ed1) or len(ed2):
exit(1)


def db_connect(path: str) -> db.Node_collection:
def db_connect(path: str) -> Tuple[db.Node_collection, Any, Any]:

global app
conf = CMDConfig(db_uri=path)
app = create_app(conf=conf)
collection = db.Node_collection()
app_context = app.app_context()
app_context.push()

return collection
return (collection, app, app_context)


def create_spreadsheet(
Expand Down Expand Up @@ -416,7 +418,7 @@ def review_osib_from_file(file_loc: str, cache: str, cre_loc: str) -> None:
"""Given the location of an osib.yaml, parse osib, convert to cres and add to db
export db to yamls and spreadsheet for review"""
loc, cache = prepare_for_review(cache)
database = db_connect(path=cache)
database, _, _ = db_connect(path=cache)
ymls = odefs.read_osib_yaml(file_loc)
osibs = odefs.try_from_file(ymls)
for osib in osibs:
Expand All @@ -437,7 +439,7 @@ def review_osib_from_file(file_loc: str, cache: str, cre_loc: str) -> None:


def add_osib_from_file(file_loc: str, cache: str, cre_loc: str) -> None:
database = db_connect(path=cache)
database, _, _ = db_connect(path=cache)
ymls = odefs.read_osib_yaml(file_loc)
osibs = odefs.try_from_file(ymls)
for osib in osibs:
Expand All @@ -448,7 +450,8 @@ def add_osib_from_file(file_loc: str, cache: str, cre_loc: str) -> None:


def export_to_osib(file_loc: str, cache: str) -> None:
docs = db_connect(path=cache).export(file_loc, dry_run=True)
cache, _, _ = db_connect(path=cache)
docs = cache.export(file_loc, dry_run=True)
tree = odefs.cre2osib(docs)
with open(file_loc, "x"):
with open(file_loc, "w") as f:
Expand All @@ -464,26 +467,23 @@ def compare_datasets(db1: str, db2: str) -> List[Dict]:
ensure that both graphs have same number of nodes and edges and both graphs have the same data
"""

database1 = db_connect(path=db1)
database2 = db_connect(path=db2)

def make_hashtable(graph):
nodes = {}
edges = {}
for node in graph.nodes():
if node.startswith("CRE"):
nodes[graph.nodes[node]["external_id"]] = node
nodes[graph.nodes[node].get("external_id")] = node
elif node.startswith("Node"):
nodes[graph.nodes[node]["infosum"]] = node
nodes[graph.nodes[node].get("infosum")] = node
else:
logger.fatal("Graph seems corrupted")

for edge in graph.edges():
key = graph.nodes[edge[0]]["external_id"]
key = graph.nodes[edge[0]].get("external_id")
if edge[1].startswith("CRE"):
key = key + "-" + graph.nodes[edge[1]]["external_id"]
key = f"{key}-{graph.nodes[edge[1]].get('external_id')}"
else:
key = key + "-" + graph.nodes[edge[1]]["infosum"]
key = f"{key}-{graph.nodes[edge[1]].get('infosum')}"
edges[key] = edge
return nodes, edges

Expand Down Expand Up @@ -524,19 +524,30 @@ def edge_differences(edges1, edges2, db2):
}
return differences

database1, _, _ = db_connect(path=db1)
g1 = database1.graph.graph
g2 = database2.graph.graph
n1, e1 = make_hashtable(g1)

print("$" * 90)
database1.graph.print_graph()
print("$" * 90)
database1.graph._instance = None
database1.graph = None

database2, _, _ = db_connect(path=db2)
g2 = database2.graph.graph
print("$" * 90)
database2.graph.print_graph()
print("$" * 90)
input()
n2, e2 = make_hashtable(g2)

d1 = node_differences(n1, n2, db2)
d2 = node_differences(n2, n1, db1)

ed1 = edge_differences(e1, e2, db2)
ed2 = edge_differences(e2, e1, db1)
if len(d1) or len(d2) or len(ed1) or len(ed2):
exit(1)
# return [d1, d2, ed1, ed2] # TODO uncomment when this becomes a library method
return [d1, d2, ed1, ed2] # TODO uncomment when this becomes a library method


def owasp_metadata_to_cre(meta_file: str):
Expand Down
17 changes: 13 additions & 4 deletions application/database/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from sqlalchemy import func
from sqlalchemy.sql.expression import desc # type: ignore
import uuid
from matplotlib import pyplot

from .. import sqla # type: ignore

Expand Down Expand Up @@ -140,6 +141,15 @@ class CRE_Graph:
graph: nx.Graph = None
__instance = None

def print_graph(self, png_path: str = None):
"""DEbug method to dump the graph, if png_path is provided it shows the graph in png format
if not, it returns the graph as dict of dicts"""
if png_path:
nx.draw(self.graph, with_labels=True)
pyplot.savefig(png_path)
pyplot.show()
return nx.to_dict_of_dicts(self.graph)

@classmethod
def instance(cls, session):
if cls.__instance is None:
Expand Down Expand Up @@ -219,12 +229,11 @@ def load_cre_graph(cls, session) -> nx.Graph:

class Node_collection:
graph: nx.Graph = None
session = sqla.session
session = None

def __init__(self) -> None:
def __init__(self, session=sqla.session) -> None:
self.graph = CRE_Graph.instance(sqla.session)
# self.graph = CRE_Graph.instance(session=sqla.session)
self.session = sqla.session
self.session = session

def __get_external_links(self) -> List[Tuple[CRE, Node, str]]:
external_links: List[Tuple[CRE, Node, str]] = []
Expand Down
82 changes: 75 additions & 7 deletions application/tests/cre_main_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import copy
import logging
import os
import shutil
Expand Down Expand Up @@ -372,7 +373,7 @@ def test_add_from_spreadsheet(
self.tmpdirs.append(dir)
cache = tempfile.mkstemp(dir=dir, suffix=".sqlite")[1]

mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_export.return_value = [
defs.CRE(name="c0"),
defs.Standard(name="s0", section="s1"),
Expand Down Expand Up @@ -415,7 +416,7 @@ def test_review_from_spreadsheet(
loc = tempfile.mkstemp(dir=dir)[1]
cache = tempfile.mkstemp(dir=dir)[1]
mocked_prepare_for_review.return_value = (loc, cache)
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context

mocked_create_spreadsheet.return_value = "https://example.com/sheeet"
mocked_export.return_value = [
Expand Down Expand Up @@ -467,7 +468,7 @@ def test_review_from_disk(
loc = tempfile.mkstemp(dir=dir)[1]
cache = tempfile.mkstemp(dir=dir, suffix=".sqlite")[1]
mocked_prepare_for_review.return_value = (loc, cache)
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_get_standards_files_from_disk.return_value = [yml for i in range(0, 3)]
mocked_export.return_value = [
defs.CRE(name="c0"),
Expand Down Expand Up @@ -511,7 +512,7 @@ def test_add_from_disk(
yml = tempfile.mkstemp(dir=dir, suffix=".yaml")[1]
loc = tempfile.mkstemp(dir=dir)[1]
cache = tempfile.mkstemp(dir=dir, suffix=".sqlite")[1]
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_get_standards_files_from_disk.return_value = [yml for i in range(0, 3)]
mocked_export.return_value = [
defs.CRE(name="c0"),
Expand Down Expand Up @@ -557,7 +558,7 @@ def test_review_osib_from_file(
loc = tempfile.mkstemp(dir=dir)[1]
cach = tempfile.mkstemp(dir=dir)[1]
mocked_prepare_for_review.return_value = (loc, cach)
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_read_osib_yaml.return_value = [{"osib": "osib"}]
mocked_try_from_file.return_value = [
Osib_tree(aliases=[Osib_id("t1")]),
Expand Down Expand Up @@ -619,7 +620,7 @@ def test_add_osib_from_file(
osib_yaml = tempfile.mkstemp(dir=dir, suffix=".yaml")[1]
loc = tempfile.mkstemp(dir=dir)[1]
cache = tempfile.mkstemp(dir=dir, suffix=".sqlite")[1]
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_read_osib_yaml.return_value = [{"osib": "osib"}]
mocked_try_from_file.return_value = [
odefs.Osib_tree(aliases=[Osib_id("t1")]),
Expand Down Expand Up @@ -663,14 +664,81 @@ def test_export_to_osib(
# osib_yaml = tempfile.mkstemp(dir=dir,suffix=".yaml")[1]
loc = tempfile.mkstemp(dir=dir)[1]
cache = tempfile.mkstemp(dir=dir, suffix=".sqlite")[1]
mocked_db_connect.return_value = self.collection
mocked_db_connect.return_value = self.collection, self.app, self.app_context
mocked_cre2osib.return_value = odefs.Osib_tree(aliases=[Osib_id("t1")])
mocked_export.return_value = [defs.CRE(name="c0")]

main.export_to_osib(file_loc=f"{dir}/osib.yaml", cache=cache)
mocked_db_connect.assert_called_with(path=cache)
mocked_cre2osib.assert_called_with([defs.CRE(name="c0")])

def test_compare_datasets(self):
_, t1 = tempfile.mkstemp()
_, t2 = tempfile.mkstemp()
_, tdiff = tempfile.mkstemp()
self.tmpdirs.extend([t1, t2, tdiff])

c0 = defs.CRE(id="111-000", description="CREdesc", name="CREname")
s456 = defs.Standard(
subsection="4.5.6",
section="FooStand",
name="BarStand",
hyperlink="https://example.com",
tags=["a", "b", "c"],
)
c1 = defs.CRE(
id="111-001",
description="Groupdesc",
name="GroupName",
links=[defs.Link(document=s456)],
)
s_unlinked = defs.Standard(
subsection="4.5.6",
section="Unlinked",
name="Unlinked",
hyperlink="https://example.com",
)
connection_1, app1, context1 = main.db_connect(path=t1)
sqla.create_all(app=app1)
connection_1.graph.graph = db.CRE_Graph.load_cre_graph(connection_1.session)
connection_1.add_cre(c0)
connection_1.add_node(s_unlinked)
connection_1.add_link(connection_1.add_cre(c1), connection_1.add_node(s456))

pprint("%" * 90)
pprint(t1)
pprint(connection_1.graph.print_graph())
input()

# connection_2,app2,context2 = main.db_connect(path=t2)
# sqla.create_all(app=app2)
# connection_2.graph.graph = db.CRE_Graph.load_cre_graph(sqla.session)
# connection_2.add_cre(c0)
# connection_2.add_node(s_unlinked)
# connection_2.add_link(connection_2.add_cre(c1),connection_2.add_node(s456))

connection_diff, appdiff, contextdiff = main.db_connect(path=tdiff)
connection_diff.graph.graph = db.CRE_Graph.load_cre_graph(
connection_diff.session
)
sqla.create_all(app=appdiff)
connection_diff.add_cre(c0)
connection_diff.add_cre(defs.CRE(id="000-111", name="asdfa232332sdf"))

pprint("#" * 90)
pprint(tdiff)
pprint(connection_diff.graph.print_graph())
input()
pprint("#" * 90)

# self.assertEqual(main.compare_datasets("foo", "bar"), [{},{},{},{}])
# self.assertEqual(main.compare_datasets(t1,t2), [{},{},{},{}])
self.assertNotEqual(main.compare_datasets(t1, tdiff), [{}, {}, {}, {}])

contextdiff.pop()
# context2.pop()
context1.pop()

# def test_prepare_for_Review(self):
# raise NotImplementedError

Expand Down

0 comments on commit 0ce5b86

Please sign in to comment.