Skip to content

Commit

Permalink
DPDB #SAT v1
Browse files Browse the repository at this point in the history
working output
TODO: maybe reformat the bag-labels
  • Loading branch information
VaeterchenFrost committed Apr 6, 2020
1 parent 64a01d4 commit 6cc37ed
Show file tree
Hide file tree
Showing 86 changed files with 10,039 additions and 23 deletions.
71 changes: 48 additions & 23 deletions satvisualization_repo/satvisu/construct_dpdb_visu.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"""
import psycopg2 as pg
import itertools
import json
from more_itertools import locate

from dijkstra import bidirectional_dijkstra as find_path, convert_to_adj
Expand Down Expand Up @@ -60,8 +61,17 @@ def read_problem(cursor, problem):
"public.problem WHERE id={}".format(problem))
(name, ptype, num_bags, tree_width,
setup_start_time, calc_start_time, end_time) = cursor.fetchone()
return (num_vars, num_clauses, model_count, name, ptype, num_bags, tree_width,
setup_start_time, calc_start_time, end_time)
return (
num_vars,
num_clauses,
model_count,
name,
ptype,
num_bags,
tree_width,
setup_start_time,
calc_start_time,
end_time)


def read_clauses(cursor, problem):
Expand Down Expand Up @@ -133,33 +143,45 @@ def readTimeline(cursor, problem, edgearray):
array of bagids and eventually solution-tables.
"""
result = list()
timeline = list()
adj = convert_to_adj(edgearray)
cursor.execute(
"SELECT node FROM public.p{}_td_node_status".format(problem))
order_solved = list(flatten(cursor.fetchall()))
last = order_solved[-1] # tour sol -> through result nodes along the edges
startpath = find_path(adj, last, order_solved[0])
result = [[bag] for bag in startpath[1]]
# add the other bags in order_solved to the result
timeline = [[bag] for bag in startpath[1]]
# add the other bags in order_solved to the timeline
last = order_solved[0]
for bag in order_solved:
path = find_path(adj, last, bag)
for intermed in path[1][1:]:
result.append([intermed])
for intermed in path[1][1:]:
timeline.append([intermed])
# query column names
cursor.execute(
"SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS "
"WHERE TABLE_NAME = 'p{}_td_node_{}'".format(problem, bag))
cursor.execute(
"SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS "
"WHERE TABLE_NAME = 'p{}_td_node_{}'".format(problem, bag))
column_names = list(flatten(cursor.fetchall()))
print('column_names', column_names)
# get solutions
cursor.execute(
"SELECT * FROM public.p{}_td_node_{}".format(problem, bag))
cursor.execute(
"SELECT * FROM public.p{}_td_node_{}".format(problem, bag))
solution_raw = cursor.fetchall()

solution = [bag, []]
print('solution_raw', solution_raw)
# check for nulled variables - assuming whole columns are nulled:
columns_notnull = [column_names[i] for i, x in
enumerate(solution_raw[0]) if x is not None]
solution = [bag,
[[columns_notnull,
*list(map(lambda row: [int(v) for v in row if v is not None],
solution_raw))],
"sol bag " + str(bag),
"sum: " + str(sum([li[-1] for li in solution_raw])),
True]]
timeline.append(solution)
last = bag
return result

return timeline


def read_edgearray(cursor, problem):
Expand Down Expand Up @@ -188,8 +210,11 @@ def create_json(db, problem=1):
"edgearray": edgearray,
"labeldict": labeldict,
"numVars": num_vars}
print(treeDecJson)
print(readTimeline(cur, problem, edgearray))

timeline = readTimeline(cur, problem, edgearray)
return {"clausesJson": clausesJson,
"tdTimeline": timeline,
"treeDecJson": treeDecJson}
except (Exception, pg.DatabaseError) as error:
print(error)

Expand Down Expand Up @@ -220,16 +245,16 @@ def connect():

# close the communication with the PostgreSQL
cur.close()
create_json(conn, 5)

except (Exception, pg.DatabaseError) as error:
print(error)

finally:
if conn is not None:
conn.close()
print('Database connection closed.')
return conn


if __name__ == "__main__":
connect()
db = connect()
resultjson = create_json(db, problem=5)
with open('dbjson.json', 'w') as outfile:
json.dump(resultjson, outfile,sort_keys = True, indent = 2,
ensure_ascii = False)
Loading

0 comments on commit 6cc37ed

Please sign in to comment.