fix a SSMat sqlite3 fatal error. promote decorator
This commit is contained in:
parent
f1477e03da
commit
442d46cdd9
0
SuperScriptDecorator/DecoratorBB.py
Normal file
0
SuperScriptDecorator/DecoratorBB.py
Normal file
710
SuperScriptDecorator/DecoratorCore.old.py
Normal file
710
SuperScriptDecorator/DecoratorCore.old.py
Normal file
|
@ -0,0 +1,710 @@
|
||||||
|
import sqlite3
|
||||||
|
import DecoratorConstValue as dcv
|
||||||
|
import json
|
||||||
|
import CustomConfig
|
||||||
|
import Progressbar
|
||||||
|
|
||||||
|
def run():
|
||||||
|
exportDb = sqlite3.connect(CustomConfig.export_db)
|
||||||
|
exportDb.text_factory = lambda x: x.decode(CustomConfig.database_encoding, errors="ignore")
|
||||||
|
decorateDb = sqlite3.connect(CustomConfig.decorated_db)
|
||||||
|
|
||||||
|
# init table
|
||||||
|
print('Init decorate database...')
|
||||||
|
initDecorateDb(decorateDb)
|
||||||
|
decorateDb.commit()
|
||||||
|
|
||||||
|
# decorate graph
|
||||||
|
print('Generating gragh list...')
|
||||||
|
graphList = []
|
||||||
|
decorateGraph(exportDb, decorateDb, graphList)
|
||||||
|
|
||||||
|
# decorate each graph
|
||||||
|
print('Generating graph...')
|
||||||
|
currentGraphBlockCell = {}
|
||||||
|
Progressbar.initProgressbar(len(graphList))
|
||||||
|
for i in graphList:
|
||||||
|
currentGraphBlockCell.clear()
|
||||||
|
buildBlock(exportDb, decorateDb, i, currentGraphBlockCell)
|
||||||
|
graphPIO = buildCell(exportDb, decorateDb, i, currentGraphBlockCell)
|
||||||
|
buildLink(exportDb, decorateDb, i, currentGraphBlockCell, graphPIO)
|
||||||
|
|
||||||
|
Progressbar.stepProgressbar()
|
||||||
|
Progressbar.finProgressbar()
|
||||||
|
|
||||||
|
# export information
|
||||||
|
print('Generating info...')
|
||||||
|
buildInfo(exportDb, decorateDb)
|
||||||
|
|
||||||
|
# give up all change of eexport.db (because no change)
|
||||||
|
exportDb.close()
|
||||||
|
decorateDb.commit()
|
||||||
|
decorateDb.close()
|
||||||
|
|
||||||
|
def initDecorateDb(db):
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute("CREATE TABLE graph([graph] INTEGER, [graph_name] TEXT, [width] INTEGER, [height] INTEGER, [index] INTEGER, [belong_to] TEXT);")
|
||||||
|
cur.execute("CREATE TABLE info([target] INTEGER, [attach_bb] INTEGER, [is_setting] INTEGER, [name] TEXT, [field] TEXT, [data] TEXT);")
|
||||||
|
|
||||||
|
cur.execute("CREATE TABLE block([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [pin-ptarget] TEXT, [pin-pin] TEXT, [pin-pout] TEXT, [pin-bin] TEXT, [pin-bout] TEXT, [x] REAL, [y] REAL, [width] REAL, [height] REAL, [expandable] INTEGER);")
|
||||||
|
cur.execute("CREATE TABLE cell([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [x] REAL, [y] REAL, [type] INTEGER);")
|
||||||
|
cur.execute("CREATE TABLE link([belong_to_graph] INETGER, [delay] INTEGER, [start_interface] INTEGER, [end_interface] INTEGER, [startobj] INTEGER, [endobj] INTEGER, [start_type] INTEGER, [end_type] INTEGER, [start_index] INTEGER, [end_index] INTEGER, [x1] REAL, [y1] REAL, [x2] REAL, [y2] REAL);")
|
||||||
|
|
||||||
|
def decorateGraph(exDb, deDb, graph):
|
||||||
|
exCur = exDb.cursor()
|
||||||
|
deCur = deDb.cursor()
|
||||||
|
scriptMap = {}
|
||||||
|
|
||||||
|
exCur.execute("SELECT [behavior], [index], [name] FROM script;")
|
||||||
|
while True:
|
||||||
|
lines = exCur.fetchone()
|
||||||
|
if lines == None:
|
||||||
|
break
|
||||||
|
scriptMap[lines[0]] = (lines[1], lines[2])
|
||||||
|
|
||||||
|
exCur.execute("SELECT [thisobj], [type], [name] FROM behavior WHERE [type] != 0;")
|
||||||
|
while True:
|
||||||
|
lines = exCur.fetchone()
|
||||||
|
if lines == None:
|
||||||
|
break
|
||||||
|
|
||||||
|
# add into global graph list
|
||||||
|
graph.append(lines[0])
|
||||||
|
|
||||||
|
# width and height will be computed by following method and use update
|
||||||
|
# statement to change it
|
||||||
|
if lines[1] == 1:
|
||||||
|
# script
|
||||||
|
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, ?, ?)", (lines[0], lines[2], scriptMap[lines[0]][0], scriptMap[lines[0]][1]))
|
||||||
|
else:
|
||||||
|
# sub bb
|
||||||
|
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, -1, '')", (lines[0], lines[2]))
|
||||||
|
|
||||||
|
def buildBlock(exDb, deDb, target, currentGraphBlockCell):
|
||||||
|
exCur = exDb.cursor()
|
||||||
|
deCur = deDb.cursor()
|
||||||
|
|
||||||
|
# sort inner bb
|
||||||
|
# use current graph input as the start point
|
||||||
|
treeRoot = dcv.BBTreeNode(target, -1)
|
||||||
|
processedBB = set()
|
||||||
|
# layer start from 2, 0 is occupied for pLocal, 1 is occupied for pOper
|
||||||
|
arrangedLayer = recursiveBuildBBTree(treeRoot, exCur, processedBB, 2, 0, target)
|
||||||
|
|
||||||
|
# get no linked bb and place them. linked bb position will be computed
|
||||||
|
# following
|
||||||
|
# calc each bb's x postion, as for y, calc later
|
||||||
|
# flat bb tree
|
||||||
|
arrangedLayer+=1
|
||||||
|
singleBB = set()
|
||||||
|
bbResult = {}
|
||||||
|
bb_layer_map = {}
|
||||||
|
baseX = dcv.GRAPH_CONTENTOFFSET_X
|
||||||
|
exCur.execute('SELECT [thisobj], [name], [type], [proto_name], [pin_count] FROM behavior WHERE parent == ?', (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
pinSplit = i[4].split(',')
|
||||||
|
bbCache = dcv.BBResult(i[1], i[3], pinSplit[1], pinSplit[2], pinSplit[3], pinSplit[4], (i[0] if i[2] != 0 else -1))
|
||||||
|
bbCache.computSize()
|
||||||
|
if i[0] not in processedBB:
|
||||||
|
# single bb, process it
|
||||||
|
singleBB.add(i[0])
|
||||||
|
bbCache.x = baseX
|
||||||
|
baseX += bbCache.width + dcv.GRAPH_BB_SPAN
|
||||||
|
bb_layer_map[i[0]] = arrangedLayer
|
||||||
|
|
||||||
|
bbResult[i[0]] = bbCache
|
||||||
|
|
||||||
|
recursiveCalcBBX(treeRoot, dcv.GRAPH_CONTENTOFFSET_X, bbResult, bb_layer_map)
|
||||||
|
|
||||||
|
# calc poper
|
||||||
|
allBB = processedBB | singleBB
|
||||||
|
processedOper = set()
|
||||||
|
pluggedOper = {}
|
||||||
|
occupiedLayerCountForSpecificBB = {}
|
||||||
|
exCur.execute('SELECT [thisobj] FROM pOper WHERE [belong_to] == ?', (target,))
|
||||||
|
newCur = exDb.cursor()
|
||||||
|
newCur2 = exDb.cursor()
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
if i[0] in processedOper:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# check current bout, plugin into the first bb
|
||||||
|
newCur.execute("SELECT [output_obj] FROM pLink WHERE ([input_obj] == ? AND [output_type] == ? AND [output_is_bb] == 1)", (i[0], dcv.dbPLinkInputOutputType.PIN))
|
||||||
|
for j in newCur.fetchall():
|
||||||
|
if j[0] in allBB:
|
||||||
|
# can be plugin
|
||||||
|
# try get tree
|
||||||
|
if j[0] not in pluggedOper.keys():
|
||||||
|
pluggedOper[j[0]] = {}
|
||||||
|
recursiveBuildOperTree(i[0], bb_layer_map, processedOper, occupiedLayerCountForSpecificBB, newCur2, 1, j[0], target, pluggedOper[j[0]])
|
||||||
|
# exit for due to have found a proper host bb
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
# calc layer position
|
||||||
|
layer_height = {}
|
||||||
|
layer_y = {}
|
||||||
|
layer_height[0] = 25
|
||||||
|
layer_height[1] = 50
|
||||||
|
for i in bb_layer_map.keys():
|
||||||
|
curLayer = bb_layer_map[i]
|
||||||
|
if curLayer not in layer_height.keys():
|
||||||
|
layer_height[curLayer] = bbResult[i].height
|
||||||
|
else:
|
||||||
|
layer_height[curLayer] = max(layer_height.get(curLayer, 0), bbResult[i].height)
|
||||||
|
layer_height[arrangedLayer] = layer_height.get(arrangedLayer, 0) # make sure misc bb height exist
|
||||||
|
layer_height[2] = layer_height.get(2, 0) # make sure at least have a bb layer (when there are no bb in a map)
|
||||||
|
|
||||||
|
# calc bb Y
|
||||||
|
baseY = dcv.GRAPH_CONTENTOFFSET_Y
|
||||||
|
for i in range(arrangedLayer + 1):
|
||||||
|
baseY += layer_height[i] + dcv.GRAPH_LAYER_SPAN
|
||||||
|
baseY += occupiedLayerCountForSpecificBB.get(i, 0) * dcv.GRAPH_SPAN_BB_POPER # add oper occipation
|
||||||
|
layer_y[i] = baseY
|
||||||
|
for i in bbResult.keys():
|
||||||
|
cache = bbResult[i]
|
||||||
|
layer = bb_layer_map[i]
|
||||||
|
cache.y = layer_y[layer] - layer_height[layer]
|
||||||
|
|
||||||
|
# calc oper position
|
||||||
|
# flat oper tree
|
||||||
|
operResult = {}
|
||||||
|
exCur.execute('SELECT [thisobj], [op] FROM pOper WHERE [belong_to] == ?', (target,))
|
||||||
|
homelessOperCurrentX = dcv.GRAPH_CONTENTOFFSET_X
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
if i[0] not in processedOper:
|
||||||
|
# homeless oper
|
||||||
|
cache2 = dcv.OperResult(i[1])
|
||||||
|
cache2.computSize()
|
||||||
|
cache2.x = homelessOperCurrentX
|
||||||
|
cache2.y = layer_y[1] - cache2.height
|
||||||
|
homelessOperCurrentX += cache2.width + dcv.GRAPH_BB_SPAN
|
||||||
|
operResult[i[0]] = cache2
|
||||||
|
|
||||||
|
for i in pluggedOper.keys(): # plugged oper
|
||||||
|
cache = bbResult[i]
|
||||||
|
for j in pluggedOper[i]:
|
||||||
|
jCache = pluggedOper[i][j]
|
||||||
|
baseX = cache.x
|
||||||
|
for q in jCache:
|
||||||
|
exCur.execute("SELECT [op] FROM pOper WHERE [thisobj] == ?", (q,))
|
||||||
|
cache2 = dcv.OperResult(exCur.fetchone()[0])
|
||||||
|
cache2.computSize()
|
||||||
|
cache2.x = baseX
|
||||||
|
baseX += cache2.width + dcv.GRAPH_BB_SPAN
|
||||||
|
cache2.y = cache.y - j * dcv.GRAPH_SPAN_BB_POPER
|
||||||
|
operResult[q] = cache2
|
||||||
|
|
||||||
|
# query bb pin's data
|
||||||
|
listCache = []
|
||||||
|
listItemCache = None
|
||||||
|
for i in allBB:
|
||||||
|
cache = bbResult[i]
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?;", (i,))
|
||||||
|
temp = exCur.fetchone()
|
||||||
|
if temp == None:
|
||||||
|
cache.ptargetData = '{}'
|
||||||
|
else:
|
||||||
|
cache.ptargetData = json.dumps(dcv.PinInformation(temp[0], temp[1], temp[2]), cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name] FROM bIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.binData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name] FROM bOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.boutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
# query oper pin's data
|
||||||
|
for i in operResult.keys():
|
||||||
|
cache = operResult[i]
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
listCache.clear()
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||||
|
for j in exCur.fetchall():
|
||||||
|
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||||
|
listCache.append(listItemCache)
|
||||||
|
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||||
|
|
||||||
|
# write to database and return
|
||||||
|
for i in bbResult.keys():
|
||||||
|
cache = bbResult[i]
|
||||||
|
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
||||||
|
deCur.execute('INSERT INTO block VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||||
|
(target, i, cache.name, cache.assistName, cache.ptargetData, cache.pinData, cache.poutData, cache.binData, cache.boutData, cache.x, cache.y, cache.width, cache.height, cache.expandable))
|
||||||
|
for i in operResult.keys():
|
||||||
|
cache = operResult[i]
|
||||||
|
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
||||||
|
deCur.execute("INSERT INTO block VALUES (?, ?, ?, '', '{}', ?, ?, '[]', '[]', ?, ?, ?, ?, -1)",
|
||||||
|
(target, i, cache.name, cache.pinData, cache.poutData, cache.x, cache.y, cache.width, cache.height))
|
||||||
|
|
||||||
|
def recursiveBuildBBTree(node, exCur, processedBB, layer, depth, graphId):
|
||||||
|
realLinkedBB = set()
|
||||||
|
# find links
|
||||||
|
exCur.execute("SELECT [output_obj] FROM bLink WHERE ([input_obj] == ? AND [input_type] == ? AND [belong_to] = ?) ORDER BY [input_index] ASC;",
|
||||||
|
(node.bb, (dcv.dbBLinkInputOutputType.INPUT if depth == 0 else dcv.dbBLinkInputOutputType.OUTPUT), graphId))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
if i[0] != graphId: # omit self
|
||||||
|
realLinkedBB.add(i[0])
|
||||||
|
|
||||||
|
if (len(realLinkedBB) == 0):
|
||||||
|
return layer
|
||||||
|
|
||||||
|
# ignore duplicated bb
|
||||||
|
# calc need processed bb first
|
||||||
|
# and register all gotten bb. for preventing infinity resursive func and
|
||||||
|
# keep bb tree structure
|
||||||
|
realLinkedBB = realLinkedBB - processedBB
|
||||||
|
processedBB.update(realLinkedBB)
|
||||||
|
|
||||||
|
# iterate each bb
|
||||||
|
for i in realLinkedBB:
|
||||||
|
# recursive execute this method
|
||||||
|
newNode = dcv.BBTreeNode(i, layer)
|
||||||
|
layer = recursiveBuildBBTree(newNode, exCur, processedBB, layer, depth + 1, graphId)
|
||||||
|
# add new node into list and ++layer
|
||||||
|
layer+=1
|
||||||
|
node.nodes.append(newNode)
|
||||||
|
|
||||||
|
# minus extra ++ due to for
|
||||||
|
if (len(realLinkedBB) != 0):
|
||||||
|
layer-=1
|
||||||
|
|
||||||
|
return layer
|
||||||
|
|
||||||
|
def recursiveCalcBBX(node, baseX, resultList, layerMap):
|
||||||
|
maxExpand = 0
|
||||||
|
for i in node.nodes:
|
||||||
|
layerMap[i.bb] = i.layer
|
||||||
|
resultList[i.bb].x = baseX
|
||||||
|
maxExpand = max(maxExpand, resultList[i.bb].width)
|
||||||
|
|
||||||
|
for i in node.nodes:
|
||||||
|
recursiveCalcBBX(i, baseX + maxExpand + dcv.GRAPH_BB_SPAN, resultList, layerMap)
|
||||||
|
|
||||||
|
def recursiveBuildOperTree(oper, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer, bb, graphId, subLayerColumnMap):
|
||||||
|
if oper in processedOper:
|
||||||
|
return
|
||||||
|
|
||||||
|
# for avoid fucking export parameter feature. check whether self is
|
||||||
|
# current graph's memeber
|
||||||
|
exCur.execute("SELECT [belong_to] FROM pOper WHERE [thisobj] == ?;", (oper,))
|
||||||
|
if (exCur.fetchone()[0] != graphId):
|
||||||
|
# fuck export param, exit
|
||||||
|
return
|
||||||
|
|
||||||
|
# make sure sub layer column map is ok
|
||||||
|
if sublayer not in subLayerColumnMap.keys():
|
||||||
|
subLayerColumnMap[sublayer] = []
|
||||||
|
|
||||||
|
# register self
|
||||||
|
# mark processed
|
||||||
|
processedOper.add(oper)
|
||||||
|
subLayerColumnMap[sublayer].append(oper)
|
||||||
|
|
||||||
|
# record layer occupation
|
||||||
|
layer = bb_layer_map[bb]
|
||||||
|
occupiedLayerMap[layer] = max(occupiedLayerMap.get(layer, -1), sublayer)
|
||||||
|
|
||||||
|
# iterate sub item
|
||||||
|
exCur.execute("SELECT [input_obj] FROM pLink WHERE ([output_obj] == ? AND [input_type] == ? AND [input_is_bb] == 0) ORDER BY [output_index];", (oper, dcv.dbPLinkInputOutputType.POUT))
|
||||||
|
res = []
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
res.append(i[0])
|
||||||
|
|
||||||
|
for i in res:
|
||||||
|
recursiveBuildOperTree(i, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer + 1, bb, graphId, subLayerColumnMap)
|
||||||
|
|
||||||
|
def buildCell(exDb, deDb, target, currentGraphBlockCell):
|
||||||
|
exCur = exDb.cursor()
|
||||||
|
deCur = deDb.cursor()
|
||||||
|
# prepare block set
|
||||||
|
blockSet = set()
|
||||||
|
for i in currentGraphBlockCell.keys():
|
||||||
|
blockSet.add(i)
|
||||||
|
|
||||||
|
# find current graph's pio bio
|
||||||
|
boutx = set()
|
||||||
|
pouty = set()
|
||||||
|
graphPIO = set()
|
||||||
|
|
||||||
|
# bOut.x and pOut.y data is not confirmed, when graph size was confirmed,
|
||||||
|
# update it
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [index] FROM bIn WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
x = 0
|
||||||
|
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
||||||
|
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [index] FROM bOut WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
x = 0
|
||||||
|
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
||||||
|
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
||||||
|
boutx.add(i[0])
|
||||||
|
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pIn WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
||||||
|
y = 0
|
||||||
|
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
||||||
|
graphPIO.add(i[0])
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pOut WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
||||||
|
y = 0
|
||||||
|
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
||||||
|
graphPIO.add(i[0])
|
||||||
|
pouty.add(i[0])
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?", (target,))
|
||||||
|
cache = exCur.fetchone()
|
||||||
|
if cache != None:
|
||||||
|
currentGraphBlockCell[cache[0]] = dcv.BlockCellItem(0, 0, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, 0, 0, ?)", (target, i[0], i[1], i[2], dcv.CellType.PTARGET))
|
||||||
|
graphPIO.add(cache[0])
|
||||||
|
|
||||||
|
# query all plocal
|
||||||
|
allLocal = set()
|
||||||
|
localUsageCounter = {}
|
||||||
|
exCur.execute("SELECT [thisobj], [name], [type] FROM pLocal WHERE [belong_to] == ?;", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
allLocal.add(i[0])
|
||||||
|
localUsageCounter[i[0]] = dcv.LocalUsageItem(0, False, dcv.LocalUsageType.PLOCAL)
|
||||||
|
|
||||||
|
# query all links(don't need to consider export pIO, due to it will not add
|
||||||
|
# any shortcut)
|
||||||
|
# !! the same if framework in pLink generator function !! SHARED
|
||||||
|
createdShortcut = set()
|
||||||
|
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
|
||||||
|
# analyse 5 chancee one by one
|
||||||
|
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
||||||
|
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||||
|
if i[2] in allLocal or i[2] in createdShortcut:
|
||||||
|
cache = localUsageCounter[i[2]]
|
||||||
|
else:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
||||||
|
localUsageCounter[i[2]] = cache
|
||||||
|
createdShortcut.add(i[2])
|
||||||
|
|
||||||
|
cache.count += 1
|
||||||
|
cache.lastUse = i[6]
|
||||||
|
cache.lastDirection = 0
|
||||||
|
cache.lastIndex = i[9]
|
||||||
|
|
||||||
|
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
||||||
|
if i[2] == target:
|
||||||
|
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
||||||
|
if i[2] not in blockSet:
|
||||||
|
if i[0] not in createdShortcut:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PIN)
|
||||||
|
localUsageCounter[i[0]] = cache
|
||||||
|
createdShortcut.add(i[0])
|
||||||
|
else:
|
||||||
|
cache = localUsageCounter[i[0]]
|
||||||
|
|
||||||
|
cache.count+=1
|
||||||
|
cache.lastUse = i[6]
|
||||||
|
cache.lastDirection = 0
|
||||||
|
cache.lastIndex = i[9]
|
||||||
|
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR): # for attribute using
|
||||||
|
if i[2] not in createdShortcut:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PATTR)
|
||||||
|
localUsageCounter[i[2]] = cache
|
||||||
|
createdShortcut.add(i[2])
|
||||||
|
else:
|
||||||
|
cache = localUsageCounter[i[2]]
|
||||||
|
|
||||||
|
cache.count+=1
|
||||||
|
cache.lastUse = i[6]
|
||||||
|
cache.lastDirection = 0
|
||||||
|
cache.lastIndex = i[9]
|
||||||
|
else:
|
||||||
|
if i[2] not in blockSet:
|
||||||
|
if i[0] not in createdShortcut:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
||||||
|
localUsageCounter[i[0]] = cache
|
||||||
|
createdShortcut.add(i[0])
|
||||||
|
else:
|
||||||
|
cache = localUsageCounter[i[0]]
|
||||||
|
|
||||||
|
cache.count+=1
|
||||||
|
cache.lastUse = i[6]
|
||||||
|
cache.lastDirection = 1
|
||||||
|
cache.lastIndex = i[9]
|
||||||
|
else:
|
||||||
|
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||||
|
if i[6] in allLocal or i[6] in createdShortcut:
|
||||||
|
cache = localUsageCounter[i[6]]
|
||||||
|
else:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
||||||
|
localUsageCounter[i[6]] = cache
|
||||||
|
createdShortcut.add(i[6])
|
||||||
|
|
||||||
|
cache.count += 1
|
||||||
|
cache.lastUse = i[2]
|
||||||
|
cache.lastDirection = 1
|
||||||
|
cache.lastIndex = i[5]
|
||||||
|
else:
|
||||||
|
if i[6] == target:
|
||||||
|
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
||||||
|
if i[6] not in blockSet:
|
||||||
|
if i[1] not in createdShortcut:
|
||||||
|
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
||||||
|
localUsageCounter[i[1]] = cache
|
||||||
|
createdShortcut.add(i[1])
|
||||||
|
else:
|
||||||
|
cache = localUsageCounter[i[1]]
|
||||||
|
|
||||||
|
cache.count += 1
|
||||||
|
cache.lastUse = i[2]
|
||||||
|
cache.lastDirection = 1
|
||||||
|
cache.lastIndex = i[5]
|
||||||
|
|
||||||
|
# apply all cells
|
||||||
|
defaultCellIndex = 0
|
||||||
|
for i in localUsageCounter.keys():
|
||||||
|
cache = localUsageCounter[i]
|
||||||
|
# comput x,y
|
||||||
|
if (cache.count == 1):
|
||||||
|
# attachable
|
||||||
|
attachTarget = currentGraphBlockCell[cache.lastUse]
|
||||||
|
(x, y) = computCellPosition(attachTarget.x, attachTarget.y, attachTarget.h, cache.lastDirection, cache.lastIndex)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# place it in default area
|
||||||
|
y = dcv.GRAPH_CONTENTOFFSET_Y
|
||||||
|
x = dcv.GRAPH_CONTENTOFFSET_X + defaultCellIndex * (dcv.CELL_WIDTH + dcv.GRAPH_BB_SPAN)
|
||||||
|
defaultCellIndex += 1
|
||||||
|
# get information
|
||||||
|
if (cache.internal_type == dcv.LocalUsageType.PIN):
|
||||||
|
tableName = 'pIn'
|
||||||
|
elif (cache.internal_type == dcv.LocalUsageType.POUT):
|
||||||
|
tableName = 'pOut'
|
||||||
|
elif (cache.internal_type == dcv.LocalUsageType.PATTR):
|
||||||
|
tableName = 'pAttr'
|
||||||
|
else:
|
||||||
|
tableName = 'pLocal'
|
||||||
|
exCur.execute("SELECT [name], [type] FROM {} WHERE [thisobj] == ?".format(tableName), (i,))
|
||||||
|
temp = exCur.fetchone()
|
||||||
|
|
||||||
|
# submit to database and map
|
||||||
|
currentGraphBlockCell[i] = dcv.BlockCellItem(x, y, dcv.CELL_WIDTH, dcv.CELL_HEIGHT)
|
||||||
|
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
(target, i, temp[0], temp[1], x, y, (dcv.CellType.SHORTCUT if cache.isshortcut else dcv.CellType.PLOCAL)))
|
||||||
|
|
||||||
|
# comput size and update database and currentGraphBlockCell
|
||||||
|
graphX = 0
|
||||||
|
graphY = 0
|
||||||
|
for key, values in currentGraphBlockCell.items():
|
||||||
|
graphX = max(graphX, values.x + values.w)
|
||||||
|
graphY = max(graphY, values.y + values.h)
|
||||||
|
graphX += dcv.GRAPH_POFFSET
|
||||||
|
graphY += dcv.GRAPH_BOFFSET
|
||||||
|
|
||||||
|
deCur.execute("UPDATE graph SET [width] = ?, [height] = ? WHERE [graph] == ?", (graphX, graphY, target))
|
||||||
|
|
||||||
|
# update bOut.x and pOut.y data
|
||||||
|
for i in boutx:
|
||||||
|
deCur.execute("UPDATE cell SET [x] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphX - dcv.BB_PBSIZE, i, target))
|
||||||
|
currentGraphBlockCell[i].x = graphX - dcv.BB_PBSIZE
|
||||||
|
for i in pouty:
|
||||||
|
deCur.execute("UPDATE cell SET [y] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphY - dcv.BB_PBSIZE, i, target))
|
||||||
|
currentGraphBlockCell[i].y = graphY - dcv.BB_PBSIZE
|
||||||
|
|
||||||
|
return graphPIO
|
||||||
|
|
||||||
|
def computCellPosition(baseX, baseY, height, direction, index):
|
||||||
|
if (index == -1):
|
||||||
|
return (baseX, baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||||
|
|
||||||
|
if (direction == 0):
|
||||||
|
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||||
|
else:
|
||||||
|
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY + height + dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||||
|
|
||||||
|
def buildLink(exDb, deDb, target, currentGraphBlockCell, graphPIO):
|
||||||
|
exCur = exDb.cursor()
|
||||||
|
deCur = deDb.cursor()
|
||||||
|
|
||||||
|
# prepare block set
|
||||||
|
blockSet = set()
|
||||||
|
for i in currentGraphBlockCell.keys():
|
||||||
|
blockSet.add(i)
|
||||||
|
|
||||||
|
# bLink
|
||||||
|
exCur.execute("SELECT * FROM bLink WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
if i[3] == target:
|
||||||
|
(x1, y1) = computLinkBTerminal(i[0], 0, -1 ,currentGraphBlockCell)
|
||||||
|
bStartObj = i[0]
|
||||||
|
bStartType = 0
|
||||||
|
bStartIndex = -1
|
||||||
|
else:
|
||||||
|
(x1, y1) = computLinkBTerminal(i[3], i[4], i[5], currentGraphBlockCell)
|
||||||
|
bStartObj = i[3]
|
||||||
|
bStartType = i[4]
|
||||||
|
bStartIndex = i[5]
|
||||||
|
if i[6] == target:
|
||||||
|
(x2, y2) = computLinkBTerminal(i[1], 0, -1,currentGraphBlockCell)
|
||||||
|
bEndObj = i[1]
|
||||||
|
bEndType = 0
|
||||||
|
bEndIndex = -1
|
||||||
|
else:
|
||||||
|
(x2, y2) = computLinkBTerminal(i[6], i[7], i[8],currentGraphBlockCell)
|
||||||
|
bEndObj = i[6]
|
||||||
|
bEndType = i[7]
|
||||||
|
bEndIndex = i[8]
|
||||||
|
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, i[2], i[0], i[1], bStartObj, bEndObj, bStartType, bEndType, bStartIndex, bEndIndex, x1, y1, x2, y2))
|
||||||
|
|
||||||
|
# pLink
|
||||||
|
# !! the same if framework in cell generator function !! SHARED
|
||||||
|
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
# analyse 5 chancee one by one
|
||||||
|
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
||||||
|
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||||
|
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||||
|
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||||
|
|
||||||
|
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
||||||
|
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||||
|
if i[2] == target:
|
||||||
|
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[0], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||||
|
else:
|
||||||
|
(x1, y1) = computLinkPTerminal(i[2], 0, i[5], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 0, 0, i[5], i[9], x1, y1, x2, y2))
|
||||||
|
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR):
|
||||||
|
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||||
|
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||||
|
else:
|
||||||
|
if i[2] in blockSet: # process protencial pOut(shortcut) (because plocal input/input_obj
|
||||||
|
# output/output_obj is same, so don't need add for them)
|
||||||
|
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||||
|
else:
|
||||||
|
(x1, y1) = computLinkPTerminal(i[0], 1, i[5], currentGraphBlockCell)
|
||||||
|
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], i[9], x1, y1, x2, y2))
|
||||||
|
|
||||||
|
else:
|
||||||
|
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||||
|
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||||
|
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], -1, x1, y1, x2, y2))
|
||||||
|
else:
|
||||||
|
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||||
|
if i[6] == target:
|
||||||
|
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[1], 1, 0, i[5], -1, x1, y1, x2, y2))
|
||||||
|
else:
|
||||||
|
(x2, y2) = computLinkPTerminal(i[6], 1, i[9], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -1, i[0], i[1], i[2], i[6], 1, 1, i[5], i[9], x1, y1, x2, y2))
|
||||||
|
|
||||||
|
# eLink
|
||||||
|
exCur.execute("SELECT * FROM eLink WHERE [belong_to] == ?", (target,))
|
||||||
|
for i in exCur.fetchall():
|
||||||
|
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||||
|
(x2, y2) = computLinkPTerminal(i[1], 0 if i[2] == 1 else 1, i[3], currentGraphBlockCell)
|
||||||
|
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||||
|
(target, -2, i[0], i[0], target, i[1], 0, 0 if i[2] == 1 else 1, -1, i[3], x1, y1, x2, y2))
|
||||||
|
|
||||||
|
def computLinkBTerminal(obj, xtype, index, currentGraphBlockCell):
|
||||||
|
# index = -1 mean no offset, it will connect to graph io
|
||||||
|
cache = currentGraphBlockCell[obj]
|
||||||
|
return (cache.x if xtype == 0 else cache.x + cache.w - dcv.BB_PBSIZE,
|
||||||
|
cache.y if index == -1 else (cache.y + dcv.BB_BOFFSET + index * (dcv.BB_PBSIZE + dcv.BB_BSPAN)))
|
||||||
|
|
||||||
|
def computLinkPTerminal(obj, ytype, index, currentGraphBlockCell):
|
||||||
|
# ytype is not database type. it have the same meaning of LinkBTerminal,
|
||||||
|
# indicating the position. 0 is keep origin position(for pIn and pTarget),
|
||||||
|
# 1 is consider height(for pOut)
|
||||||
|
cache = currentGraphBlockCell[obj]
|
||||||
|
return (cache.x if index == -1 else (cache.x + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN)),
|
||||||
|
cache.y if ytype == 0 else (cache.y + cache.h - dcv.BB_PBSIZE))
|
||||||
|
|
||||||
|
def buildInfo(exDb, deDb):
|
||||||
|
exInfoCur = exDb.cursor()
|
||||||
|
exQueryCur = exDb.cursor()
|
||||||
|
deCur = deDb.cursor()
|
||||||
|
|
||||||
|
# declare tiny storage for convenient query
|
||||||
|
tinyStorageKey = 0
|
||||||
|
tinyStorageBB = -1
|
||||||
|
tinyStorageSetting = 0
|
||||||
|
tinyStorageName = ""
|
||||||
|
|
||||||
|
# export local data (including proto bb internal data)
|
||||||
|
exInfoCur.execute("SELECT * FROM pData;")
|
||||||
|
for i in exInfoCur.fetchall():
|
||||||
|
attachBB = -1
|
||||||
|
isSetting = 0
|
||||||
|
infoName = ""
|
||||||
|
|
||||||
|
if i[2] == tinyStorageKey:
|
||||||
|
attachBB = tinyStorageBB
|
||||||
|
isSetting = tinyStorageSetting
|
||||||
|
infotName = tinyStorageName
|
||||||
|
else:
|
||||||
|
# clear storage first
|
||||||
|
tinyStorageBB = -1
|
||||||
|
tinyStorageSetting = 0
|
||||||
|
tinyStorageName = ""
|
||||||
|
|
||||||
|
# query correspond pLocal
|
||||||
|
exQueryCur.execute("SELECT [belong_to], [is_setting], [name] FROM pLocal WHERE [thisobj] = ?", (i[2], ))
|
||||||
|
plocalCache = exQueryCur.fetchone()
|
||||||
|
if plocalCache is not None:
|
||||||
|
# add setting config
|
||||||
|
tinyStorageSetting = isSetting = plocalCache[1]
|
||||||
|
tinyStorageName = infoName = plocalCache[2]
|
||||||
|
# query bb again
|
||||||
|
exQueryCur.execute("SELECT [thisobj] FROM behavior WHERE ([thisobj] = ? AND [type] = 0)", (plocalCache[0], ))
|
||||||
|
behaviorCache = exQueryCur.fetchone()
|
||||||
|
if behaviorCache is not None:
|
||||||
|
tinyStorageBB = attachBB = behaviorCache[0]
|
||||||
|
|
||||||
|
deCur.execute("INSERT INTO info VALUES (?, ?, ?, ?, ?, ?)", (i[2], attachBB, isSetting, infoName, i[0], i[1]))
|
|
@ -1,710 +1,93 @@
|
||||||
import sqlite3
|
import sqlite3, json, collections
|
||||||
import DecoratorConstValue as dcv
|
import CustomConfig, Progressbar
|
||||||
import json
|
|
||||||
import CustomConfig
|
|
||||||
import Progressbar
|
|
||||||
|
|
||||||
def run():
|
class CompositionIngredients(object):
|
||||||
exportDb = sqlite3.connect(CustomConfig.export_db)
|
def __init__(self, name: str, export_id: int, env_id: int):
|
||||||
exportDb.text_factory = lambda x: x.decode(CustomConfig.database_encoding, errors="ignore")
|
self.m_CompositionName: str = name
|
||||||
decorateDb = sqlite3.connect(CustomConfig.decorated_db)
|
self.m_ExportIndex = export_id
|
||||||
|
self.m_EnvIndex = env_id
|
||||||
|
|
||||||
|
def _InitDecoratedDb(db: sqlite3.Connection):
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute("CREATE TABLE [compositions] ([id] INTEGER, [name] TEXT, [export_id] INTEGER, [env_id] INTEGER);")
|
||||||
|
|
||||||
|
cur.execute("CREATE TABLE graph([graph_ckid] INTEGER, [graph_name] TEXT, [hierarchy_ckid] TEXT, [export_id] INTEGER);")
|
||||||
|
#cur.execute("CREATE TABLE info([target] INTEGER, [attach_bb] INTEGER, [is_setting] INTEGER, [name] TEXT, [field] TEXT, [data] TEXT);")
|
||||||
|
|
||||||
|
#cur.execute("CREATE TABLE block([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [pin-ptarget] TEXT, [pin-pin] TEXT, [pin-pout] TEXT, [pin-bin] TEXT, [pin-bout] TEXT, [x] REAL, [y] REAL, [width] REAL, [height] REAL, [expandable] INTEGER);")
|
||||||
|
#cur.execute("CREATE TABLE cell([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [x] REAL, [y] REAL, [type] INTEGER);")
|
||||||
|
#cur.execute("CREATE TABLE link([belong_to_graph] INETGER, [delay] INTEGER, [start_interface] INTEGER, [end_interface] INTEGER, [startobj] INTEGER, [endobj] INTEGER, [start_type] INTEGER, [end_type] INTEGER, [start_index] INTEGER, [end_index] INTEGER, [x1] REAL, [y1] REAL, [x2] REAL, [y2] REAL);")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
cur.close()
|
||||||
|
|
||||||
|
def _GenerateCompositions(cfg: CustomConfig.CustomConfig) -> tuple[tuple[CompositionIngredients], tuple[str], tuple[str]]:
|
||||||
|
compositions: list[CompositionIngredients] = []
|
||||||
|
exportdb: collections.OrderedDict = collections.OrderedDict()
|
||||||
|
envdb: collections.OrderedDict = collections.OrderedDict()
|
||||||
|
|
||||||
|
for entry in cfg.m_InputEntries:
|
||||||
|
# check 2 database
|
||||||
|
export_id = exportdb.get(entry.m_ExportDb, None)
|
||||||
|
if export_id is None:
|
||||||
|
export_id = len(exportdb)
|
||||||
|
exportdb[entry.m_ExportDb] = export_id
|
||||||
|
|
||||||
|
env_id = envdb.get(entry.m_EnvDb, None)
|
||||||
|
if env_id is None:
|
||||||
|
env_id = len(envdb)
|
||||||
|
envdb[entry.m_EnvDb] = env_id
|
||||||
|
|
||||||
|
# create record
|
||||||
|
compositions.append(CompositionIngredients(entry.m_Name, export_id, env_id))
|
||||||
|
|
||||||
|
return (
|
||||||
|
tuple(compositions),
|
||||||
|
tuple(exportdb.values()),
|
||||||
|
tuple(envdb.values())
|
||||||
|
)
|
||||||
|
|
||||||
|
def _UploadComposition(db: sqlite3.Connection, compositions: list[CompositionIngredients]):
|
||||||
|
cur = db.cursor()
|
||||||
|
for idx, ingredient in enumerate(compositions):
|
||||||
|
cur.execute("INSERT INTO [compositions] VALUES(?, ?, ?, ?)",
|
||||||
|
(idx, ingredient.m_CompositionName, ingredient.m_ExportIndex, ingredient.m_EnvIndex)
|
||||||
|
)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
cur.close()
|
||||||
|
|
||||||
|
def Run(cfg: CustomConfig.CustomConfig):
|
||||||
|
# establish target database
|
||||||
|
print('Opening decorated database...')
|
||||||
|
decorateDb: sqlite3.Connection = sqlite3.connect(cfg.m_DecoratedDb)
|
||||||
|
|
||||||
# init table
|
# init table
|
||||||
print('Init decorate database...')
|
print('Initializing decorated database...')
|
||||||
initDecorateDb(decorateDb)
|
_InitDecoratedDb(decorateDb)
|
||||||
decorateDb.commit()
|
decorateDb.commit()
|
||||||
|
|
||||||
# decorate graph
|
# we need know which database we need analyse first
|
||||||
print('Generating gragh list...')
|
print('Generating compositions...')
|
||||||
graphList = []
|
(compositions, exportdb, envdb) = _GenerateCompositions(cfg)
|
||||||
decorateGraph(exportDb, decorateDb, graphList)
|
_UploadComposition(decorateDb, compositions)
|
||||||
|
print(f'Analysation done. {len(exportdb)} Export DB and {len(envdb)} Env DB.')
|
||||||
|
|
||||||
# decorate each graph
|
# process export
|
||||||
print('Generating graph...')
|
print('Generating graphs...')
|
||||||
currentGraphBlockCell = {}
|
progressbar: Progressbar.Prograssbar = Progressbar.Prograssbar(len(exportdb))
|
||||||
Progressbar.initProgressbar(len(graphList))
|
for expid, exp in enumerate(exportdb):
|
||||||
for i in graphList:
|
pass
|
||||||
currentGraphBlockCell.clear()
|
progressbar.Finish()
|
||||||
buildBlock(exportDb, decorateDb, i, currentGraphBlockCell)
|
|
||||||
graphPIO = buildCell(exportDb, decorateDb, i, currentGraphBlockCell)
|
|
||||||
buildLink(exportDb, decorateDb, i, currentGraphBlockCell, graphPIO)
|
|
||||||
|
|
||||||
Progressbar.stepProgressbar()
|
# process env
|
||||||
Progressbar.finProgressbar()
|
print('Generating infos...')
|
||||||
|
progressbar = Progressbar.Prograssbar(len(envdb))
|
||||||
|
for envid, env in enumerate(envdb):
|
||||||
|
pass
|
||||||
|
progressbar.Finish()
|
||||||
|
|
||||||
# export information
|
# close database
|
||||||
print('Generating info...')
|
print('Closing decorated database...')
|
||||||
buildInfo(exportDb, decorateDb)
|
|
||||||
|
|
||||||
# give up all change of eexport.db (because no change)
|
|
||||||
exportDb.close()
|
|
||||||
decorateDb.commit()
|
decorateDb.commit()
|
||||||
decorateDb.close()
|
decorateDb.close()
|
||||||
|
|
||||||
def initDecorateDb(db):
|
|
||||||
cur = db.cursor()
|
|
||||||
cur.execute("CREATE TABLE graph([graph] INTEGER, [graph_name] TEXT, [width] INTEGER, [height] INTEGER, [index] INTEGER, [belong_to] TEXT);")
|
|
||||||
cur.execute("CREATE TABLE info([target] INTEGER, [attach_bb] INTEGER, [is_setting] INTEGER, [name] TEXT, [field] TEXT, [data] TEXT);")
|
|
||||||
|
|
||||||
cur.execute("CREATE TABLE block([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [pin-ptarget] TEXT, [pin-pin] TEXT, [pin-pout] TEXT, [pin-bin] TEXT, [pin-bout] TEXT, [x] REAL, [y] REAL, [width] REAL, [height] REAL, [expandable] INTEGER);")
|
|
||||||
cur.execute("CREATE TABLE cell([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [x] REAL, [y] REAL, [type] INTEGER);")
|
|
||||||
cur.execute("CREATE TABLE link([belong_to_graph] INETGER, [delay] INTEGER, [start_interface] INTEGER, [end_interface] INTEGER, [startobj] INTEGER, [endobj] INTEGER, [start_type] INTEGER, [end_type] INTEGER, [start_index] INTEGER, [end_index] INTEGER, [x1] REAL, [y1] REAL, [x2] REAL, [y2] REAL);")
|
|
||||||
|
|
||||||
def decorateGraph(exDb, deDb, graph):
|
|
||||||
exCur = exDb.cursor()
|
|
||||||
deCur = deDb.cursor()
|
|
||||||
scriptMap = {}
|
|
||||||
|
|
||||||
exCur.execute("SELECT [behavior], [index], [name] FROM script;")
|
|
||||||
while True:
|
|
||||||
lines = exCur.fetchone()
|
|
||||||
if lines == None:
|
|
||||||
break
|
|
||||||
scriptMap[lines[0]] = (lines[1], lines[2])
|
|
||||||
|
|
||||||
exCur.execute("SELECT [thisobj], [type], [name] FROM behavior WHERE [type] != 0;")
|
|
||||||
while True:
|
|
||||||
lines = exCur.fetchone()
|
|
||||||
if lines == None:
|
|
||||||
break
|
|
||||||
|
|
||||||
# add into global graph list
|
|
||||||
graph.append(lines[0])
|
|
||||||
|
|
||||||
# width and height will be computed by following method and use update
|
|
||||||
# statement to change it
|
|
||||||
if lines[1] == 1:
|
|
||||||
# script
|
|
||||||
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, ?, ?)", (lines[0], lines[2], scriptMap[lines[0]][0], scriptMap[lines[0]][1]))
|
|
||||||
else:
|
|
||||||
# sub bb
|
|
||||||
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, -1, '')", (lines[0], lines[2]))
|
|
||||||
|
|
||||||
def buildBlock(exDb, deDb, target, currentGraphBlockCell):
|
|
||||||
exCur = exDb.cursor()
|
|
||||||
deCur = deDb.cursor()
|
|
||||||
|
|
||||||
# sort inner bb
|
|
||||||
# use current graph input as the start point
|
|
||||||
treeRoot = dcv.BBTreeNode(target, -1)
|
|
||||||
processedBB = set()
|
|
||||||
# layer start from 2, 0 is occupied for pLocal, 1 is occupied for pOper
|
|
||||||
arrangedLayer = recursiveBuildBBTree(treeRoot, exCur, processedBB, 2, 0, target)
|
|
||||||
|
|
||||||
# get no linked bb and place them. linked bb position will be computed
|
|
||||||
# following
|
|
||||||
# calc each bb's x postion, as for y, calc later
|
|
||||||
# flat bb tree
|
|
||||||
arrangedLayer+=1
|
|
||||||
singleBB = set()
|
|
||||||
bbResult = {}
|
|
||||||
bb_layer_map = {}
|
|
||||||
baseX = dcv.GRAPH_CONTENTOFFSET_X
|
|
||||||
exCur.execute('SELECT [thisobj], [name], [type], [proto_name], [pin_count] FROM behavior WHERE parent == ?', (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
pinSplit = i[4].split(',')
|
|
||||||
bbCache = dcv.BBResult(i[1], i[3], pinSplit[1], pinSplit[2], pinSplit[3], pinSplit[4], (i[0] if i[2] != 0 else -1))
|
|
||||||
bbCache.computSize()
|
|
||||||
if i[0] not in processedBB:
|
|
||||||
# single bb, process it
|
|
||||||
singleBB.add(i[0])
|
|
||||||
bbCache.x = baseX
|
|
||||||
baseX += bbCache.width + dcv.GRAPH_BB_SPAN
|
|
||||||
bb_layer_map[i[0]] = arrangedLayer
|
|
||||||
|
|
||||||
bbResult[i[0]] = bbCache
|
|
||||||
|
|
||||||
recursiveCalcBBX(treeRoot, dcv.GRAPH_CONTENTOFFSET_X, bbResult, bb_layer_map)
|
|
||||||
|
|
||||||
# calc poper
|
|
||||||
allBB = processedBB | singleBB
|
|
||||||
processedOper = set()
|
|
||||||
pluggedOper = {}
|
|
||||||
occupiedLayerCountForSpecificBB = {}
|
|
||||||
exCur.execute('SELECT [thisobj] FROM pOper WHERE [belong_to] == ?', (target,))
|
|
||||||
newCur = exDb.cursor()
|
|
||||||
newCur2 = exDb.cursor()
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
if i[0] in processedOper:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# check current bout, plugin into the first bb
|
|
||||||
newCur.execute("SELECT [output_obj] FROM pLink WHERE ([input_obj] == ? AND [output_type] == ? AND [output_is_bb] == 1)", (i[0], dcv.dbPLinkInputOutputType.PIN))
|
|
||||||
for j in newCur.fetchall():
|
|
||||||
if j[0] in allBB:
|
|
||||||
# can be plugin
|
|
||||||
# try get tree
|
|
||||||
if j[0] not in pluggedOper.keys():
|
|
||||||
pluggedOper[j[0]] = {}
|
|
||||||
recursiveBuildOperTree(i[0], bb_layer_map, processedOper, occupiedLayerCountForSpecificBB, newCur2, 1, j[0], target, pluggedOper[j[0]])
|
|
||||||
# exit for due to have found a proper host bb
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
# calc layer position
|
|
||||||
layer_height = {}
|
|
||||||
layer_y = {}
|
|
||||||
layer_height[0] = 25
|
|
||||||
layer_height[1] = 50
|
|
||||||
for i in bb_layer_map.keys():
|
|
||||||
curLayer = bb_layer_map[i]
|
|
||||||
if curLayer not in layer_height.keys():
|
|
||||||
layer_height[curLayer] = bbResult[i].height
|
|
||||||
else:
|
|
||||||
layer_height[curLayer] = max(layer_height.get(curLayer, 0), bbResult[i].height)
|
|
||||||
layer_height[arrangedLayer] = layer_height.get(arrangedLayer, 0) # make sure misc bb height exist
|
|
||||||
layer_height[2] = layer_height.get(2, 0) # make sure at least have a bb layer (when there are no bb in a map)
|
|
||||||
|
|
||||||
# calc bb Y
|
|
||||||
baseY = dcv.GRAPH_CONTENTOFFSET_Y
|
|
||||||
for i in range(arrangedLayer + 1):
|
|
||||||
baseY += layer_height[i] + dcv.GRAPH_LAYER_SPAN
|
|
||||||
baseY += occupiedLayerCountForSpecificBB.get(i, 0) * dcv.GRAPH_SPAN_BB_POPER # add oper occipation
|
|
||||||
layer_y[i] = baseY
|
|
||||||
for i in bbResult.keys():
|
|
||||||
cache = bbResult[i]
|
|
||||||
layer = bb_layer_map[i]
|
|
||||||
cache.y = layer_y[layer] - layer_height[layer]
|
|
||||||
|
|
||||||
# calc oper position
|
|
||||||
# flat oper tree
|
|
||||||
operResult = {}
|
|
||||||
exCur.execute('SELECT [thisobj], [op] FROM pOper WHERE [belong_to] == ?', (target,))
|
|
||||||
homelessOperCurrentX = dcv.GRAPH_CONTENTOFFSET_X
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
if i[0] not in processedOper:
|
|
||||||
# homeless oper
|
|
||||||
cache2 = dcv.OperResult(i[1])
|
|
||||||
cache2.computSize()
|
|
||||||
cache2.x = homelessOperCurrentX
|
|
||||||
cache2.y = layer_y[1] - cache2.height
|
|
||||||
homelessOperCurrentX += cache2.width + dcv.GRAPH_BB_SPAN
|
|
||||||
operResult[i[0]] = cache2
|
|
||||||
|
|
||||||
for i in pluggedOper.keys(): # plugged oper
|
|
||||||
cache = bbResult[i]
|
|
||||||
for j in pluggedOper[i]:
|
|
||||||
jCache = pluggedOper[i][j]
|
|
||||||
baseX = cache.x
|
|
||||||
for q in jCache:
|
|
||||||
exCur.execute("SELECT [op] FROM pOper WHERE [thisobj] == ?", (q,))
|
|
||||||
cache2 = dcv.OperResult(exCur.fetchone()[0])
|
|
||||||
cache2.computSize()
|
|
||||||
cache2.x = baseX
|
|
||||||
baseX += cache2.width + dcv.GRAPH_BB_SPAN
|
|
||||||
cache2.y = cache.y - j * dcv.GRAPH_SPAN_BB_POPER
|
|
||||||
operResult[q] = cache2
|
|
||||||
|
|
||||||
# query bb pin's data
|
|
||||||
listCache = []
|
|
||||||
listItemCache = None
|
|
||||||
for i in allBB:
|
|
||||||
cache = bbResult[i]
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?;", (i,))
|
|
||||||
temp = exCur.fetchone()
|
|
||||||
if temp == None:
|
|
||||||
cache.ptargetData = '{}'
|
|
||||||
else:
|
|
||||||
cache.ptargetData = json.dumps(dcv.PinInformation(temp[0], temp[1], temp[2]), cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name] FROM bIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.binData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name] FROM bOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.boutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
# query oper pin's data
|
|
||||||
for i in operResult.keys():
|
|
||||||
cache = operResult[i]
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
listCache.clear()
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
|
||||||
for j in exCur.fetchall():
|
|
||||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
|
||||||
listCache.append(listItemCache)
|
|
||||||
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
|
||||||
|
|
||||||
# write to database and return
|
|
||||||
for i in bbResult.keys():
|
|
||||||
cache = bbResult[i]
|
|
||||||
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
|
||||||
deCur.execute('INSERT INTO block VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
|
||||||
(target, i, cache.name, cache.assistName, cache.ptargetData, cache.pinData, cache.poutData, cache.binData, cache.boutData, cache.x, cache.y, cache.width, cache.height, cache.expandable))
|
|
||||||
for i in operResult.keys():
|
|
||||||
cache = operResult[i]
|
|
||||||
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
|
||||||
deCur.execute("INSERT INTO block VALUES (?, ?, ?, '', '{}', ?, ?, '[]', '[]', ?, ?, ?, ?, -1)",
|
|
||||||
(target, i, cache.name, cache.pinData, cache.poutData, cache.x, cache.y, cache.width, cache.height))
|
|
||||||
|
|
||||||
def recursiveBuildBBTree(node, exCur, processedBB, layer, depth, graphId):
|
|
||||||
realLinkedBB = set()
|
|
||||||
# find links
|
|
||||||
exCur.execute("SELECT [output_obj] FROM bLink WHERE ([input_obj] == ? AND [input_type] == ? AND [belong_to] = ?) ORDER BY [input_index] ASC;",
|
|
||||||
(node.bb, (dcv.dbBLinkInputOutputType.INPUT if depth == 0 else dcv.dbBLinkInputOutputType.OUTPUT), graphId))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
if i[0] != graphId: # omit self
|
|
||||||
realLinkedBB.add(i[0])
|
|
||||||
|
|
||||||
if (len(realLinkedBB) == 0):
|
|
||||||
return layer
|
|
||||||
|
|
||||||
# ignore duplicated bb
|
|
||||||
# calc need processed bb first
|
|
||||||
# and register all gotten bb. for preventing infinity resursive func and
|
|
||||||
# keep bb tree structure
|
|
||||||
realLinkedBB = realLinkedBB - processedBB
|
|
||||||
processedBB.update(realLinkedBB)
|
|
||||||
|
|
||||||
# iterate each bb
|
|
||||||
for i in realLinkedBB:
|
|
||||||
# recursive execute this method
|
|
||||||
newNode = dcv.BBTreeNode(i, layer)
|
|
||||||
layer = recursiveBuildBBTree(newNode, exCur, processedBB, layer, depth + 1, graphId)
|
|
||||||
# add new node into list and ++layer
|
|
||||||
layer+=1
|
|
||||||
node.nodes.append(newNode)
|
|
||||||
|
|
||||||
# minus extra ++ due to for
|
|
||||||
if (len(realLinkedBB) != 0):
|
|
||||||
layer-=1
|
|
||||||
|
|
||||||
return layer
|
|
||||||
|
|
||||||
def recursiveCalcBBX(node, baseX, resultList, layerMap):
|
|
||||||
maxExpand = 0
|
|
||||||
for i in node.nodes:
|
|
||||||
layerMap[i.bb] = i.layer
|
|
||||||
resultList[i.bb].x = baseX
|
|
||||||
maxExpand = max(maxExpand, resultList[i.bb].width)
|
|
||||||
|
|
||||||
for i in node.nodes:
|
|
||||||
recursiveCalcBBX(i, baseX + maxExpand + dcv.GRAPH_BB_SPAN, resultList, layerMap)
|
|
||||||
|
|
||||||
def recursiveBuildOperTree(oper, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer, bb, graphId, subLayerColumnMap):
|
|
||||||
if oper in processedOper:
|
|
||||||
return
|
|
||||||
|
|
||||||
# for avoid fucking export parameter feature. check whether self is
|
|
||||||
# current graph's memeber
|
|
||||||
exCur.execute("SELECT [belong_to] FROM pOper WHERE [thisobj] == ?;", (oper,))
|
|
||||||
if (exCur.fetchone()[0] != graphId):
|
|
||||||
# fuck export param, exit
|
|
||||||
return
|
|
||||||
|
|
||||||
# make sure sub layer column map is ok
|
|
||||||
if sublayer not in subLayerColumnMap.keys():
|
|
||||||
subLayerColumnMap[sublayer] = []
|
|
||||||
|
|
||||||
# register self
|
|
||||||
# mark processed
|
|
||||||
processedOper.add(oper)
|
|
||||||
subLayerColumnMap[sublayer].append(oper)
|
|
||||||
|
|
||||||
# record layer occupation
|
|
||||||
layer = bb_layer_map[bb]
|
|
||||||
occupiedLayerMap[layer] = max(occupiedLayerMap.get(layer, -1), sublayer)
|
|
||||||
|
|
||||||
# iterate sub item
|
|
||||||
exCur.execute("SELECT [input_obj] FROM pLink WHERE ([output_obj] == ? AND [input_type] == ? AND [input_is_bb] == 0) ORDER BY [output_index];", (oper, dcv.dbPLinkInputOutputType.POUT))
|
|
||||||
res = []
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
res.append(i[0])
|
|
||||||
|
|
||||||
for i in res:
|
|
||||||
recursiveBuildOperTree(i, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer + 1, bb, graphId, subLayerColumnMap)
|
|
||||||
|
|
||||||
def buildCell(exDb, deDb, target, currentGraphBlockCell):
|
|
||||||
exCur = exDb.cursor()
|
|
||||||
deCur = deDb.cursor()
|
|
||||||
# prepare block set
|
|
||||||
blockSet = set()
|
|
||||||
for i in currentGraphBlockCell.keys():
|
|
||||||
blockSet.add(i)
|
|
||||||
|
|
||||||
# find current graph's pio bio
|
|
||||||
boutx = set()
|
|
||||||
pouty = set()
|
|
||||||
graphPIO = set()
|
|
||||||
|
|
||||||
# bOut.x and pOut.y data is not confirmed, when graph size was confirmed,
|
|
||||||
# update it
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [index] FROM bIn WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
x = 0
|
|
||||||
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
|
||||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [index] FROM bOut WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
x = 0
|
|
||||||
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
|
||||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
|
||||||
boutx.add(i[0])
|
|
||||||
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pIn WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
|
||||||
y = 0
|
|
||||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
|
||||||
graphPIO.add(i[0])
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pOut WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
|
||||||
y = 0
|
|
||||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
|
||||||
graphPIO.add(i[0])
|
|
||||||
pouty.add(i[0])
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?", (target,))
|
|
||||||
cache = exCur.fetchone()
|
|
||||||
if cache != None:
|
|
||||||
currentGraphBlockCell[cache[0]] = dcv.BlockCellItem(0, 0, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, 0, 0, ?)", (target, i[0], i[1], i[2], dcv.CellType.PTARGET))
|
|
||||||
graphPIO.add(cache[0])
|
|
||||||
|
|
||||||
# query all plocal
|
|
||||||
allLocal = set()
|
|
||||||
localUsageCounter = {}
|
|
||||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pLocal WHERE [belong_to] == ?;", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
allLocal.add(i[0])
|
|
||||||
localUsageCounter[i[0]] = dcv.LocalUsageItem(0, False, dcv.LocalUsageType.PLOCAL)
|
|
||||||
|
|
||||||
# query all links(don't need to consider export pIO, due to it will not add
|
|
||||||
# any shortcut)
|
|
||||||
# !! the same if framework in pLink generator function !! SHARED
|
|
||||||
createdShortcut = set()
|
|
||||||
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
|
|
||||||
# analyse 5 chancee one by one
|
|
||||||
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
|
||||||
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
|
||||||
if i[2] in allLocal or i[2] in createdShortcut:
|
|
||||||
cache = localUsageCounter[i[2]]
|
|
||||||
else:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
|
||||||
localUsageCounter[i[2]] = cache
|
|
||||||
createdShortcut.add(i[2])
|
|
||||||
|
|
||||||
cache.count += 1
|
|
||||||
cache.lastUse = i[6]
|
|
||||||
cache.lastDirection = 0
|
|
||||||
cache.lastIndex = i[9]
|
|
||||||
|
|
||||||
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
|
||||||
if i[2] == target:
|
|
||||||
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
|
||||||
if i[2] not in blockSet:
|
|
||||||
if i[0] not in createdShortcut:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PIN)
|
|
||||||
localUsageCounter[i[0]] = cache
|
|
||||||
createdShortcut.add(i[0])
|
|
||||||
else:
|
|
||||||
cache = localUsageCounter[i[0]]
|
|
||||||
|
|
||||||
cache.count+=1
|
|
||||||
cache.lastUse = i[6]
|
|
||||||
cache.lastDirection = 0
|
|
||||||
cache.lastIndex = i[9]
|
|
||||||
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR): # for attribute using
|
|
||||||
if i[2] not in createdShortcut:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PATTR)
|
|
||||||
localUsageCounter[i[2]] = cache
|
|
||||||
createdShortcut.add(i[2])
|
|
||||||
else:
|
|
||||||
cache = localUsageCounter[i[2]]
|
|
||||||
|
|
||||||
cache.count+=1
|
|
||||||
cache.lastUse = i[6]
|
|
||||||
cache.lastDirection = 0
|
|
||||||
cache.lastIndex = i[9]
|
|
||||||
else:
|
|
||||||
if i[2] not in blockSet:
|
|
||||||
if i[0] not in createdShortcut:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
|
||||||
localUsageCounter[i[0]] = cache
|
|
||||||
createdShortcut.add(i[0])
|
|
||||||
else:
|
|
||||||
cache = localUsageCounter[i[0]]
|
|
||||||
|
|
||||||
cache.count+=1
|
|
||||||
cache.lastUse = i[6]
|
|
||||||
cache.lastDirection = 1
|
|
||||||
cache.lastIndex = i[9]
|
|
||||||
else:
|
|
||||||
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
|
||||||
if i[6] in allLocal or i[6] in createdShortcut:
|
|
||||||
cache = localUsageCounter[i[6]]
|
|
||||||
else:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
|
||||||
localUsageCounter[i[6]] = cache
|
|
||||||
createdShortcut.add(i[6])
|
|
||||||
|
|
||||||
cache.count += 1
|
|
||||||
cache.lastUse = i[2]
|
|
||||||
cache.lastDirection = 1
|
|
||||||
cache.lastIndex = i[5]
|
|
||||||
else:
|
|
||||||
if i[6] == target:
|
|
||||||
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
|
||||||
if i[6] not in blockSet:
|
|
||||||
if i[1] not in createdShortcut:
|
|
||||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
|
||||||
localUsageCounter[i[1]] = cache
|
|
||||||
createdShortcut.add(i[1])
|
|
||||||
else:
|
|
||||||
cache = localUsageCounter[i[1]]
|
|
||||||
|
|
||||||
cache.count += 1
|
|
||||||
cache.lastUse = i[2]
|
|
||||||
cache.lastDirection = 1
|
|
||||||
cache.lastIndex = i[5]
|
|
||||||
|
|
||||||
# apply all cells
|
|
||||||
defaultCellIndex = 0
|
|
||||||
for i in localUsageCounter.keys():
|
|
||||||
cache = localUsageCounter[i]
|
|
||||||
# comput x,y
|
|
||||||
if (cache.count == 1):
|
|
||||||
# attachable
|
|
||||||
attachTarget = currentGraphBlockCell[cache.lastUse]
|
|
||||||
(x, y) = computCellPosition(attachTarget.x, attachTarget.y, attachTarget.h, cache.lastDirection, cache.lastIndex)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# place it in default area
|
|
||||||
y = dcv.GRAPH_CONTENTOFFSET_Y
|
|
||||||
x = dcv.GRAPH_CONTENTOFFSET_X + defaultCellIndex * (dcv.CELL_WIDTH + dcv.GRAPH_BB_SPAN)
|
|
||||||
defaultCellIndex += 1
|
|
||||||
# get information
|
|
||||||
if (cache.internal_type == dcv.LocalUsageType.PIN):
|
|
||||||
tableName = 'pIn'
|
|
||||||
elif (cache.internal_type == dcv.LocalUsageType.POUT):
|
|
||||||
tableName = 'pOut'
|
|
||||||
elif (cache.internal_type == dcv.LocalUsageType.PATTR):
|
|
||||||
tableName = 'pAttr'
|
|
||||||
else:
|
|
||||||
tableName = 'pLocal'
|
|
||||||
exCur.execute("SELECT [name], [type] FROM {} WHERE [thisobj] == ?".format(tableName), (i,))
|
|
||||||
temp = exCur.fetchone()
|
|
||||||
|
|
||||||
# submit to database and map
|
|
||||||
currentGraphBlockCell[i] = dcv.BlockCellItem(x, y, dcv.CELL_WIDTH, dcv.CELL_HEIGHT)
|
|
||||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)",
|
|
||||||
(target, i, temp[0], temp[1], x, y, (dcv.CellType.SHORTCUT if cache.isshortcut else dcv.CellType.PLOCAL)))
|
|
||||||
|
|
||||||
# comput size and update database and currentGraphBlockCell
|
|
||||||
graphX = 0
|
|
||||||
graphY = 0
|
|
||||||
for key, values in currentGraphBlockCell.items():
|
|
||||||
graphX = max(graphX, values.x + values.w)
|
|
||||||
graphY = max(graphY, values.y + values.h)
|
|
||||||
graphX += dcv.GRAPH_POFFSET
|
|
||||||
graphY += dcv.GRAPH_BOFFSET
|
|
||||||
|
|
||||||
deCur.execute("UPDATE graph SET [width] = ?, [height] = ? WHERE [graph] == ?", (graphX, graphY, target))
|
|
||||||
|
|
||||||
# update bOut.x and pOut.y data
|
|
||||||
for i in boutx:
|
|
||||||
deCur.execute("UPDATE cell SET [x] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphX - dcv.BB_PBSIZE, i, target))
|
|
||||||
currentGraphBlockCell[i].x = graphX - dcv.BB_PBSIZE
|
|
||||||
for i in pouty:
|
|
||||||
deCur.execute("UPDATE cell SET [y] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphY - dcv.BB_PBSIZE, i, target))
|
|
||||||
currentGraphBlockCell[i].y = graphY - dcv.BB_PBSIZE
|
|
||||||
|
|
||||||
return graphPIO
|
|
||||||
|
|
||||||
def computCellPosition(baseX, baseY, height, direction, index):
|
|
||||||
if (index == -1):
|
|
||||||
return (baseX, baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
|
||||||
|
|
||||||
if (direction == 0):
|
|
||||||
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
|
||||||
else:
|
|
||||||
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY + height + dcv.GRAPH_SPAN_BB_PLOCAL)
|
|
||||||
|
|
||||||
def buildLink(exDb, deDb, target, currentGraphBlockCell, graphPIO):
|
|
||||||
exCur = exDb.cursor()
|
|
||||||
deCur = deDb.cursor()
|
|
||||||
|
|
||||||
# prepare block set
|
|
||||||
blockSet = set()
|
|
||||||
for i in currentGraphBlockCell.keys():
|
|
||||||
blockSet.add(i)
|
|
||||||
|
|
||||||
# bLink
|
|
||||||
exCur.execute("SELECT * FROM bLink WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
if i[3] == target:
|
|
||||||
(x1, y1) = computLinkBTerminal(i[0], 0, -1 ,currentGraphBlockCell)
|
|
||||||
bStartObj = i[0]
|
|
||||||
bStartType = 0
|
|
||||||
bStartIndex = -1
|
|
||||||
else:
|
|
||||||
(x1, y1) = computLinkBTerminal(i[3], i[4], i[5], currentGraphBlockCell)
|
|
||||||
bStartObj = i[3]
|
|
||||||
bStartType = i[4]
|
|
||||||
bStartIndex = i[5]
|
|
||||||
if i[6] == target:
|
|
||||||
(x2, y2) = computLinkBTerminal(i[1], 0, -1,currentGraphBlockCell)
|
|
||||||
bEndObj = i[1]
|
|
||||||
bEndType = 0
|
|
||||||
bEndIndex = -1
|
|
||||||
else:
|
|
||||||
(x2, y2) = computLinkBTerminal(i[6], i[7], i[8],currentGraphBlockCell)
|
|
||||||
bEndObj = i[6]
|
|
||||||
bEndType = i[7]
|
|
||||||
bEndIndex = i[8]
|
|
||||||
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, i[2], i[0], i[1], bStartObj, bEndObj, bStartType, bEndType, bStartIndex, bEndIndex, x1, y1, x2, y2))
|
|
||||||
|
|
||||||
# pLink
|
|
||||||
# !! the same if framework in cell generator function !! SHARED
|
|
||||||
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
# analyse 5 chancee one by one
|
|
||||||
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
|
||||||
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
|
||||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
|
||||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
|
||||||
|
|
||||||
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
|
||||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
|
||||||
if i[2] == target:
|
|
||||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[0], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
|
||||||
else:
|
|
||||||
(x1, y1) = computLinkPTerminal(i[2], 0, i[5], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, i[5], i[9], x1, y1, x2, y2))
|
|
||||||
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR):
|
|
||||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
|
||||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
|
||||||
else:
|
|
||||||
if i[2] in blockSet: # process protencial pOut(shortcut) (because plocal input/input_obj
|
|
||||||
# output/output_obj is same, so don't need add for them)
|
|
||||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
|
||||||
else:
|
|
||||||
(x1, y1) = computLinkPTerminal(i[0], 1, i[5], currentGraphBlockCell)
|
|
||||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], i[9], x1, y1, x2, y2))
|
|
||||||
|
|
||||||
else:
|
|
||||||
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
|
||||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
|
||||||
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], -1, x1, y1, x2, y2))
|
|
||||||
else:
|
|
||||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
|
||||||
if i[6] == target:
|
|
||||||
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[1], 1, 0, i[5], -1, x1, y1, x2, y2))
|
|
||||||
else:
|
|
||||||
(x2, y2) = computLinkPTerminal(i[6], 1, i[9], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -1, i[0], i[1], i[2], i[6], 1, 1, i[5], i[9], x1, y1, x2, y2))
|
|
||||||
|
|
||||||
# eLink
|
|
||||||
exCur.execute("SELECT * FROM eLink WHERE [belong_to] == ?", (target,))
|
|
||||||
for i in exCur.fetchall():
|
|
||||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
|
||||||
(x2, y2) = computLinkPTerminal(i[1], 0 if i[2] == 1 else 1, i[3], currentGraphBlockCell)
|
|
||||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
|
||||||
(target, -2, i[0], i[0], target, i[1], 0, 0 if i[2] == 1 else 1, -1, i[3], x1, y1, x2, y2))
|
|
||||||
|
|
||||||
def computLinkBTerminal(obj, xtype, index, currentGraphBlockCell):
|
|
||||||
# index = -1 mean no offset, it will connect to graph io
|
|
||||||
cache = currentGraphBlockCell[obj]
|
|
||||||
return (cache.x if xtype == 0 else cache.x + cache.w - dcv.BB_PBSIZE,
|
|
||||||
cache.y if index == -1 else (cache.y + dcv.BB_BOFFSET + index * (dcv.BB_PBSIZE + dcv.BB_BSPAN)))
|
|
||||||
|
|
||||||
def computLinkPTerminal(obj, ytype, index, currentGraphBlockCell):
|
|
||||||
# ytype is not database type. it have the same meaning of LinkBTerminal,
|
|
||||||
# indicating the position. 0 is keep origin position(for pIn and pTarget),
|
|
||||||
# 1 is consider height(for pOut)
|
|
||||||
cache = currentGraphBlockCell[obj]
|
|
||||||
return (cache.x if index == -1 else (cache.x + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN)),
|
|
||||||
cache.y if ytype == 0 else (cache.y + cache.h - dcv.BB_PBSIZE))
|
|
||||||
|
|
||||||
def buildInfo(exDb, deDb):
|
|
||||||
exInfoCur = exDb.cursor()
|
|
||||||
exQueryCur = exDb.cursor()
|
|
||||||
deCur = deDb.cursor()
|
|
||||||
|
|
||||||
# declare tiny storage for convenient query
|
|
||||||
tinyStorageKey = 0
|
|
||||||
tinyStorageBB = -1
|
|
||||||
tinyStorageSetting = 0
|
|
||||||
tinyStorageName = ""
|
|
||||||
|
|
||||||
# export local data (including proto bb internal data)
|
|
||||||
exInfoCur.execute("SELECT * FROM pData;")
|
|
||||||
for i in exInfoCur.fetchall():
|
|
||||||
attachBB = -1
|
|
||||||
isSetting = 0
|
|
||||||
infoName = ""
|
|
||||||
|
|
||||||
if i[2] == tinyStorageKey:
|
|
||||||
attachBB = tinyStorageBB
|
|
||||||
isSetting = tinyStorageSetting
|
|
||||||
infotName = tinyStorageName
|
|
||||||
else:
|
|
||||||
# clear storage first
|
|
||||||
tinyStorageBB = -1
|
|
||||||
tinyStorageSetting = 0
|
|
||||||
tinyStorageName = ""
|
|
||||||
|
|
||||||
# query correspond pLocal
|
|
||||||
exQueryCur.execute("SELECT [belong_to], [is_setting], [name] FROM pLocal WHERE [thisobj] = ?", (i[2], ))
|
|
||||||
plocalCache = exQueryCur.fetchone()
|
|
||||||
if plocalCache is not None:
|
|
||||||
# add setting config
|
|
||||||
tinyStorageSetting = isSetting = plocalCache[1]
|
|
||||||
tinyStorageName = infoName = plocalCache[2]
|
|
||||||
# query bb again
|
|
||||||
exQueryCur.execute("SELECT [thisobj] FROM behavior WHERE ([thisobj] = ? AND [type] = 0)", (plocalCache[0], ))
|
|
||||||
behaviorCache = exQueryCur.fetchone()
|
|
||||||
if behaviorCache is not None:
|
|
||||||
tinyStorageBB = attachBB = behaviorCache[0]
|
|
||||||
|
|
||||||
deCur.execute("INSERT INTO info VALUES (?, ?, ?, ?, ?, ?)", (i[2], attachBB, isSetting, infoName, i[0], i[1]))
|
|
109
SuperScriptDecorator/DecoratorData.py
Normal file
109
SuperScriptDecorator/DecoratorData.py
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
import typing, collections, sqlite3
|
||||||
|
|
||||||
|
class Vector(object):
|
||||||
|
def __init__(self, x: float, y: float):
|
||||||
|
self.X: float = x
|
||||||
|
self.Y: float = y
|
||||||
|
def __add__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return Vector(self.X + other.X, self.Y + other.Y)
|
||||||
|
def __sub__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return Vector(self.X - other.X, self.Y - other.Y)
|
||||||
|
|
||||||
|
def __radd__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return Vector(self.X + other.X, self.Y + other.Y)
|
||||||
|
def __rsub__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return Vector(self.X - other.X, self.Y - other.Y)
|
||||||
|
|
||||||
|
def __iadd__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
self.X += other.X
|
||||||
|
self.Y += other.Y
|
||||||
|
return self
|
||||||
|
def __isub__(self, other):
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
self.X -= other.X
|
||||||
|
self.Y -= other.Y
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __eq__(self, other) -> bool:
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return (self.X == other.X and self.Y == other.Y)
|
||||||
|
def __ne__(self, other) -> bool:
|
||||||
|
if not isinstance(other, Vector): return NotImplemented
|
||||||
|
return (self.X != other.X or self.Y != other.Y)
|
||||||
|
|
||||||
|
class ICanComputeSize(object):
|
||||||
|
'''
|
||||||
|
This class is served for TreeLayout class.
|
||||||
|
|
||||||
|
All classes inherit this class will have ability to calculate the size of self,
|
||||||
|
and report to TreeLayout. TreeLayout will use these data to distribute position.
|
||||||
|
|
||||||
|
The functions declared in this class have unique name meaning.
|
||||||
|
The reason is that the vertical and horizonal direction between BB and Oper is opposited.
|
||||||
|
So we use Leaves and Root to give theme an uniformed concept.
|
||||||
|
'''
|
||||||
|
def GetLeavesDirLength() -> float:
|
||||||
|
'''
|
||||||
|
Get the length of the direction where leaves grow up.
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
def GetRootDirLength() -> float:
|
||||||
|
'''
|
||||||
|
Get the length of the direction where the tree planted.
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
TNode = typing.TypeVar('TNode')
|
||||||
|
class TreeLayout(typing.Generic[TNode]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
SqlTableProto_Behavior = collections.namedtuple('Behavior', 'thisobj, name, type, proto_name, proto_guid, flags, priority, version, pin_count, parent')
|
||||||
|
|
||||||
|
class BBDataPayload(object):
|
||||||
|
def __init__(self, sql_data: tuple):
|
||||||
|
v = SqlTableProto_Behavior._make(sql_data)
|
||||||
|
|
||||||
|
self.m_CKID: int = v.thisobj
|
||||||
|
self.m_Name: str = v.name
|
||||||
|
self.m_Type: int = v.type
|
||||||
|
self.m_ProtoName: str = v.proto_name
|
||||||
|
self.m_ProtoGUID: str = v.proto_guid
|
||||||
|
self.m_Flags: int = v.flags
|
||||||
|
self.m_Priority: int = v.priority
|
||||||
|
self.m_Version: int = v.version
|
||||||
|
self.m_Parent: int = v.parent
|
||||||
|
|
||||||
|
div_pin_count = v.pin_count.split(',')
|
||||||
|
self.m_pTargetExisting: bool = int(div_pin_count[0] == 1)
|
||||||
|
self.m_pInCount: int = int(div_pin_count[1])
|
||||||
|
self.m_pOutCount: int = int(div_pin_count[2])
|
||||||
|
self.m_bInCount: int = int(div_pin_count[3])
|
||||||
|
self.m_bOutCount: int = int(div_pin_count[4])
|
||||||
|
|
||||||
|
class OperTreeNode(ICanComputeSize):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class BBTreeNode(ICanComputeSize):
|
||||||
|
def __init__(self):
|
||||||
|
self.m_UpperOper: TreeLayout[OperTreeNode] = TreeLayout()
|
||||||
|
self.m_LowerOper: TreeLayout[OperTreeNode] = TreeLayout()
|
||||||
|
self.m_Upperval: collections.deque = collections.deque()
|
||||||
|
self.m_LowerVal: collections.deque = collections.deque()
|
||||||
|
self.m_Payload: BBDataPayload = None
|
||||||
|
|
||||||
|
|
||||||
|
class GraphWork(ICanComputeSize):
|
||||||
|
def __init__(self):
|
||||||
|
self.m_PassiveVal: collections.deque = collections.deque()
|
||||||
|
self.m_PassiveOper: TreeLayout[OperTreeNode] = TreeLayout()
|
||||||
|
self.m_ActiveBB: TreeLayout[BBTreeNode] = TreeLayout()
|
||||||
|
self.m_PassiveBB: TreeLayout[BBTreeNode] = TreeLayout()
|
||||||
|
|
||||||
|
|
|
@ -46,6 +46,6 @@ class Prograssbar(object):
|
||||||
percentage_bar * '#',
|
percentage_bar * '#',
|
||||||
(self.__PbarFullChar - percentage_bar) * '=',
|
(self.__PbarFullChar - percentage_bar) * '=',
|
||||||
percentage_full * 100,
|
percentage_full * 100,
|
||||||
self.__CurFileName
|
self.__CurFileName if self.__CurFileName else ''
|
||||||
))
|
))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
|
@ -38,10 +38,10 @@ if not cfg.Regulate():
|
||||||
# if in debug mode, run directly
|
# if in debug mode, run directly
|
||||||
# otherwise, run with a try wrapper.
|
# otherwise, run with a try wrapper.
|
||||||
if cfg.m_DebugMode:
|
if cfg.m_DebugMode:
|
||||||
DecoratorCore.run(cfg)
|
DecoratorCore.Run(cfg)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
DecoratorCore.run(cfg)
|
DecoratorCore.Run(cfg)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print("!!! An error occurs. Please report follwoing error output and reproduce file to developer. !!!")
|
print("!!! An error occurs. Please report follwoing error output and reproduce file to developer. !!!")
|
||||||
logging.exception(ex)
|
logging.exception(ex)
|
||||||
|
|
|
@ -44,14 +44,21 @@ namespace SSMaterializer {
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlite3_stmt* SSMaterializerDatabase::CreateStmt(const char* str_stmt) {
|
sqlite3_stmt* SSMaterializerDatabase::GetStmt(const char* str_stmt) {
|
||||||
|
// try find first
|
||||||
|
auto probe = mStmtCache.find(reinterpret_cast<const uintptr_t>(str_stmt));
|
||||||
|
if (probe != mStmtCache.end()) {
|
||||||
|
return probe->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
// no found. create one
|
||||||
int result;
|
int result;
|
||||||
sqlite3_stmt* stmt = NULL;
|
sqlite3_stmt* stmt = NULL;
|
||||||
result = sqlite3_prepare_v2(mDb, str_stmt, -1, &stmt, NULL);
|
result = sqlite3_prepare_v2(mDb, str_stmt, -1, &stmt, NULL);
|
||||||
if (result != SQLITE_OK) return NULL;
|
if (result != SQLITE_OK) return NULL;
|
||||||
|
|
||||||
// append new one
|
// append new one
|
||||||
mStmtCache.push_back(stmt);
|
mStmtCache.emplace(reinterpret_cast<const uintptr_t>(str_stmt), stmt);
|
||||||
return stmt;
|
return stmt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,8 +99,8 @@ namespace SSMaterializer {
|
||||||
|
|
||||||
//free all cached stmts and commit job
|
//free all cached stmts and commit job
|
||||||
for (auto it = mStmtCache.begin(); it != mStmtCache.end(); it++) {
|
for (auto it = mStmtCache.begin(); it != mStmtCache.end(); it++) {
|
||||||
if (*it != NULL) {
|
if (it->second != NULL) {
|
||||||
result = sqlite3_finalize(*it);
|
result = sqlite3_finalize(it->second);
|
||||||
if (result != SQLITE_OK) goto fail;
|
if (result != SQLITE_OK) goto fail;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -255,18 +262,12 @@ if (result != SQLITE_OK) { return FALSE; }
|
||||||
|
|
||||||
#pragma endregion
|
#pragma endregion
|
||||||
|
|
||||||
#define TryGetStmtCache(str_sql) static sqlite3_stmt* stmt = NULL; \
|
|
||||||
if (stmt == NULL) { \
|
|
||||||
stmt = CreateStmt(str_sql); \
|
|
||||||
if (stmt == NULL) return; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma region document database
|
#pragma region document database
|
||||||
|
|
||||||
void DocumentDatabase::write_script(DataStruct::dbdoc_script& data) {
|
void DocumentDatabase::write_script(DataStruct::dbdoc_script& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -279,7 +280,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_behavior(DataStruct::dbdoc_script_behavior& data) {
|
void DocumentDatabase::write_script_behavior(DataStruct::dbdoc_script_behavior& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_behavior] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_behavior] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -299,7 +300,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pTarget(DataStruct::dbdoc_script_pTarget& data) {
|
void DocumentDatabase::write_script_pTarget(DataStruct::dbdoc_script_pTarget& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pTarget] VALUES (?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pTarget] VALUES (?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -315,7 +316,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pIn(DataStruct::dbdoc_script_pIn& data) {
|
void DocumentDatabase::write_script_pIn(DataStruct::dbdoc_script_pIn& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pIn] VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pIn] VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -332,7 +333,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pOut(DataStruct::dbdoc_script_pOut& data) {
|
void DocumentDatabase::write_script_pOut(DataStruct::dbdoc_script_pOut& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pOut] VALUES (?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pOut] VALUES (?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -347,7 +348,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_bIn(DataStruct::dbdoc_script_bIn& data) {
|
void DocumentDatabase::write_script_bIn(DataStruct::dbdoc_script_bIn& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_bIn] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_bIn] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -360,7 +361,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_bOut(DataStruct::dbdoc_script_bOut& data) {
|
void DocumentDatabase::write_script_bOut(DataStruct::dbdoc_script_bOut& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_bOut] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_bOut] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -373,7 +374,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_bLink(DataStruct::dbdoc_script_bLink& data) {
|
void DocumentDatabase::write_script_bLink(DataStruct::dbdoc_script_bLink& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_bLink] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_bLink] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.input);
|
sqlite3_bind_int(stmt, 1, data.input);
|
||||||
|
@ -392,7 +393,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pLocal(DataStruct::dbdoc_script_pLocal& data) {
|
void DocumentDatabase::write_script_pLocal(DataStruct::dbdoc_script_pLocal& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pLocal] VALUES (?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pLocal] VALUES (?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -407,7 +408,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pLink(DataStruct::dbdoc_script_pLink& data) {
|
void DocumentDatabase::write_script_pLink(DataStruct::dbdoc_script_pLink& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pLink] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pLink] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.input);
|
sqlite3_bind_int(stmt, 1, data.input);
|
||||||
|
@ -427,7 +428,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_pOper(DataStruct::dbdoc_script_pOper& data) {
|
void DocumentDatabase::write_script_pOper(DataStruct::dbdoc_script_pOper& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pOper] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pOper] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -440,7 +441,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_script_eLink(DataStruct::dbdoc_script_eLink& data) {
|
void DocumentDatabase::write_script_eLink(DataStruct::dbdoc_script_eLink& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_eLink] VALUES (?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_eLink] VALUES (?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.export_obj);
|
sqlite3_bind_int(stmt, 1, data.export_obj);
|
||||||
|
@ -455,7 +456,7 @@ if (stmt == NULL) { \
|
||||||
// then check database validation
|
// then check database validation
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [script_pAttr] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [script_pAttr] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.thisobj);
|
sqlite3_bind_int(stmt, 1, data.thisobj);
|
||||||
|
@ -470,7 +471,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_msg(DataStruct::dbdoc_msg& data) {
|
void DocumentDatabase::write_msg(DataStruct::dbdoc_msg& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [msg] VALUES (?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [msg] VALUES (?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.index);
|
sqlite3_bind_int(stmt, 1, data.index);
|
||||||
|
@ -481,7 +482,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_obj(DataStruct::dbdoc_obj& data) {
|
void DocumentDatabase::write_obj(DataStruct::dbdoc_obj& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [obj] VALUES (?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [obj] VALUES (?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.id);
|
sqlite3_bind_int(stmt, 1, data.id);
|
||||||
|
@ -494,7 +495,7 @@ if (stmt == NULL) { \
|
||||||
void DocumentDatabase::write_data(DataStruct::dbdoc_data& data) {
|
void DocumentDatabase::write_data(DataStruct::dbdoc_data& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [data] VALUES (?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [data] VALUES (?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_text(stmt, 1, data.field.c_str(), -1, SQLITE_TRANSIENT);
|
sqlite3_bind_text(stmt, 1, data.field.c_str(), -1, SQLITE_TRANSIENT);
|
||||||
|
@ -510,7 +511,7 @@ if (stmt == NULL) { \
|
||||||
void EnvironmentDatabase::write_op(DataStruct::dbenv_op& data) {
|
void EnvironmentDatabase::write_op(DataStruct::dbenv_op& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [op] VALUES (?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [op] VALUES (?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, (int)data.funcPtr);
|
sqlite3_bind_int(stmt, 1, (int)data.funcPtr);
|
||||||
|
@ -526,7 +527,7 @@ if (stmt == NULL) { \
|
||||||
void EnvironmentDatabase::write_param(DataStruct::dbenv_param& data) {
|
void EnvironmentDatabase::write_param(DataStruct::dbenv_param& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [param] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [param] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.index);
|
sqlite3_bind_int(stmt, 1, data.index);
|
||||||
|
@ -553,7 +554,7 @@ if (stmt == NULL) { \
|
||||||
void EnvironmentDatabase::write_attr(DataStruct::dbenv_attr& data) {
|
void EnvironmentDatabase::write_attr(DataStruct::dbenv_attr& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [attr] VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [attr] VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.index);
|
sqlite3_bind_int(stmt, 1, data.index);
|
||||||
|
@ -570,7 +571,7 @@ if (stmt == NULL) { \
|
||||||
void EnvironmentDatabase::write_plugin(DataStruct::dbenv_plugin& data) {
|
void EnvironmentDatabase::write_plugin(DataStruct::dbenv_plugin& data) {
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
TryGetStmtCache("INSERT INTO [plugin] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [plugin] VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_int(stmt, 1, data.dll_index);
|
sqlite3_bind_int(stmt, 1, data.dll_index);
|
||||||
|
@ -592,7 +593,7 @@ if (stmt == NULL) { \
|
||||||
if (mDb == NULL) return;
|
if (mDb == NULL) return;
|
||||||
|
|
||||||
#if !defined(VIRTOOLS_21)
|
#if !defined(VIRTOOLS_21)
|
||||||
TryGetStmtCache("INSERT INTO [variable] VALUES (?, ?, ?, ?, ?, ?)");
|
sqlite3_stmt* stmt = GetStmt("INSERT INTO [variable] VALUES (?, ?, ?, ?, ?, ?)");
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
|
|
||||||
sqlite3_bind_text(stmt, 1, data.name.c_str(), -1, SQLITE_TRANSIENT);
|
sqlite3_bind_text(stmt, 1, data.name.c_str(), -1, SQLITE_TRANSIENT);
|
||||||
|
@ -607,7 +608,5 @@ if (stmt == NULL) { \
|
||||||
|
|
||||||
#pragma endregion
|
#pragma endregion
|
||||||
|
|
||||||
#undef TryGetStmtCache
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -4,7 +4,7 @@
|
||||||
#include "stdafx.h"
|
#include "stdafx.h"
|
||||||
#include "virtools_compatible.hpp"
|
#include "virtools_compatible.hpp"
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <unordered_map>
|
||||||
#include <set>
|
#include <set>
|
||||||
|
|
||||||
namespace SSMaterializer {
|
namespace SSMaterializer {
|
||||||
|
@ -290,14 +290,14 @@ namespace SSMaterializer {
|
||||||
virtual ~SSMaterializerDatabase();
|
virtual ~SSMaterializerDatabase();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
sqlite3_stmt* CreateStmt(const char* stmt);
|
sqlite3_stmt* GetStmt(const char* stmt);
|
||||||
void FakeConstructor(const char* file);
|
void FakeConstructor(const char* file);
|
||||||
void FakeDeconstructor();
|
void FakeDeconstructor();
|
||||||
virtual BOOL Init();
|
virtual BOOL Init();
|
||||||
virtual BOOL Finalize();
|
virtual BOOL Finalize();
|
||||||
|
|
||||||
sqlite3* mDb;
|
sqlite3* mDb;
|
||||||
std::vector<sqlite3_stmt*> mStmtCache;
|
std::unordered_map<uintptr_t, sqlite3_stmt*> mStmtCache;
|
||||||
};
|
};
|
||||||
|
|
||||||
class DocumentDatabase : public SSMaterializerDatabase {
|
class DocumentDatabase : public SSMaterializerDatabase {
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Ballance/vt2obj "example.nmo" "export.db" "env.db"
|
Ballance/vt2obj "example.nmo" "export.db" "env.db"
|
||||||
"Ballance/vt2obj mirror" Gameplay.nmo export.db env.db
|
"Ballance/vt2obj mirror" Gameplay.nmo export2.db env.db
|
Loading…
Reference in New Issue
Block a user