Update doc. Split project part. 1
* Update doc. Still not the final version. * Splite original Viewer into independent Decorator and Viewer project.
This commit is contained in:
8
SuperScriptDecorator/CustomConfig.py
Normal file
8
SuperScriptDecorator/CustomConfig.py
Normal file
@ -0,0 +1,8 @@
|
||||
import locale
|
||||
|
||||
# encoding list
|
||||
# https://docs.python.org/3/library/codecs.html#standard-encodings
|
||||
database_encoding = locale.getpreferredencoding()
|
||||
export_db = "import.txt"
|
||||
decorated_db = "decorate.db"
|
||||
debug_mode = False
|
129
SuperScriptDecorator/DecoratorConstValue.py
Normal file
129
SuperScriptDecorator/DecoratorConstValue.py
Normal file
@ -0,0 +1,129 @@
|
||||
import json
|
||||
|
||||
FONT_SIZE = 12
|
||||
|
||||
GRAPH_POFFSET = 40 # 主体bb的p距离左边框距离
|
||||
GRAPH_BOFFSET = 40 # 主体bb的b距离上边框距离
|
||||
GRAPH_PSPAN = 20 # 主体bb的每个p之间的水平间距
|
||||
GRAPH_BSPAN = 20 # 主体bb的每个b之间的垂直间距
|
||||
GRAPH_LAYER_SPAN = 50 # 绘图中各个bb层之间的间距
|
||||
GRAPH_BB_SPAN = 25 # 绘图中每个bb之间的距离(扩展到Oper和不可插入的local之间的距离)
|
||||
GRAPH_SPAN_BB_POPER = 60 # 每个bb上面挂载的oper和被挂载bb之间的垂直距离 和 每个bb上面挂载的oper的各层之间的垂直距离
|
||||
GRAPH_SPAN_BB_PLOCAL = 10 # 每个bb上面挂载的plocal和被挂载bb之间的垂直距离
|
||||
GRAPH_CONTENTOFFSET_X = 40 # 绘图内容原点到左边框的距离
|
||||
GRAPH_CONTENTOFFSET_Y = 40 # 绘图内容原点到顶边框的距离
|
||||
BB_POFFSET = 20 # bb框中的p距离左边框距离
|
||||
BB_BOFFSET = 10 # bb框中的b距离上边框距离
|
||||
BB_PSPAN = 20 # bb框每个p之间的水平间距
|
||||
BB_BSPAN = 20 # bb框每个b之间的垂直间距
|
||||
BB_PBSIZE = 6 # bb框中o和b的正方形符号的边长(扩展到graph中的p/b大小)
|
||||
CELL_WIDTH = 15
|
||||
CELL_HEIGHT = 5
|
||||
|
||||
class dbPLinkInputOutputType(object):
|
||||
PIN = 0
|
||||
POUT = 1
|
||||
PLOCAL = 2
|
||||
PTARGET = 3
|
||||
PATTR = 4
|
||||
|
||||
class dbBLinkInputOutputType(object):
|
||||
INPUT = 0
|
||||
OUTPUT = 1
|
||||
|
||||
class CellType(object):
|
||||
PLOCAL = 0
|
||||
SHORTCUT = 1
|
||||
PIO = 2
|
||||
BIO = 3
|
||||
PTARGET = 4
|
||||
|
||||
class LocalUsageType(object):
|
||||
PIN = 0
|
||||
POUT = 1
|
||||
PLOCAL = 2
|
||||
PATTR = 3
|
||||
|
||||
class JsonCustomEncoder(json.JSONEncoder):
|
||||
def default(self, field):
|
||||
if isinstance(field, PinInformation):
|
||||
return {'id': field.id, 'name': field.name, 'type': field.type}
|
||||
else:
|
||||
return json.JSONEncoder.default(self, field)
|
||||
|
||||
class BlockCellItem(object):
|
||||
def __init__(self, x, y, w, h):
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.w = w
|
||||
self.h = h
|
||||
|
||||
class BBTreeNode(object):
|
||||
def __init__(self, ckid, layer):
|
||||
self.bb = ckid
|
||||
self.layer = layer
|
||||
self.nodes = []
|
||||
|
||||
class BBResult(object):
|
||||
def __init__(self, name, assistName, pin, pout, bin, bout, expandable):
|
||||
self.name = name
|
||||
self.assistName = assistName
|
||||
self.ptargetData = None
|
||||
self.pin = int(pin)
|
||||
self.pinData = None
|
||||
self.pout = int(pout)
|
||||
self.poutData = None
|
||||
self.bin = int(bin)
|
||||
self.binData = None
|
||||
self.bout = int(bout)
|
||||
self.boutData = None
|
||||
self.x = 0.0
|
||||
self.y = 0.0
|
||||
self.width = 0.0
|
||||
self.height = 0.0
|
||||
self.expandable = expandable
|
||||
|
||||
def computSize(self):
|
||||
wText = max(len(self.name), len(self.assistName)) * FONT_SIZE / 3 * 2
|
||||
hText = FONT_SIZE * 3
|
||||
|
||||
wp = max(self.pin, self.pout) * (BB_PBSIZE + BB_PSPAN)
|
||||
hb = max(self.bin, self.bout) * (BB_PBSIZE + BB_BSPAN)
|
||||
|
||||
self.width = 2 * BB_POFFSET + max(wp, wText)
|
||||
self.height = 2 * BB_BOFFSET + max(hb, hText)
|
||||
|
||||
class OperResult(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.x = 0.0
|
||||
self.y = 0.0
|
||||
self.pinData = None
|
||||
self.poutData = None
|
||||
self.height = 0.0
|
||||
self.width = 0.0
|
||||
|
||||
def computSize(self):
|
||||
wText = len(self.name) * FONT_SIZE / 3 * 2
|
||||
hText = FONT_SIZE * 3
|
||||
|
||||
wp = 2 * BB_POFFSET + 2 * (BB_PBSIZE + BB_PSPAN)
|
||||
hb = 2 * BB_BOFFSET + 0 * (BB_PBSIZE + BB_BSPAN)
|
||||
|
||||
self.width = max(wp, wText)
|
||||
self.height = max(hb, hText)
|
||||
|
||||
class PinInformation(object):
|
||||
def __init__(self, id, name, type):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.type = type
|
||||
|
||||
class LocalUsageItem(object):
|
||||
def __init__(self, count, isshortcut, internal_type):
|
||||
self.count = count
|
||||
self.lastUse = -1
|
||||
self.lastDirection = 0 # 0 for pIn, 1 for pOut
|
||||
self.lastIndex = -1 # -1 for pTarget, otherwise the real index
|
||||
self.isshortcut = isshortcut
|
||||
self.internal_type = internal_type # 0 pIn, 1 pOut, 2 pLocal. for convenient query match data
|
710
SuperScriptDecorator/DecoratorCore.py
Normal file
710
SuperScriptDecorator/DecoratorCore.py
Normal file
@ -0,0 +1,710 @@
|
||||
import sqlite3
|
||||
import DecoratorConstValue as dcv
|
||||
import json
|
||||
import CustomConfig
|
||||
import Progressbar
|
||||
|
||||
def run():
|
||||
exportDb = sqlite3.connect(CustomConfig.export_db)
|
||||
exportDb.text_factory = lambda x: x.decode(CustomConfig.database_encoding, errors="ignore")
|
||||
decorateDb = sqlite3.connect(CustomConfig.decorated_db)
|
||||
|
||||
# init table
|
||||
print('Init decorate database...')
|
||||
initDecorateDb(decorateDb)
|
||||
decorateDb.commit()
|
||||
|
||||
# decorate graph
|
||||
print('Generating gragh list...')
|
||||
graphList = []
|
||||
decorateGraph(exportDb, decorateDb, graphList)
|
||||
|
||||
# decorate each graph
|
||||
print('Generating graph...')
|
||||
currentGraphBlockCell = {}
|
||||
Progressbar.initProgressbar(len(graphList))
|
||||
for i in graphList:
|
||||
currentGraphBlockCell.clear()
|
||||
buildBlock(exportDb, decorateDb, i, currentGraphBlockCell)
|
||||
graphPIO = buildCell(exportDb, decorateDb, i, currentGraphBlockCell)
|
||||
buildLink(exportDb, decorateDb, i, currentGraphBlockCell, graphPIO)
|
||||
|
||||
Progressbar.stepProgressbar()
|
||||
Progressbar.finProgressbar()
|
||||
|
||||
# export information
|
||||
print('Generating info...')
|
||||
buildInfo(exportDb, decorateDb)
|
||||
|
||||
# give up all change of eexport.db (because no change)
|
||||
exportDb.close()
|
||||
decorateDb.commit()
|
||||
decorateDb.close()
|
||||
|
||||
def initDecorateDb(db):
|
||||
cur = db.cursor()
|
||||
cur.execute("CREATE TABLE graph([graph] INTEGER, [graph_name] TEXT, [width] INTEGER, [height] INTEGER, [index] INTEGER, [belong_to] TEXT);")
|
||||
cur.execute("CREATE TABLE info([target] INTEGER, [attach_bb] INTEGER, [is_setting] INTEGER, [name] TEXT, [field] TEXT, [data] TEXT);")
|
||||
|
||||
cur.execute("CREATE TABLE block([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [pin-ptarget] TEXT, [pin-pin] TEXT, [pin-pout] TEXT, [pin-bin] TEXT, [pin-bout] TEXT, [x] REAL, [y] REAL, [width] REAL, [height] REAL, [expandable] INTEGER);")
|
||||
cur.execute("CREATE TABLE cell([belong_to_graph] INETGER, [thisobj] INTEGER, [name] TEXT, [assist_text] TEXT, [x] REAL, [y] REAL, [type] INTEGER);")
|
||||
cur.execute("CREATE TABLE link([belong_to_graph] INETGER, [delay] INTEGER, [start_interface] INTEGER, [end_interface] INTEGER, [startobj] INTEGER, [endobj] INTEGER, [start_type] INTEGER, [end_type] INTEGER, [start_index] INTEGER, [end_index] INTEGER, [x1] REAL, [y1] REAL, [x2] REAL, [y2] REAL);")
|
||||
|
||||
def decorateGraph(exDb, deDb, graph):
|
||||
exCur = exDb.cursor()
|
||||
deCur = deDb.cursor()
|
||||
scriptMap = {}
|
||||
|
||||
exCur.execute("SELECT [behavior], [index], [name] FROM script;")
|
||||
while True:
|
||||
lines = exCur.fetchone()
|
||||
if lines == None:
|
||||
break
|
||||
scriptMap[lines[0]] = (lines[1], lines[2])
|
||||
|
||||
exCur.execute("SELECT [thisobj], [type], [name] FROM behavior WHERE [type] != 0;")
|
||||
while True:
|
||||
lines = exCur.fetchone()
|
||||
if lines == None:
|
||||
break
|
||||
|
||||
# add into global graph list
|
||||
graph.append(lines[0])
|
||||
|
||||
# width and height will be computed by following method and use update
|
||||
# statement to change it
|
||||
if lines[1] == 1:
|
||||
# script
|
||||
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, ?, ?)", (lines[0], lines[2], scriptMap[lines[0]][0], scriptMap[lines[0]][1]))
|
||||
else:
|
||||
# sub bb
|
||||
deCur.execute("INSERT INTO graph VALUES(?, ?, 0, 0, -1, '')", (lines[0], lines[2]))
|
||||
|
||||
def buildBlock(exDb, deDb, target, currentGraphBlockCell):
|
||||
exCur = exDb.cursor()
|
||||
deCur = deDb.cursor()
|
||||
|
||||
# sort inner bb
|
||||
# use current graph input as the start point
|
||||
treeRoot = dcv.BBTreeNode(target, -1)
|
||||
processedBB = set()
|
||||
# layer start from 2, 0 is occupied for pLocal, 1 is occupied for pOper
|
||||
arrangedLayer = recursiveBuildBBTree(treeRoot, exCur, processedBB, 2, 0, target)
|
||||
|
||||
# get no linked bb and place them. linked bb position will be computed
|
||||
# following
|
||||
# calc each bb's x postion, as for y, calc later
|
||||
# flat bb tree
|
||||
arrangedLayer+=1
|
||||
singleBB = set()
|
||||
bbResult = {}
|
||||
bb_layer_map = {}
|
||||
baseX = dcv.GRAPH_CONTENTOFFSET_X
|
||||
exCur.execute('SELECT [thisobj], [name], [type], [proto_name], [pin_count] FROM behavior WHERE parent == ?', (target,))
|
||||
for i in exCur.fetchall():
|
||||
pinSplit = i[4].split(',')
|
||||
bbCache = dcv.BBResult(i[1], i[3], pinSplit[1], pinSplit[2], pinSplit[3], pinSplit[4], (i[0] if i[2] != 0 else -1))
|
||||
bbCache.computSize()
|
||||
if i[0] not in processedBB:
|
||||
# single bb, process it
|
||||
singleBB.add(i[0])
|
||||
bbCache.x = baseX
|
||||
baseX += bbCache.width + dcv.GRAPH_BB_SPAN
|
||||
bb_layer_map[i[0]] = arrangedLayer
|
||||
|
||||
bbResult[i[0]] = bbCache
|
||||
|
||||
recursiveCalcBBX(treeRoot, dcv.GRAPH_CONTENTOFFSET_X, bbResult, bb_layer_map)
|
||||
|
||||
# calc poper
|
||||
allBB = processedBB | singleBB
|
||||
processedOper = set()
|
||||
pluggedOper = {}
|
||||
occupiedLayerCountForSpecificBB = {}
|
||||
exCur.execute('SELECT [thisobj] FROM pOper WHERE [belong_to] == ?', (target,))
|
||||
newCur = exDb.cursor()
|
||||
newCur2 = exDb.cursor()
|
||||
for i in exCur.fetchall():
|
||||
if i[0] in processedOper:
|
||||
continue
|
||||
|
||||
# check current bout, plugin into the first bb
|
||||
newCur.execute("SELECT [output_obj] FROM pLink WHERE ([input_obj] == ? AND [output_type] == ? AND [output_is_bb] == 1)", (i[0], dcv.dbPLinkInputOutputType.PIN))
|
||||
for j in newCur.fetchall():
|
||||
if j[0] in allBB:
|
||||
# can be plugin
|
||||
# try get tree
|
||||
if j[0] not in pluggedOper.keys():
|
||||
pluggedOper[j[0]] = {}
|
||||
recursiveBuildOperTree(i[0], bb_layer_map, processedOper, occupiedLayerCountForSpecificBB, newCur2, 1, j[0], target, pluggedOper[j[0]])
|
||||
# exit for due to have found a proper host bb
|
||||
break
|
||||
|
||||
|
||||
# calc layer position
|
||||
layer_height = {}
|
||||
layer_y = {}
|
||||
layer_height[0] = 25
|
||||
layer_height[1] = 50
|
||||
for i in bb_layer_map.keys():
|
||||
curLayer = bb_layer_map[i]
|
||||
if curLayer not in layer_height.keys():
|
||||
layer_height[curLayer] = bbResult[i].height
|
||||
else:
|
||||
layer_height[curLayer] = max(layer_height.get(curLayer, 0), bbResult[i].height)
|
||||
layer_height[arrangedLayer] = layer_height.get(arrangedLayer, 0) # make sure misc bb height exist
|
||||
layer_height[2] = layer_height.get(2, 0) # make sure at least have a bb layer (when there are no bb in a map)
|
||||
|
||||
# calc bb Y
|
||||
baseY = dcv.GRAPH_CONTENTOFFSET_Y
|
||||
for i in range(arrangedLayer + 1):
|
||||
baseY += layer_height[i] + dcv.GRAPH_LAYER_SPAN
|
||||
baseY += occupiedLayerCountForSpecificBB.get(i, 0) * dcv.GRAPH_SPAN_BB_POPER # add oper occipation
|
||||
layer_y[i] = baseY
|
||||
for i in bbResult.keys():
|
||||
cache = bbResult[i]
|
||||
layer = bb_layer_map[i]
|
||||
cache.y = layer_y[layer] - layer_height[layer]
|
||||
|
||||
# calc oper position
|
||||
# flat oper tree
|
||||
operResult = {}
|
||||
exCur.execute('SELECT [thisobj], [op] FROM pOper WHERE [belong_to] == ?', (target,))
|
||||
homelessOperCurrentX = dcv.GRAPH_CONTENTOFFSET_X
|
||||
for i in exCur.fetchall():
|
||||
if i[0] not in processedOper:
|
||||
# homeless oper
|
||||
cache2 = dcv.OperResult(i[1])
|
||||
cache2.computSize()
|
||||
cache2.x = homelessOperCurrentX
|
||||
cache2.y = layer_y[1] - cache2.height
|
||||
homelessOperCurrentX += cache2.width + dcv.GRAPH_BB_SPAN
|
||||
operResult[i[0]] = cache2
|
||||
|
||||
for i in pluggedOper.keys(): # plugged oper
|
||||
cache = bbResult[i]
|
||||
for j in pluggedOper[i]:
|
||||
jCache = pluggedOper[i][j]
|
||||
baseX = cache.x
|
||||
for q in jCache:
|
||||
exCur.execute("SELECT [op] FROM pOper WHERE [thisobj] == ?", (q,))
|
||||
cache2 = dcv.OperResult(exCur.fetchone()[0])
|
||||
cache2.computSize()
|
||||
cache2.x = baseX
|
||||
baseX += cache2.width + dcv.GRAPH_BB_SPAN
|
||||
cache2.y = cache.y - j * dcv.GRAPH_SPAN_BB_POPER
|
||||
operResult[q] = cache2
|
||||
|
||||
# query bb pin's data
|
||||
listCache = []
|
||||
listItemCache = None
|
||||
for i in allBB:
|
||||
cache = bbResult[i]
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?;", (i,))
|
||||
temp = exCur.fetchone()
|
||||
if temp == None:
|
||||
cache.ptargetData = '{}'
|
||||
else:
|
||||
cache.ptargetData = json.dumps(dcv.PinInformation(temp[0], temp[1], temp[2]), cls = dcv.JsonCustomEncoder)
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name] FROM bIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
||||
listCache.append(listItemCache)
|
||||
cache.binData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name] FROM bOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], '')
|
||||
listCache.append(listItemCache)
|
||||
cache.boutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||
listCache.append(listItemCache)
|
||||
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||
listCache.append(listItemCache)
|
||||
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
# query oper pin's data
|
||||
for i in operResult.keys():
|
||||
cache = operResult[i]
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pIn WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||
listCache.append(listItemCache)
|
||||
cache.pinData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
listCache.clear()
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pOut WHERE [belong_to] == ? ORDER BY [index];", (i,))
|
||||
for j in exCur.fetchall():
|
||||
listItemCache = dcv.PinInformation(j[0], j[1], j[2])
|
||||
listCache.append(listItemCache)
|
||||
cache.poutData = json.dumps(listCache, cls = dcv.JsonCustomEncoder)
|
||||
|
||||
# write to database and return
|
||||
for i in bbResult.keys():
|
||||
cache = bbResult[i]
|
||||
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
||||
deCur.execute('INSERT INTO block VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
(target, i, cache.name, cache.assistName, cache.ptargetData, cache.pinData, cache.poutData, cache.binData, cache.boutData, cache.x, cache.y, cache.width, cache.height, cache.expandable))
|
||||
for i in operResult.keys():
|
||||
cache = operResult[i]
|
||||
currentGraphBlockCell[i] = dcv.BlockCellItem(cache.x, cache.y, cache.width, cache.height)
|
||||
deCur.execute("INSERT INTO block VALUES (?, ?, ?, '', '{}', ?, ?, '[]', '[]', ?, ?, ?, ?, -1)",
|
||||
(target, i, cache.name, cache.pinData, cache.poutData, cache.x, cache.y, cache.width, cache.height))
|
||||
|
||||
def recursiveBuildBBTree(node, exCur, processedBB, layer, depth, graphId):
|
||||
realLinkedBB = set()
|
||||
# find links
|
||||
exCur.execute("SELECT [output_obj] FROM bLink WHERE ([input_obj] == ? AND [input_type] == ? AND [belong_to] = ?) ORDER BY [input_index] ASC;",
|
||||
(node.bb, (dcv.dbBLinkInputOutputType.INPUT if depth == 0 else dcv.dbBLinkInputOutputType.OUTPUT), graphId))
|
||||
for i in exCur.fetchall():
|
||||
if i[0] != graphId: # omit self
|
||||
realLinkedBB.add(i[0])
|
||||
|
||||
if (len(realLinkedBB) == 0):
|
||||
return layer
|
||||
|
||||
# ignore duplicated bb
|
||||
# calc need processed bb first
|
||||
# and register all gotten bb. for preventing infinity resursive func and
|
||||
# keep bb tree structure
|
||||
realLinkedBB = realLinkedBB - processedBB
|
||||
processedBB.update(realLinkedBB)
|
||||
|
||||
# iterate each bb
|
||||
for i in realLinkedBB:
|
||||
# recursive execute this method
|
||||
newNode = dcv.BBTreeNode(i, layer)
|
||||
layer = recursiveBuildBBTree(newNode, exCur, processedBB, layer, depth + 1, graphId)
|
||||
# add new node into list and ++layer
|
||||
layer+=1
|
||||
node.nodes.append(newNode)
|
||||
|
||||
# minus extra ++ due to for
|
||||
if (len(realLinkedBB) != 0):
|
||||
layer-=1
|
||||
|
||||
return layer
|
||||
|
||||
def recursiveCalcBBX(node, baseX, resultList, layerMap):
|
||||
maxExpand = 0
|
||||
for i in node.nodes:
|
||||
layerMap[i.bb] = i.layer
|
||||
resultList[i.bb].x = baseX
|
||||
maxExpand = max(maxExpand, resultList[i.bb].width)
|
||||
|
||||
for i in node.nodes:
|
||||
recursiveCalcBBX(i, baseX + maxExpand + dcv.GRAPH_BB_SPAN, resultList, layerMap)
|
||||
|
||||
def recursiveBuildOperTree(oper, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer, bb, graphId, subLayerColumnMap):
|
||||
if oper in processedOper:
|
||||
return
|
||||
|
||||
# for avoid fucking export parameter feature. check whether self is
|
||||
# current graph's memeber
|
||||
exCur.execute("SELECT [belong_to] FROM pOper WHERE [thisobj] == ?;", (oper,))
|
||||
if (exCur.fetchone()[0] != graphId):
|
||||
# fuck export param, exit
|
||||
return
|
||||
|
||||
# make sure sub layer column map is ok
|
||||
if sublayer not in subLayerColumnMap.keys():
|
||||
subLayerColumnMap[sublayer] = []
|
||||
|
||||
# register self
|
||||
# mark processed
|
||||
processedOper.add(oper)
|
||||
subLayerColumnMap[sublayer].append(oper)
|
||||
|
||||
# record layer occupation
|
||||
layer = bb_layer_map[bb]
|
||||
occupiedLayerMap[layer] = max(occupiedLayerMap.get(layer, -1), sublayer)
|
||||
|
||||
# iterate sub item
|
||||
exCur.execute("SELECT [input_obj] FROM pLink WHERE ([output_obj] == ? AND [input_type] == ? AND [input_is_bb] == 0) ORDER BY [output_index];", (oper, dcv.dbPLinkInputOutputType.POUT))
|
||||
res = []
|
||||
for i in exCur.fetchall():
|
||||
res.append(i[0])
|
||||
|
||||
for i in res:
|
||||
recursiveBuildOperTree(i, bb_layer_map, processedOper, occupiedLayerMap, exCur, sublayer + 1, bb, graphId, subLayerColumnMap)
|
||||
|
||||
def buildCell(exDb, deDb, target, currentGraphBlockCell):
|
||||
exCur = exDb.cursor()
|
||||
deCur = deDb.cursor()
|
||||
# prepare block set
|
||||
blockSet = set()
|
||||
for i in currentGraphBlockCell.keys():
|
||||
blockSet.add(i)
|
||||
|
||||
# find current graph's pio bio
|
||||
boutx = set()
|
||||
pouty = set()
|
||||
graphPIO = set()
|
||||
|
||||
# bOut.x and pOut.y data is not confirmed, when graph size was confirmed,
|
||||
# update it
|
||||
exCur.execute("SELECT [thisobj], [name], [index] FROM bIn WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
x = 0
|
||||
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
||||
exCur.execute("SELECT [thisobj], [name], [index] FROM bOut WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
x = 0
|
||||
y = dcv.GRAPH_BOFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_BSPAN)
|
||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, '', ?, ?, ?)", (target, i[0], i[1], x, y, dcv.CellType.BIO))
|
||||
boutx.add(i[0])
|
||||
|
||||
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pIn WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
||||
y = 0
|
||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
||||
graphPIO.add(i[0])
|
||||
exCur.execute("SELECT [thisobj], [name], [index], [type] FROM pOut WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
x = dcv.GRAPH_POFFSET + i[2] * (dcv. BB_PBSIZE + dcv.GRAPH_PSPAN)
|
||||
y = 0
|
||||
currentGraphBlockCell[i[0]] = dcv.BlockCellItem(x, y, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)", (target, i[0], i[1], i[3], x, y, dcv.CellType.PIO))
|
||||
graphPIO.add(i[0])
|
||||
pouty.add(i[0])
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pTarget WHERE [belong_to] == ?", (target,))
|
||||
cache = exCur.fetchone()
|
||||
if cache != None:
|
||||
currentGraphBlockCell[cache[0]] = dcv.BlockCellItem(0, 0, dcv.BB_PBSIZE, dcv.BB_PBSIZE)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, 0, 0, ?)", (target, i[0], i[1], i[2], dcv.CellType.PTARGET))
|
||||
graphPIO.add(cache[0])
|
||||
|
||||
# query all plocal
|
||||
allLocal = set()
|
||||
localUsageCounter = {}
|
||||
exCur.execute("SELECT [thisobj], [name], [type] FROM pLocal WHERE [belong_to] == ?;", (target,))
|
||||
for i in exCur.fetchall():
|
||||
allLocal.add(i[0])
|
||||
localUsageCounter[i[0]] = dcv.LocalUsageItem(0, False, dcv.LocalUsageType.PLOCAL)
|
||||
|
||||
# query all links(don't need to consider export pIO, due to it will not add
|
||||
# any shortcut)
|
||||
# !! the same if framework in pLink generator function !! SHARED
|
||||
createdShortcut = set()
|
||||
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
|
||||
# analyse 5 chancee one by one
|
||||
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
||||
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||
if i[2] in allLocal or i[2] in createdShortcut:
|
||||
cache = localUsageCounter[i[2]]
|
||||
else:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
||||
localUsageCounter[i[2]] = cache
|
||||
createdShortcut.add(i[2])
|
||||
|
||||
cache.count += 1
|
||||
cache.lastUse = i[6]
|
||||
cache.lastDirection = 0
|
||||
cache.lastIndex = i[9]
|
||||
|
||||
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
||||
if i[2] == target:
|
||||
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
||||
if i[2] not in blockSet:
|
||||
if i[0] not in createdShortcut:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PIN)
|
||||
localUsageCounter[i[0]] = cache
|
||||
createdShortcut.add(i[0])
|
||||
else:
|
||||
cache = localUsageCounter[i[0]]
|
||||
|
||||
cache.count+=1
|
||||
cache.lastUse = i[6]
|
||||
cache.lastDirection = 0
|
||||
cache.lastIndex = i[9]
|
||||
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR): # for attribute using
|
||||
if i[2] not in createdShortcut:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PATTR)
|
||||
localUsageCounter[i[2]] = cache
|
||||
createdShortcut.add(i[2])
|
||||
else:
|
||||
cache = localUsageCounter[i[2]]
|
||||
|
||||
cache.count+=1
|
||||
cache.lastUse = i[6]
|
||||
cache.lastDirection = 0
|
||||
cache.lastIndex = i[9]
|
||||
else:
|
||||
if i[2] not in blockSet:
|
||||
if i[0] not in createdShortcut:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
||||
localUsageCounter[i[0]] = cache
|
||||
createdShortcut.add(i[0])
|
||||
else:
|
||||
cache = localUsageCounter[i[0]]
|
||||
|
||||
cache.count+=1
|
||||
cache.lastUse = i[6]
|
||||
cache.lastDirection = 1
|
||||
cache.lastIndex = i[9]
|
||||
else:
|
||||
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||
if i[6] in allLocal or i[6] in createdShortcut:
|
||||
cache = localUsageCounter[i[6]]
|
||||
else:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.PLOCAL)
|
||||
localUsageCounter[i[6]] = cache
|
||||
createdShortcut.add(i[6])
|
||||
|
||||
cache.count += 1
|
||||
cache.lastUse = i[2]
|
||||
cache.lastDirection = 1
|
||||
cache.lastIndex = i[5]
|
||||
else:
|
||||
if i[6] == target:
|
||||
continue # ignore self pIn/pOut. it doesn't need any shortcut
|
||||
if i[6] not in blockSet:
|
||||
if i[1] not in createdShortcut:
|
||||
cache = dcv.LocalUsageItem(0, True, dcv.LocalUsageType.POUT)
|
||||
localUsageCounter[i[1]] = cache
|
||||
createdShortcut.add(i[1])
|
||||
else:
|
||||
cache = localUsageCounter[i[1]]
|
||||
|
||||
cache.count += 1
|
||||
cache.lastUse = i[2]
|
||||
cache.lastDirection = 1
|
||||
cache.lastIndex = i[5]
|
||||
|
||||
# apply all cells
|
||||
defaultCellIndex = 0
|
||||
for i in localUsageCounter.keys():
|
||||
cache = localUsageCounter[i]
|
||||
# comput x,y
|
||||
if (cache.count == 1):
|
||||
# attachable
|
||||
attachTarget = currentGraphBlockCell[cache.lastUse]
|
||||
(x, y) = computCellPosition(attachTarget.x, attachTarget.y, attachTarget.h, cache.lastDirection, cache.lastIndex)
|
||||
|
||||
else:
|
||||
# place it in default area
|
||||
y = dcv.GRAPH_CONTENTOFFSET_Y
|
||||
x = dcv.GRAPH_CONTENTOFFSET_X + defaultCellIndex * (dcv.CELL_WIDTH + dcv.GRAPH_BB_SPAN)
|
||||
defaultCellIndex += 1
|
||||
# get information
|
||||
if (cache.internal_type == dcv.LocalUsageType.PIN):
|
||||
tableName = 'pIn'
|
||||
elif (cache.internal_type == dcv.LocalUsageType.POUT):
|
||||
tableName = 'pOut'
|
||||
elif (cache.internal_type == dcv.LocalUsageType.PATTR):
|
||||
tableName = 'pAttr'
|
||||
else:
|
||||
tableName = 'pLocal'
|
||||
exCur.execute("SELECT [name], [type] FROM {} WHERE [thisobj] == ?".format(tableName), (i,))
|
||||
temp = exCur.fetchone()
|
||||
|
||||
# submit to database and map
|
||||
currentGraphBlockCell[i] = dcv.BlockCellItem(x, y, dcv.CELL_WIDTH, dcv.CELL_HEIGHT)
|
||||
deCur.execute("INSERT INTO cell VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||
(target, i, temp[0], temp[1], x, y, (dcv.CellType.SHORTCUT if cache.isshortcut else dcv.CellType.PLOCAL)))
|
||||
|
||||
# comput size and update database and currentGraphBlockCell
|
||||
graphX = 0
|
||||
graphY = 0
|
||||
for key, values in currentGraphBlockCell.items():
|
||||
graphX = max(graphX, values.x + values.w)
|
||||
graphY = max(graphY, values.y + values.h)
|
||||
graphX += dcv.GRAPH_POFFSET
|
||||
graphY += dcv.GRAPH_BOFFSET
|
||||
|
||||
deCur.execute("UPDATE graph SET [width] = ?, [height] = ? WHERE [graph] == ?", (graphX, graphY, target))
|
||||
|
||||
# update bOut.x and pOut.y data
|
||||
for i in boutx:
|
||||
deCur.execute("UPDATE cell SET [x] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphX - dcv.BB_PBSIZE, i, target))
|
||||
currentGraphBlockCell[i].x = graphX - dcv.BB_PBSIZE
|
||||
for i in pouty:
|
||||
deCur.execute("UPDATE cell SET [y] = ? WHERE ([thisobj] == ? AND [belong_to_graph] == ?)", (graphY - dcv.BB_PBSIZE, i, target))
|
||||
currentGraphBlockCell[i].y = graphY - dcv.BB_PBSIZE
|
||||
|
||||
return graphPIO
|
||||
|
||||
def computCellPosition(baseX, baseY, height, direction, index):
|
||||
if (index == -1):
|
||||
return (baseX, baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||
|
||||
if (direction == 0):
|
||||
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY - dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||
else:
|
||||
return (baseX + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN), baseY + height + dcv.GRAPH_SPAN_BB_PLOCAL)
|
||||
|
||||
def buildLink(exDb, deDb, target, currentGraphBlockCell, graphPIO):
|
||||
exCur = exDb.cursor()
|
||||
deCur = deDb.cursor()
|
||||
|
||||
# prepare block set
|
||||
blockSet = set()
|
||||
for i in currentGraphBlockCell.keys():
|
||||
blockSet.add(i)
|
||||
|
||||
# bLink
|
||||
exCur.execute("SELECT * FROM bLink WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
if i[3] == target:
|
||||
(x1, y1) = computLinkBTerminal(i[0], 0, -1 ,currentGraphBlockCell)
|
||||
bStartObj = i[0]
|
||||
bStartType = 0
|
||||
bStartIndex = -1
|
||||
else:
|
||||
(x1, y1) = computLinkBTerminal(i[3], i[4], i[5], currentGraphBlockCell)
|
||||
bStartObj = i[3]
|
||||
bStartType = i[4]
|
||||
bStartIndex = i[5]
|
||||
if i[6] == target:
|
||||
(x2, y2) = computLinkBTerminal(i[1], 0, -1,currentGraphBlockCell)
|
||||
bEndObj = i[1]
|
||||
bEndType = 0
|
||||
bEndIndex = -1
|
||||
else:
|
||||
(x2, y2) = computLinkBTerminal(i[6], i[7], i[8],currentGraphBlockCell)
|
||||
bEndObj = i[6]
|
||||
bEndType = i[7]
|
||||
bEndIndex = i[8]
|
||||
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, i[2], i[0], i[1], bStartObj, bEndObj, bStartType, bEndType, bStartIndex, bEndIndex, x1, y1, x2, y2))
|
||||
|
||||
# pLink
|
||||
# !! the same if framework in cell generator function !! SHARED
|
||||
exCur.execute("SELECT * FROM pLink WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
# analyse 5 chancee one by one
|
||||
if (i[7] == dcv.dbPLinkInputOutputType.PTARGET or i[7] == dcv.dbPLinkInputOutputType.PIN):
|
||||
if (i[3] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||
|
||||
elif (i[3] == dcv.dbPLinkInputOutputType.PIN):
|
||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||
if i[2] == target:
|
||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[0], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||
else:
|
||||
(x1, y1) = computLinkPTerminal(i[2], 0, i[5], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, i[5], i[9], x1, y1, x2, y2))
|
||||
elif (i[3] == dcv.dbPLinkInputOutputType.PATTR):
|
||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 0, 0, -1, i[9], x1, y1, x2, y2))
|
||||
else:
|
||||
if i[2] in blockSet: # process protencial pOut(shortcut) (because plocal input/input_obj
|
||||
# output/output_obj is same, so don't need add for them)
|
||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||
else:
|
||||
(x1, y1) = computLinkPTerminal(i[0], 1, i[5], currentGraphBlockCell)
|
||||
(x2, y2) = computLinkPTerminal(i[6], 0, i[9], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], i[9], x1, y1, x2, y2))
|
||||
|
||||
else:
|
||||
if (i[7] == dcv.dbPLinkInputOutputType.PLOCAL):
|
||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 1, 0, i[5], -1, x1, y1, x2, y2))
|
||||
else:
|
||||
(x1, y1) = computLinkPTerminal(i[2], 1, i[5], currentGraphBlockCell)
|
||||
if i[6] == target:
|
||||
(x2, y2) = computLinkPTerminal(i[1], 0, -1, currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[1], 1, 0, i[5], -1, x1, y1, x2, y2))
|
||||
else:
|
||||
(x2, y2) = computLinkPTerminal(i[6], 1, i[9], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -1, i[0], i[1], i[2], i[6], 1, 1, i[5], i[9], x1, y1, x2, y2))
|
||||
|
||||
# eLink
|
||||
exCur.execute("SELECT * FROM eLink WHERE [belong_to] == ?", (target,))
|
||||
for i in exCur.fetchall():
|
||||
(x1, y1) = computLinkPTerminal(i[0], 0, -1, currentGraphBlockCell)
|
||||
(x2, y2) = computLinkPTerminal(i[1], 0 if i[2] == 1 else 1, i[3], currentGraphBlockCell)
|
||||
deCur.execute("INSERT INTO link VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);",
|
||||
(target, -2, i[0], i[0], target, i[1], 0, 0 if i[2] == 1 else 1, -1, i[3], x1, y1, x2, y2))
|
||||
|
||||
def computLinkBTerminal(obj, xtype, index, currentGraphBlockCell):
|
||||
# index = -1 mean no offset, it will connect to graph io
|
||||
cache = currentGraphBlockCell[obj]
|
||||
return (cache.x if xtype == 0 else cache.x + cache.w - dcv.BB_PBSIZE,
|
||||
cache.y if index == -1 else (cache.y + dcv.BB_BOFFSET + index * (dcv.BB_PBSIZE + dcv.BB_BSPAN)))
|
||||
|
||||
def computLinkPTerminal(obj, ytype, index, currentGraphBlockCell):
|
||||
# ytype is not database type. it have the same meaning of LinkBTerminal,
|
||||
# indicating the position. 0 is keep origin position(for pIn and pTarget),
|
||||
# 1 is consider height(for pOut)
|
||||
cache = currentGraphBlockCell[obj]
|
||||
return (cache.x if index == -1 else (cache.x + dcv.BB_POFFSET + index * (dcv.BB_PBSIZE + dcv.BB_PSPAN)),
|
||||
cache.y if ytype == 0 else (cache.y + cache.h - dcv.BB_PBSIZE))
|
||||
|
||||
def buildInfo(exDb, deDb):
|
||||
exInfoCur = exDb.cursor()
|
||||
exQueryCur = exDb.cursor()
|
||||
deCur = deDb.cursor()
|
||||
|
||||
# declare tiny storage for convenient query
|
||||
tinyStorageKey = 0
|
||||
tinyStorageBB = -1
|
||||
tinyStorageSetting = 0
|
||||
tinyStorageName = ""
|
||||
|
||||
# export local data (including proto bb internal data)
|
||||
exInfoCur.execute("SELECT * FROM pData;")
|
||||
for i in exInfoCur.fetchall():
|
||||
attachBB = -1
|
||||
isSetting = 0
|
||||
infoName = ""
|
||||
|
||||
if i[2] == tinyStorageKey:
|
||||
attachBB = tinyStorageBB
|
||||
isSetting = tinyStorageSetting
|
||||
infotName = tinyStorageName
|
||||
else:
|
||||
# clear storage first
|
||||
tinyStorageBB = -1
|
||||
tinyStorageSetting = 0
|
||||
tinyStorageName = ""
|
||||
|
||||
# query correspond pLocal
|
||||
exQueryCur.execute("SELECT [belong_to], [is_setting], [name] FROM pLocal WHERE [thisobj] = ?", (i[2], ))
|
||||
plocalCache = exQueryCur.fetchone()
|
||||
if plocalCache is not None:
|
||||
# add setting config
|
||||
tinyStorageSetting = isSetting = plocalCache[1]
|
||||
tinyStorageName = infoName = plocalCache[2]
|
||||
# query bb again
|
||||
exQueryCur.execute("SELECT [thisobj] FROM behavior WHERE ([thisobj] = ? AND [type] = 0)", (plocalCache[0], ))
|
||||
behaviorCache = exQueryCur.fetchone()
|
||||
if behaviorCache is not None:
|
||||
tinyStorageBB = attachBB = behaviorCache[0]
|
||||
|
||||
deCur.execute("INSERT INTO info VALUES (?, ?, ?, ?, ?, ?)", (i[2], attachBB, isSetting, infoName, i[0], i[1]))
|
29
SuperScriptDecorator/Progressbar.py
Normal file
29
SuperScriptDecorator/Progressbar.py
Normal file
@ -0,0 +1,29 @@
|
||||
import sys
|
||||
|
||||
value_All = 0
|
||||
value_Now = 0
|
||||
progressbar_span = 2
|
||||
progressbar_count = int(100/progressbar_span)
|
||||
|
||||
def initProgressbar(all):
|
||||
global value_Now, value_All
|
||||
value_All = all
|
||||
value_Now = 0
|
||||
|
||||
sys.stdout.write('[{}] 0%'.format(progressbar_count * '='))
|
||||
sys.stdout.flush()
|
||||
|
||||
def stepProgressbar():
|
||||
global value_Now, value_All
|
||||
value_Now += 1
|
||||
if (value_Now > value_All):
|
||||
value_Now = value_All
|
||||
|
||||
percentage = int(value_Now / value_All * 100)
|
||||
percentage_bar = int(value_Now / value_All * progressbar_count)
|
||||
sys.stdout.write('\r[{}{}] {}%'.format(percentage_bar * '#',(progressbar_count - percentage_bar) * '=', percentage))
|
||||
sys.stdout.flush()
|
||||
|
||||
def finProgressbar():
|
||||
sys.stdout.write('\r[{}] 100%\n'.format(progressbar_count * '#'))
|
||||
sys.stdout.flush()
|
53
SuperScriptDecorator/SuperScriptDecorator.py
Normal file
53
SuperScriptDecorator/SuperScriptDecorator.py
Normal file
@ -0,0 +1,53 @@
|
||||
import CustomConfig
|
||||
import DecoratorCore
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
import logging
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "hi:o:e:c:fd")
|
||||
except getopt.GetoptError:
|
||||
print('Wrong arguments!')
|
||||
print('python SuperScriptViewer.py -i <import.txt> -o <decorated.db> -c <codec_name> -d')
|
||||
sys.exit(1)
|
||||
for opt, arg in opts:
|
||||
if opt == '-h':
|
||||
print('python SuperScriptViewer.py -i <import.txt> -o <decorated.db> -c <codec_name> -d')
|
||||
sys.exit(0)
|
||||
elif opt == '-i':
|
||||
CustomConfig.export_db = arg
|
||||
elif opt == '-o':
|
||||
CustomConfig.decorated_db = arg
|
||||
elif opt == '-c':
|
||||
CustomConfig.database_encoding = arg
|
||||
elif opt == '-d':
|
||||
CustomConfig.debug_mode = True
|
||||
|
||||
print('Super Script Decorator')
|
||||
print('Homepage: https://github.com/yyc12345/SuperScriptMaterializer')
|
||||
print('Report bug: https://github.com/yyc12345/SuperScriptMaterializer/issues')
|
||||
print('')
|
||||
|
||||
# process input and output
|
||||
if not os.path.isfile(CustomConfig.export_db):
|
||||
print('No import.txt. Fail to generate. Exit app.')
|
||||
sys.exit(1)
|
||||
|
||||
# real db generator func
|
||||
def dc_wrapper():
|
||||
pass
|
||||
|
||||
# generate db
|
||||
if CustomConfig.debug_mode:
|
||||
DecoratorCore.run()
|
||||
else:
|
||||
try:
|
||||
DecoratorCore.run()
|
||||
except Exception, ex:
|
||||
print("!!! An error occurs. Please report follwoing error output and reproduce file to developer. !!!")
|
||||
logging.exception(ex)
|
||||
sys.exit(1)
|
||||
|
||||
print('Decorated database generating done.')
|
||||
|
50
SuperScriptDecorator/SuperScriptDecorator.pyproj
Normal file
50
SuperScriptDecorator/SuperScriptDecorator.pyproj
Normal file
@ -0,0 +1,50 @@
|
||||
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<SchemaVersion>2.0</SchemaVersion>
|
||||
<ProjectGuid>6d751bf5-87d6-4123-94b3-34721938cf04</ProjectGuid>
|
||||
<ProjectHome>.</ProjectHome>
|
||||
<StartupFile>SuperScriptDecorator.py</StartupFile>
|
||||
<SearchPath>
|
||||
</SearchPath>
|
||||
<WorkingDirectory>.</WorkingDirectory>
|
||||
<OutputPath>.</OutputPath>
|
||||
<Name>SuperScriptDecorator</Name>
|
||||
<RootNamespace>SuperScriptDecorator</RootNamespace>
|
||||
<LaunchProvider>Standard Python launcher</LaunchProvider>
|
||||
<CommandLineArguments>-f -d</CommandLineArguments>
|
||||
<EnableNativeCodeDebugging>False</EnableNativeCodeDebugging>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="CustomConfig.py">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="DecoratorConstValue.py">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="DecoratorCore.py">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="Progressbar.py">
|
||||
<SubType>Code</SubType>
|
||||
</Compile>
|
||||
<Compile Include="SuperScriptDecorator.py" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets" />
|
||||
<!-- Uncomment the CoreCompile target to enable the Build command in
|
||||
Visual Studio and specify your pre- and post-build commands in
|
||||
the BeforeBuild and AfterBuild targets below. -->
|
||||
<!--<Target Name="CoreCompile" />-->
|
||||
<Target Name="BeforeBuild">
|
||||
</Target>
|
||||
<Target Name="AfterBuild">
|
||||
</Target>
|
||||
</Project>
|
Reference in New Issue
Block a user