mirror of
https://git.yoctoproject.org/poky
synced 2026-05-09 05:29:32 +00:00
Update to bitbake 1.4.2 (latest stable branch release). This includes the caching speedups
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@371 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
+128
-142
@@ -24,14 +24,14 @@
|
||||
import sys, os, getopt, glob, copy, os.path, re, time
|
||||
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
import bb
|
||||
from bb import utils, data, parse, debug, event, fatal
|
||||
from bb import utils, data, parse, debug, event, fatal, cache
|
||||
from sets import Set
|
||||
import itertools, optparse
|
||||
|
||||
parsespin = itertools.cycle( r'|/-\\' )
|
||||
bbdebug = 0
|
||||
|
||||
__version__ = "1.3.3.2"
|
||||
__version__ = "1.4.3"
|
||||
|
||||
#============================================================================#
|
||||
# BBParsingStatus
|
||||
@@ -44,7 +44,6 @@ class BBParsingStatus:
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache_dirty = False
|
||||
self.providers = {}
|
||||
self.rproviders = {}
|
||||
self.packages = {}
|
||||
@@ -60,34 +59,35 @@ class BBParsingStatus:
|
||||
self.pkg_dp = {}
|
||||
self.pn_provides = {}
|
||||
self.all_depends = Set()
|
||||
self.build_all = {}
|
||||
self.rundeps = {}
|
||||
self.runrecs = {}
|
||||
self.stamp = {}
|
||||
|
||||
def handle_bb_data(self, file_name, bb_data, cached):
|
||||
def handle_bb_data(self, file_name, bb_cache, cached):
|
||||
"""
|
||||
We will fill the dictionaries with the stuff we
|
||||
need for building the tree more fast
|
||||
"""
|
||||
if bb_data == None:
|
||||
return
|
||||
|
||||
if not cached:
|
||||
self.cache_dirty = True
|
||||
|
||||
pn = bb.data.getVar('PN', bb_data, True)
|
||||
pv = bb.data.getVar('PV', bb_data, True)
|
||||
pr = bb.data.getVar('PR', bb_data, True)
|
||||
dp = int(bb.data.getVar('DEFAULT_PREFERENCE', bb_data, True) or "0")
|
||||
provides = Set([pn] + (bb.data.getVar("PROVIDES", bb_data, 1) or "").split())
|
||||
depends = (bb.data.getVar("DEPENDS", bb_data, True) or "").split()
|
||||
packages = (bb.data.getVar('PACKAGES', bb_data, True) or "").split()
|
||||
packages_dynamic = (bb.data.getVar('PACKAGES_DYNAMIC', bb_data, True) or "").split()
|
||||
rprovides = (bb.data.getVar("RPROVIDES", bb_data, 1) or "").split()
|
||||
|
||||
pn = bb_cache.getVar('PN', file_name, True)
|
||||
pv = bb_cache.getVar('PV', file_name, True)
|
||||
pr = bb_cache.getVar('PR', file_name, True)
|
||||
dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
|
||||
provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split())
|
||||
depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split()
|
||||
packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split()
|
||||
packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
|
||||
rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split()
|
||||
|
||||
# build PackageName to FileName lookup table
|
||||
if pn not in self.pkg_pn:
|
||||
self.pkg_pn[pn] = []
|
||||
self.pkg_pn[pn].append(file_name)
|
||||
|
||||
self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0")
|
||||
self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True)
|
||||
|
||||
# build FileName to PackageName lookup table
|
||||
self.pkg_fn[file_name] = pn
|
||||
self.pkg_pvpr[file_name] = (pv,pr)
|
||||
@@ -114,7 +114,7 @@ class BBParsingStatus:
|
||||
if not package in self.packages:
|
||||
self.packages[package] = []
|
||||
self.packages[package].append(file_name)
|
||||
rprovides += (bb.data.getVar("RPROVIDES_%s" % package, bb_data, 1) or "").split()
|
||||
rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
|
||||
|
||||
for package in packages_dynamic:
|
||||
if not package in self.packages_dynamic:
|
||||
@@ -126,9 +126,32 @@ class BBParsingStatus:
|
||||
self.rproviders[rprovide] = []
|
||||
self.rproviders[rprovide].append(file_name)
|
||||
|
||||
# Build hash of runtime depeneds and rececommends
|
||||
|
||||
def add_dep(deplist, deps):
|
||||
for dep in deps:
|
||||
if not dep in deplist:
|
||||
deplist[dep] = ""
|
||||
|
||||
if not file_name in self.rundeps:
|
||||
self.rundeps[file_name] = {}
|
||||
if not file_name in self.runrecs:
|
||||
self.runrecs[file_name] = {}
|
||||
|
||||
for package in packages + [pn]:
|
||||
if not package in self.rundeps[file_name]:
|
||||
self.rundeps[file_name][package] = {}
|
||||
if not package in self.runrecs[file_name]:
|
||||
self.runrecs[file_name][package] = {}
|
||||
|
||||
add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or ""))
|
||||
add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or ""))
|
||||
add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
|
||||
add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if not bb.data.getVar('BROKEN', bb_data, True) and not bb.data.getVar('EXCLUDE_FROM_WORLD', bb_data, True):
|
||||
if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True):
|
||||
self.possible_world.append(file_name)
|
||||
|
||||
|
||||
@@ -166,7 +189,6 @@ class BBConfiguration( object ):
|
||||
def __init__( self, options ):
|
||||
for key, val in options.__dict__.items():
|
||||
setattr( self, key, val )
|
||||
self.data = data.init()
|
||||
|
||||
#============================================================================#
|
||||
# BBCooker
|
||||
@@ -190,8 +212,8 @@ class BBCooker:
|
||||
self.stats = BBStatistics()
|
||||
self.status = None
|
||||
|
||||
self.pkgdata = None
|
||||
self.cache = None
|
||||
self.bb_cache = None
|
||||
|
||||
def tryBuildPackage( self, fn, item, the_data ):
|
||||
"""Build one package"""
|
||||
@@ -226,10 +248,11 @@ class BBCooker:
|
||||
If build_depends is empty, we're dealing with a runtime depends
|
||||
"""
|
||||
|
||||
the_data = self.pkgdata[fn]
|
||||
the_data = self.bb_cache.loadDataFull(fn, self)
|
||||
|
||||
if not buildAllDeps:
|
||||
buildAllDeps = bb.data.getVar('BUILD_ALL_DEPS', the_data, True) or False
|
||||
# Only follow all (runtime) dependencies if doing a build
|
||||
if not buildAllDeps and self.configuration.cmd is "build":
|
||||
buildAllDeps = self.status.build_all[fn]
|
||||
|
||||
# Error on build time dependency loops
|
||||
if build_depends and build_depends.count(fn) > 1:
|
||||
@@ -402,12 +425,15 @@ class BBCooker:
|
||||
|
||||
print "%-30s %20s %20s" % (p, latest[0][0] + "-" + latest[0][1],
|
||||
prefstr)
|
||||
|
||||
|
||||
def showEnvironment( self ):
|
||||
"""Show the outer or per-package environment"""
|
||||
if self.configuration.buildfile:
|
||||
self.cb = None
|
||||
self.bb_cache = bb.cache.init(self)
|
||||
try:
|
||||
self.configuration.data, fromCache = self.load_bbfile( self.configuration.buildfile )
|
||||
self.configuration.data = self.bb_cache.loadDataFull(self.configuration.buildfile, self)
|
||||
except IOError, e:
|
||||
fatal("Unable to read %s: %s" % ( self.configuration.buildfile, e ))
|
||||
except Exception, e:
|
||||
@@ -457,11 +483,10 @@ class BBCooker:
|
||||
# look to see if one of them is already staged, or marked as preferred.
|
||||
# if so, bump it to the head of the queue
|
||||
for p in providers:
|
||||
the_data = self.pkgdata[p]
|
||||
pn = bb.data.getVar('PN', the_data, 1)
|
||||
pv = bb.data.getVar('PV', the_data, 1)
|
||||
pr = bb.data.getVar('PR', the_data, 1)
|
||||
stamp = '%s.do_populate_staging' % bb.data.getVar('STAMP', the_data, 1)
|
||||
pn = self.status.pkg_fn[p]
|
||||
pv, pr = self.status.pkg_pvpr[p]
|
||||
|
||||
stamp = '%s.do_populate_staging' % self.status.stamp[p]
|
||||
if os.path.exists(stamp):
|
||||
(newvers, fn) = preferred_versions[pn]
|
||||
if not fn in eligible:
|
||||
@@ -470,11 +495,11 @@ class BBCooker:
|
||||
oldver = "%s-%s" % (pv, pr)
|
||||
newver = '-'.join(newvers)
|
||||
if (newver != oldver):
|
||||
extra_chat = "; upgrading from %s to %s" % (oldver, newver)
|
||||
extra_chat = "%s (%s) already staged but upgrading to %s to satisfy %s" % (pn, oldver, newver, item)
|
||||
else:
|
||||
extra_chat = ""
|
||||
extra_chat = "Selecting already-staged %s (%s) to satisfy %s" % (pn, oldver, item)
|
||||
if self.configuration.verbose:
|
||||
bb.note("selecting already-staged %s to satisfy %s%s" % (pn, item, extra_chat))
|
||||
bb.note("%s" % extra_chat)
|
||||
eligible.remove(fn)
|
||||
eligible = [fn] + eligible
|
||||
discriminated = True
|
||||
@@ -656,20 +681,11 @@ class BBCooker:
|
||||
|
||||
rdepends = []
|
||||
self.rbuild_cache.append(item)
|
||||
the_data = self.pkgdata[fn]
|
||||
pn = self.status.pkg_fn[fn]
|
||||
|
||||
if (item == pn):
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar('RDEPENDS', the_data, True) or "")
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS', the_data, True) or "")
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % pn, the_data, True) or "")
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS_%s' % pn, the_data, True) or "")
|
||||
else:
|
||||
packages = (bb.data.getVar('PACKAGES', the_data, 1).split() or "")
|
||||
for package in packages:
|
||||
if package == item:
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % package, the_data, True) or "")
|
||||
rdepends += bb.utils.explode_deps(bb.data.getVar("RRECOMMENDS_%s" % package, the_data, True) or "")
|
||||
if fn in self.status.rundeps and item in self.status.rundeps[fn]:
|
||||
rdepends += self.status.rundeps[fn][item].keys()
|
||||
if fn in self.status.runrecs and item in self.status.runrecs[fn]:
|
||||
rdepends += self.status.runrecs[fn][item].keys()
|
||||
|
||||
bb.debug(2, "Additional runtime dependencies for %s are: %s" % (item, " ".join(rdepends)))
|
||||
|
||||
@@ -684,6 +700,9 @@ class BBCooker:
|
||||
all_depends = self.status.all_depends
|
||||
pn_provides = self.status.pn_provides
|
||||
|
||||
localdata = data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
def calc_bbfile_priority(filename):
|
||||
for (regex, pri) in self.status.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
@@ -691,17 +710,22 @@ class BBCooker:
|
||||
return 0
|
||||
|
||||
# Handle PREFERRED_PROVIDERS
|
||||
for p in (bb.data.getVar('PREFERRED_PROVIDERS', self.configuration.data, 1) or "").split():
|
||||
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
|
||||
(providee, provider) = p.split(':')
|
||||
if providee in self.preferred and self.preferred[providee] != provider:
|
||||
bb.error("conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.preferred[providee]))
|
||||
self.preferred[providee] = provider
|
||||
|
||||
# Calculate priorities for each file
|
||||
for p in self.pkgdata.keys():
|
||||
for p in self.status.pkg_fn.keys():
|
||||
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
|
||||
|
||||
# Build package list for "bitbake world"
|
||||
def buildWorldTargetList(self):
|
||||
"""
|
||||
Build package list for "bitbake world"
|
||||
"""
|
||||
all_depends = self.status.all_depends
|
||||
pn_provides = self.status.pn_provides
|
||||
bb.debug(1, "collating packages for \"world\"")
|
||||
for f in self.status.possible_world:
|
||||
terminal = True
|
||||
@@ -724,9 +748,10 @@ class BBCooker:
|
||||
self.status.possible_world = None
|
||||
self.status.all_depends = None
|
||||
|
||||
def myProgressCallback( self, x, y, f, file_data, from_cache ):
|
||||
def myProgressCallback( self, x, y, f, bb_cache, from_cache ):
|
||||
# feed the status with new input
|
||||
self.status.handle_bb_data(f, file_data, from_cache)
|
||||
|
||||
self.status.handle_bb_data(f, bb_cache, from_cache)
|
||||
|
||||
if bbdebug > 0:
|
||||
return
|
||||
@@ -755,6 +780,13 @@ class BBCooker:
|
||||
def parseConfigurationFile( self, afile ):
|
||||
try:
|
||||
self.configuration.data = bb.parse.handle( afile, self.configuration.data )
|
||||
|
||||
# Add the handlers we inherited by INHERITS
|
||||
# FIXME: This assumes that we included at least one .inc file
|
||||
for var in bb.data.keys(self.configuration.data):
|
||||
if bb.data.getVarFlag(var, 'handler', self.configuration.data):
|
||||
bb.event.register(var,bb.data.getVar(var,self.configuration.data))
|
||||
|
||||
except IOError:
|
||||
bb.fatal( "Unable to open %s" % afile )
|
||||
except bb.parse.ParseError, details:
|
||||
@@ -786,6 +818,12 @@ class BBCooker:
|
||||
|
||||
|
||||
def cook( self, configuration, args ):
|
||||
"""
|
||||
We are building stuff here. We do the building
|
||||
from here. By default we try to execute task
|
||||
build.
|
||||
"""
|
||||
|
||||
self.configuration = configuration
|
||||
|
||||
if not self.configuration.cmd:
|
||||
@@ -801,6 +839,13 @@ class BBCooker:
|
||||
|
||||
self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
|
||||
|
||||
|
||||
#
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(self.configuration.event_data)
|
||||
|
||||
if self.configuration.show_environment:
|
||||
self.showEnvironment()
|
||||
sys.exit( 0 )
|
||||
@@ -876,18 +921,18 @@ class BBCooker:
|
||||
print "Requested parsing .bb files only. Exiting."
|
||||
return
|
||||
|
||||
bb.data.update_data( self.configuration.data )
|
||||
self.buildDepgraph()
|
||||
|
||||
if self.configuration.show_versions:
|
||||
self.showVersions()
|
||||
sys.exit( 0 )
|
||||
if 'world' in pkgs_to_build:
|
||||
self.buildWorldTargetList()
|
||||
pkgs_to_build.remove('world')
|
||||
for t in self.status.world_target:
|
||||
pkgs_to_build.append(t)
|
||||
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, pkgs_to_build, self.configuration.data))
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, pkgs_to_build, self.configuration.event_data))
|
||||
|
||||
failures = 0
|
||||
for k in pkgs_to_build:
|
||||
@@ -905,7 +950,7 @@ class BBCooker:
|
||||
if self.configuration.abort:
|
||||
sys.exit(1)
|
||||
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, pkgs_to_build, self.configuration.data, failures))
|
||||
bb.event.fire(bb.event.BuildCompleted(buildname, pkgs_to_build, self.configuration.event_data, failures))
|
||||
|
||||
sys.exit( self.stats.show() )
|
||||
|
||||
@@ -932,77 +977,12 @@ class BBCooker:
|
||||
return []
|
||||
return finddata.readlines()
|
||||
|
||||
def deps_clean(self, d):
|
||||
depstr = data.getVar('__depends', d)
|
||||
if depstr:
|
||||
deps = depstr.split(" ")
|
||||
for dep in deps:
|
||||
(f,old_mtime_s) = dep.split("@")
|
||||
old_mtime = int(old_mtime_s)
|
||||
new_mtime = parse.cached_mtime(f)
|
||||
if (new_mtime > old_mtime):
|
||||
return False
|
||||
return True
|
||||
|
||||
def load_bbfile( self, bbfile ):
|
||||
"""Load and parse one .bb build file"""
|
||||
|
||||
if not self.cache in [None, '']:
|
||||
# get the times
|
||||
cache_mtime = data.init_db_mtime(self.cache, bbfile)
|
||||
file_mtime = parse.cached_mtime(bbfile)
|
||||
|
||||
if file_mtime > cache_mtime:
|
||||
#print " : '%s' dirty. reparsing..." % bbfile
|
||||
pass
|
||||
else:
|
||||
#print " : '%s' clean. loading from cache..." % bbfile
|
||||
cache_data = data.init_db( self.cache, bbfile, False )
|
||||
if self.deps_clean(cache_data):
|
||||
return cache_data, True
|
||||
|
||||
topdir = data.getVar('TOPDIR', self.configuration.data)
|
||||
if not topdir:
|
||||
topdir = os.path.abspath(os.getcwd())
|
||||
# set topdir to here
|
||||
data.setVar('TOPDIR', topdir, self.configuration)
|
||||
bbfile = os.path.abspath(bbfile)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
# expand tmpdir to include this topdir
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', self.configuration.data, 1) or "", self.configuration.data)
|
||||
# set topdir to location of .bb file
|
||||
topdir = bbfile_loc
|
||||
#data.setVar('TOPDIR', topdir, cfg)
|
||||
# go there
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
os.chdir(topdir)
|
||||
bb = data.init_db(self.cache,bbfile, True, self.configuration.data)
|
||||
try:
|
||||
parse.handle(bbfile, bb) # read .bb data
|
||||
if not self.cache in [None, '']:
|
||||
bb.commit(parse.cached_mtime(bbfile)) # write cache
|
||||
os.chdir(oldpath)
|
||||
return bb, False
|
||||
finally:
|
||||
os.chdir(oldpath)
|
||||
|
||||
def collect_bbfiles( self, progressCallback ):
|
||||
"""Collect all available .bb build files"""
|
||||
self.cb = progressCallback
|
||||
parsed, cached, skipped, masked = 0, 0, 0, 0
|
||||
self.cache = bb.data.getVar( "CACHE", self.configuration.data, 1 )
|
||||
self.pkgdata = data.pkgdata( not self.cache in [None, ''], self.cache, self.configuration.data )
|
||||
self.bb_cache = bb.cache.init(self)
|
||||
|
||||
if not self.cache in [None, '']:
|
||||
if self.cb is not None:
|
||||
print "NOTE: Using cache in '%s'" % self.cache
|
||||
try:
|
||||
os.stat( self.cache )
|
||||
except OSError:
|
||||
bb.mkdirhier( self.cache )
|
||||
else:
|
||||
if self.cb is not None:
|
||||
print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
|
||||
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
|
||||
data.setVar("BBFILES", " ".join(files), self.configuration.data)
|
||||
|
||||
@@ -1037,43 +1017,49 @@ class BBCooker:
|
||||
|
||||
# read a file's metadata
|
||||
try:
|
||||
bb_data, fromCache = self.load_bbfile(f)
|
||||
if fromCache: cached += 1
|
||||
fromCache, skip = self.bb_cache.loadData(f, self)
|
||||
if skip:
|
||||
skipped += 1
|
||||
#bb.note("Skipping %s" % f)
|
||||
self.bb_cache.skip(f)
|
||||
continue
|
||||
elif fromCache: cached += 1
|
||||
else: parsed += 1
|
||||
deps = None
|
||||
if bb_data is not None:
|
||||
# allow metadata files to add items to BBFILES
|
||||
#data.update_data(self.pkgdata[f])
|
||||
addbbfiles = data.getVar('BBFILES', bb_data) or None
|
||||
if addbbfiles:
|
||||
for aof in addbbfiles.split():
|
||||
if not files.count(aof):
|
||||
if not os.path.isabs(aof):
|
||||
aof = os.path.join(os.path.dirname(f),aof)
|
||||
files.append(aof)
|
||||
for var in bb_data.keys():
|
||||
if data.getVarFlag(var, "handler", bb_data) and data.getVar(var, bb_data):
|
||||
event.register(data.getVar(var, bb_data))
|
||||
self.pkgdata[f] = bb_data
|
||||
|
||||
# allow metadata files to add items to BBFILES
|
||||
#data.update_data(self.pkgdata[f])
|
||||
addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
|
||||
if addbbfiles:
|
||||
for aof in addbbfiles.split():
|
||||
if not files.count(aof):
|
||||
if not os.path.isabs(aof):
|
||||
aof = os.path.join(os.path.dirname(f),aof)
|
||||
files.append(aof)
|
||||
|
||||
# now inform the caller
|
||||
if self.cb is not None:
|
||||
self.cb( i + 1, len( newfiles ), f, bb_data, fromCache )
|
||||
self.cb( i + 1, len( newfiles ), f, self.bb_cache, fromCache )
|
||||
|
||||
except IOError, e:
|
||||
self.bb_cache.remove(f)
|
||||
bb.error("opening %s: %s" % (f, e))
|
||||
pass
|
||||
except bb.parse.SkipPackage:
|
||||
skipped += 1
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
self.bb_cache.sync()
|
||||
raise
|
||||
except Exception, e:
|
||||
self.bb_cache.remove(f)
|
||||
bb.error("%s while parsing %s" % (e, f))
|
||||
except:
|
||||
self.bb_cache.remove(f)
|
||||
raise
|
||||
|
||||
if self.cb is not None:
|
||||
print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ),
|
||||
|
||||
self.bb_cache.sync()
|
||||
|
||||
#============================================================================#
|
||||
# main
|
||||
#============================================================================#
|
||||
|
||||
Binary file not shown.
Reference in New Issue
Block a user