commit:     c78602c4fc3043be4c0c722f0b5886a372ad9382
Author:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
AuthorDate: Sat Apr 18 20:26:35 2015 +0000
Commit:     Magnus Granberg <zorry <AT> gentoo <DOT> org>
CommitDate: Sat Apr 18 20:26:35 2015 +0000
URL:        
https://gitweb.gentoo.org/proj/tinderbox-cluster.git/commit/?id=c78602c4

add code/files in tbc/pym for the backend python code

 tbc/pym/ConnectionManager.py |  22 ++
 tbc/pym/__init__.py          |   0
 tbc/pym/build_depgraph.py    |  65 ++++++
 tbc/pym/build_job.py         | 166 ++++++++++++++
 tbc/pym/build_log.py         | 336 ++++++++++++++++++++++++++++
 tbc/pym/buildquerydb.py      | 102 +++++++++
 tbc/pym/check_setup.py       |  74 +++++++
 tbc/pym/db_mapping.py        | 260 ++++++++++++++++++++++
 tbc/pym/depclean.py          |  53 +++++
 tbc/pym/flags.py             | 231 +++++++++++++++++++
 tbc/pym/jobs.py              |  86 ++++++++
 tbc/pym/old_cpv.py           |  97 ++++++++
 tbc/pym/package.py           | 355 ++++++++++++++++++++++++++++++
 tbc/pym/readconf.py          |  58 +++++
 tbc/pym/sqlquerys.py         | 512 +++++++++++++++++++++++++++++++++++++++++++
 tbc/pym/sync.py              |  73 ++++++
 tbc/pym/text.py              |  49 +++++
 tbc/pym/updatedb.py          | 135 ++++++++++++
 18 files changed, 2674 insertions(+)

diff --git a/tbc/pym/ConnectionManager.py b/tbc/pym/ConnectionManager.py
new file mode 100644
index 0000000..40abfd5
--- /dev/null
+++ b/tbc/pym/ConnectionManager.py
@@ -0,0 +1,22 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import logging
+
+def NewConnection(tbc_settings_dict):
+       backend=tbc_settings_dict['sql_backend']
+       host=tbc_settings_dict['sql_host']
+       user=tbc_settings_dict['sql_user']
+       password=tbc_settings_dict['sql_passwd']
+       database=tbc_settings_dict['sql_db']
+       if backend == 'mysql':
+               try:
+                       from sqlalchemy import create_engine
+               except ImportError:
+                       print("Please install a recent version of 
dev-python/sqlalchemy for Python")
+                       sys.exit(1)
+               #logging.basicConfig()
+               #logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+               mysqldriver = 'mysql+mysqlconnector'
+               return create_engine(mysqldriver + '://' + user + ':' + 
password + '@' + host + '/' + database, pool_recycle=120)

diff --git a/tbc/pym/__init__.py b/tbc/pym/__init__.py
new file mode 100644
index 0000000..e69de29

diff --git a/tbc/pym/build_depgraph.py b/tbc/pym/build_depgraph.py
new file mode 100644
index 0000000..812e576
--- /dev/null
+++ b/tbc/pym/build_depgraph.py
@@ -0,0 +1,65 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+from _emerge.create_depgraph_params import create_depgraph_params
+from _emerge.depgraph import backtrack_depgraph
+import portage
+portage.proxy.lazyimport.lazyimport(globals(),
+       'tbc.actions:load_emerge_config',
+)
+from portage.exception import PackageSetNotFound
+
+from tbc.build_log import log_fail_queru
+
+def build_mydepgraph(settings, trees, mtimedb, myopts, myparams, myaction, 
myfiles, spinner, build_dict, session):
+       try:
+               success, mydepgraph, favorites = backtrack_depgraph(
+               settings, trees, myopts, myparams, myaction, myfiles, spinner)
+       except portage.exception.PackageSetNotFound as e:
+               root_config = trees[settings["ROOT"]]["root_config"]
+               display_missing_pkg_set(root_config, e.value)
+               build_dict['type_fail'] = "depgraph fail"
+               build_dict['check_fail'] = True
+       else:
+               if not success:
+                       repeat = True
+                       repeat_times = 0
+                       while repeat:
+                               if 
mydepgraph._dynamic_config._needed_p_mask_changes:
+                                       build_dict['type_fail'] = "Mask package 
or dep"
+                                       build_dict['check_fail'] = True
+                               elif 
mydepgraph._dynamic_config._needed_use_config_changes:
+                                       mydepgraph._display_autounmask()
+                                       build_dict['type_fail'] = "Need use 
change"
+                                       build_dict['check_fail'] = True
+                               elif 
mydepgraph._dynamic_config._slot_conflict_handler:
+                                       build_dict['type_fail'] = "Slot 
blocking"
+                                       build_dict['check_fail'] = True
+                               elif 
mydepgraph._dynamic_config._circular_deps_for_display:
+                                       build_dict['type_fail'] = "Circular 
Deps"
+                                       build_dict['check_fail'] = True
+                               elif 
mydepgraph._dynamic_config._unsolvable_blockers:
+                                       build_dict['type_fail'] = "Blocking 
packages"
+                                       build_dict['check_fail'] = True
+                               else:
+                                       build_dict['type_fail'] = "Dep calc 
fail"
+                                       build_dict['check_fail'] = True
+                               mydepgraph.display_problems()
+                               if repeat_times is 2:
+                                       repeat = False
+                                       log_fail_queru(session, build_dict, 
settings)
+                               else:
+                                       repeat_times = repeat_times + 1
+                                       settings, trees, mtimedb = 
load_emerge_config()
+                                       myparams = 
create_depgraph_params(myopts, myaction)
+                                       try:
+                                               success, mydepgraph, favorites 
= backtrack_depgraph(
+                                               settings, trees, myopts, 
myparams, myaction, myfiles, spinner)
+                                       except 
portage.exception.PackageSetNotFound as e:
+                                               root_config = 
trees[settings["ROOT"]]["root_config"]
+                                               
display_missing_pkg_set(root_config, e.value)
+                                       if success:
+                                               repeat = False
+
+       return success, settings, trees, mtimedb, mydepgraph

diff --git a/tbc/pym/build_job.py b/tbc/pym/build_job.py
new file mode 100644
index 0000000..0167882
--- /dev/null
+++ b/tbc/pym/build_job.py
@@ -0,0 +1,166 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import portage
+import os
+import re
+import sys
+import signal
+
+from portage import _encodings
+from portage import _unicode_decode
+from portage.versions import cpv_getkey
+from portage.dep import check_required_use
+from tbc.manifest import tbc_manifest
+from tbc.depclean import do_depclean
+from tbc.flags import tbc_use_flags
+from tbc.main import emerge_main
+from tbc.build_log import log_fail_queru
+from tbc.actions import load_emerge_config
+from tbc.sqlquerys import add_tbc_logs, get_packages_to_build, 
update_buildjobs_status, is_build_job_done
+
+class build_job_action(object):
+
+       def __init__(self, config_id, session):
+               self._config_id = config_id
+               self._session = session 
+
+       def make_build_list(self, build_dict, settings, portdb):
+               cp = build_dict['cp']
+               repo = build_dict['repo']
+               package = build_dict['package']
+               cpv = build_dict['cpv']
+               pkgdir = portdb.getRepositoryPath(repo) + "/" + cp
+               init_manifest =  tbc_manifest(settings, pkgdir)
+               build_use_flags_list = []
+               try:
+                       ebuild_version_checksum_tree = 
portage.checksum.sha256hash(pkgdir + "/" + package + "-" + 
build_dict['ebuild_version'] + ".ebuild")[0]
+               except:
+                       ebuild_version_checksum_tree = None
+               if ebuild_version_checksum_tree == build_dict['checksum']:
+                       manifest_error = 
init_manifest.check_file_in_manifest(portdb, cpv, build_use_flags_list, repo)
+                       if manifest_error is None:
+                               init_flags = tbc_use_flags(settings, portdb, 
cpv)
+                               build_use_flags_list = 
init_flags.comper_useflags(build_dict)
+                               log_msg = "build_use_flags_list %s" % 
(build_use_flags_list,)
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                               manifest_error = 
init_manifest.check_file_in_manifest(portdb, cpv, build_use_flags_list, repo)
+                       if manifest_error is None:
+                               build_dict['check_fail'] = False
+                               build_cpv_dict = {}
+                               build_cpv_dict[cpv] = build_use_flags_list
+                               log_msg = "build_cpv_dict: %s" % 
(build_cpv_dict,)
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                               return build_cpv_dict
+                       build_dict['type_fail'] = "Manifest error"
+                       build_dict['check_fail'] = True
+                       log_msg = "Manifest error: %s:%s" % (cpv, 
manifest_error)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+               else:
+                       build_dict['type_fail'] = "Wrong ebuild checksum"
+                       build_dict['check_fail'] = True
+               if build_dict['check_fail'] is True:
+                               log_fail_queru(self._session, build_dict, 
settings)
+               return None
+
+       def build_procces(self, buildqueru_cpv_dict, build_dict, settings, 
portdb):
+               build_cpv_list = []
+               depclean_fail = True
+               for k, build_use_flags_list in buildqueru_cpv_dict.items():
+                       build_cpv_list.append("=" + k)
+                       if not build_use_flags_list == None:
+                               build_use_flags = ""
+                               for flags in build_use_flags_list:
+                                       build_use_flags = build_use_flags + 
flags + " "
+                               filetext = '=' + k + ' ' + build_use_flags
+                               log_msg = "filetext: %s" % filetext
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                               with 
open("/etc/portage/package.use/99_autounmask", "a") as f:
+                                       f.write(filetext)
+                                       f.write('\n')
+                                       f.close
+               log_msg = "build_cpv_list: %s" % (build_cpv_list,)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+
+               # We remove the binary package if removebin is true
+               if build_dict['removebin']:
+                       package = build_dict['package']
+                       pv = package + "-" + build_dict['ebuild_version']
+                       binfile = settings['PKGDIR'] + "/" + 
build_dict['category'] + "/" + pv + ".tbz2"
+                       try:
+                               os.remove(binfile)
+                       except:
+                               log_msg = "Binary file was not removed or 
found: %s" % (binfile,)
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+
+               argscmd = []
+               for emerge_option in build_dict['emerge_options']:
+                       if emerge_option == '--depclean':
+                               pass
+                       elif emerge_option == '--nodepclean':
+                               pass
+                       elif emerge_option == '--nooneshot':
+                               pass
+                       else:
+                               if not emerge_option in argscmd:
+                                       argscmd.append(emerge_option)
+               for build_cpv in build_cpv_list:
+                       argscmd.append(build_cpv)
+               print("Emerge options: %s" % argscmd)
+               log_msg = "argscmd: %s" % (argscmd,)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+               
+               # Call main_emerge to build the package in build_cpv_list
+               print("Build: %s" % build_dict)
+               update_buildjobs_status(self._session, 
build_dict['build_job_id'], 'Building', self._config_id)
+               build_fail = emerge_main(argscmd, build_dict, self._session)
+               # Run depclean
+               if  '--depclean' in build_dict['emerge_options'] and not 
'--nodepclean' in build_dict['emerge_options']:
+                       depclean_fail = do_depclean()
+               try:
+                       os.remove("/etc/portage/package.use/99_autounmask")
+                       with open("/etc/portage/package.use/99_autounmask", 
"a") as f:
+                               f.close
+               except:
+                       pass
+
+               if is_build_job_done(self._session, build_dict['build_job_id']):
+                       update_buildjobs_status(self._session, 
build_dict['build_job_id'], 'Looked', self._config_id)
+                       log_msg = "build_job %s was not removed" % 
(build_dict['build_job_id'],)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       print("qurery was not removed")
+                       build_dict['type_fail'] = "Querey was not removed"
+                       build_dict['check_fail'] = True
+                       log_fail_queru(self._session, build_dict, settings)
+               if build_fail is True:
+                       build_dict['type_fail'] = "Emerge faild"
+                       build_dict['check_fail'] = True
+                       log_msg = "Emerge faild!"
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       return True
+               return False
+
+       def procces_build_jobs(self):
+               build_dict = {}
+               build_dict = get_packages_to_build(self._session, 
self._config_id)
+               if build_dict is None:
+                       return
+               print("build_dict: %s" % (build_dict,))
+               log_msg = "build_dict: %s" % (build_dict,)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+               if not build_dict['ebuild_id'] is None and 
build_dict['checksum'] is not None:
+                       settings, trees, mtimedb = load_emerge_config()
+                       portdb = trees[settings["ROOT"]]["porttree"].dbapi
+                       buildqueru_cpv_dict = self.make_build_list(build_dict, 
settings, portdb)
+                       log_msg = "buildqueru_cpv_dict: %s" % 
(buildqueru_cpv_dict,)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       if buildqueru_cpv_dict is None:
+                               return
+                       fail_build_procces = 
self.build_procces(buildqueru_cpv_dict, build_dict, settings, portdb)
+                       return
+               if not build_dict['emerge_options'] is [] and 
build_dict['ebuild_id'] is None:
+                       return
+               if not build_dict['ebuild_id'] is None and 
build_dict['emerge_options'] is None:
+                       pass
+                       # del_old_queue(self._session, build_dict['queue_id'])

diff --git a/tbc/pym/build_log.py b/tbc/pym/build_log.py
new file mode 100644
index 0000000..a7d5314
--- /dev/null
+++ b/tbc/pym/build_log.py
@@ -0,0 +1,336 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import re
+import os
+import platform
+import hashlib
+
+from portage.versions import catpkgsplit, cpv_getversion
+import portage
+from portage.util import writemsg, \
+       writemsg_level, writemsg_stdout
+from portage import _encodings
+from portage import _unicode_encode
+
+from _emerge.main import parse_opts
+
+portage.proxy.lazyimport.lazyimport(globals(),
+       'tbc.actions:action_info,load_emerge_config',
+)
+
+from tbc.repoman_tbc import tbc_repoman
+from tbc.text import get_log_text_dict
+from tbc.package import tbc_package
+from tbc.readconf import get_conf_settings
+from tbc.flags import tbc_use_flags
+from tbc.ConnectionManager import NewConnection
+from tbc.sqlquerys import add_tbc_logs, get_config_id, get_ebuild_id_db, 
add_new_buildlog, \
+       get_package_info, get_build_job_id, get_use_id, get_config_info, 
get_hilight_info, get_error_info_list, \
+       add_e_info, get_fail_times, add_fail_times, update_fail_times, 
del_old_build_jobs, add_old_ebuild, \
+       update_buildjobs_status
+from sqlalchemy.orm import sessionmaker
+
+def get_build_dict_db(session, config_id, settings, pkg):
+       myportdb = portage.portdbapi(mysettings=settings)
+       cpvr_list = catpkgsplit(pkg.cpv, silent=1)
+       categories = cpvr_list[0]
+       package = cpvr_list[1]
+       repo = pkg.repo
+       ebuild_version = cpv_getversion(pkg.cpv)
+       log_msg = "Logging %s:%s" % (pkg.cpv, repo,)
+       add_tbc_logs(session, log_msg, "info", config_id)
+       PackageInfo = get_package_info(session, categories, package, repo)
+       build_dict = {}
+       build_dict['ebuild_version'] = ebuild_version
+       build_dict['package_id'] = PackageInfo.PackageId
+       build_dict['cpv'] = pkg.cpv
+       build_dict['categories'] = categories
+       build_dict['package'] = package
+       build_dict['config_id'] = config_id
+       init_useflags = tbc_use_flags(settings, myportdb, pkg.cpv)
+       iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, 
settings)
+       iuse = []
+       for iuse_line in iuse_flags_list:
+               iuse.append(init_useflags.reduce_flag(iuse_line))
+       iuse_flags_list2 = list(set(iuse))
+       use_enable = final_use_list
+       use_disable = list(set(iuse_flags_list2).difference(set(use_enable)))
+       use_flagsDict = {}
+       for x in use_enable:
+               use_id = get_use_id(session, x)
+               use_flagsDict[use_id] = True
+       for x in use_disable:
+               use_id = get_use_id(session, x)
+               use_flagsDict[use_id] = False
+       if use_enable == [] and use_disable == []:
+               build_dict['build_useflags'] = None
+       else:
+               build_dict['build_useflags'] = use_flagsDict
+       pkgdir = myportdb.getRepositoryPath(repo) + "/" + categories + "/" + 
package
+       ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" 
+ package + "-" + ebuild_version + ".ebuild")[0]
+       build_dict['checksum'] = ebuild_version_checksum_tree
+       ebuild_id_list, status = get_ebuild_id_db(session, 
build_dict['checksum'], build_dict['package_id'])
+       if status:
+               if ebuild_id_list is None:
+                       log_msg = "%s:%s Don't have any ebuild_id!" % (pkg.cpv, 
repo,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       update_manifest_sql(session, build_dict['package_id'], 
"0")
+                       init_package = tbc_package(session, settings, myportdb)
+                       init_package.update_package_db(build_dict['package_id'])
+                       ebuild_id_list, status = get_ebuild_id_db(session, 
build_dict['checksum'], build_dict['package_id'])
+                       if status and ebuild_id is None:
+                               log_msg = "%s:%s Don't have any ebuild_id!" % 
(pkg.cpv, repo,)
+                               add_tbc_logs(session, log_msg, "error", 
config_id)
+               else:
+                       old_ebuild_id_list = []
+                       for ebuild_id in ebuild_id_list:
+                               log_msg = "%s:%s:%s Dups of checksums" % 
(pkg.cpv, repo, ebuild_id,)
+                               add_tbc_logs(session, log_msg, "error", 
config_id)
+                               old_ebuild_id_list.append(ebuild_id)
+                       add_old_ebuild(session, old_ebuild_id_list)
+               return
+       build_dict['ebuild_id'] = ebuild_id_list
+
+       build_job_id = get_build_job_id(session, build_dict)
+       if build_job_id is None:
+               build_dict['build_job_id'] = None
+       else:
+               build_dict['build_job_id'] = build_job_id
+       return build_dict
+
+def search_buildlog(session, logfile_text_dict, max_text_lines):
+       log_search_list = get_hilight_info(session)
+       hilight_list = []
+       for index, text_line in logfile_text_dict.items():
+               for search_pattern in log_search_list:
+                       if re.search(search_pattern.HiLightSearch, text_line):
+                               hilight_tmp = {}
+                               hilight_tmp['startline'] = index - 
search_pattern.HiLightStart
+                               hilight_tmp['hilight'] = 
search_pattern.HiLightCssId
+                               if search_pattern.HiLightSearchEnd == "":
+                                       hilight_tmp['endline'] = index + 
search_pattern.HiLightEnd
+                                       if hilight_tmp['endline'] > 
max_text_lines:
+                                               hilight_tmp['endline'] = 
max_text_lines
+                               elif not search_pattern.HiLightSearchEnd == "" 
and (index + 1) >= max_text_lines:
+                                               hilight_tmp['endline'] = 
max_text_lines
+                               else:
+                                       i = index + 1
+                                       match = True
+                                       while match:
+                                               if i >= max_text_lines:
+                                                       match = False
+                                                       break
+                                               if 
re.search(search_pattern.HiLightSearchPattern, logfile_text_dict[i]) and 
re.search(search_pattern.HiLightSearchPattern, logfile_text_dict[i + 1]):
+                                                       for search_pattern2 in 
log_search_list:
+                                                               if 
re.search(search_pattern2.HiLightSearch, logfile_text_dict[i]):
+                                                                       match = 
False
+                                                       if match:
+                                                               i = i + 1
+                                               elif 
re.search(search_pattern.HiLightSearchPattern, logfile_text_dict[i]) and 
re.search(search_pattern.HiLightSearchEnd, logfile_text_dict[i + 1]):
+                                                       i = i + 1
+                                                       match = False
+                                               else:
+                                                       match = False
+                                       if i >= max_text_lines:
+                                               hilight_tmp['endline'] = 
max_text_lines
+                                       if 
re.search(search_pattern.HiLightSearchEnd, logfile_text_dict[i]):
+                                               hilight_tmp['endline'] = i
+                                       else:
+                                               hilight_tmp['endline'] = i - 1
+                               hilight_list.append(hilight_tmp)
+
+       new_hilight_dict = {}
+       for hilight_tmp in hilight_list:
+               add_new_hilight = True
+               add_new_hilight_middel = None
+               for k, v in sorted(new_hilight_dict.items()):
+                       if hilight_tmp['startline'] == hilight_tmp['endline']:
+                               if v['endline'] == hilight_tmp['startline'] or 
v['startline'] == hilight_tmp['startline']:
+                                       add_new_hilight = False
+                               if hilight_tmp['startline'] > v['startline'] 
and hilight_tmp['startline'] < v['endline']:
+                                       add_new_hilight = False
+                                       add_new_hilight_middel = k
+                       else:
+                               if v['endline'] == hilight_tmp['startline'] or 
v['startline'] == hilight_tmp['startline']:
+                                       add_new_hilight = False
+                               if hilight_tmp['startline'] > v['startline'] 
and hilight_tmp['startline'] < v['endline']:
+                                       add_new_hilight = False
+               if add_new_hilight is True:
+                       adict = {}
+                       adict['startline'] = hilight_tmp['startline']
+                       adict['hilight_css_id'] = hilight_tmp['hilight']
+                       adict['endline'] = hilight_tmp['endline']
+                       new_hilight_dict[hilight_tmp['startline']] = adict
+               if not add_new_hilight_middel is None:
+                       adict1 = {}
+                       adict2 = {}
+                       adict3 = {}
+                       adict1['startline'] = 
new_hilight_dict[add_new_hilight_middel]['startline']
+                       adict1['endline'] = hilight_tmp['startline'] -1
+                       adict1['hilight_css_id'] = 
new_hilight_dict[add_new_hilight_middel]['hilight']
+                       adict2['startline'] = hilight_tmp['startline']
+                       adict2['hilight_css_id'] = hilight_tmp['hilight']
+                       adict2['endline'] = hilight_tmp['endline']
+                       adict3['startline'] = hilight_tmp['endline'] + 1
+                       adict3['hilight_css_id'] = 
new_hilight_dict[add_new_hilight_middel]['hilight']
+                       adict3['endline'] = 
new_hilight_dict[add_new_hilight_middel]['endline']
+                       del new_hilight_dict[add_new_hilight_middel]
+                       new_hilight_dict[adict1['startline']] = adict1
+                       new_hilight_dict[adict2['startline']] = adict2
+                       new_hilight_dict[adict3['startline']] = adict3
+       return new_hilight_dict
+
+def get_buildlog_info(session, settings, pkg, build_dict):
+       myportdb = portage.portdbapi(mysettings=settings)
+       init_repoman = tbc_repoman(settings, myportdb)
+       logfile_text_dict, max_text_lines = 
get_log_text_dict(settings.get("PORTAGE_LOG_FILE"))
+       hilight_dict = search_buildlog(session, logfile_text_dict, 
max_text_lines)
+       error_log_list = []
+       qa_error_list = []
+       repoman_error_list = []
+       sum_build_log_list = []
+       error_info_list = get_error_info_list(session)
+       for k, v in sorted(hilight_dict.items()):
+               if v['startline'] == v['endline']:
+                       error_log_list.append(logfile_text_dict[k ])
+                       if v['hilight_css_id'] == "3" or v['hilight_css_id'] == 
"4": # qa = 3 and 4
+                               qa_error_list.append(logfile_text_dict[k])
+               else:
+                       i = k
+                       while i != (v['endline'] + 1):
+                               error_log_list.append(logfile_text_dict[i])
+                               if v['hilight_css_id'] == "3" or 
v['hilight_css_id'] == "4": # qa = 3 and 4
+                                       
qa_error_list.append(logfile_text_dict[i])
+                               i = i +1
+
+       # Run repoman check_repoman()
+       repoman_error_list = init_repoman.check_repoman(build_dict['cpv'], 
pkg.repo)
+       if repoman_error_list != []:
+               sum_build_log_list.append("1") # repoman = 1
+       if qa_error_list != []:
+               sum_build_log_list.append("2") # qa = 2
+       error_search_line = "^ \\* ERROR: "
+       for error_log_line in error_log_list:
+               if re.search(error_search_line, error_log_line):
+                       for error_info in error_info_list:
+                               if re.search(error_info.ErrorSearch, 
error_log_line):
+                                       
sum_build_log_list.append(error_info.ErrorId)
+       build_log_dict = {}
+       build_log_dict['repoman_error_list'] = repoman_error_list
+       build_log_dict['qa_error_list'] = qa_error_list
+       build_log_dict['error_log_list'] = error_log_list
+       build_log_dict['summary_error_list'] = sum_build_log_list
+       build_log_dict['hilight_dict'] = hilight_dict
+       return build_log_dict
+
+def get_emerge_info_id(settings, trees, session, config_id):
+       args = []
+       args.append("--info")
+       myaction, myopts, myfiles = parse_opts(args, silent=True)
+       status, emerge_info_list = action_info(settings, trees, myopts, myfiles)
+       emerge_info = ""
+       return "\n".join(emerge_info_list)
+
+def add_buildlog_main(settings, pkg, trees):
+       reader=get_conf_settings()
+       tbc_settings_dict=reader.read_tbc_settings_all()
+       config = tbc_settings_dict['tbc_config']
+       hostname =tbc_settings_dict['hostname']
+       host_config = hostname + "/" + config
+       Session = sessionmaker(bind=NewConnection(tbc_settings_dict))
+       session = Session()
+       config_id = get_config_id(session, config, hostname)
+       if pkg.type_name == "binary":
+               build_dict = None
+       else:
+               build_dict = get_build_dict_db(session, config_id, settings, 
pkg)
+       if build_dict is None:
+               log_msg = "Package %s:%s is NOT logged." % (pkg.cpv, pkg.repo,)
+               add_tbc_logs(session, log_msg, "info", config_id)
+               session.close
+               return
+       build_log_dict = {}
+       build_log_dict = get_buildlog_info(session, settings, pkg, build_dict)
+       error_log_list = build_log_dict['error_log_list']
+       build_error = ""
+       log_hash = hashlib.sha256()
+       build_error = ""
+       if error_log_list != []:
+               for log_line in error_log_list:
+                       build_error = build_error + log_line
+                       log_hash.update(log_line.encode('utf-8'))
+       build_log_dict['build_error'] = build_error
+       build_log_dict['log_hash'] = log_hash.hexdigest()
+       build_log_dict['logfilename'] = 
settings.get("PORTAGE_LOG_FILE").split(host_config)[1]
+       log_msg = "Logfile name: %s" % (settings.get("PORTAGE_LOG_FILE"),)
+       add_tbc_logs(session, log_msg, "info", config_id)
+       build_log_dict['emerge_info'] = get_emerge_info_id(settings, trees, 
session, config_id)
+       log_id = add_new_buildlog(session, build_dict, build_log_dict)
+
+       if log_id is None:
+               log_msg = "Package %s:%s is NOT logged." % (pkg.cpv, pkg.repo,)
+               add_tbc_logs(session, log_msg, "info", config_id)
+       else:
+               os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664)
+               log_msg = "Package: %s:%s is logged." % (pkg.cpv, pkg.repo,)
+               add_tbc_logs(session, log_msg, "info", config_id)
+               print("\n>>> Logging %s:%s\n" % (pkg.cpv, pkg.repo,))
+       session.close
+
+def log_fail_queru(session, build_dict, settings):
+       config_id = build_dict['config_id']
+       if get_fail_times(session, build_dict):
+               fail_querue_dict = {}
+               fail_querue_dict['build_job_id'] = build_dict['build_job_id']
+               fail_querue_dict['fail_type'] = build_dict['type_fail']
+               fail_querue_dict['fail_times'] = 1
+               add_fail_times(session, fail_querue_dict)
+               update_buildjobs_status(session, build_dict['build_job_id'], 
'Waiting', config_id)
+       else:
+               build_log_dict = {}
+               error_log_list = []
+               qa_error_list = []
+               repoman_error_list = []
+               sum_build_log_list = []
+               sum_build_log_list.append("2")
+               error_log_list.append(build_dict['type_fail'])
+               build_log_dict['repoman_error_list'] = repoman_error_list
+               build_log_dict['qa_error_list'] = qa_error_list
+               build_log_dict['summary_error_list'] = sum_build_log_list
+               if build_dict['type_fail'] == 'merge fail':
+                       error_log_list = []
+                       for k, v in build_dict['failed_merge'].items():
+                               error_log_list.append(v['fail_msg'])
+               build_log_dict['error_log_list'] = error_log_list
+               build_error = ""
+               if error_log_list != []:
+                       for log_line in error_log_list:
+                               build_error = build_error + log_line
+               build_log_dict['build_error'] = build_error
+               summary_error = ""
+               if sum_build_log_list != []:
+                       for sum_log_line in sum_build_log_list:
+                               summary_error = summary_error + " " + 
sum_log_line
+               build_log_dict['log_hash'] = '0'
+               useflagsdict = {}
+               if build_dict['build_useflags'] == {}:
+                       for k, v in build_dict['build_useflags'].items():
+                               use_id = get_use_id(session, k)
+                               useflagsdict[use_id] = v
+                               build_dict['build_useflags'] = useflagsdict
+               else:
+                       build_dict['build_useflags'] = None                     
+               if settings.get("PORTAGE_LOG_FILE") is not None:
+                       ConfigInfo= get_config_info(session, config_id)
+                       host_config = ConfigInfo.Hostname +"/" + 
ConfigInfo.Config
+                       build_log_dict['logfilename'] = 
settings.get("PORTAGE_LOG_FILE").split(host_config)[1]
+                       os.chmod(settings.get("PORTAGE_LOG_FILE"), 0o664)
+               else:
+                       build_log_dict['logfilename'] = ""
+                       build_log_dict['hilight_dict'] = {}
+               settings2, trees, tmp = load_emerge_config()
+               build_log_dict['emerge_info'] = get_emerge_info_id(settings2, 
trees, session, config_id)
+               log_id = add_new_buildlog(session, build_dict, build_log_dict)
+               del_old_build_jobs(session, build_dict['build_job_id'])

diff --git a/tbc/pym/buildquerydb.py b/tbc/pym/buildquerydb.py
new file mode 100644
index 0000000..7fe7f82
--- /dev/null
+++ b/tbc/pym/buildquerydb.py
@@ -0,0 +1,102 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import sys
+import os
+
+# Get the options from the config file set in tbc.readconf
+from tbc.readconf import get_conf_settings
+reader=get_conf_settings()
+tbc_settings_dict=reader.read_tbc_settings_all()
+config_profile = tbc_settings_dict['tbc_config']
+
+from tbc.check_setup import check_make_conf
+from tbc.sync import git_pull
+from tbc.package import tbc_package
+import portage
+import multiprocessing
+
+def add_cpv_query_pool(mysettings, myportdb, config_id, cp, repo):
+       conn =0
+       init_package = tbc_package(mysettings, myportdb)
+       # FIXME: remove the check for tbc when in tree
+       if cp != "dev-python/tbc":
+               build_dict = {}
+               packageDict = {}
+               ebuild_id_list = []
+               # split the cp to categories and package
+               element = cp.split('/')
+               categories = element[0]
+               package = element[1]
+               log_msg = "C %s:%s" % (cp, repo,)
+               add_tbc_logs(conn, log_msg, "info", config_id)
+               pkgdir = self._myportdb.getRepositoryPath(repo) + "/" + cp
+               config_id_list = []
+               config_id_list.append(config_id)
+               config_cpv_listDict = init_package.config_match_ebuild(cp, 
config_id_list)
+               if config_cpv_listDict != {}:
+                       cpv = config_cpv_listDict[config_id]['cpv']
+                       packageDict[cpv] = init_package.get_packageDict(pkgdir, 
cpv, repo)
+                       build_dict['checksum'] = 
packageDict[cpv]['ebuild_version_checksum_tree']
+                       build_dict['package_id'] = get_package_id(conn, 
categories, package, repo)
+                       build_dict['ebuild_version'] = 
packageDict[cpv]['ebuild_version_tree']
+                       ebuild_id = get_ebuild_id_db_checksum(conn, build_dict)
+                       if ebuild_id is not None:
+                               ebuild_id_list.append(ebuild_id)
+                               
init_package.add_new_ebuild_buildquery_db(ebuild_id_list, packageDict, 
config_cpv_listDict)
+               log_msg = "C %s:%s ... Done." % (cp, repo,)
+               add_tbc_logs(conn, log_msg, "info", config_id)
+       return
+
+def add_buildquery_main(config_id):
+       conn = 0
+       config_setup = get_config(conn, config_id)
+       log_msg = "Adding build jobs for: %s" % (config_setup,)
+       add_tbc_logs(conn, log_msg, "info", config_id)
+       check_make_conf()
+       log_msg = "Check configs done"
+       add_tbc_logs(conn, log_msg, "info", config_profile)
+       # Get default config from the configs table  and default_config=1
+       default_config_root = "/var/cache/tbc/" + 
tbc_settings_dict['tbc_gitreponame'] + "/" + config_setup + "/"
+       # Set config_root (PORTAGE_CONFIGROOT)  to default_config_root
+       mysettings = portage.config(config_root = default_config_root)
+       myportdb = portage.portdbapi(mysettings=mysettings)
+       init_package = tbc_package(mysettings, myportdb)
+       log_msg = "Setting default config to: %s" % (config_setup)
+       add_tbc_logs(conn, log_msg, "info", config_is)
+       # Use all exept 2 cores when multiprocessing
+       pool_cores= multiprocessing.cpu_count()
+       if pool_cores >= 3:
+               use_pool_cores = pool_cores - 2
+       else:
+               use_pool_cores = 1
+       pool = multiprocessing.Pool(processes=use_pool_cores)
+
+       repo_trees_list = myportdb.porttrees
+       for repo_dir in repo_trees_list:
+               repo = myportdb.getRepositoryName(repo_dir)
+               repo_dir_list = []
+               repo_dir_list.append(repo_dir)
+               
+               # Get the package list from the repo
+               package_list_tree = myportdb.cp_all(trees=repo_dir_list)
+               for cp in sorted(package_list_tree):
+                       pool.apply_async(add_cpv_query_pool, (mysettings, 
myportdb, config_id, cp, repo,))
+       pool.close()
+       pool.join()
+       log_msg = "Adding build jobs for: %s ... Done." % (config_setup,)
+       add_tbc_logs(conn, log_msg, "info", config_profile)
+       return True
+
+def del_buildquery_main(config_id):
+       conn=0
+       config_setup = get_config(conn, config_id)
+       log_msg = "Removeing build jobs for: %s" % (config_setup,)
+       add_tbc_logs(conn, log_msg, "info", config_id)
+       build_job_id_list = get_build_jobs_id_list_config(conn, config_id)
+       if build_job_id_list is not None:
+               for build_job_id in build_job_id_list:
+                       del_old_build_jobs(conn, build_job_id)
+       log_msg = "Removeing build jobs for: %s ... Done." % (config_setup,)
+       add_tbc_logs(conn, log_msg, "info", config_id)
+       return True

diff --git a/tbc/pym/check_setup.py b/tbc/pym/check_setup.py
new file mode 100644
index 0000000..46d4391
--- /dev/null
+++ b/tbc/pym/check_setup.py
@@ -0,0 +1,74 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import portage
+import os
+import errno
+
+from portage.exception import DigestException, FileNotFound, ParseError, 
PermissionDenied
+from tbc.text import get_file_text
+from tbc.sqlquerys import get_config_all_info, add_tbc_logs, 
get_configmetadata_info, get_setup_info
+
+def check_make_conf(session, config_id, tbc_settings_dict):
+       log_msg = "Checking configs for changes and errors"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       configsDict = {}
+       for ConfigInfo in get_config_all_info(session):
+               attDict={}
+               # Set the config dir
+               SetupInfo = get_setup_info(session, ConfigInfo.ConfigId)
+               check_config_dir = "/var/cache/tbc/" + 
tbc_settings_dict['tbc_gitreponame'] + "/" + ConfigInfo.Hostname +"/" + 
SetupInfo.Setup + "/"
+               make_conf_file = check_config_dir + "etc/portage/make.conf"
+               ConfigsMetaDataInfo = get_configmetadata_info(session, 
ConfigInfo.ConfigId)
+               # Check if we can take a checksum on it.
+               # Check if we have some error in the file. 
(portage.util.getconfig)
+               # Check if we envorment error with the config. 
(settings.validate)
+               try:
+                       make_conf_checksum_tree = 
portage.checksum.sha256hash(make_conf_file)[0]
+                       portage.util.getconfig(make_conf_file, tolerant=0, 
allow_sourcing=True, expand=True)
+                       mysettings = portage.config(config_root = 
check_config_dir)
+                       mysettings.validate()
+                       # With errors we update the db on the config and 
disable the config
+               except ParseError as e:
+                       ConfigsMetaDataInfo.ConfigErrorText = str(e)
+                       ConfigsMetaDataInfo.Active = False
+                       log_msg = "%s FAIL!" % (ConfigInfo.Hostname,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       session.commit()
+               else:
+                       ConfigsMetaDataInfo.Active = True
+                       log_msg = "%s PASS" % (ConfigInfo.Hostname,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       session.commit()
+               if make_conf_checksum_tree != ConfigsMetaDataInfo.Checksum:
+                       ConfigsMetaDataInfo.MakeConfText = 
get_file_text(make_conf_file)
+                       ConfigsMetaDataInfo.Checksum = make_conf_checksum_tree
+                       session.commit()
+       log_msg = "Checking configs for changes and errors ... Done"
+       add_tbc_logs(session, log_msg, "info", config_id)
+
+def check_make_conf_guest(session, config_id):
+       make_conf_file = "/etc/portage/make.conf"
+       # Check if we can open the file and close it
+       # Check if we have some error in the file (portage.util.getconfig)
+       # Check if we envorment error with the config (settings.validate)
+       try:
+               make_conf_checksum_tree = 
portage.checksum.sha256hash(make_conf_file)[0]
+               portage.util.getconfig(make_conf_file, tolerant=0, 
allow_sourcing=True, expand=True)
+               mysettings = portage.config(config_root = "/")
+               mysettings.validate()
+               # With errors we return false
+       except Exception as e:
+               return False
+       ConfigsMetaDataInfo = get_configmetadata_info(session, config_id)
+       print('make_conf_checksum_tree', make_conf_checksum_tree)
+       print('make_conf_checksum_db', ConfigsMetaDataInfo.Checksum)
+       if make_conf_checksum_tree != ConfigsMetaDataInfo.Checksum:
+               return False
+       return True
+
+def check_configure_guest(session, config_id):
+       pass_make_conf = check_make_conf_guest(session, config_id)
+       print(pass_make_conf)
+       return pass_make_conf
\ No newline at end of file

diff --git a/tbc/pym/db_mapping.py b/tbc/pym/db_mapping.py
new file mode 100644
index 0000000..c215456
--- /dev/null
+++ b/tbc/pym/db_mapping.py
@@ -0,0 +1,260 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import datetime
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, Integer, String, Boolean, DateTime, Enum, Text, 
ForeignKey
+from sqlalchemy.orm import relationship, backref
+
+Base = declarative_base()
+
+class Keywords(Base):
+       KeywordId = Column('keyword_id', Integer, primary_key=True)
+       Keyword = Column('keyword', String)
+       __tablename__ = 'keywords'
+       
+class Setups(Base):
+       SetupId = Column('setup_id', Integer, primary_key=True)
+       Setup = Column('setup', String(100))
+       __tablename__ = 'setups'
+
+class Configs(Base):
+       ConfigId = Column('config_id', Integer, primary_key=True)
+       Hostname = Column('hostname', String(150))
+       SetupId = Column('setup_id', Integer, ForeignKey('setups.setup_id'))
+       Host = Column('default_config', Boolean, default=False)
+       __tablename__ = 'configs'
+       
+class Logs(Base):
+       LogId = Column('log_id', Integer, primary_key=True)
+       ConfigId = Column('config_id', Integer, ForeignKey('configs.config_id'))
+       LogType = Column('log_type', 
Enum('info','error','debug','qa','repoman'))
+       Msg = Column('msg', Text)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'logs'
+
+class Jobs(Base):
+       JobId = Column('job_id', Integer, primary_key=True)
+       JobType = Column('job_type', Enum('updatedb', 'esync', 'gsync'))
+       Status = Column('status', Enum('Runing', 'Done', 'Waiting'))
+       User = Column('user', String(50))
+       ConfigId = Column('config_id', Integer, ForeignKey('configs.config_id'))
+       RunConfigId = Column('run_config_id', Integer, 
ForeignKey('configs.config_id'))
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'jobs'
+
+class ConfigsMetaData(Base):
+       Id = Column('id', Integer, primary_key=True)
+       ConfigId = Column('config_id', Integer, ForeignKey('configs.config_id'))
+       Profile = Column('profile', String(150))
+       KeywordId = Column('keyword_id', Integer, 
ForeignKey('keywords.keyword_id'))
+       MakeConfText = Column('make_conf_text', Text)
+       Checksum = Column('checksum', String(100))
+       ConfigSync = Column('configsync', Boolean, default=False)
+       Active = Column('active', Boolean, default=False)
+       ConfigErrorText = Column('config_error_text', Text)
+       Updateing = Column('updateing', Boolean, default=False)
+       Status = Column('status', Enum('Stopped', 'Runing', 'Waiting'))
+       Auto = Column('auto', Boolean, default=False)
+       GitWww = Column('git_www', String(200))
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'configs_metadata'
+
+class Categories(Base):
+       CategoryId = Column('category_id', Integer, primary_key=True)
+       Category = Column('category', String(150))
+       Active = Column('active', Boolean, default=True)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'categories'
+
+class Repos(Base):
+       RepoId = Column('repo_id', Integer, primary_key=True)
+       Repo = Column('repo', String(100))
+       __tablename__ = 'repos'
+
+class Packages(Base):
+       PackageId = Column('package_id', Integer, primary_key=True)
+       CategoryId = Column('category_id', Integer, 
ForeignKey('categories.category_id'))
+       Package = Column('package',String(150))
+       RepoId = Column('repo_id', Integer, ForeignKey('repos.repo_id'))
+       Checksum = Column('checksum', String(100))
+       Active = Column('active', Boolean, default=False)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'packages'
+
+class Emails(Base):
+       EmailId = Column('email_id', Integer, primary_key=True)
+       Email = Column('email', String(150))
+       __tablename__ = 'emails'
+
+class PackagesEmails(Base):
+       Id = Column('id', Integer, primary_key=True)
+       PackageId = Column('package_id', Integer, 
ForeignKey('packages.package_id'))
+       EmailId = Column('email_id', Integer, ForeignKey('emails.email_id'))
+       __tablename__ = 'packages_emails'
+
+class PackagesMetadata(Base):
+       Id = Column('id', Integer, primary_key=True)
+       PackageId = Column('package_id', Integer, 
ForeignKey('packages.package_id'))
+       Checksum = Column('checksum', String(100))
+       __tablename__ = 'packages_metadata'
+
+class Ebuilds(Base):
+       EbuildId = Column('ebuild_id', Integer, primary_key=True)
+       PackageId = Column('package_id', Integer, 
ForeignKey('packages.package_id'))
+       Version = Column('version', String(150))
+       Checksum = Column('checksum', String(100))
+       Active = Column('active', Boolean, default=False)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'ebuilds'
+
+class EmergeOptions(Base):
+       EmergeOptionId = Column('eoption_id', Integer, primary_key=True)
+       EOption = Column('eoption', String(45))
+       __tablename__ = 'emerge_options'
+
+class ConfigsEmergeOptions(Base):
+       ConfigId = Column('config_id', Integer, 
ForeignKey('configs.config_id'), primary_key=True)
+       EOptionId = Column('eoption_id', Integer, 
ForeignKey('emerge_options.eoption_id'))
+       __tablename__ = 'configs_emerge_options'
+
+class BuildJobs(Base):
+       BuildJobId = Column('build_job_id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', Integer, ForeignKey('ebuilds.ebuild_id'))
+       SetupId = Column('setup_id', Integer, ForeignKey('setups.setup_id'))
+       ConfigId = Column('config_id', Integer, ForeignKey('configs.config_id'))
+       Status = Column('status', Enum('Waiting','Building','Looked',))
+       BuildNow = Column('build_now', Boolean, default=False)
+       RemoveBin = Column('removebin', Boolean ,default=False)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'build_jobs'
+
+class BuildJobsEmergeOptions(Base):
+       Id = Column('id', Integer, primary_key=True)
+       BuildJobId = Column('build_job_id', Integer, 
ForeignKey('build_jobs.build_job_id'))
+       EOption = Column('eoption_id', Integer, 
ForeignKey('emerge_options.eoption_id'))
+       __tablename__ = 'build_jobs_emerge_options'
+
+class BuildJobsRedo(Base):
+       Id = Column('id', Integer, primary_key=True)
+       BuildJobId = Column('build_job_id', Integer, 
ForeignKey('build_jobs.build_job_id'))
+       FailTimes = Column('fail_times', Integer)
+       FailType = Column('fail_type', String(50))
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'build_jobs_redo'
+
+class Uses(Base):
+       UseId = Column('use_id', Integer, primary_key=True)
+       Flag = Column('flag', String(150))
+       __tablename__ = 'uses'
+
+class BuildJobsUse(Base):
+       Id = Column('id', Integer, primary_key=True)
+       BuildJobId = Column('build_job_id', Integer, 
ForeignKey('build_jobs.build_job_id'))
+       UseId = Column('use_id', Integer, ForeignKey('uses.use_id'))
+       Status = Column('status', Boolean, default=False)
+       __tablename__ = 'build_jobs_use'
+
+class HiLightCss(Base):
+       HiLightCssId = Column('hilight_css_id', Integer, primary_key=True)
+       HiLightCssName = Column('hilight_css_name', String(30))
+       HiLightCssCollor = Column('hilight_css_collor', String(30))
+       __tablename__ = 'hilight_css'
+
+class HiLight(Base):
+       HiLightId = Column('hilight_id', Integer, primary_key=True)
+       HiLightSearch = Column('hilight_search', String(50))
+       HiLightSearchEnd = Column('hilight_search_end', String(50))
+       HiLightSearchPattern = Column('hilight_search_pattern', String(50))
+       HiLightCssId = Column('hilight_css_id', Integer, 
ForeignKey('hilight_css.hilight_css_id'))
+       HiLightStart = Column('hilight_start', Integer)
+       HiLightEnd = Column('hilight_end', Integer)
+       __tablename__ = 'hilight'
+
+class BuildLogs(Base):
+       BuildLogId = Column('build_log_id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', Integer, ForeignKey('ebuilds.ebuild_id'))
+       Fail = Column('fail', Boolean, default=False)
+       SummeryText = Column('summery_text', Text)
+       LogHash = Column('log_hash', String(100))
+       BugId = Column('bug_id', Integer, default=0)
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__ = 'build_logs'
+
+class EmergeInfo(Base):
+       EInfoId = Column('einfo_id', Integer, primary_key=True)
+       EmergeInfoText = Column('emerge_info_text', Text)
+       __tablename__ = 'emerge_info'
+
+class BuildLogsConfig(Base):
+       LogId = Column('log_id', Integer, primary_key=True)
+       BuildLogId = Column('build_log_id', Integer, 
ForeignKey('build_logs.build_log_id'))
+       ConfigId = Column('config_id', Integer, ForeignKey('configs.config_id'))
+       EInfoId = Column('einfo_id', Integer, 
ForeignKey('emerge_info.einfo_id'))
+       LogName = Column('logname', String(450))
+       TimeStamp = Column('time_stamp', DateTime, nullable=False, 
default=datetime.datetime.utcnow)
+       __tablename__  = 'build_logs_config'
+
+class BuildLogsHiLight(Base):
+       BuildLogHiLightId = Column('id', Integer, primary_key=True)
+       LogId = Column('log_id', Integer, 
ForeignKey('build_logs_config.log_id'))
+       StartLine = Column('start_line', Integer)
+       EndLine = Column('end_line', Integer)
+       HiLightCssId = Column('hilight_css_id', Integer, 
ForeignKey('hilight_css.hilight_css_id'))
+       __tablename__ = 'build_logs_hilight'
+
+class BuildLogsEmergeOptions(Base):
+       Id = Column('id', Integer, primary_key=True)
+       BuildLogId = Column('build_log_id', Integer, 
ForeignKey('Build_logs.Build_log_id'))
+       EmergeOptionId = Column('eoption_id', Integer, 
ForeignKey('emerge_options.eoption_id'))
+       __tablename__ = 'build_logs_emerge_options'
+
+class BuildLogsUse(Base):
+       Id = Column('id', Integer, primary_key=True)
+       BuildLogId = Column('build_log_id', Integer, 
ForeignKey('build_logs.build_log_id'))
+       UseId = Column('use_id', Integer, ForeignKey('uses.use_id'))
+       Status = Column('status', Boolean, default=False)
+       __tablename__ = 'build_logs_use'
+
+class ErrorsInfo(Base):
+       ErrorId = Column('error_id', Integer, primary_key=True)
+       ErrorName = Column('error_name', String)
+       ErrorSearch = Column('error_search', String)
+       __tablename__ = 'errors_info'
+
+class BuildLogsErrors(Base):
+       BuildLogErrorId =  Column('id', Integer, primary_key=True)
+       BuildLogId = Column('build_log_id', Integer, 
ForeignKey('build_logs.build_log_id'))
+       ErrorId = Column('error_id', Integer, 
ForeignKey('errors_info.error_id'))
+       __tablename__ = 'build_logs_errors'
+
+class Restrictions(Base):
+       RestrictionId = Column('restriction_id', Integer, primary_key=True)
+       Restriction = Column('restriction', String(150))
+       __tablename__ = 'restrictions'
+
+class EbuildsRestrictions(Base):
+       Id =  Column('id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', ForeignKey('ebuilds.ebuild_id'))
+       RestrictionId = Column('restriction_id', 
ForeignKey('restrictions.restriction_id'))
+       __tablename__ = 'ebuilds_restrictions'
+
+class EbuildsIUse(Base):
+       Id =  Column('id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', ForeignKey('ebuilds.ebuild_id'))
+       UseId = Column('use_id', ForeignKey('uses.use_id'))
+       Status = Column('status', Boolean, default=False)
+       __tablename__= 'ebuilds_iuse'
+
+class EbuildsKeywords(Base):
+       Id =  Column('id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', ForeignKey('ebuilds.ebuild_id'))
+       KeywordId = Column('keyword_id', ForeignKey('keywords.keyword_id'))
+       Status = Column('status', Enum('Stable','Unstable','Negative'))
+       __tablename__ = 'ebuilds_keywords'
+
+class EbuildsMetadata(Base):
+       Id =  Column('id', Integer, primary_key=True)
+       EbuildId = Column('ebuild_id', ForeignKey('ebuilds.ebuild_id'))
+       Revision = Column('revision', String(30))
+       __tablename__ = 'ebuilds_metadata'

diff --git a/tbc/pym/depclean.py b/tbc/pym/depclean.py
new file mode 100644
index 0000000..3154ac5
--- /dev/null
+++ b/tbc/pym/depclean.py
@@ -0,0 +1,53 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import portage
+from portage._sets.base import InternalPackageSet
+from _emerge.main import parse_opts
+from tbc.actions import load_emerge_config, action_depclean, calc_depclean
+
+def do_depclean():
+       mysettings, mytrees, mtimedb = load_emerge_config()
+       myroot = mysettings["ROOT"]
+       root_config = mytrees[myroot]["root_config"]
+       psets = root_config.setconfig.psets
+       args_set = InternalPackageSet(allow_repo=True)
+       spinner=None
+       scheduler=None
+       tmpcmdline = []
+       tmpcmdline.append("--depclean")
+       tmpcmdline.append("--pretend")
+       print("depclean",tmpcmdline)
+       myaction, myopts, myfiles = parse_opts(tmpcmdline, silent=False)
+       if myfiles:
+               args_set.update(myfiles)
+               matched_packages = False
+               for x in args_set:
+                       if vardb.match(x):
+                               matched_packages = True
+               if not matched_packages:
+                       return 0
+
+       rval, cleanlist, ordered, req_pkg_count, unresolvable = 
calc_depclean(mysettings, mytrees, mtimedb["ldpath"], myopts, myaction, 
args_set, spinner)
+       print('rval, cleanlist, ordered, req_pkg_count, unresolvable', rval, 
cleanlist, ordered, req_pkg_count, unresolvable)
+       if unresolvable != []:
+               return True
+       if cleanlist != []:
+               conflict_package_list = []
+               for depclean_cpv in cleanlist:
+                       if portage.versions.cpv_getkey(depclean_cpv) in 
list(psets["system"]):
+                               conflict_package_list.append(depclean_cpv)
+                       if portage.versions.cpv_getkey(depclean_cpv) in 
list(psets['selected']):
+                               conflict_package_list.append(depclean_cpv)
+               print('conflict_package_list', conflict_package_list)
+               if conflict_package_list == []:
+                       tmpcmdline = []
+                       tmpcmdline.append("--depclean")
+                       myaction, myopts, myfiles = parse_opts(tmpcmdline, 
silent=False)
+                       rval = action_depclean(mysettings, mytrees, 
mtimedb["ldpath"], myopts, myaction, myfiles, spinner, scheduler=None)
+                       return True
+               else:
+                       print("conflicting packages: %s", conflict_package_list)
+                       return True
+       return True

diff --git a/tbc/pym/flags.py b/tbc/pym/flags.py
new file mode 100644
index 0000000..eea48f3
--- /dev/null
+++ b/tbc/pym/flags.py
@@ -0,0 +1,231 @@
+#!/usr/bin/python
+#
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+# Origin flags.py from portage public api repo
+from __future__ import print_function
+from _emerge.main import parse_opts
+from _emerge.depgraph import backtrack_depgraph, depgraph, resume_depgraph
+from _emerge.create_depgraph_params import create_depgraph_params
+from _emerge.actions import load_emerge_config
+import portage
+import os
+
+class tbc_use_flags(object):
+       
+       def __init__(self, mysettings, myportdb, cpv):
+               self._mysettings = mysettings
+               self._myportdb = myportdb
+               self._cpv = cpv
+       
+       def get_iuse(self):
+               """Gets the current IUSE flags from the tree
+               To be used when a gentoolkit package object is not needed
+               @type: cpv: string 
+               @param cpv: cat/pkg-ver
+               @rtype list
+               @returns [] or the list of IUSE flags
+               """
+               return self._myportdb.aux_get(self._cpv, ["IUSE"])[0].split()
+               
+       def reduce_flag(self, flag):
+               """Absolute value function for a USE flag
+               @type flag: string
+               @param flag: the use flag to absolute.
+               @rtype: string
+               @return absolute USE flag
+               """
+               if flag[0] in ["+","-"]:
+                       return flag[1:]
+               else:
+                       return flag
+
+       def reduce_flags(self, the_list):
+               """Absolute value function for a USE flag list
+               @type the_list: list
+               @param the_list: the use flags to absolute.
+               @rtype: list
+               @return absolute USE flags
+               """
+               r=[]
+               for member in the_list:
+                       r.append(self.reduce_flag(member))
+               return r
+
+       def filter_flags(self, use, use_expand_hidden, usemasked, useforced):
+               """Filter function to remove hidden or otherwise not normally
+               visible USE flags from a list.
+               @type use: list
+               @param use: the USE flag list to be filtered.
+               @type use_expand_hidden: list
+               @param  use_expand_hidden: list of flags hidden.
+               @type usemasked: list
+               @param usemasked: list of masked USE flags.
+               @type useforced: list
+               @param useforced: the forced USE flags.
+               @rtype: list
+               @return the filtered USE flags.
+               """
+               # clean out some environment flags, since they will most 
probably
+               # be confusing for the user
+               for f in use_expand_hidden:
+                       f=f.lower() + "_"
+                       for x in use:
+                               if f in x:
+                                       use.remove(x)
+               # clean out any arch's
+               archlist = self._mysettings["PORTAGE_ARCHLIST"].split()
+               for a in use[:]:
+                       if a in archlist:
+                               use.remove(a)
+               # clean out any abi_ flag
+               for a in use[:]:
+                       if a.startswith("abi_"):
+                               use.remove(a)
+               # clean out any python_ flag
+               for a in use[:]:
+                       if a.startswith("python_"):
+                               use.remove(a)
+
+               # dbl check if any from usemasked  or useforced are still there
+               masked = usemasked + useforced
+               for a in use[:]:
+                       if a in masked:
+                               use.remove(a)
+               return use
+
+       def get_all_cpv_use(self):
+               """Uses portage to determine final USE flags and settings for 
an emerge
+               @type cpv: string
+               @param cpv: eg cat/pkg-ver
+               @rtype: lists
+               @return  use, use_expand_hidden, usemask, useforce
+               """
+               use = None
+               self._mysettings.unlock()
+               try:
+                       self._mysettings.setcpv(self._cpv, use_cache=None, 
mydb=self._myportdb)
+                       use = self._mysettings['PORTAGE_USE'].split()
+                       use_expand_hidden = 
self._mysettings["USE_EXPAND_HIDDEN"].split()
+                       usemask = list(self._mysettings.usemask)
+                       useforce =  list(self._mysettings.useforce)
+               except KeyError:
+                       self._mysettings.reset()
+                       self._mysettings.lock()
+                       return [], [], [], []
+               # reset cpv filter
+               self._mysettings.reset()
+               self._mysettings.lock()
+               print(usemask)
+               return use, use_expand_hidden, usemask, useforce
+
+       def get_all_cpv_use_looked(self):
+               """Uses portage to determine final USE flags and settings for 
an emerge
+               @type cpv: string
+               @param cpv: eg cat/pkg-ver
+               @rtype: lists
+               @return  use, use_expand_hidden, usemask, useforce
+               """
+               # use = self._mysettings['PORTAGE_USE'].split()
+               use = os.environ['USE'].split()
+               use_expand_hidden = 
self._mysettings["USE_EXPAND_HIDDEN"].split()
+               usemask = list(self._mysettings.usemask)
+               useforce = list(self._mysettings.useforce)
+               return use, use_expand_hidden, usemask, useforce
+
+       def get_all_cpv_use_pkg(self, pkg, settings):
+               """Uses portage to determine final USE flags and settings for 
an emerge
+               @type cpv: string
+               @param cpv: eg cat/pkg-ver
+               @rtype: lists
+               @return  use, use_expand_hidden, usemask, useforce
+               """
+               # use = self._mysettings['PORTAGE_USE'].split()
+               use_list = list(pkg.use.enabled)
+               use_expand_hidden = settings["USE_EXPAND_HIDDEN"].split()
+               usemask = list(settings.usemask)
+               useforced = list(settings.useforce)
+               return use_list, use_expand_hidden, usemask, useforced
+
+       def get_flags(self):
+               """Retrieves all information needed to filter out hidden, 
masked, etc.
+               USE flags for a given package.
+
+               @type cpv: string
+               @param cpv: eg. cat/pkg-ver
+               @type final_setting: boolean
+               @param final_setting: used to also determine the final
+               enviroment USE flag settings and return them as well.
+               @rtype: list or list, list
+               @return IUSE or IUSE, final_flags
+               """
+               final_use, use_expand_hidden, usemasked, useforced = 
self.get_all_cpv_use()
+               iuse_flags = self.filter_flags(self.get_iuse(), 
use_expand_hidden, usemasked, useforced)
+               #flags = filter_flags(use_flags, use_expand_hidden, usemasked, 
useforced)
+               final_flags = self.filter_flags(final_use, use_expand_hidden, 
usemasked, useforced)
+               return iuse_flags, final_flags
+
+       def get_flags_looked(self):
+               """Retrieves all information needed to filter out hidden, 
masked, etc.
+               USE flags for a given package.
+
+               @type cpv: string
+               @param cpv: eg. cat/pkg-ver
+               @type final_setting: boolean
+               @param final_setting: used to also determine the final
+               enviroment USE flag settings and return them as well.
+               @rtype: list or list, list
+               @return IUSE or IUSE, final_flags
+               """
+               final_use, use_expand_hidden, usemasked, useforced = 
self.get_all_cpv_use_looked()
+               iuse_flags = self.filter_flags(self.get_iuse(), 
use_expand_hidden, usemasked, useforced)
+               #flags = filter_flags(use_flags, use_expand_hidden, usemasked, 
useforced)
+               final_flags = self.filter_flags(final_use, use_expand_hidden, 
usemasked, useforced)
+               return iuse_flags, final_flags
+
+       def get_flags_pkg(self, pkg, settings):
+               """Retrieves all information needed to filter out hidden, 
masked, etc.
+               USE flags for a given package.
+               @type cpv: string
+               @param cpv: eg. cat/pkg-ver
+               @type final_setting: boolean
+               @param final_setting: used to also determine the final
+               enviroment USE flag settings and return them as well.
+               @rtype: list or list, list
+               @return IUSE or IUSE, final_flags
+               """
+               final_use, use_expand_hidden, usemasked, useforced = 
self.get_all_cpv_use_pkg(pkg, settings)
+               iuse_flags = self.filter_flags(list(pkg.iuse.all), 
use_expand_hidden, usemasked, useforced)
+               #flags = filter_flags(use_flags, use_expand_hidden, usemasked, 
useforced)
+               final_flags = self.filter_flags(final_use, use_expand_hidden, 
usemasked, useforced)
+               return iuse_flags, final_flags
+
+       def comper_useflags(self, build_dict):
+               iuse_flags, use_enable = self.get_flags()
+               iuse = []
+               build_use_flags_dict = build_dict['build_useflags']
+               build_use_flags_list = []
+               if use_enable == []:
+                       if build_use_flags_dict is None:
+                               return None
+               for iuse_line in iuse_flags:
+                       iuse.append(self.reduce_flag(iuse_line))
+               iuse_flags_list = list(set(iuse))
+               use_disable = 
list(set(iuse_flags_list).difference(set(use_enable)))
+               use_flagsDict = {}
+               for x in use_enable:
+                       use_flagsDict[x] = True
+               for x in use_disable:
+                       use_flagsDict[x] = False
+               print("use_flagsDict", use_flagsDict)
+               for k, v in use_flagsDict.items():
+                       if build_use_flags_dict[k] != v:
+                               if build_use_flags_dict[k]:
+                                       build_use_flags_list.append(k)
+                               else:
+                                       build_use_flags_list.append("-" + k)
+               if build_use_flags_list == []:
+                       build_use_flags_list = None
+               print(build_use_flags_list)
+               return build_use_flags_list

diff --git a/tbc/pym/jobs.py b/tbc/pym/jobs.py
new file mode 100644
index 0000000..840932e
--- /dev/null
+++ b/tbc/pym/jobs.py
@@ -0,0 +1,86 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+from tbc.sync import sync_tree
+#from tbc.buildquerydb import add_buildquery_main, del_buildquery_main
+from tbc.updatedb import update_db_main
+from tbc.sqlquerys import get_config_id, add_tbc_logs, get_jobs, 
update_job_list
+
+def jobs_main(session, config_id):
+       JobsInfo = get_jobs(session, config_id)
+       if JobsInfo is None:
+               return
+       for JobInfo in JobsInfo:
+               job = JobInfo.JobType
+               run_config_id = JobInfo.RunConfigId
+               job_id = JobInfo.JobId
+               log_msg = "Job: %s Type: %s" % (job_id, job,)
+               add_tbc_logs(session, log_msg, "info", config_id)
+               if job == "addbuildquery":
+                       update_job_list(session, "Runing", job_id)
+                       log_msg = "Job %s is runing." % (job_id,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       #result =  add_buildquery_main(run_config_id)
+                       #if result is True:
+                       #       update_job_list(session, "Done", job_id)
+                       #       log_msg = "Job %s is done.." % (job_id,)
+                       #       add_tbc_logs(session, log_msg, "info", 
config_id)
+                       #else:
+                       #       update_job_list(session, "Fail", job_id)
+                       #       log_msg = "Job %s did fail." % (job_id,)
+                       #       add_tbc_logs(session, log_msg, "info", 
config_id)
+               elif job == "delbuildquery":
+                       update_job_list(session, "Runing", job_id)
+                       log_msg = "Job %s is runing." % (job_id,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       #result =  del_buildquery_main(config_id)
+                       #if result is True:
+                       #       update_job_list(session, "Done", job_id)
+                       #       log_msg = "Job %s is done.." % (job_id,)
+                       #       add_tbc_logs(session, log_msg, "info", 
config_id)
+                       #else:
+                       #       update_job_list(session, "Fail", job_id)
+                       #       log_msg = "Job %s did fail." % (job_id,)
+                       #       add_tbc_logs(session, log_msg, "info", 
config_id)
+               elif job == "gsync":
+                       update_job_list(session, "Runing", job_id)
+                       log_msg = "Job %s is runing." % (job_id,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       result = True
+                       # result = git_pull(session)
+                       if result:
+                               update_job_list(session, "Done", job_id)
+                               log_msg = "Job %s is done.." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+                       else:
+                               update_job_list(session, "Fail", job_id)
+                               log_msg = "Job %s did fail." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+               elif job == "esync":
+                       update_job_list(session, "Runing", job_id)
+                       log_msg = "Job %s is runing." % (job_id,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       result =  sync_tree(session)
+                       if result:
+                               update_job_list(session, "Done", job_id)
+                               log_msg = "Job %s is done.." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+                       else:
+                               update_job_list(session, "Fail", job_id)
+                               log_msg = "Job %s did fail." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+               elif job == "updatedb":
+                       update_job_list(session, "Runing", job_id)
+                       log_msg = "Job %s is runing." % (job_id,)
+                       add_tbc_logs(session, log_msg, "info", config_id)
+                       result = update_db_main(session, config_id)
+                       if result:
+                               update_job_list(session, "Done", job_id)
+                               log_msg = "Job %s is done.." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+                       else:
+                               update_job_list(session, "Fail", job_id)
+                               log_msg = "Job %s did fail." % (job_id,)
+                               add_tbc_logs(session, log_msg, "info", 
config_id)
+       return

diff --git a/tbc/pym/old_cpv.py b/tbc/pym/old_cpv.py
new file mode 100644
index 0000000..dbeba56
--- /dev/null
+++ b/tbc/pym/old_cpv.py
@@ -0,0 +1,97 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import logging
+from tbc.readconf import get_conf_settings
+reader=get_conf_settings()
+tbc_settings_dict=reader.read_tbc_settings_all()
+# make a CM
+from tbc.ConnectionManager import connectionManager
+CM=connectionManager(tbc_settings_dict)
+#selectively import the pgsql/mysql querys
+if CM.getName()=='pgsql':
+       from tbc.pgsql_querys import *
+
+class tbc_old_cpv(object):
+       
+       def __init__(self, myportdb, mysettings):
+               self._mysettings = mysettings
+               self._myportdb = myportdb
+
+       def mark_old_ebuild_db(self, package_id):
+               conn=CM.getConnection()
+               # Get the ebuild list for cp
+               cp, repo = get_cp_repo_from_package_id(conn, package_id)
+               mytree = []
+               mytree.append(self._myportdb.getRepositoryPath(repo))
+               ebuild_list_tree = self._myportdb.cp_list(cp, use_cache=1, 
mytree=mytree)
+               # Get ebuild list on categories, package in the db
+               ebuild_list_db = cp_list_db(conn, package_id)
+               # Check if don't have the ebuild in the tree
+               # Add it to the no active list
+               old_ebuild_list = []
+               for ebuild_line in ebuild_list_db:
+                       cpv_db = cp + "-" + ebuild_line[0]
+                       if not cpv_db in ebuild_list_tree:
+                               old_ebuild_list.append(ebuild_line)
+                       # Set no active on ebuilds in the db that no longer in 
tree
+                       if  old_ebuild_list != []:
+                               for old_ebuild in old_ebuild_list:
+                                       logging.info("O %s/%s-%s", categories, 
package, old_ebuild[0])
+                                       add_old_ebuild(conn,package_id, 
old_ebuild_list)
+               # Check if we have older no activ ebuilds then 60 days
+               ebuild_old_list_db = cp_list_old_db(conn,package_id)
+               # Delete older ebuilds in the db
+               if ebuild_old_list_db != []:
+                       for del_ebuild_old in ebuild_old_list_db:
+                               logging.info("D %s/%s-%s", categories, package, 
del_ebuild_old[1])
+                       del_old_ebuild(conn,ebuild_old_list_db)
+               CM.putConnection(conn)
+
+       def mark_old_package_db(self, package_id_list_tree):
+               conn=CM.getConnection()
+               # Get categories/package list from db
+               package_list_db = cp_all_db(conn)
+               old_package_id_list = []
+               # Check if don't have the categories/package in the tree
+               # Add it to the no active list
+               for package_line in package_list_db:
+                       if not package_line in package_id_list_tree:
+                               old_package_id_list.append(package_line)
+               # Set no active on categories/package and ebuilds in the db 
that no longer in tree
+               if old_package_id_list != []:
+                       mark_old_list = 
add_old_package(conn,old_package_id_list)
+                       if mark_old_list != []:
+                               for x in mark_old_list:
+                                       element = get_cp_from_package_id(conn,x)
+                                       logging.info("O %s", element[0])
+               # Check if we have older no activ categories/package then 60 
days
+               del_package_id_old_list = 
cp_all_old_db(conn,old_package_id_list)
+               # Delete older  categories/package and ebuilds in the db
+               if del_package_id_old_list != []:
+                       for i in del_package_id_old_list:
+                               element = get_cp_from_package_id(conn,i)
+                               logging.info("D %s", element)
+                       del_old_package(conn,del_package_id_old_list)
+               CM.putConnection(conn)
+               
+       def mark_old_categories_db(self):
+               conn=CM.getConnection()
+               # Get categories list from the tree and db
+               categories_list_tree = self._mysettings.categories
+               categories_list_db =get_categories_db(conn)
+               categories_old_list = []
+               # Check if don't have the categories in the tree
+               # Add it to the no active list
+               for categories_line in categories_list_db:
+                       if not categories_line[0] in categories_list_tree:
+                               old_c = 
get_old_categories(conn,categories_line[0])
+                               if old_c is not None:
+                                       
categories_old_list.append(categories_line)
+               # Delete older  categories in the db
+               if categories_old_list != []:
+                       for real_old_categories in categories_old_list:
+                               del_old_categories(conn,real_old_categoriess)
+                               logging.info("D %s", real_old_categories)
+               CM.putConnection(conn)
\ No newline at end of file

diff --git a/tbc/pym/package.py b/tbc/pym/package.py
new file mode 100644
index 0000000..af1bff5
--- /dev/null
+++ b/tbc/pym/package.py
@@ -0,0 +1,355 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import portage
+from portage.xml.metadata import MetaDataXML
+from tbc.flags import tbc_use_flags
+from tbc.manifest import tbc_manifest
+from tbc.text import get_ebuild_cvs_revision
+from tbc.flags import tbc_use_flags
+from tbc.sqlquerys import add_tbc_logs, get_package_info, get_config_info, \
+       add_new_build_job, add_new_ebuild_sql, get_ebuild_id_list, 
add_old_ebuild, \
+       get_package_metadata_sql, update_package_metadata, update_manifest_sql, 
\
+       get_package_info_from_package_id, get_config_all_info, 
add_new_package_sql, \
+       get_ebuild_checksums, get_ebuild_id_db, get_configmetadata_info, 
get_setup_info
+from tbc.readconf import get_conf_settings
+
+class tbc_package(object):
+
+       def __init__(self, session, mysettings, myportdb, config_id, 
tbc_settings_dict):
+               self._session = session
+               self._mysettings = mysettings
+               self._myportdb = myportdb
+               self._config_id = config_id
+               self._tbc_settings_dict = tbc_settings_dict
+
+       def change_config(self, host_config):
+               # Change config_root  config_setup = table config
+               my_new_setup = "/var/cache/tbc/" + 
self._tbc_settings_dict['tbc_gitreponame'] + "/" + host_config + "/"
+               mysettings_setup = portage.config(config_root = my_new_setup)
+               return mysettings_setup
+
+       def config_match_ebuild(self, cp, config_list):
+               config_cpv_listDict ={}
+               if config_list == []:
+                       return config_cpv_listDict
+               for config_id in config_list:
+                       ConfigInfo = get_config_info(self._session, config_id)
+                       ConfigsMetaData = 
get_configmetadata_info(self._session, config_id)
+                       if ConfigsMetaData.Auto and ConfigsMetaData.Active and 
ConfigsMetaData.Status != 'Stopped':
+                               SetupInfo = get_setup_info(self._session, 
config_id)
+                               mysettings_setup = 
self.change_config(ConfigInfo.Hostname + "/" + SetupInfo.Setup)
+                               myportdb_setup = 
portage.portdbapi(mysettings=mysettings_setup)
+
+                               # Get the latest cpv from portage with the 
config that we can build
+                               build_cpv = 
myportdb_setup.xmatch('bestmatch-visible', cp)
+
+                               # Check if could get cpv from portage and add 
it to the config_cpv_listDict.
+                               if build_cpv != "" and not ConfigInfo.SetupId 
in config_cpv_listDict:
+
+                                       # Get the iuse and use flags for that 
config/setup and cpv
+                                       init_useflags = 
tbc_use_flags(mysettings_setup, myportdb_setup, build_cpv)
+                                       iuse_flags_list, final_use_list = 
init_useflags.get_flags()
+                                       iuse_flags_list2 = []
+                                       for iuse_line in iuse_flags_list:
+                                               iuse_flags_list2.append( 
init_useflags.reduce_flag(iuse_line))
+
+                                       # Dict the needed info
+                                       attDict = {}
+                                       attDict['cpv'] = build_cpv
+                                       attDict['useflags'] = final_use_list
+                                       attDict['iuse'] = iuse_flags_list2
+                                       config_cpv_listDict[ConfigInfo.SetupId] 
= attDict
+
+                               # Clean some cache
+                               myportdb_setup.close_caches()
+                               
portage.portdbapi.portdbapi_instances.remove(myportdb_setup)
+               return config_cpv_listDict
+
+       def get_ebuild_metadata(self, cpv, repo):
+               # Get the auxdbkeys infos for the ebuild
+               try:
+                       ebuild_auxdb_list = self._myportdb.aux_get(cpv, 
portage.auxdbkeys, myrepo=repo)
+               except:
+                       ebuild_auxdb_list = None
+               else:
+                       for i in range(len(ebuild_auxdb_list)):
+                               if ebuild_auxdb_list[i] == '':
+                                       ebuild_auxdb_list[i] = ''
+                       return ebuild_auxdb_list
+
+       def get_packageDict(self, pkgdir, cpv, repo):
+
+               #Get categories, package and version from cpv
+               ebuild_version_tree = portage.versions.cpv_getversion(cpv)
+               element = portage.versions.cpv_getkey(cpv).split('/')
+               categories = element[0]
+               package = element[1]
+
+               # Make a checksum of the ebuild
+               try:
+                       ebuild_version_checksum_tree = 
portage.checksum.sha256hash(pkgdir + "/" + package + "-" + ebuild_version_tree 
+ ".ebuild")[0]
+               except:
+                       ebuild_version_checksum_tree = "0"
+                       log_msg = "QA: Can't checksum the ebuild file. %s on 
repo %s" % (cpv, repo,)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       log_msg = "C %s:%s ... Fail." % (cpv, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       ebuild_version_cvs_revision_tree = '0'
+               else:
+                       ebuild_version_cvs_revision_tree = 
get_ebuild_cvs_revision(pkgdir + "/" + package + "-" + ebuild_version_tree + 
".ebuild")
+
+               # Get the ebuild metadata
+               ebuild_version_metadata_tree = self.get_ebuild_metadata(cpv, 
repo)
+               # if there some error to get the metadata we add rubish to the
+               # ebuild_version_metadata_tree and set 
ebuild_version_checksum_tree to 0
+               # so it can be updated next time we update the db
+               if ebuild_version_metadata_tree  is None:
+                       log_msg = " QA: %s have broken metadata on repo %s" % 
(cpv, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       ebuild_version_metadata_tree = 
['','','','','','','','','','','','','','','','','','','','','','','','','']
+                       ebuild_version_checksum_tree = '0'
+
+               # add the ebuild info to the dict packages
+               PackageInfo = get_package_info(self._session, categories, 
package, repo)
+               attDict = {}
+               attDict['package_id'] = PackageInfo.PackageId
+               attDict['repo'] = repo
+               attDict['ebuild_version'] = ebuild_version_tree
+               attDict['checksum']= ebuild_version_checksum_tree
+               attDict['ebuild_version_metadata_tree'] = 
ebuild_version_metadata_tree
+               #attDict['ebuild_version_text_tree'] = 
ebuild_version_text_tree[0]
+               attDict['ebuild_version_revision_tree'] = 
ebuild_version_cvs_revision_tree
+               return attDict
+
+       def add_new_build_job_db(self, ebuild_id_list, packageDict, 
config_cpv_listDict):
+               # Get the needed info from packageDict and config_cpv_listDict 
and put that in buildqueue
+               # Only add it if ebuild_version in packageDict and 
config_cpv_listDict match
+               if config_cpv_listDict is not None:
+                       # Unpack config_cpv_listDict
+                       for setup_id, v in config_cpv_listDict.items():
+                               build_cpv = v['cpv']
+                               iuse_flags_list = list(set(v['iuse']))
+                               use_enable= v['useflags']
+                               use_disable = 
list(set(iuse_flags_list).difference(set(use_enable)))
+                               # Make a dict with enable and disable use flags 
for ebuildqueuedwithuses
+                               use_flagsDict = {}
+                               for x in use_enable:
+                                       use_flagsDict[x] = True
+                               for x in use_disable:
+                                       use_flagsDict[x] = False
+                               # Unpack packageDict
+                               i = 0
+                               for k, v in packageDict.items():
+                                       ebuild_id = ebuild_id_list[i]
+
+                                       # Comper and add the cpv to buildqueue
+                                       if build_cpv == k:
+                                               
add_new_build_job(self._session, ebuild_id, setup_id, use_flagsDict, 
self._config_id)
+                                               # B = Build cpv use-flags config
+                                               # FIXME log_msg need a fix to 
log the use flags corect.
+                                               log_msg = "B %s:%s USE: %s 
Setup: %s" % (k, v['repo'], use_flagsDict, setup_id,)
+                                               add_tbc_logs(self._session, 
log_msg, "info", self._config_id)
+                                       i = i +1
+
+       def get_package_metadataDict(self, pkgdir, package_id):
+               # Make package_metadataDict
+               attDict = {}
+               package_metadataDict = {}
+               md_email_list = []
+               # changelog_checksum_tree = portage.checksum.sha256hash(pkgdir 
+ "/ChangeLog")
+               # changelog_text_tree = get_file_text(pkgdir + "/ChangeLog")
+               herd = None
+               pkg_md = MetaDataXML(pkgdir + "/metadata.xml", herd)
+               #metadata_xml_text_tree = get_file_text(pkgdir + 
"/metadata.xml")
+               # attDict['changelog_checksum'] =  changelog_checksum_tree[0]
+               # attDict['changelog_text'] =  changelog_text_tree
+               tmp_herds = pkg_md.herds()
+               if tmp_herds != ():
+                       attDict['metadata_xml_herds'] = tmp_herds[0]
+                       md_email_list.append(attDict['metadata_xml_herds'] + 
'@gentoo.org')
+               for maint in pkg_md.maintainers():
+                       md_email_list.append(maint.email)
+               if md_email_list != []:
+                       attDict['metadata_xml_email'] = md_email_list
+               else:
+                       log_msg = "Metadata file %s missing Email" % (pkgdir + 
"/metadata.xml")
+                       add_tbc_logs(self._session, log_msg, "qa", 
self._config_id)
+                       attDict['metadata_xml_email'] = False
+               attDict['metadata_xml_checksum'] =  
portage.checksum.sha256hash(pkgdir + "/metadata.xml")[0]
+               #attDict['metadata_xml_text'] =  metadata_xml_text_tree
+               package_metadataDict[package_id] = attDict
+               return package_metadataDict
+
+       def add_package(self, packageDict, package_metadataDict, package_id, 
new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree):
+               # Use packageDict to update the db
+               ebuild_id_list = add_new_ebuild_sql(self._session, packageDict)
+               
+               # Make old ebuilds unactive
+               for ebuild_id in ebuild_id_list:
+                       new_ebuild_id_list.append(ebuild_id)
+               for ebuild_id in get_ebuild_id_list(self._session, package_id):
+                       if not ebuild_id in new_ebuild_id_list:
+                               if not ebuild_id in old_ebuild_id_list:
+                                       old_ebuild_id_list.append(ebuild_id)
+               if not old_ebuild_id_list == []:
+                       add_old_ebuild(self._session, old_ebuild_id_list)
+               PackagesMetadataInfo = get_package_metadata_sql(self._session, 
package_id)
+               if PackagesMetadataInfo:
+                       package_metadata_checksum_sql = 
PackagesMetadataInfo.Checksum
+               else:
+                       package_metadata_checksum_sql = None
+               if package_metadata_checksum_sql is None or 
package_metadata_checksum_sql != 
package_metadataDict[package_id]['metadata_xml_checksum']:
+                       update_package_metadata(self._session, 
package_metadataDict)
+
+               # update the cp manifest checksum
+               update_manifest_sql(self._session, package_id, 
manifest_checksum_tree)
+
+               # Get the best cpv for the configs and add it to 
config_cpv_listDict
+               PackageInfo, CategoryInfo, RepoInfo = 
get_package_info_from_package_id(self._session, package_id)
+               cp = CategoryInfo.Category + '/' + PackageInfo.Package
+               config_all_info  = get_config_all_info(self._session)
+               config_list = []
+               for config in get_config_all_info(self._session):
+                       if config.Host is False:
+                               config_list.append(config.ConfigId)
+               config_cpv_listDict = self.config_match_ebuild(cp, config_list)
+
+               # Add the ebuild to the build jobs table if needed
+               self.add_new_build_job_db(ebuild_id_list, packageDict, 
config_cpv_listDict)
+
+       def add_new_package_db(self, cp, repo):
+               # Add new categories package ebuild to tables package and 
ebuilds
+               # C = Checking
+               # N = New Package
+               log_msg = "C %s:%s" % (cp, repo)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+               log_msg = "N %s:%s" % (cp, repo)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+               repodir = self._myportdb.getRepositoryPath(repo)
+               pkgdir = repodir + "/" + cp # Get RepoDIR + cp
+
+               # Get the cp manifest file checksum.
+               try:
+                       manifest_checksum_tree = 
portage.checksum.sha256hash(pkgdir + "/Manifest")[0]
+               except:
+                       manifest_checksum_tree = "0"
+                       log_msg = "QA: Can't checksum the Manifest file. 
:%s:%s" % (cp, repo,)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       log_msg = "C %s:%s ... Fail." % (cp, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       return None
+               package_id = add_new_package_sql(self._session, cp, repo)
+               
+               package_metadataDict = self.get_package_metadataDict(pkgdir, 
package_id)
+               # Get the ebuild list for cp
+               mytree = []
+               mytree.append(repodir)
+               ebuild_list_tree = self._myportdb.cp_list(cp, use_cache=1, 
mytree=mytree)
+               if ebuild_list_tree == []:
+                       log_msg = "QA: Can't get the ebuilds list. %s:%s" % 
(cp, repo,)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       log_msg = "C %s:%s ... Fail." % (cp, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       return None
+
+               # Make the needed packageDict with ebuild infos so we can add 
it later to the db.
+               packageDict ={}
+               new_ebuild_id_list = []
+               old_ebuild_id_list = []
+               for cpv in sorted(ebuild_list_tree):
+                       packageDict[cpv] = self.get_packageDict(pkgdir, cpv, 
repo)
+
+               self.add_package(packageDict, package_metadataDict, package_id, 
new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree)
+               log_msg = "C %s:%s ... Done." % (cp, repo)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+
+       def update_package_db(self, package_id):
+               # Update the categories and package with new info
+               # C = Checking
+               PackageInfo, CategoryInfo, RepoInfo = 
get_package_info_from_package_id(self._session, package_id)
+               cp = CategoryInfo.Category + '/' + PackageInfo.Package
+               repo = RepoInfo.Repo
+               log_msg = "C %s:%s" % (cp, repo)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)
+               repodir = self._myportdb.getRepositoryPath(repo)
+               pkgdir = repodir + "/" + cp # Get RepoDIR + cp
+
+               # Get the cp mainfest file checksum
+               try:
+                       manifest_checksum_tree = 
portage.checksum.sha256hash(pkgdir + "/Manifest")[0]
+               except:
+                       manifest_checksum_tree = "0"
+                       log_msg = "QA: Can't checksum the Manifest file. %s:%s" 
% (cp, repo,)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       log_msg = "C %s:%s ... Fail." % (cp, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                       return None
+
+               # if we NOT have the same checksum in the db update the package
+               if manifest_checksum_tree != PackageInfo.Checksum:
+
+                       # U = Update
+                       log_msg = "U %s:%s" % (cp, repo)
+                       add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+
+                       # Get the ebuild list for cp
+                       mytree = []
+                       mytree.append(repodir)
+                       ebuild_list_tree = self._myportdb.cp_list(cp, 
use_cache=1, mytree=mytree)
+                       if ebuild_list_tree == []:
+                               log_msg = "QA: Can't get the ebuilds list. 
%s:%s" % (cp, repo,)
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                               log_msg = "C %s:%s ... Fail." % (cp, repo)
+                               add_tbc_logs(self._session, log_msg, "info", 
self._config_id)
+                               return None
+                       packageDict ={}
+                       new_ebuild_id_list = []
+                       old_ebuild_id_list = []
+                       for cpv in sorted(ebuild_list_tree):
+
+                               # split out ebuild version
+                               ebuild_version_tree = 
portage.versions.cpv_getversion(cpv)
+                               
+                               # Get packageDict for cpv
+                               packageDict[cpv] = self.get_packageDict(pkgdir, 
cpv, repo)
+
+                               # Get the checksum of the ebuild in tree and db
+                               ebuild_version_checksum_tree = 
packageDict[cpv]['checksum']
+                               checksums_db, fail= 
get_ebuild_checksums(self._session, package_id, ebuild_version_tree)
+                               # check if we have dupes of the checksum from db
+                               if checksums_db is None:
+                                       ebuild_version_manifest_checksum_db = 
None
+                               elif fail:
+                                       dupe_ebuild_id_list = []
+                                       for checksum in checksums_db:
+                                               ebuilds_id , status = 
get_ebuild_id_db(self._session, checksum, package_id)
+                                               for ebuild_id in ebuilds_id:
+                                                       log_msg = "U %s:%s:%s 
Dups of checksums" % (cpv, repo, ebuild_id,)
+                                                       
add_tbc_logs(self._session, log_msg, "error", self._config_id)
+                                                       
dupe_ebuild_id_list.append(ebuild_id)
+                                       add_old_ebuild(self._session, 
dupe_ebuild_id_list)
+                                       ebuild_version_manifest_checksum_db = 
None
+                               else:
+                                       ebuild_version_manifest_checksum_db = 
checksums_db
+
+                               # Check if the checksum have change
+                               if ebuild_version_manifest_checksum_db is None:
+                                       # N = New ebuild
+                                       log_msg = "N %s:%s" % (cpv, repo,)
+                                       add_tbc_logs(self._session, log_msg, 
"info", self._config_id)
+                               elif  ebuild_version_checksum_tree != 
ebuild_version_manifest_checksum_db:
+                                       # U = Updated ebuild
+                                       log_msg = "U %s:%s" % (cpv, repo,)
+                                       add_tbc_logs(self._session, log_msg, 
"info", self._config_id)
+                               else:
+                                       # Remove cpv from packageDict and add 
ebuild to new ebuils list
+                                       del packageDict[cpv]
+                                       ebuild_id , status = 
get_ebuild_id_db(self._session, ebuild_version_checksum_tree, package_id)
+                                       new_ebuild_id_list.append(ebuild_id)
+                       package_metadataDict = 
self.get_package_metadataDict(pkgdir, package_id)
+                       self.add_package(packageDict, package_metadataDict, 
package_id, new_ebuild_id_list, old_ebuild_id_list, manifest_checksum_tree)
+
+               log_msg = "C %s:%s ... Done." % (cp, repo)
+               add_tbc_logs(self._session, log_msg, "info", self._config_id)

diff --git a/tbc/pym/readconf.py b/tbc/pym/readconf.py
new file mode 100644
index 0000000..db1056e
--- /dev/null
+++ b/tbc/pym/readconf.py
@@ -0,0 +1,58 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import os
+import sys
+import re
+from socket import getfqdn
+
+class get_conf_settings(object):
+# open the /etc/tbc/tbc.conf file and get the needed
+# settings for tbc
+       def __init__(self):
+               self.configfile = "/etc/tbc/tbc.conf"
+
+       def read_tbc_settings_all(self):
+       # It will return a dict with options from the configfile
+               try:
+                       open_conffile = open(self.configfile, 'r')
+               except:
+                       sys.exit("Fail to open config file:" + self.configfile)
+               textlines = open_conffile.readlines()
+               for line in textlines:
+                       element = line.split('=')
+                       if element[0] == 'SQLBACKEND':          # Databas 
backend
+                               get_sql_backend = element[1]
+                       if element[0] == 'SQLDB':                       # 
Database
+                               get_sql_db = element[1]
+                       if element[0] == 'SQLHOST':                     # Host
+                               get_sql_host = element[1]
+                       if element[0] == 'SQLUSER':                     # User
+                               get_sql_user = element[1]
+                       if element[0] == 'SQLPASSWD':           # Password
+                               get_sql_passwd = element[1]
+                       # Buildhost root (dir for host/setup on host)
+                       if element[0] == 'ZOBCSGITREPONAME':
+                               get_tbc_gitreponame = element[1]
+                       # Buildhost setup (host/setup on guest)
+                       if element[0] == 'ZOBCSCONFIG':
+                               get_tbc_config = element[1]
+                       # if element[0] == 'LOGFILE':
+                       #       get_tbc_logfile = element[1]
+               open_conffile.close()
+
+               tbc_settings_dict = {}
+               tbc_settings_dict['sql_backend'] = get_sql_backend.rstrip('\n')
+               tbc_settings_dict['sql_db'] = get_sql_db.rstrip('\n')
+               tbc_settings_dict['sql_host'] = get_sql_host.rstrip('\n')
+               tbc_settings_dict['sql_user'] = get_sql_user.rstrip('\n')
+               tbc_settings_dict['sql_passwd'] = get_sql_passwd.rstrip('\n')
+               tbc_settings_dict['tbc_gitreponame'] = 
get_tbc_gitreponame.rstrip('\n')
+               tbc_settings_dict['tbc_config'] = get_tbc_config.rstrip('\n')
+               tbc_settings_dict['hostname'] = getfqdn()
+               # tbc_settings_dict['tbc_logfile'] = 
get_tbc_logfile.rstrip('\n')
+               return tbc_settings_dict
+
+def read_config_settings():
+       reader = get_conf_settings()
+       return reader.read_tbc_settings_all()

diff --git a/tbc/pym/sqlquerys.py b/tbc/pym/sqlquerys.py
new file mode 100644
index 0000000..623f0b0
--- /dev/null
+++ b/tbc/pym/sqlquerys.py
@@ -0,0 +1,512 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import datetime
+from tbc.db_mapping import Configs, Logs, ConfigsMetaData, Jobs, BuildJobs, 
Packages, Ebuilds, Repos, Categories, \
+       Uses, ConfigsEmergeOptions, EmergeOptions, HiLight, BuildLogs, 
BuildLogsConfig, BuildJobsUse, BuildJobsRedo, \
+       HiLightCss, BuildLogsHiLight, BuildLogsEmergeOptions, BuildLogsErrors, 
ErrorsInfo, EmergeInfo, BuildLogsUse, \
+       BuildJobsEmergeOptions, EbuildsMetadata, EbuildsIUse, Restrictions, 
EbuildsRestrictions, EbuildsKeywords, \
+        Keywords, PackagesMetadata, Emails, PackagesEmails, Setups
+from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
+from sqlalchemy import and_, or_
+
+# Guest Functions
+def get_config_id(session, setup, host):
+       SetupInfo = session.query(Setups).filter_by(Setup = setup).one()
+       ConfigInfo = session.query(Configs).filter_by(SetupId = 
SetupInfo.SetupId).filter_by(Hostname = host).one()
+       return ConfigInfo.ConfigId
+
+def add_tbc_logs(session, log_msg, log_type, config_id):
+       Add_Log = Logs(ConfigId = config_id, LogType = log_type, Msg = log_msg)
+       session.add(Add_Log)
+       session.commit()
+
+def update_deamon_status(session, status, config_id):
+       ConfigInfo = session.query(ConfigsMetaData).filter_by(ConfigId = 
config_id).one()
+       ConfigInfo.Status = status
+       session.commit()
+
+def get_jobs(session, config_id):
+       JobsInfo = session.query(Jobs).filter_by(Status = 
'Waiting').filter_by(ConfigId = config_id).order_by(Jobs.JobId).all()
+       if JobsInfo == []:
+               return None
+       return JobsInfo
+
+def update_job_list(session, status, job_id):
+       JobInfo = session.query(Jobs).filter_by(JobId = job_id).one()
+       JobInfo.Status = status
+       session.commit()
+
+def get_config_all_info(session):
+       return session.query(Configs).all()
+
+def get_config_info(session, config_id):
+       ConfigInfo = session.query(Configs).filter_by(ConfigId = 
config_id).one()
+       return ConfigInfo
+
+def get_setup_info(session, config_id):
+       ConfigInfo = get_config_info(session, config_id)
+       SetupInfo = session.query(Setups).filter_by(SetupId = 
ConfigInfo.SetupId).one()
+       return SetupInfo
+
+def update_buildjobs_status(session, build_job_id, status, config_id):
+       BuildJobsInfo = session.query(BuildJobs).filter_by(BuildJobId = 
build_job_id).one()
+       BuildJobsInfo.Status = status
+       BuildJobsInfo.ConfigId = config_id
+       session.commit()
+
+def get_configmetadata_info(session, config_id):
+       return session.query(ConfigsMetaData).filter_by(ConfigId = 
config_id).one()
+
+def is_build_job_done(session, build_job_id):
+       try:
+               BuildJobsInfo = session.query(BuildJobs).filter_by(BuildJobId = 
build_job_id).one()
+       except NoResultFound as e:
+               return False
+       return True
+
+def get_packages_to_build(session, config_id):
+       SetupInfo = get_setup_info(session, config_id)
+       BuildJobsTmp = 
session.query(BuildJobs).filter(BuildJobs.SetupId==SetupInfo.SetupId). \
+                               order_by(BuildJobs.BuildJobId)
+       if session.query(BuildJobs).filter_by(SetupId = 
SetupInfo.SetupId).filter_by(BuildNow = True).filter_by(Status = 
'Waiting').all() == [] and session.query(BuildJobs).filter_by(SetupId = 
SetupInfo.SetupId).filter_by(Status = 'Waiting').all() == []:
+               return None
+       if not BuildJobsTmp.filter_by(BuildNow = True).first() is None:
+               BuildJobsInfo = session.query(BuildJobs).filter_by(SetupId = 
SetupInfo.SetupId).filter_by(BuildNow = True). \
+                       filter_by(Status = 
'Waiting').order_by(BuildJobs.BuildJobId).first()
+       else:
+               BuildJobsInfo = session.query(BuildJobs).filter_by(SetupId = 
SetupInfo.SetupId).filter_by(Status = 'Waiting').\
+                       order_by(BuildJobs.BuildJobId).first()
+       update_buildjobs_status(session, BuildJobsInfo.BuildJobId, 'Looked', 
config_id)
+       EbuildsInfo = session.query(Ebuilds).filter_by(EbuildId = 
BuildJobsInfo.EbuildId).one()
+       PackagesInfo, CategoriesInfo = session.query(Packages, 
Categories).filter(Packages.PackageId==EbuildsInfo.PackageId).filter(Packages.CategoryId==Categories.CategoryId).one()
+       ReposInfo = session.query(Repos).filter_by(RepoId = 
PackagesInfo.RepoId).one()
+       uses={}
+       for BuildJobsUseInfo, UsesInfo in session.query(BuildJobsUse, 
Uses).filter(BuildJobsUse.BuildJobId==BuildJobsInfo.BuildJobId).filter(BuildJobsUse.UseId==Uses.UseId).all():
+               uses[UsesInfo.Flag] = BuildJobsUseInfo.Status
+       if uses == {}:
+               uses = None
+       emerge_options_list = []
+       for ConfigsEmergeOptionsInfo, EmergeOptionsInfo in 
session.query(ConfigsEmergeOptions, EmergeOptions). \
+                       filter(ConfigsEmergeOptions.ConfigId==config_id). \
+                       
filter(ConfigsEmergeOptions.EOptionId==EmergeOptions.EmergeOptionId).all():
+               emerge_options_list.append(EmergeOptionsInfo.EOption)
+       build_dict={}
+       build_dict['config_id'] = config_id
+       build_dict['setup_id'] = BuildJobsInfo.SetupId
+       build_dict['build_job_id'] = BuildJobsInfo.BuildJobId
+       build_dict['ebuild_id']= EbuildsInfo.EbuildId
+       build_dict['package_id'] = EbuildsInfo.PackageId
+       build_dict['package'] = PackagesInfo.Package
+       build_dict['category'] = CategoriesInfo.Category
+       build_dict['repo'] = ReposInfo.Repo
+       build_dict['removebin'] = BuildJobsInfo.RemoveBin
+       build_dict['ebuild_version'] = EbuildsInfo.Version
+       build_dict['checksum'] = EbuildsInfo.Checksum
+       build_dict['cp'] = CategoriesInfo.Category + '/' + PackagesInfo.Package
+       build_dict['cpv'] = build_dict['cp'] + '-' + EbuildsInfo.Version
+       build_dict['build_useflags'] = uses
+       build_dict['emerge_options'] = emerge_options_list
+       return build_dict
+
+def get_category_info(session, category):
+       try:
+               CategoryInfo = session.query(Categories).filter_by(Category = 
category).filter_by(Active = True).one()
+       except NoResultFound as e:
+               return False
+       return CategoryInfo
+
+def get_repo_info(session, repo):
+       try:
+               RepoInfo = session.query(Repos).filter_by(Repo = repo).one()
+       except NoResultFound as e:
+               return False
+       return RepoInfo
+
+def get_package_info(session, category, package, repo):
+       CategoryInfo = get_category_info(session, category)
+       RepoInfo = get_repo_info(session, repo)
+       try:
+               PackageInfo = session.query(Packages).filter_by(CategoryId = 
CategoryInfo.CategoryId). \
+                       filter_by(Package = package).filter_by(RepoId = 
RepoInfo.RepoId).filter_by(Active = True).one()
+       except NoResultFound as e:
+               return False
+       return PackageInfo
+
+def get_ebuild_info(session, build_dict):
+       EbuildInfo = session.query(Ebuilds).filter_by(Version = 
build_dict['ebuild_version']).filter_by(Checksum = build_dict['checksum']).\
+               filter_by(PackageId = 
build_dict['package_id']).filter_by(Active = True)
+       if EbuildInfo.all() == []:
+               return None, True
+       try:
+               EbuildInfo2 = EbuildInfo.one()
+       except (MultipleResultsFound) as e:
+               return EbuildInfo.all(), True
+       return EbuildInfo2, False
+
+def get_build_job_id(session, build_dict):
+       BuildJobsIdInfo = 
session.query(BuildJobs.BuildJobId).filter_by(EbuildId = 
build_dict['ebuild_id']).filter_by(ConfigId = build_dict['config_id']).all()
+       if BuildJobsIdInfo == []:
+               return None
+       for build_job_id in BuildJobsIdInfo:
+               BuildJobsUseInfo = 
session.query(BuildJobsUse).filter_by(BuildJobId = 
build_job_id.BuildJobId).all()
+               useflagsdict = {}
+               if BuildJobsUseInfo == []:
+                       useflagsdict = None
+               else:
+                       for x in BuildJobsUseInfo:
+                               useflagsdict[x.UseId] = x.Status
+               if useflagsdict == build_dict['build_useflags']:
+                       return build_job_id.BuildJobId
+       return None
+
+def get_use_id(session, use_flag):
+       try:
+               UseIdInfo = session.query(Uses).filter_by(Flag = use_flag).one()
+       except NoResultFound as e:
+               return None
+       return UseIdInfo.UseId
+
+def get_hilight_info(session):
+       return session.query(HiLight).all()
+
+def get_error_info_list(session):
+       return session.query(ErrorsInfo).all()
+
+def add_e_info(session, emerge_info):
+       AddEmergeInfo = EmergeInfo(EmergeInfoText = emerge_info)
+       session.add(AddEmergeInfo)
+       session.flush()
+       EmergeInfoId = AddEmergeInfo.EInfoId
+       session.commit()
+       return EmergeInfoId
+
+def del_old_build_jobs(session, build_job_id):
+       session.query(BuildJobsUse).filter(BuildJobsUse.BuildJobId == 
build_job_id).delete()
+       session.query(BuildJobsRedo).filter(BuildJobsRedo.BuildJobId == 
build_job_id).delete()
+       
session.query(BuildJobsEmergeOptions).filter(BuildJobsEmergeOptions.BuildJobId 
== build_job_id).delete()
+       session.query(BuildJobs).filter(BuildJobs.BuildJobId == 
build_job_id).delete()
+       session.commit()
+
+def add_new_buildlog(session, build_dict, build_log_dict):
+       build_log_id_list = 
session.query(BuildLogs.BuildLogId).filter_by(EbuildId = 
build_dict['ebuild_id']).all()
+
+       def add_new_hilight(log_id, build_log_dict):
+               for k, hilight_tmp in 
sorted(build_log_dict['hilight_dict'].items()):
+                       NewHiLight = BuildLogsHiLight(LogId = log_id, StartLine 
= hilight_tmp['startline'], EndLine = hilight_tmp['endline'], HiLightCssId = 
hilight_tmp['hilight_css_id'])
+                       session.add(NewHiLight)
+                       session.commit()
+
+       def build_log_id_match(build_log_id_list, build_dict, build_log_dict):
+               for build_log_id in build_log_id_list:
+                       log_hash = 
session.query(BuildLogs.LogHash).filter_by(BuildLogId = build_log_id[0]).one()
+                       use_list = 
session.query(BuildLogsUse).filter_by(BuildLogId = build_log_id[0]).all()
+                       useflagsdict = {}
+                       if use_list == []:
+                               useflagsdict = None
+                       else:
+                               for use in use_list:
+                                       useflagsdict[use.UseId] = use.Status
+                       if log_hash[0] == build_log_dict['log_hash'] and 
build_dict['build_useflags'] == useflagsdict:
+                               if 
session.query(BuildLogsConfig).filter(BuildLogsConfig.ConfigId.in_([build_dict['config_id']])).filter_by(BuildLogId
 = build_log_id[0]):
+                                       return None, True
+                               e_info_id = add_e_info(session, 
build_log_dict['emerge_info'])
+                               NewBuildLogConfig = BuildLogsConfig(BuildLogId 
= build_log_id[0], ConfigId = build_dict['config_id'], LogName = 
build_log_dict['logfilename'], EInfoId = e_info_id)
+                               session.add(NewBuildLogConfig)
+                               session.commit()
+                               return build_log_id[0], True
+               return None, False
+
+       def build_log_id_no_match(build_dict, build_log_dict):
+               if build_log_dict['summary_error_list'] == []:
+                       NewBuildLog = BuildLogs(EbuildId = 
build_dict['ebuild_id'], Fail = False, SummeryText = 
build_log_dict['build_error'], LogHash = build_log_dict['log_hash'])
+               else:
+                       NewBuildLog = BuildLogs(EbuildId = 
build_dict['ebuild_id'], Fail = True, SummeryText = 
build_log_dict['build_error'], LogHash = build_log_dict['log_hash'])
+               session.add(NewBuildLog)
+               session.flush()
+               build_log_id = NewBuildLog.BuildLogId
+               session.commit()
+               if build_log_dict['summary_error_list'] != []:
+                       for error in build_log_dict['summary_error_list']:
+                               NewError = BuildLogsErrors(BuildLogId = 
build_log_id, ErrorId = error)
+                               session.add(NewError)
+                               session.commit()
+               e_info_id = add_e_info(session, build_log_dict['emerge_info'])
+               NewBuildLogConfig = BuildLogsConfig(BuildLogId = build_log_id, 
ConfigId = build_dict['config_id'], LogName = build_log_dict['logfilename'], 
EInfoId = e_info_id)
+               session.add(NewBuildLogConfig)
+               session.flush()
+               log_id = NewBuildLogConfig.LogId
+               session.commit()
+               add_new_hilight(log_id, build_log_dict)
+               if not build_dict['build_useflags'] is None:
+                       for use_id, status in  
build_dict['build_useflags'].items():
+                               NewBuildLogUse = BuildLogsUse(BuildLogId = 
build_log_id, UseId = use_id, Status = status)
+                               session.add(NewBuildLogUse)
+                               session.flush()
+                       session.commit()
+               return build_log_id
+
+       if build_dict['build_job_id'] is None and build_log_id_list == []:
+               build_log_id = build_log_id_no_match(build_dict, build_log_dict)
+               return build_log_id
+       elif build_dict['build_job_id'] is None and build_log_id_list != []:
+               build_log_id, match = build_log_id_match(build_log_id_list, 
build_dict, build_log_dict)
+               if not match:
+                       build_log_id = build_log_id_no_match(build_dict, 
build_log_dict)
+               return build_log_id
+       elif not build_dict['build_job_id'] is None and build_log_id_list != []:
+               build_log_id, match = build_log_id_match(build_log_id_list, 
build_dict, build_log_dict)
+               if not match:
+                       build_log_id = build_log_id_no_match(build_dict, 
build_log_dict)
+                       del_old_build_jobs(session, build_dict['build_job_id'])
+               return build_log_id
+       elif not build_dict['build_job_id'] is None and build_log_id_list == []:
+               build_log_id = build_log_id_no_match(build_dict, build_log_dict)
+               del_old_build_jobs(session, build_dict['build_job_id'])
+               return build_log_id
+
+def update_fail_times(session, FailInfo):
+       NewBuildJobs = session.query(BuildJobs).filter_by(BuildJobId = 
FailInfo.BuildJobId).one()
+       NewBuildJobs.TimeStamp = datetime.datetime.utcnow()
+       session.commit()
+
+def get_fail_times(session, build_dict):
+       try:
+               FailInfo = session.query(BuildJobsRedo).filter_by(BuildJobId = 
build_dict['build_job_id']).filter_by(FailType = build_dict['type_fail']).one()
+       except NoResultFound as e:
+               return False
+       return True
+
+def add_fail_times(session, fail_querue_dict):
+       print(fail_querue_dict)
+       NewBuildJobsRedo = BuildJobsRedo(BuildJobId = 
fail_querue_dict['build_job_id'], FailType = fail_querue_dict['fail_type'], 
FailTimes = fail_querue_dict['fail_times'])
+       session.add(NewBuildJobsRedo)
+       session.commit()
+
+# Host Functions
+def update_repo_db(session, repo_list):
+       for repo in repo_list:
+               if not get_repo_info(session, repo):
+                       session.add(Repos(Repo = repo))
+                       session.commit()
+
+def update_categories_db(session, category):
+       if not get_category_info(session, category):
+               session.add(Categories(Category = category))
+               session.commit()
+
+def get_keyword_id(session, keyword):
+       try:
+               KeywordsInfo = session.query(Keywords).filter_by(Keyword = 
keyword).one()
+       except NoResultFound as e:
+               return None
+       return KeywordsInfo.KeywordId
+
+def add_new_ebuild_metadata_sql(session, ebuild_id, keywords, restrictions, 
iuse_list):
+       for restriction in restrictions:
+               if restriction in ["!"]:
+                       restriction = restriction[1:]
+               if restriction in ["?"]:
+                       restriction = restriction[:1]
+               if restriction != '(' or restriction != ')':
+                       try:
+                               RestrictionInfo = 
session.query(Restrictions).filter_by(Restriction = restriction).one()
+                       except NoResultFound as e:
+                               session.add(Restrictions(Restriction = 
restriction))
+                               session.commit()
+                               RestrictionInfo = 
session.query(Restrictions).filter_by(Restriction = restriction).one()
+                       session.add(EbuildsRestrictions(EbuildId = ebuild_id, 
RestrictionId = RestrictionInfo.RestrictionId))
+                       session.commit()
+       for iuse in iuse_list:
+               status = False
+               if iuse[0] in ["+"]:
+                       iuse = iuse[1:]
+                       status = True
+               elif iuse[0] in ["-"]:
+                       iuse = iuse[1:]
+               use_id = get_use_id(session, iuse)
+               if use_id is None:
+                       session.add(Uses(Flag = iuse))
+                       session.commit()
+                       use_id = get_use_id(session, iuse)
+               session.add(EbuildsIUse(EbuildId = ebuild_id, UseId = use_id, 
Status = status))
+               session.commit()
+       for keyword in keywords:
+               status = 'Stable'
+               if keyword[0] in ["~"]:
+                       keyword = keyword[1:]
+                       status = 'Unstable'
+               elif keyword[0] in ["-"]:
+                       keyword = keyword[1:]
+                       status = 'Negative'
+               keyword_id = get_keyword_id(session, keyword)
+               if keyword_id is None:
+                       session.add(Keywords(Keyword = keyword))
+                       session.commit()
+                       keyword_id = get_keyword_id(session, keyword)
+               session.add(EbuildsKeywords(EbuildId = ebuild_id, KeywordId = 
keyword_id, Status = status)) 
+               session.commit()
+
+def add_new_ebuild_sql(session, packageDict):
+       ebuild_id_list = []
+       for k, v in packageDict.items():
+               session.add(Ebuilds(PackageId = v['package_id'], Version = 
v['ebuild_version'], Checksum = v['checksum'], Active = True))
+               session.flush()
+               try:
+                       EbuildInfo = session.query(Ebuilds).filter_by(Version = 
v['ebuild_version']).filter_by(Checksum = v['checksum']).\
+                               filter_by(PackageId = 
v['package_id']).filter_by(Active = True).one()
+               except (MultipleResultsFound) as e:
+                       for x in session.query(Ebuilds).filter_by(Version = 
v['ebuild_version']).filter_by(Checksum = v['checksum']).\
+                               filter_by(PackageId = 
v['package_id']).filter_by(Active = True).all():
+                               print(x.EbuildId)
+                               # FIXME
+                               #x.Checksum = 0
+                               #x.Active = False
+                               #session.commit()
+                       sys.exit()
+               session.add(EbuildsMetadata(EbuildId = EbuildInfo.EbuildId, 
Revision = v['ebuild_version_revision_tree']))
+               session.commit()
+               ebuild_id_list.append(EbuildInfo.EbuildId)
+               restrictions = []
+               keywords = []
+               iuse = []
+               for i in v['ebuild_version_metadata_tree'][4].split():
+                       restrictions.append(i)
+               for i in v['ebuild_version_metadata_tree'][8].split():
+                       keywords.append(i)
+               for i in v['ebuild_version_metadata_tree'][10].split():
+                       iuse.append(i)
+               add_new_ebuild_metadata_sql(session, EbuildInfo.EbuildId, 
keywords, restrictions, iuse)
+       return ebuild_id_list
+
+def get_ebuild_id_list(session, package_id):
+       ebuild_id_list = []
+       for EbuildInfo in session.query(Ebuilds).filter_by(PackageId = 
package_id).filter_by(Active = True).all():
+               ebuild_id_list.append(EbuildInfo.EbuildId)
+       return ebuild_id_list
+
+def get_build_job_all(session, ebuild_id):
+       return session.query(BuildJobs).filter_by(EbuildId = ebuild_id).all()
+
+def add_old_ebuild(session, old_ebuild_list):
+       for ebuild_id in  old_ebuild_list:
+               EbuildInfo = session.query(Ebuilds).filter_by(EbuildId = 
ebuild_id).one()
+               EbuildInfo.Active = False
+               session.commit()
+               build_job_id_list = get_build_job_all(session, ebuild_id)
+               if build_job_id_list != []:
+                       for build_job in build_job_id_list:
+                               del_old_build_jobs(session, 
build_job.BuildJobId)
+
+def add_new_package_sql(session, cp, repo):
+       element = cp.split('/')
+       categories = element[0]
+       package = element[1]
+       RepoInfo =get_repo_info(session, repo)
+       repo_id = RepoInfo.RepoId
+       CategoriesInfo = get_category_info(session, categories)
+       category_id = CategoriesInfo.CategoryId
+       session.add(Packages(Package = package, CategoryId = category_id, 
RepoId = repo_id, Checksum = '0', Active = True))
+       session.commit()
+       PackageInfo = get_package_info(session, categories, package, repo)
+       return PackageInfo.PackageId
+
+def get_package_metadata_sql(session, package_id):
+       try:
+               PackagesMetadataInfo = 
session.query(PackagesMetadata).filter_by(PackageId = package_id).one()
+       except NoResultFound as e:
+               return False
+       return PackagesMetadataInfo
+
+def update_email_info(session, email):
+       try:
+               EmailInfo = session.query(Emails).filter_by(Email = email).one()
+       except NoResultFound as e:
+               session.add(Emails(Email = email))
+               session.commit()
+               EmailInfo = session.query(Emails).filter_by(Email = email).one()
+       return EmailInfo
+
+def update_package_email_info(session, email_id, package_id):
+       try:
+               PackagesEmailInfo = 
session.query(PackagesEmails).filter_by(EmailId = email_id).filter_by(PackageId 
= package_id).one()
+       except NoResultFound as e:
+               session.add(PackagesEmails(EmailId = email_id, PackageId = 
package_id))
+               session.commit()
+               PackagesEmailInfo = 
session.query(PackagesEmails).filter_by(EmailId = email_id).filter_by(PackageId 
= package_id).one()
+       return PackagesEmailInfo
+
+def update_package_metadata(session, package_metadataDict):
+       for k, v in package_metadataDict.items():
+               try:
+                       PackagesMetadataInfo = 
session.query(PackagesMetadata).filter_by(PackageId = k).one()
+               except NoResultFound as e:
+                       session.add(PackagesMetadata(PackageId = k, Checksum = 
v['metadata_xml_checksum']))
+                       session.commit()
+               else:
+                       PackagesMetadataInfo.Checksum = 
v['metadata_xml_checksum']
+                       session.commit()
+               if v['metadata_xml_email']:
+                       for email in v['metadata_xml_email']:
+                               EmailInfo = update_email_info(session, email)
+                               PackagesEmailInfo = 
update_package_email_info(session, EmailInfo.EmailId, k)
+
+def update_manifest_sql(session, package_id, manifest_checksum_tree):
+       PackagesInfo = session.query(Packages).filter_by(PackageId = 
package_id).one()
+       PackagesInfo.Checksum = manifest_checksum_tree
+       session.commit()
+
+def get_package_info_from_package_id(session, package_id):
+       PackageInfo = session.query(Packages).filter_by(PackageId = 
package_id).one()
+       CategoryInfo = session.query(Categories).filter_by(CategoryId = 
PackageInfo.CategoryId).one()
+       RepoInfo = session.query(Repos).filter_by(RepoId = 
PackageInfo.RepoId).one()
+       return PackageInfo, CategoryInfo, RepoInfo
+
+def add_new_build_job(session, ebuild_id, setup_id, use_flagsDict, config_id):
+       NewBuildJobs =BuildJobs(EbuildId = ebuild_id, SetupId = setup_id, 
ConfigId = config_id, Status = 'Waiting', BuildNow = False, RemoveBin = True)
+       session.add(NewBuildJobs)
+       session.flush()
+       build_job_id = NewBuildJobs.BuildJobId
+       session.commit()
+       for k, v in use_flagsDict.items():
+               use_id = get_use_id(session, k)
+               session.add(BuildJobsUse(BuildJobId = build_job_id, UseId = 
use_id, Status = v))
+               session.commit()
+
+def get_ebuild_checksums(session, package_id, ebuild_version):
+       ebuild_checksum_list = []
+       try:
+               EbuildInfo = session.query(Ebuilds).filter_by(PackageId = 
package_id).filter_by(Version = ebuild_version).filter_by(Active = True).one()
+       except NoResultFound as e:
+               return None, False
+       except MultipleResultsFound as e:
+               EbuildInfo2 = session.query(Ebuilds).filter_by(PackageId = 
package_id).filter_by(Version = ebuild_version).filter_by(Active = True).all()
+               for Ebuild in EbuildInfo2:
+                       print("ebuild version checksum")
+                       print(ebuild_version)
+                       print(Ebuild.Version)
+                       print(Ebuild.Checksum)
+                       ebuild_checksum_list.append(Ebuild.Checksum)
+               return ebuild_checksum_list, True
+       return EbuildInfo.Checksum, False
+
+def get_ebuild_id_db(session, checksum, package_id):
+       try:
+               EbuildInfos = session.query(Ebuilds).filter_by(PackageId = 
package_id).filter_by(Checksum = checksum).one()
+       except NoResultFound as e:
+               return None, True
+       except MultipleResultsFound as e:
+               EbuildInfos = session.query(Ebuilds).filter_by(PackageId = 
package_id).filter_by(Checksum = checksum).all()
+               ebuilds_id = []
+               for EbuildInfo in EbuildInfos:
+                       ebuilds_id.append(EbuildInfo.EbuildId)
+               return ebuilds_id, True
+       return EbuildInfos.EbuildId, False
+
+def check_host_updatedb(session):
+       try:
+               JobsInfo = session.query(Jobs).filter_by(Status = 
'Done').filter_by(JobType = 'esync').one()
+       except NoResultFound as e:
+               return True
+       return False

diff --git a/tbc/pym/sync.py b/tbc/pym/sync.py
new file mode 100644
index 0000000..a75562f
--- /dev/null
+++ b/tbc/pym/sync.py
@@ -0,0 +1,73 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import portage
+import os
+import errno
+import sys
+import time
+
+from _emerge.main import emerge_main
+from tbc.readconf import get_conf_settings
+from tbc.sqlquerys import get_config_id, add_tbc_logs, get_config_all_info, 
get_configmetadata_info
+from tbc.updatedb import update_db_main
+from tbc.readconf import read_config_settings
+
+def sync_tree(session):
+       tbc_settings_dict = read_config_settings()
+       _hostname = tbc_settings_dict['hostname']
+       _config = tbc_settings_dict['tbc_config']
+       config_id = get_config_id(session, _config, _hostname)
+       host_config = _hostname +"/" + _config
+       default_config_root = "/var/cache/tbc/" + 
tbc_settings_dict['tbc_gitreponame'] + "/" + host_config + "/"
+       mysettings = portage.config(config_root = default_config_root)
+       GuestBusy = True
+       log_msg = "Waiting for Guest to be idel"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       guestid_list = []
+       for config in get_config_all_info(session):
+               if not config.Host:
+                       guestid_list.append(config.ConfigId)
+       while GuestBusy:
+               Status_list = []
+               for guest_id in guestid_list:
+                       ConfigMetadata = get_configmetadata_info(session, 
guest_id)
+                       Status_list.append(ConfigMetadata.Status)
+               if not 'Runing' in Status_list:
+                       GuestBusy = False
+               time.sleep(30)
+       try:
+               os.remove(mysettings['PORTDIR'] + "/profiles/config/parent")
+               os.rmdir(mysettings['PORTDIR'] + "/profiles/config")
+       except:
+               pass
+       tmpcmdline = []
+       tmpcmdline.append("--sync")
+       tmpcmdline.append("--quiet")
+       tmpcmdline.append("--config-root=" + default_config_root)
+       log_msg = "Emerge --sync"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       fail_sync = emerge_main(args=tmpcmdline)
+       if fail_sync:
+               log_msg = "Emerge --sync fail!"
+               add_tbc_logs(session, log_msg, "error", config_id)
+               return False
+       else:
+               # Need to add a config dir so we can use profiles/base for 
reading the tree.
+               # We may allready have the dir on local repo when we sync.
+               try:
+                       os.mkdir(mysettings['PORTDIR'] + "/profiles/config", 
0o777)
+                       with open(mysettings['PORTDIR'] + 
"/profiles/config/parent", "w") as f:
+                               f.write("../base\n")
+                               f.close()
+               except:
+                       pass
+               log_msg = "Emerge --sync ... Done."
+               add_tbc_logs(session, log_msg, "info", config_id)
+       result = update_db_main(session, config_id)
+       if result:
+               return True
+       else:
+               log_msg = "Updatedb fail"
+               add_tbc_logs(session, log_msg, "info", config_id)

diff --git a/tbc/pym/text.py b/tbc/pym/text.py
new file mode 100644
index 0000000..c78c432
--- /dev/null
+++ b/tbc/pym/text.py
@@ -0,0 +1,49 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import sys
+import re
+import os
+import errno
+from portage.util import grablines
+
+def  get_file_text(filename):
+       # Return the filename contents
+       try:
+               textfile = open(filename, encoding='utf-8')
+       except:
+               return "No file", filename
+       text = ""
+       for line in textfile:
+               text += line
+       textfile.close()
+       return text
+
+def  get_ebuild_cvs_revision(filename):
+       """Return the ebuild contents"""
+       try:
+               ebuildfile = open(filename, encoding='utf-8')
+       except:
+               return "No Ebuild file there"
+       text = ""
+       dataLines = ebuildfile.readlines()
+       for i in dataLines:
+               text = text + i + " "
+       line2 = dataLines[2]
+       field = line2.split(" ")
+       ebuildfile.close()
+       try:
+               cvs_revision = field[3]
+       except:
+               cvs_revision = ''
+       return cvs_revision
+
+def  get_log_text_dict(filename):
+       """Return the log contents as a dict"""
+       logfile_dict = {}
+       index = 1
+       for text_line in grablines(filename):
+               logfile_dict[index] = text_line
+               index = index + 1
+       return logfile_dict, index - 1

diff --git a/tbc/pym/updatedb.py b/tbc/pym/updatedb.py
new file mode 100644
index 0000000..12b94c5
--- /dev/null
+++ b/tbc/pym/updatedb.py
@@ -0,0 +1,135 @@
+# Copyright 1998-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+import sys
+import os
+import multiprocessing
+import time
+import portage
+from sqlalchemy.orm import scoped_session, sessionmaker
+from tbc.ConnectionManager import NewConnection
+from tbc.sqlquerys import add_tbc_logs, get_package_info, update_repo_db, \
+       update_categories_db, get_configmetadata_info, get_config_all_info, 
add_new_build_job, \
+       get_config_info
+from tbc.check_setup import check_make_conf
+from tbc.package import tbc_package
+# Get the options from the config file set in tbc.readconf
+from tbc.readconf import get_conf_settings
+
+def init_portage_settings(session, config_id, tbc_settings_dict):
+       # check config setup
+       check_make_conf(session, config_id, tbc_settings_dict)
+       log_msg = "Check configs done"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       
+       # Get default config from the configs table  and default_config=1
+       host_config = tbc_settings_dict['hostname'] +"/" + 
tbc_settings_dict['tbc_config']
+       default_config_root = "/var/cache/tbc/" + 
tbc_settings_dict['tbc_gitreponame'] + "/" + host_config + "/"
+
+       # Set config_root (PORTAGE_CONFIGROOT)  to default_config_root
+       mysettings = portage.config(config_root = default_config_root)
+       log_msg = "Setting default config to: %s" % (host_config,)
+       add_tbc_logs(session, log_msg, "info", config_id)
+       return mysettings
+
+def update_cpv_db_pool(mysettings, myportdb, cp, repo, tbc_settings_dict, 
config_id):
+       session_factory = sessionmaker(bind=NewConnection(tbc_settings_dict))
+       Session = scoped_session(session_factory)
+       session2 = Session()
+       init_package = tbc_package(session2, mysettings, myportdb, config_id, 
tbc_settings_dict)
+
+       # split the cp to categories and package
+       element = cp.split('/')
+       categories = element[0]
+       package = element[1]
+
+       # update the categories table
+       update_categories_db(session2, categories)
+
+       # Check if we have the cp in the package table
+       PackagesInfo = get_package_info(session2, categories, package, repo)
+       if PackagesInfo:  
+               # Update the packages with ebuilds
+               init_package.update_package_db(PackagesInfo.PackageId)
+       else:
+               # Add new package with ebuilds
+               init_package.add_new_package_db(cp, repo)
+       Session.remove()
+
+def update_cpv_db(session, config_id, tbc_settings_dict):
+       GuestBusy = True
+       log_msg = "Waiting for Guest to be idel"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       guestid_list = []
+       for config in get_config_all_info(session):
+               if not config.Host:
+                       guestid_list.append(config.ConfigId)
+       while GuestBusy:
+               Status_list = []
+               for guest_id in guestid_list:
+                       ConfigMetadata = get_configmetadata_info(session, 
guest_id)
+                       Status_list.append(ConfigMetadata.Status)
+               if not 'Runing' in Status_list:
+                       break
+               time.sleep(30)
+
+       mysettings =  init_portage_settings(session, config_id, 
tbc_settings_dict)
+       log_msg = "Checking categories, package, ebuilds"
+       add_tbc_logs(session, log_msg, "info", config_id)
+       new_build_jobs_list = []
+
+       # Setup portdb, package
+       myportdb = portage.portdbapi(mysettings=mysettings)
+       repo_list = ()
+       repos_trees_list = []
+
+       # Use all cores when multiprocessing
+       pool_cores= multiprocessing.cpu_count()
+       pool = multiprocessing.Pool(processes=pool_cores)
+
+       # Will run some update checks and update package if needed
+
+       # Get the repos and update the repos db
+       repo_list = myportdb.getRepositories()
+       update_repo_db(session, repo_list)
+
+       # Close the db for the multiprocessing pool will make new ones
+       # and we don't need this one for some time.
+
+       # Get the rootdirs for the repos
+       repo_trees_list = myportdb.porttrees
+       for repo_dir in repo_trees_list:
+               repo = myportdb.getRepositoryName(repo_dir)
+               repo_dir_list = []
+               repo_dir_list.append(repo_dir)
+
+               # Get the package list from the repo
+               package_list_tree = myportdb.cp_all(trees=repo_dir_list)
+
+               # Run the update package for all package in the list and in a 
multiprocessing pool
+               for cp in sorted(package_list_tree):
+                       pool.apply_async(update_cpv_db_pool, (mysettings, 
myportdb, cp, repo, tbc_settings_dict, config_id,))
+                       # use this when debuging
+                       #update_cpv_db_pool(mysettings, myportdb, cp, repo, 
tbc_settings_dict, config_id)
+
+       #close and join the multiprocessing pools
+       pool.close()
+       pool.join()
+       log_msg = "Checking categories, package and ebuilds ... done"
+       add_tbc_logs(session, log_msg, "info", config_id)
+
+def update_db_main(session, config_id):
+       # Main
+
+       # Logging
+       reader = get_conf_settings()
+       tbc_settings_dict=reader.read_tbc_settings_all()
+       log_msg = "Update db started."
+       add_tbc_logs(session, log_msg, "info", config_id)
+
+       # Update the cpv db
+       update_cpv_db(session, config_id, tbc_settings_dict)
+       log_msg = "Update db ... Done."
+       add_tbc_logs(session, log_msg, "info", config_id)
+       return True

Reply via email to