OLD | NEW |
1 # Copyright 1998-2011 Gentoo Foundation | 1 # Copyright 1998-2011 Gentoo Foundation |
2 # Distributed under the terms of the GNU General Public License v2 | 2 # Distributed under the terms of the GNU General Public License v2 |
3 | 3 |
4 __all__ = [ | 4 __all__ = [ |
5 "vardbapi", "vartree", "dblink"] + \ | 5 "vardbapi", "vartree", "dblink"] + \ |
6 ["write_contents", "tar_contents"] | 6 ["write_contents", "tar_contents"] |
7 | 7 |
8 import portage | 8 import portage |
9 portage.proxy.lazyimport.lazyimport(globals(), | 9 portage.proxy.lazyimport.lazyimport(globals(), |
10 'portage.checksum:_perform_md5_merge@perform_md5', | 10 'portage.checksum:_perform_md5_merge@perform_md5', |
11 'portage.data:portage_gid,portage_uid,secpass', | 11 'portage.data:portage_gid,portage_uid,secpass', |
12 'portage.dbapi.dep_expand:dep_expand', | 12 'portage.dbapi.dep_expand:dep_expand', |
13 'portage.dbapi._MergeProcess:MergeProcess', | 13 'portage.dbapi._MergeProcess:MergeProcess', |
14 'portage.dep:dep_getkey,isjustname,match_from_list,' + \ | 14 'portage.dep:dep_getkey,isjustname,match_from_list,' + \ |
15 'use_reduce,_slot_re', | 15 'use_reduce,_slot_re', |
16 » 'portage.elog:elog_process,_preload_elog_modules', | 16 » 'portage.elog:collect_ebuild_messages,collect_messages,' + \ |
| 17 » » 'elog_process,_merge_logentries,_preload_elog_modules', |
17 'portage.locks:lockdir,unlockdir', | 18 'portage.locks:lockdir,unlockdir', |
18 'portage.output:bold,colorize', | 19 'portage.output:bold,colorize', |
19 'portage.package.ebuild.doebuild:doebuild_environment,' + \ | 20 'portage.package.ebuild.doebuild:doebuild_environment,' + \ |
20 '_spawn_phase', | 21 '_spawn_phase', |
21 'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs', | 22 'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs', |
22 'portage.update:fixdbentries', | 23 'portage.update:fixdbentries', |
23 'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \ | 24 'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \ |
24 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,
' + \ | 25 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,
' + \ |
25 'grabdict,normalize_path,new_protect_filename', | 26 'grabdict,normalize_path,new_protect_filename', |
26 'portage.util.digraph:digraph', | 27 'portage.util.digraph:digraph', |
(...skipping 1166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1193 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \ | 1194 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \ |
1194 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))'
+ \ | 1195 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))'
+ \ |
1195 r')$' | 1196 r')$' |
1196 ) | 1197 ) |
1197 | 1198 |
1198 # When looping over files for merge/unmerge, temporarily yield to the | 1199 # When looping over files for merge/unmerge, temporarily yield to the |
1199 # scheduler each time this many files are processed. | 1200 # scheduler each time this many files are processed. |
1200 _file_merge_yield_interval = 20 | 1201 _file_merge_yield_interval = 20 |
1201 | 1202 |
1202 def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None, | 1203 def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None, |
1203 » » vartree=None, blockers=None, scheduler=None): | 1204 » » vartree=None, blockers=None, scheduler=None, pipe=None): |
1204 """ | 1205 """ |
1205 Creates a DBlink object for a given CPV. | 1206 Creates a DBlink object for a given CPV. |
1206 The given CPV may not be present in the database already. | 1207 The given CPV may not be present in the database already. |
1207 | 1208 |
1208 @param cat: Category | 1209 @param cat: Category |
1209 @type cat: String | 1210 @type cat: String |
1210 @param pkg: Package (PV) | 1211 @param pkg: Package (PV) |
1211 @type pkg: String | 1212 @type pkg: String |
1212 @param myroot: ignored, settings['ROOT'] is used instead | 1213 @param myroot: ignored, settings['ROOT'] is used instead |
1213 @type myroot: String (Path) | 1214 @type myroot: String (Path) |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1252 | 1253 |
1253 self.myroot=myroot | 1254 self.myroot=myroot |
1254 self._installed_instance = None | 1255 self._installed_instance = None |
1255 self.contentscache = None | 1256 self.contentscache = None |
1256 self._contents_inodes = None | 1257 self._contents_inodes = None |
1257 self._contents_basenames = None | 1258 self._contents_basenames = None |
1258 self._linkmap_broken = False | 1259 self._linkmap_broken = False |
1259 self._md5_merge_map = {} | 1260 self._md5_merge_map = {} |
1260 self._hash_key = (self.myroot, self.mycpv) | 1261 self._hash_key = (self.myroot, self.mycpv) |
1261 self._protect_obj = None | 1262 self._protect_obj = None |
| 1263 self._pipe = pipe |
1262 | 1264 |
1263 def __hash__(self): | 1265 def __hash__(self): |
1264 return hash(self._hash_key) | 1266 return hash(self._hash_key) |
1265 | 1267 |
1266 def __eq__(self, other): | 1268 def __eq__(self, other): |
1267 return isinstance(other, dblink) and \ | 1269 return isinstance(other, dblink) and \ |
1268 self._hash_key == other._hash_key | 1270 self._hash_key == other._hash_key |
1269 | 1271 |
1270 def _get_protect_obj(self): | 1272 def _get_protect_obj(self): |
1271 | 1273 |
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1496 if others_in_slot is None: | 1498 if others_in_slot is None: |
1497 slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[
0] | 1499 slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[
0] |
1498 slot_matches = self.vartree.dbapi.match( | 1500 slot_matches = self.vartree.dbapi.match( |
1499 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
) | 1501 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
) |
1500 others_in_slot = [] | 1502 others_in_slot = [] |
1501 for cur_cpv in slot_matches: | 1503 for cur_cpv in slot_matches: |
1502 if cur_cpv == self.mycpv: | 1504 if cur_cpv == self.mycpv: |
1503 continue | 1505 continue |
1504 others_in_slot.append(dblink(self.cat, catsplit(
cur_cpv)[1], | 1506 others_in_slot.append(dblink(self.cat, catsplit(
cur_cpv)[1], |
1505 settings=self.settings, vartree=self.var
tree, | 1507 settings=self.settings, vartree=self.var
tree, |
1506 » » » » » treetype="vartree")) | 1508 » » » » » treetype="vartree", pipe=self._pipe)) |
1507 | 1509 |
1508 retval = self._security_check([self] + others_in_slot) | 1510 retval = self._security_check([self] + others_in_slot) |
1509 if retval: | 1511 if retval: |
1510 return retval | 1512 return retval |
1511 | 1513 |
1512 contents = self.getcontents() | 1514 contents = self.getcontents() |
1513 # Now, don't assume that the name of the ebuild is the same as t
he | 1515 # Now, don't assume that the name of the ebuild is the same as t
he |
1514 # name of the dir; the package may have been moved. | 1516 # name of the dir; the package may have been moved. |
1515 myebuildpath = None | 1517 myebuildpath = None |
1516 failures = 0 | 1518 failures = 0 |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1660 "sourced and the eclasse
s " | 1662 "sourced and the eclasse
s " |
1661 "from the current portag
e tree will be used " | 1663 "from the current portag
e tree will be used " |
1662 "when necessary. Removal
of " | 1664 "when necessary. Removal
of " |
1663 "the ebuild file will ca
use the " | 1665 "the ebuild file will ca
use the " |
1664 "pkg_prerm() and pkg_pos
trm() removal " | 1666 "pkg_prerm() and pkg_pos
trm() removal " |
1665 "phases to be skipped en
tirely.") | 1667 "phases to be skipped en
tirely.") |
1666 msg_lines.extend(wrap(ms
g, 72)) | 1668 msg_lines.extend(wrap(ms
g, 72)) |
1667 | 1669 |
1668 self._eerror(ebuild_phas
e, msg_lines) | 1670 self._eerror(ebuild_phas
e, msg_lines) |
1669 | 1671 |
1670 » » » » » » # process logs created during pr
e/postrm | 1672 » » » » » self._elog_process() |
1671 » » » » » » elog_process(self.mycpv, self.se
ttings, | |
1672 » » » » » » » phasefilter=('prerm', 'p
ostrm')) | |
1673 | 1673 |
1674 if retval == os.EX_OK: | 1674 if retval == os.EX_OK: |
1675 # myebuildpath might be None, so
ensure | 1675 # myebuildpath might be None, so
ensure |
1676 # it has a sane value for the cl
ean phase, | 1676 # it has a sane value for the cl
ean phase, |
1677 # even though it won't really be
sourced. | 1677 # even though it won't really be
sourced. |
1678 myebuildpath = os.path.join(self
.dbdir, | 1678 myebuildpath = os.path.join(self
.dbdir, |
1679 self.pkg + ".ebuild") | 1679 self.pkg + ".ebuild") |
1680 doebuild_environment(myebuildpat
h, "cleanrm", | 1680 doebuild_environment(myebuildpat
h, "cleanrm", |
1681 settings=self.settings,
db=self.vartree.dbapi) | 1681 settings=self.settings,
db=self.vartree.dbapi) |
1682 if scheduler is None: | 1682 if scheduler is None: |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1759 if others_in_slot is None: | 1759 if others_in_slot is None: |
1760 others_in_slot = [] | 1760 others_in_slot = [] |
1761 slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[
0] | 1761 slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[
0] |
1762 slot_matches = self.vartree.dbapi.match( | 1762 slot_matches = self.vartree.dbapi.match( |
1763 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
) | 1763 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
) |
1764 for cur_cpv in slot_matches: | 1764 for cur_cpv in slot_matches: |
1765 if cur_cpv == self.mycpv: | 1765 if cur_cpv == self.mycpv: |
1766 continue | 1766 continue |
1767 others_in_slot.append(dblink(self.cat, catsplit(
cur_cpv)[1], | 1767 others_in_slot.append(dblink(self.cat, catsplit(
cur_cpv)[1], |
1768 settings=self.settings, | 1768 settings=self.settings, |
1769 » » » » » vartree=self.vartree, treetype="vartree"
)) | 1769 » » » » » vartree=self.vartree, treetype="vartree"
, pipe=self._pipe)) |
1770 | 1770 |
1771 dest_root = self._eroot | 1771 dest_root = self._eroot |
1772 dest_root_len = len(dest_root) - 1 | 1772 dest_root_len = len(dest_root) - 1 |
1773 | 1773 |
1774 conf_mem_file = os.path.join(dest_root, CONFIG_MEMORY_FILE) | 1774 conf_mem_file = os.path.join(dest_root, CONFIG_MEMORY_FILE) |
1775 cfgfiledict = grabdict(conf_mem_file) | 1775 cfgfiledict = grabdict(conf_mem_file) |
1776 stale_confmem = [] | 1776 stale_confmem = [] |
1777 | 1777 |
1778 unmerge_orphans = "unmerge-orphans" in self.settings.features | 1778 unmerge_orphans = "unmerge-orphans" in self.settings.features |
1779 calc_prelink = "prelink-checksums" in self.settings.features | 1779 calc_prelink = "prelink-checksums" in self.settings.features |
(...skipping 999 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2779 | 2779 |
2780 def _eerror(self, phase, lines): | 2780 def _eerror(self, phase, lines): |
2781 from portage.elog.messages import eerror as _eerror | 2781 from portage.elog.messages import eerror as _eerror |
2782 if self._scheduler is None: | 2782 if self._scheduler is None: |
2783 for l in lines: | 2783 for l in lines: |
2784 _eerror(l, phase=phase, key=self.settings.mycpv) | 2784 _eerror(l, phase=phase, key=self.settings.mycpv) |
2785 else: | 2785 else: |
2786 self._scheduler.dblinkElog(self, | 2786 self._scheduler.dblinkElog(self, |
2787 phase, _eerror, lines) | 2787 phase, _eerror, lines) |
2788 | 2788 |
2789 » def _elog_subprocess(self, funcname, phase, lines): | 2789 » def _elog_process(self): |
2790 » » """ | 2790 » » cpv = self.mycpv |
2791 » » Subprocesses call this in order to create elog messages in | 2791 » » if self._pipe is None: |
2792 » » $T, for collection by the main process. | 2792 » » » elog_process(cpv, self.settings) |
2793 » » """ | 2793 » » else: |
2794 » » cmd = "source %s/isolated-functions.sh ; " % \ | 2794 » » » logdir = os.path.join(self.settings["T"], "logging") |
2795 » » » portage._shell_quote(self.settings["PORTAGE_BIN_PATH"]) | 2795 » » » ebuild_logentries = collect_ebuild_messages(logdir) |
2796 » » for line in lines: | 2796 » » » py_logentries = collect_messages(key=cpv).get(cpv, {}) |
2797 » » » cmd += "%s %s ; " % (funcname, portage._shell_quote(line
)) | 2797 » » » logentries = _merge_logentries(py_logentries, ebuild_log
entries) |
2798 » » env = self.settings.environ() | 2798 » » » funcnames = { |
2799 » » env['EBUILD_PHASE'] = phase | 2799 » » » » "INFO": "einfo", |
2800 » » subprocess.call([portage.const.BASH_BINARY, "-c", cmd], | 2800 » » » » "LOG": "elog", |
2801 » » » env=env) | 2801 » » » » "WARN": "ewarn", |
| 2802 » » » » "QA": "eqawarn", |
| 2803 » » » » "ERROR": "eerror" |
| 2804 » » » } |
| 2805 » » » buffer = [] |
| 2806 » » » for phase, messages in logentries.items(): |
| 2807 » » » » for key, lines in messages: |
| 2808 » » » » » funcname = funcnames[key] |
| 2809 » » » » » if isinstance(lines, basestring): |
| 2810 » » » » » » lines = [lines] |
| 2811 » » » » » for line in lines: |
| 2812 » » » » » » fields = (funcname, phase, cpv,
line.rstrip('\n')) |
| 2813 » » » » » » buffer.append(' '.join(fields)) |
| 2814 » » » » » » buffer.append('\n') |
| 2815 » » » if buffer: |
| 2816 » » » » os.write(self._pipe, ''.join(buffer)) |
2802 | 2817 |
2803 def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0, | 2818 def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0, |
2804 mydbapi=None, prev_mtimes=None): | 2819 mydbapi=None, prev_mtimes=None): |
2805 """ | 2820 """ |
2806 | 2821 |
2807 This function does the following: | 2822 This function does the following: |
2808 | 2823 |
2809 calls self._preserve_libs if FEATURES=preserve-libs | 2824 calls self._preserve_libs if FEATURES=preserve-libs |
2810 calls self._collision_protect if FEATURES=collision-protect | 2825 calls self._collision_protect if FEATURES=collision-protect |
2811 calls doebuild(mydo=pkg_preinst) | 2826 calls doebuild(mydo=pkg_preinst) |
2812 Merges the package to the livefs | 2827 Merges the package to the livefs |
2813 unmerges old version (if required) | 2828 unmerges old version (if required) |
2814 calls doebuild(mydo=pkg_postinst) | 2829 calls doebuild(mydo=pkg_postinst) |
2815 calls env_update | 2830 calls env_update |
2816 calls elog_process | |
2817 | 2831 |
2818 @param srcroot: Typically this is ${D} | 2832 @param srcroot: Typically this is ${D} |
2819 @type srcroot: String (Path) | 2833 @type srcroot: String (Path) |
2820 @param destroot: ignored, self.settings['ROOT'] is used instead | 2834 @param destroot: ignored, self.settings['ROOT'] is used instead |
2821 @type destroot: String (Path) | 2835 @type destroot: String (Path) |
2822 @param inforoot: root of the vardb entry ? | 2836 @param inforoot: root of the vardb entry ? |
2823 @type inforoot: String (Path) | 2837 @type inforoot: String (Path) |
2824 @param myebuild: path to the ebuild that we are processing | 2838 @param myebuild: path to the ebuild that we are processing |
2825 @type myebuild: String (Path) | 2839 @type myebuild: String (Path) |
2826 @param mydbapi: dbapi which is handed to doebuild. | 2840 @param mydbapi: dbapi which is handed to doebuild. |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2916 slot_matches.append(self.mycpv) | 2930 slot_matches.append(self.mycpv) |
2917 | 2931 |
2918 others_in_slot = [] | 2932 others_in_slot = [] |
2919 from portage import config | 2933 from portage import config |
2920 for cur_cpv in slot_matches: | 2934 for cur_cpv in slot_matches: |
2921 # Clone the config in case one of these has to be unmerg
ed since | 2935 # Clone the config in case one of these has to be unmerg
ed since |
2922 # we need it to have private ${T} etc... for things like
elog. | 2936 # we need it to have private ${T} etc... for things like
elog. |
2923 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)
[1], | 2937 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)
[1], |
2924 settings=config(clone=self.settings), | 2938 settings=config(clone=self.settings), |
2925 vartree=self.vartree, treetype="vartree", | 2939 vartree=self.vartree, treetype="vartree", |
2926 » » » » scheduler=self._scheduler)) | 2940 » » » » scheduler=self._scheduler, pipe=self._pipe)) |
2927 | 2941 |
2928 retval = self._security_check(others_in_slot) | 2942 retval = self._security_check(others_in_slot) |
2929 if retval: | 2943 if retval: |
2930 return retval | 2944 return retval |
2931 | 2945 |
2932 self.settings["REPLACING_VERSIONS"] = " ".join( | 2946 self.settings["REPLACING_VERSIONS"] = " ".join( |
2933 [portage.versions.cpv_getversion(other.mycpv) for other
in others_in_slot] ) | 2947 [portage.versions.cpv_getversion(other.mycpv) for other
in others_in_slot] ) |
2934 self.settings.backup_changes("REPLACING_VERSIONS") | 2948 self.settings.backup_changes("REPLACING_VERSIONS") |
2935 | 2949 |
2936 if slot_matches: | 2950 if slot_matches: |
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3064 "/etc/make.conf if you do not want to " | 3078 "/etc/make.conf if you do not want to " |
3065 "abort in cases like this.") % other_dbl
ink.mycpv, | 3079 "abort in cases like this.") % other_dbl
ink.mycpv, |
3066 wrap_width)) | 3080 wrap_width)) |
3067 eerror(msg) | 3081 eerror(msg) |
3068 if installed_files: | 3082 if installed_files: |
3069 return 1 | 3083 return 1 |
3070 | 3084 |
3071 # check for package collisions | 3085 # check for package collisions |
3072 blockers = None | 3086 blockers = None |
3073 if self._blockers is not None: | 3087 if self._blockers is not None: |
3074 # This is only supposed to be called when | |
3075 # the vdb is locked, like it is here. | |
3076 blockers = self._blockers() | 3088 blockers = self._blockers() |
3077 if blockers is None: | 3089 if blockers is None: |
3078 blockers = [] | 3090 blockers = [] |
3079 collisions, plib_collisions = \ | 3091 collisions, plib_collisions = \ |
3080 self._collision_protect(srcroot, destroot, | 3092 self._collision_protect(srcroot, destroot, |
3081 others_in_slot + blockers, myfilelist + mylinklist) | 3093 others_in_slot + blockers, myfilelist + mylinklist) |
3082 | 3094 |
3083 # Make sure the ebuild environment is initialized and that ${T}/
elog | 3095 # Make sure the ebuild environment is initialized and that ${T}/
elog |
3084 # exists for logging of collision-protect eerror messages. | 3096 # exists for logging of collision-protect eerror messages. |
3085 if myebuild is None: | 3097 if myebuild is None: |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3237 | 3249 |
3238 # Always behave like --noconfmem is enabled for downgrades | 3250 # Always behave like --noconfmem is enabled for downgrades |
3239 # so that people who don't know about this option are less | 3251 # so that people who don't know about this option are less |
3240 # likely to get confused when doing upgrade/downgrade cycles. | 3252 # likely to get confused when doing upgrade/downgrade cycles. |
3241 pv_split = catpkgsplit(self.mycpv)[1:] | 3253 pv_split = catpkgsplit(self.mycpv)[1:] |
3242 for other in others_in_slot: | 3254 for other in others_in_slot: |
3243 if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0: | 3255 if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0: |
3244 cfgfiledict["IGNORE"] = 1 | 3256 cfgfiledict["IGNORE"] = 1 |
3245 break | 3257 break |
3246 | 3258 |
3247 » » merge_task = MergeProcess( | 3259 » » rval = self._merge_contents(srcroot, destroot, cfgfiledict, |
3248 » » » background=(self.settings.get('PORTAGE_BACKGROUND') == '
1'), | 3260 » » » conf_mem_file) |
3249 » » » cfgfiledict=cfgfiledict, conf_mem_file=conf_mem_file, db
link=self, | |
3250 » » » destroot=destroot, | |
3251 » » » logfile=self.settings.get('PORTAGE_LOG_FILE'), | |
3252 » » » scheduler=(scheduler or PollScheduler().sched_iface), | |
3253 » » » srcroot=srcroot) | |
3254 | |
3255 » » merge_task.start() | |
3256 » » rval = merge_task.wait() | |
3257 if rval != os.EX_OK: | 3261 if rval != os.EX_OK: |
3258 return rval | 3262 return rval |
3259 | 3263 |
3260 # These caches are populated during collision-protect and the da
ta | 3264 # These caches are populated during collision-protect and the da
ta |
3261 # they contain is now invalid. It's very important to invalidate | 3265 # they contain is now invalid. It's very important to invalidate |
3262 # the contents_inodes cache so that FEATURES=unmerge-orphans | 3266 # the contents_inodes cache so that FEATURES=unmerge-orphans |
3263 # doesn't unmerge anything that belongs to this package that has | 3267 # doesn't unmerge anything that belongs to this package that has |
3264 # just been merged. | 3268 # just been merged. |
3265 for dblnk in others_in_slot: | 3269 for dblnk in others_in_slot: |
3266 dblnk._clear_contents_cache() | 3270 dblnk._clear_contents_cache() |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3447 while True: | 3451 while True: |
3448 x += 1 | 3452 x += 1 |
3449 backup_p = p + '.backup.' + str(x).rjust(4, '0') | 3453 backup_p = p + '.backup.' + str(x).rjust(4, '0') |
3450 try: | 3454 try: |
3451 os.lstat(backup_p) | 3455 os.lstat(backup_p) |
3452 except OSError: | 3456 except OSError: |
3453 break | 3457 break |
3454 | 3458 |
3455 return backup_p | 3459 return backup_p |
3456 | 3460 |
3457 » def _merge_process(self, srcroot, destroot, cfgfiledict, conf_mem_file): | 3461 » def _merge_contents(self, srcroot, destroot, cfgfiledict, conf_mem_file)
: |
3458 | 3462 |
3459 cfgfiledict_orig = cfgfiledict.copy() | 3463 cfgfiledict_orig = cfgfiledict.copy() |
3460 | 3464 |
3461 # open CONTENTS file (possibly overwriting old one) for recordin
g | 3465 # open CONTENTS file (possibly overwriting old one) for recordin
g |
3462 outfile = codecs.open(_unicode_encode( | 3466 outfile = codecs.open(_unicode_encode( |
3463 os.path.join(self.dbtmpdir, 'CONTENTS'), | 3467 os.path.join(self.dbtmpdir, 'CONTENTS'), |
3464 encoding=_encodings['fs'], errors='strict'), | 3468 encoding=_encodings['fs'], errors='strict'), |
3465 mode='w', encoding=_encodings['repo.content'], | 3469 mode='w', encoding=_encodings['repo.content'], |
3466 errors='backslashreplace') | 3470 errors='backslashreplace') |
3467 | 3471 |
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3676 else: | 3680 else: |
3677 # a non-directory and non-symlin
k-to-directory. Won't work for us. Move out of the way. | 3681 # a non-directory and non-symlin
k-to-directory. Won't work for us. Move out of the way. |
3678 backup_dest = self._new_backup_p
ath(mydest) | 3682 backup_dest = self._new_backup_p
ath(mydest) |
3679 msg = [] | 3683 msg = [] |
3680 msg.append("") | 3684 msg.append("") |
3681 msg.append(_("Installation of a
directory is blocked by a file:")) | 3685 msg.append(_("Installation of a
directory is blocked by a file:")) |
3682 msg.append(" '%s'" % mydest) | 3686 msg.append(" '%s'" % mydest) |
3683 msg.append(_("This file will be
renamed to a different name:")) | 3687 msg.append(_("This file will be
renamed to a different name:")) |
3684 msg.append(" '%s'" % backup_des
t) | 3688 msg.append(" '%s'" % backup_des
t) |
3685 msg.append("") | 3689 msg.append("") |
3686 » » » » » » self._elog_subprocess("eerror",
"preinst", msg) | 3690 » » » » » » self._eerror("preinst", msg) |
3687 if movefile(mydest, backup_dest, | 3691 if movefile(mydest, backup_dest, |
3688 mysettings=self.settings
, | 3692 mysettings=self.settings
, |
3689 encoding=_encodings['mer
ge']) is None: | 3693 encoding=_encodings['mer
ge']) is None: |
3690 return 1 | 3694 return 1 |
3691 showMessage(_("bak %s %s.backup\
n") % (mydest, mydest), | 3695 showMessage(_("bak %s %s.backup\
n") % (mydest, mydest), |
3692 level=logging.ERROR, noi
selevel=-1) | 3696 level=logging.ERROR, noi
selevel=-1) |
3693 #now create our directory | 3697 #now create our directory |
3694 try: | 3698 try: |
3695 if self.settings.selinux
_enabled(): | 3699 if self.settings.selinux
_enabled(): |
3696 _selinux_merge.m
kdir(mydest, mysrc) | 3700 _selinux_merge.m
kdir(mydest, mysrc) |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3754 if stat.S_ISDIR(mydmode): | 3758 if stat.S_ISDIR(mydmode): |
3755 # install of destination is bloc
ked by an existing directory with the same name | 3759 # install of destination is bloc
ked by an existing directory with the same name |
3756 newdest = self._new_backup_path(
mydest) | 3760 newdest = self._new_backup_path(
mydest) |
3757 msg = [] | 3761 msg = [] |
3758 msg.append("") | 3762 msg.append("") |
3759 msg.append(_("Installation of a
regular file is blocked by a directory:")) | 3763 msg.append(_("Installation of a
regular file is blocked by a directory:")) |
3760 msg.append(" '%s'" % mydest) | 3764 msg.append(" '%s'" % mydest) |
3761 msg.append(_("This file will be
merged with a different name:")) | 3765 msg.append(_("This file will be
merged with a different name:")) |
3762 msg.append(" '%s'" % newdest) | 3766 msg.append(" '%s'" % newdest) |
3763 msg.append("") | 3767 msg.append("") |
3764 » » » » » » self._elog_subprocess("eerror",
"preinst", msg) | 3768 » » » » » » self._eerror("preinst", msg) |
3765 mydest = newdest | 3769 mydest = newdest |
3766 | 3770 |
3767 elif stat.S_ISREG(mydmode) or (stat.S_IS
LNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST
_MODE])): | 3771 elif stat.S_ISREG(mydmode) or (stat.S_IS
LNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST
_MODE])): |
3768 # install of destination is bloc
ked by an existing regular file, | 3772 # install of destination is bloc
ked by an existing regular file, |
3769 # or by a symlink to an existing
regular file; | 3773 # or by a symlink to an existing
regular file; |
3770 # now, config file management ma
y come into play. | 3774 # now, config file management ma
y come into play. |
3771 # we only need to tweak mydest i
f cfg file management is in play. | 3775 # we only need to tweak mydest i
f cfg file management is in play. |
3772 if protected: | 3776 if protected: |
3773 # we have a protection p
ath; enable config file management. | 3777 # we have a protection p
ath; enable config file management. |
3774 cfgprot = 0 | 3778 cfgprot = 0 |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3938 background=False, | 3942 background=False, |
3939 commands=[phase], | 3943 commands=[phase], |
3940 scheduler=PollScheduler().sched_
iface, | 3944 scheduler=PollScheduler().sched_
iface, |
3941 settings=self.settings) | 3945 settings=self.settings) |
3942 ebuild_phase.start() | 3946 ebuild_phase.start() |
3943 ebuild_phase.wait() | 3947 ebuild_phase.wait() |
3944 else: | 3948 else: |
3945 self._scheduler.dblinkEbuildPhase( | 3949 self._scheduler.dblinkEbuildPhase( |
3946 self, mydbapi, myebuild, phase) | 3950 self, mydbapi, myebuild, phase) |
3947 | 3951 |
3948 » » » » elog_process(self.mycpv, self.settings) | 3952 » » » » self._elog_process() |
3949 | 3953 |
3950 if 'noclean' not in self.settings.features and \ | 3954 if 'noclean' not in self.settings.features and \ |
3951 (retval == os.EX_OK or \ | 3955 (retval == os.EX_OK or \ |
3952 'fail-clean' in self.settings.features): | 3956 'fail-clean' in self.settings.features): |
3953 if myebuild is None: | 3957 if myebuild is None: |
3954 myebuild = os.path.join(inforoot
, self.pkg + ".ebuild") | 3958 myebuild = os.path.join(inforoot
, self.pkg + ".ebuild") |
3955 | 3959 |
3956 doebuild_environment(myebuild, "clean", | 3960 doebuild_environment(myebuild, "clean", |
3957 settings=self.settings, db=mydba
pi) | 3961 settings=self.settings, db=mydba
pi) |
3958 if self._scheduler is None: | 3962 if self._scheduler is None: |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4038 """ | 4042 """ |
4039 @param myroot: ignored, settings['EROOT'] is used instead | 4043 @param myroot: ignored, settings['EROOT'] is used instead |
4040 """ | 4044 """ |
4041 myroot = None | 4045 myroot = None |
4042 if settings is None: | 4046 if settings is None: |
4043 raise TypeError("settings argument is required") | 4047 raise TypeError("settings argument is required") |
4044 if not os.access(settings['EROOT'], os.W_OK): | 4048 if not os.access(settings['EROOT'], os.W_OK): |
4045 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings
['EROOT'], | 4049 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings
['EROOT'], |
4046 noiselevel=-1) | 4050 noiselevel=-1) |
4047 return errno.EACCES | 4051 return errno.EACCES |
4048 » mylink = dblink(mycat, mypkg, settings=settings, treetype=mytree, | 4052 » background = (settings.get('PORTAGE_BACKGROUND') == '1') |
4049 » » vartree=vartree, blockers=blockers, scheduler=scheduler) | 4053 » merge_task = MergeProcess( |
4050 » return mylink.merge(pkgloc, infloc, myebuild=myebuild, | 4054 » » dblink=dblink, mycat=mycat, mypkg=mypkg, settings=settings, |
4051 » » mydbapi=mydbapi, prev_mtimes=prev_mtimes) | 4055 » » treetype=mytree, vartree=vartree, scheduler=scheduler, |
| 4056 » » background=background, blockers=blockers, pkgloc=pkgloc, |
| 4057 » » infloc=infloc, myebuild=myebuild, mydbapi=mydbapi, |
| 4058 » » prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'
)) |
| 4059 » merge_task.start() |
| 4060 » retcode = merge_task.wait() |
| 4061 » return retcode |
4052 | 4062 |
4053 def unmerge(cat, pkg, myroot=None, settings=None, | 4063 def unmerge(cat, pkg, myroot=None, settings=None, |
4054 mytrimworld=None, vartree=None, | 4064 mytrimworld=None, vartree=None, |
4055 ldpath_mtimes=None, scheduler=None): | 4065 ldpath_mtimes=None, scheduler=None): |
4056 """ | 4066 """ |
4057 @param myroot: ignored, settings['EROOT'] is used instead | 4067 @param myroot: ignored, settings['EROOT'] is used instead |
4058 @param mytrimworld: ignored | 4068 @param mytrimworld: ignored |
4059 """ | 4069 """ |
4060 myroot = None | 4070 myroot = None |
4061 if settings is None: | 4071 if settings is None: |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4182 encoding=object.__getattribute__(os, '_e
ncoding'), | 4192 encoding=object.__getattribute__(os, '_e
ncoding'), |
4183 errors='strict'), 'rb') | 4193 errors='strict'), 'rb') |
4184 try: | 4194 try: |
4185 tar.addfile(tarinfo, f) | 4195 tar.addfile(tarinfo, f) |
4186 finally: | 4196 finally: |
4187 f.close() | 4197 f.close() |
4188 else: | 4198 else: |
4189 tar.addfile(tarinfo) | 4199 tar.addfile(tarinfo) |
4190 if onProgress: | 4200 if onProgress: |
4191 onProgress(maxval, curval) | 4201 onProgress(maxval, curval) |
OLD | NEW |