OLD | NEW |
1 # Copyright 1998-2010 Gentoo Foundation | 1 # Copyright 1998-2010 Gentoo Foundation |
2 # Distributed under the terms of the GNU General Public License v2 | 2 # Distributed under the terms of the GNU General Public License v2 |
3 | 3 |
4 __all__ = ["bindbapi", "binarytree"] | 4 __all__ = ["bindbapi", "binarytree"] |
5 | 5 |
6 import portage | 6 import portage |
7 portage.proxy.lazyimport.lazyimport(globals(), | 7 portage.proxy.lazyimport.lazyimport(globals(), |
8 'portage.checksum:hashfunc_map,perform_multiple_checksums,verify_all', | 8 'portage.checksum:hashfunc_map,perform_multiple_checksums,verify_all', |
9 'portage.dbapi.dep_expand:dep_expand', | 9 'portage.dbapi.dep_expand:dep_expand', |
10 'portage.dep:dep_getkey,isjustname,match_from_list', | 10 'portage.dep:dep_getkey,isjustname,match_from_list', |
(...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
217 if True: | 217 if True: |
218 self.root = root | 218 self.root = root |
219 #self.pkgdir=settings["PKGDIR"] | 219 #self.pkgdir=settings["PKGDIR"] |
220 self.pkgdir = normalize_path(pkgdir) | 220 self.pkgdir = normalize_path(pkgdir) |
221 self.dbapi = bindbapi(self, settings=settings) | 221 self.dbapi = bindbapi(self, settings=settings) |
222 self.update_ents = self.dbapi.update_ents | 222 self.update_ents = self.dbapi.update_ents |
223 self.move_slot_ent = self.dbapi.move_slot_ent | 223 self.move_slot_ent = self.dbapi.move_slot_ent |
224 self.populated = 0 | 224 self.populated = 0 |
225 self.tree = {} | 225 self.tree = {} |
226 self._remote_has_index = False | 226 self._remote_has_index = False |
227 self._remote_base_uri = None | |
228 self._remotepkgs = None # remote metadata indexed by cpv | 227 self._remotepkgs = None # remote metadata indexed by cpv |
229 self.__remotepkgs = {} # indexed by tbz2 name (deprecat
ed) | 228 self.__remotepkgs = {} # indexed by tbz2 name (deprecat
ed) |
230 self.invalids = [] | 229 self.invalids = [] |
231 self.settings = settings | 230 self.settings = settings |
232 self._pkg_paths = {} | 231 self._pkg_paths = {} |
233 self._populating = False | 232 self._populating = False |
234 self._all_directory = os.path.isdir( | 233 self._all_directory = os.path.isdir( |
235 os.path.join(self.pkgdir, "All")) | 234 os.path.join(self.pkgdir, "All")) |
236 self._pkgindex_version = 0 | 235 self._pkgindex_version = 0 |
237 self._pkgindex_hashes = ["MD5","SHA1"] | 236 self._pkgindex_hashes = ["MD5","SHA1"] |
238 self._pkgindex_file = os.path.join(self.pkgdir, "Package
s") | 237 self._pkgindex_file = os.path.join(self.pkgdir, "Package
s") |
239 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy() | 238 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy() |
240 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"]) | 239 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"]) |
241 self._pkgindex_aux_keys = \ | 240 self._pkgindex_aux_keys = \ |
242 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION",
"EAPI", | 241 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION",
"EAPI", |
243 "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPE
RTIES", | 242 "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPE
RTIES", |
244 "PROVIDE", "RDEPEND", "repository", "SLOT", "USE
", "DEFINED_PHASES", | 243 "PROVIDE", "RDEPEND", "repository", "SLOT", "USE
", "DEFINED_PHASES", |
245 » » » » "REQUIRED_USE"] | 244 » » » » "REQUIRED_USE", "BASE_URI"] |
246 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys) | 245 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys) |
247 self._pkgindex_use_evaluated_keys = \ | 246 self._pkgindex_use_evaluated_keys = \ |
248 ("LICENSE", "RDEPEND", "DEPEND", | 247 ("LICENSE", "RDEPEND", "DEPEND", |
249 "PDEPEND", "PROPERTIES", "PROVIDE") | 248 "PDEPEND", "PROPERTIES", "PROVIDE") |
250 self._pkgindex_header_keys = set([ | 249 self._pkgindex_header_keys = set([ |
251 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE", | 250 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE", |
252 "ACCEPT_PROPERTIES", "CBUILD", | 251 "ACCEPT_PROPERTIES", "CBUILD", |
253 "CHOST", "CONFIG_PROTECT", "CONFIG_PROTECT_MASK"
, "FEATURES", | 252 "CHOST", "CONFIG_PROTECT", "CONFIG_PROTECT_MASK"
, "FEATURES", |
254 "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"]
) | 253 "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"]
) |
255 self._pkgindex_default_pkg_data = { | 254 self._pkgindex_default_pkg_data = { |
(...skipping 465 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
721 pkgindex.packages.extend(iter(metadata.values())
) | 720 pkgindex.packages.extend(iter(metadata.values())
) |
722 self._update_pkgindex_header(pkgindex.header) | 721 self._update_pkgindex_header(pkgindex.header) |
723 f = atomic_ofstream(self._pkgindex_file) | 722 f = atomic_ofstream(self._pkgindex_file) |
724 pkgindex.write(f) | 723 pkgindex.write(f) |
725 f.close() | 724 f.close() |
726 | 725 |
727 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]: | 726 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]: |
728 writemsg(_("!!! PORTAGE_BINHOST unset, but use is reques
ted.\n"), | 727 writemsg(_("!!! PORTAGE_BINHOST unset, but use is reques
ted.\n"), |
729 noiselevel=-1) | 728 noiselevel=-1) |
730 | 729 |
731 » » if getbinpkgs and 'PORTAGE_BINHOST' in self.settings: | 730 » » if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings: |
732 » » » base_url = self.settings["PORTAGE_BINHOST"] | 731 » » » self.populated=1 |
| 732 » » » return |
| 733 » » self._remotepkgs = {} |
| 734 » » self.__remotepkgs = {} |
| 735 » » for base_url in self.settings["PORTAGE_BINHOST"].split(): |
733 parsed_url = urlparse(base_url) | 736 parsed_url = urlparse(base_url) |
734 host = parsed_url.netloc | 737 host = parsed_url.netloc |
735 port = parsed_url.port | 738 port = parsed_url.port |
736 user = None | 739 user = None |
737 passwd = None | 740 passwd = None |
738 user_passwd = "" | 741 user_passwd = "" |
739 if "@" in host: | 742 if "@" in host: |
740 user, host = host.split("@", 1) | 743 user, host = host.split("@", 1) |
741 user_passwd = user + "@" | 744 user_passwd = user + "@" |
742 if ":" in user: | 745 if ":" in user: |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
850 f = atomic_ofstream(pkgindex_file) | 853 f = atomic_ofstream(pkgindex_file) |
851 pkgindex.write(f) | 854 pkgindex.write(f) |
852 f.close() | 855 f.close() |
853 except (IOError, PortageException): | 856 except (IOError, PortageException): |
854 if os.access(os.path.dirname(pkgindex_fi
le), os.W_OK): | 857 if os.access(os.path.dirname(pkgindex_fi
le), os.W_OK): |
855 raise | 858 raise |
856 # The current user doesn't have permissi
on to cache the | 859 # The current user doesn't have permissi
on to cache the |
857 # file, but that's alright. | 860 # file, but that's alright. |
858 if pkgindex: | 861 if pkgindex: |
859 # Organize remote package list as a cpv -> metad
ata map. | 862 # Organize remote package list as a cpv -> metad
ata map. |
860 » » » » self._remotepkgs = _pkgindex_cpv_map_latest_buil
d(pkgindex) | 863 » » » » remotepkgs = _pkgindex_cpv_map_latest_build(pkgi
ndex) |
| 864 » » » » self._remotepkgs.update(remotepkgs) |
861 self._remote_has_index = True | 865 self._remote_has_index = True |
862 » » » » self._remote_base_uri = pkgindex.header.get("URI
", base_url) | 866 » » » » remote_base_uri = pkgindex.header.get("URI", bas
e_url) |
863 » » » » self.__remotepkgs = {} | 867 » » » » for cpv in remotepkgs: |
864 » » » » for cpv in self._remotepkgs: | |
865 self.dbapi.cpv_inject(cpv) | 868 self.dbapi.cpv_inject(cpv) |
866 self.populated = 1 | |
867 if True: | 869 if True: |
868 # Remote package instances override loca
l package | 870 # Remote package instances override loca
l package |
869 # if they are not identical. | 871 # if they are not identical. |
870 hash_names = ["SIZE"] + self._pkgindex_h
ashes | 872 hash_names = ["SIZE"] + self._pkgindex_h
ashes |
871 for cpv, local_metadata in metadata.item
s(): | 873 for cpv, local_metadata in metadata.item
s(): |
872 remote_metadata = self._remotepk
gs.get(cpv) | 874 remote_metadata = self._remotepk
gs.get(cpv) |
873 if remote_metadata is None: | 875 if remote_metadata is None: |
874 continue | 876 continue |
| 877 remote_metadata["BASE_URI"] = re
mote_base_uri |
875 # Use digests to compare identit
y. | 878 # Use digests to compare identit
y. |
876 identical = True | 879 identical = True |
877 for hash_name in hash_names: | 880 for hash_name in hash_names: |
878 local_value = local_meta
data.get(hash_name) | 881 local_value = local_meta
data.get(hash_name) |
879 if local_value is None: | 882 if local_value is None: |
880 continue | 883 continue |
881 remote_value = remote_me
tadata.get(hash_name) | 884 remote_value = remote_me
tadata.get(hash_name) |
882 if remote_value is None: | 885 if remote_value is None: |
883 continue | 886 continue |
884 if local_value != remote
_value: | 887 if local_value != remote
_value: |
885 identical = Fals
e | 888 identical = Fals
e |
886 break | 889 break |
887 if identical: | 890 if identical: |
888 del self._remotepkgs[cpv
] | 891 del self._remotepkgs[cpv
] |
889 else: | 892 else: |
890 # Override the local pac
kage in the aux_get cache. | 893 # Override the local pac
kage in the aux_get cache. |
891 self.dbapi._aux_cache[cp
v] = remote_metadata | 894 self.dbapi._aux_cache[cp
v] = remote_metadata |
892 else: | 895 else: |
893 # Local package instances override remot
e instances. | 896 # Local package instances override remot
e instances. |
894 for cpv in metadata: | 897 for cpv in metadata: |
895 self._remotepkgs.pop(cpv, None) | 898 self._remotepkgs.pop(cpv, None) |
896 » » » » return | 899 » » » » continue |
897 » » » self._remotepkgs = {} | |
898 try: | 900 try: |
899 chunk_size = long(self.settings["PORTAGE_BINHOST
_CHUNKSIZE"]) | 901 chunk_size = long(self.settings["PORTAGE_BINHOST
_CHUNKSIZE"]) |
900 if chunk_size < 8: | 902 if chunk_size < 8: |
901 chunk_size = 8 | 903 chunk_size = 8 |
902 except (ValueError, KeyError): | 904 except (ValueError, KeyError): |
903 chunk_size = 3000 | 905 chunk_size = 3000 |
904 writemsg_stdout("\n") | 906 writemsg_stdout("\n") |
905 writemsg_stdout( | 907 writemsg_stdout( |
906 colorize("GOOD", _("Fetching bininfo from ")) +
\ | 908 colorize("GOOD", _("Fetching bininfo from ")) +
\ |
907 re.sub(r'//(.+):.+@(.+)/', r'//\1:*password*@\2/
', base_url) + "\n") | 909 re.sub(r'//(.+):.+@(.+)/', r'//\1:*password*@\2/
', base_url) + "\n") |
908 » » » self.__remotepkgs = portage.getbinpkg.dir_get_metadata( | 910 » » » remotepkgs = portage.getbinpkg.dir_get_metadata( |
909 » » » » self.settings["PORTAGE_BINHOST"], chunk_size=chu
nk_size) | 911 » » » » base_url, chunk_size=chunk_size) |
| 912 » » » self.__remotepkgs.update(remotepkgs) |
910 #writemsg(green(" -- DONE!\n\n")) | 913 #writemsg(green(" -- DONE!\n\n")) |
911 | 914 |
912 » » » for mypkg in list(self.__remotepkgs): | 915 » » » for mypkg in list(remotepkgs): |
913 if "CATEGORY" not in self.__remotepkgs[mypkg]: | 916 if "CATEGORY" not in self.__remotepkgs[mypkg]: |
914 #old-style or corrupt package | 917 #old-style or corrupt package |
915 writemsg(_("!!! Invalid remote binary pa
ckage: %s\n") % mypkg, | 918 writemsg(_("!!! Invalid remote binary pa
ckage: %s\n") % mypkg, |
916 noiselevel=-1) | 919 noiselevel=-1) |
917 del self.__remotepkgs[mypkg] | 920 del self.__remotepkgs[mypkg] |
918 continue | 921 continue |
919 mycat = self.__remotepkgs[mypkg]["CATEGORY"].str
ip() | 922 mycat = self.__remotepkgs[mypkg]["CATEGORY"].str
ip() |
920 fullpkg = mycat+"/"+mypkg[:-5] | 923 fullpkg = mycat+"/"+mypkg[:-5] |
921 | 924 |
922 if fullpkg in metadata: | 925 if fullpkg in metadata: |
(...skipping 303 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1226 writemsg(_("Resuming download of this tbz2, but
it is possible that it is corrupt.\n"), | 1229 writemsg(_("Resuming download of this tbz2, but
it is possible that it is corrupt.\n"), |
1227 noiselevel=-1) | 1230 noiselevel=-1) |
1228 | 1231 |
1229 mydest = os.path.dirname(self.getname(pkgname)) | 1232 mydest = os.path.dirname(self.getname(pkgname)) |
1230 self._ensure_dir(mydest) | 1233 self._ensure_dir(mydest) |
1231 # urljoin doesn't work correctly with unrecognized protocols lik
e sftp | 1234 # urljoin doesn't work correctly with unrecognized protocols lik
e sftp |
1232 if self._remote_has_index: | 1235 if self._remote_has_index: |
1233 rel_url = self._remotepkgs[pkgname].get("PATH") | 1236 rel_url = self._remotepkgs[pkgname].get("PATH") |
1234 if not rel_url: | 1237 if not rel_url: |
1235 rel_url = pkgname+".tbz2" | 1238 rel_url = pkgname+".tbz2" |
1236 » » » url = self._remote_base_uri.rstrip("/") + "/" + rel_url.
lstrip("/") | 1239 » » » remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"] |
| 1240 » » » url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip
("/") |
1237 else: | 1241 else: |
1238 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/"
+ tbz2name | 1242 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/"
+ tbz2name |
1239 protocol = urlparse(url)[0] | 1243 protocol = urlparse(url)[0] |
1240 fcmd_prefix = "FETCHCOMMAND" | 1244 fcmd_prefix = "FETCHCOMMAND" |
1241 if resume: | 1245 if resume: |
1242 fcmd_prefix = "RESUMECOMMAND" | 1246 fcmd_prefix = "RESUMECOMMAND" |
1243 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper()) | 1247 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper()) |
1244 if not fcmd: | 1248 if not fcmd: |
1245 fcmd = self.settings.get(fcmd_prefix) | 1249 fcmd = self.settings.get(fcmd_prefix) |
1246 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd) | 1250 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd) |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1323 def getslot(self, mycatpkg): | 1327 def getslot(self, mycatpkg): |
1324 "Get a slot for a catpkg; assume it exists." | 1328 "Get a slot for a catpkg; assume it exists." |
1325 myslot = "" | 1329 myslot = "" |
1326 try: | 1330 try: |
1327 myslot = self.dbapi.aux_get(mycatpkg,["SLOT"])[0] | 1331 myslot = self.dbapi.aux_get(mycatpkg,["SLOT"])[0] |
1328 except SystemExit as e: | 1332 except SystemExit as e: |
1329 raise | 1333 raise |
1330 except Exception as e: | 1334 except Exception as e: |
1331 pass | 1335 pass |
1332 return myslot | 1336 return myslot |
OLD | NEW |