PK!7!__pycache__/crypto.cpython-36.pycnu[3 ft` @s<ddlmZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z ddl Z ddl Z ddlZyddlmZddlmZWn<ek rddlZGdd d eZGd d d eZYnXd Ze jd ZddZddZddZddZddZddZejddZddZ d"ddZ!Gd d!d!eZ"dS)#)print_function)absolute_import)unicode_literals)_N)Context)Datac@sVeZdZddZddZddZeddZejd dZd d Z d d Z ddZ dS)rcCstj|jd<dS)Nctx)gpgmer__dict__)selfr /usr/lib/python3.6/crypto.py__init__*szContext.__init__cCs|S)Nr )r r r r __enter__-szContext.__enter__cCsdS)Nr )r typevaluetbr r r __exit__0szContext.__exit__cCs|jjS)N)rarmor)r r r r r3sz Context.armorcCs ||j_dS)N)rr)r rr r r r7scCs$t|trtj|}|jj|dS)N) isinstanceZ basestringioBytesIOrimport_)r key_for r r op_import;s  zContext.op_importcCs|jj||dS)N)rZexport)r patternmodeZkeydatar r r op_export@szContext.op_exportcCs t|j|S)N)getattrr)r namer r r __getattr__CszContext.__getattr__N) __name__ __module__ __qualname__rrrpropertyrsetterrrr r r r r r)s rc@s4eZdZddZddZddZddZd d Zd S) rcCstj|jd<dS)Nbuf)rrr )r r r r rHsz Data.__init__cCs|S)Nr )r r r r rKszData.__enter__cCsdS)Nr )r rrrr r r rNsz Data.__exit__cCs |jjS)N)r&getvalue)r r r r readQsz Data.readcCs t|j|S)N)rr&)r rr r r r TszData.__getattr__N)r!r"r#rrrr(r r r r r rGs rZ GNUPGHOMEdnfcCstjjdd|jDS)Ncss|]}|jr|VqdS)N)Zcan_sign).0subkeyr r r ]sz*_extract_signing_subkey..)r)utilfirstZsubkeys)keyr r r _extract_signing_subkey\sr0cs(fddtdtdD}dj|S)Nc3s|]}||dVqdS)Nr )r*i)fpr_hexr r r,asz)_printable_fingerprint..rr1 )rangelenjoin)r3Zsegmentsr )r3r _printable_fingerprint`sr8cCs|j}t|}x|jD]x}xrt||D]d}|j}||krNtjtd|j|q&|j j |s\q&t j j j|j|j|ddtjtd|j|q&WqWdS)Nzrepo %s: 0x%s already importedF)gpgdirZ make_ro_copyzrepo %s: imported key 0x%s.)Z _pubring_dirkeyids_from_pubringZgpgkeyretrieveid_loggerdebugridZ _key_importZ_confirmr)ZyumZmiscZimport_key_to_pubringraw_keyshort_id)repor9Z known_keyskeyurlkeyinfokeyidr r r import_repo_keyses   rFcCsltjj|sgSt|Jt8}g}x,|jD] }t|}|dk r0|j|jq0W|SQRXWdQRXdS)N) ospathexists pubring_dirrkeylistr0appendrE)r9rZkeyidskr+r r r r:vs r:cCs8td|j|jt|j|jjddf}tjd|dS)NzLImporting GPG key 0x%s: Userid : "%s" Fingerprint: %s From : %szfile://z%s) rrAuseridr8 fingerprinturlreplacer=critical)rDmsgr r r log_key_imports rUcCs8t||tjjjkr&tjtdntjtddS)Nz0Verified using DNS record with DNSSEC signature.zNOT verified using DNS record.)rUr)ZdnssecZValidityZVALIDr=rSr)rDZ dns_resultr r r log_dns_key_importsrVc csFtjjtd}|tjt<z dVWd|dkr6tjt=n |tjt<XdS)N)rGenvironget GPG_HOME_ENV)rJZorigr r r rJs   rJcCstj}g}t|t}|j|x2|jD]&}t|}|dkrHq2|jt||q2Wd|_ xF|D]>}t .}|j |j d||j dtj|j|_WdQRXqhWWdQRXWdQRXtjj||S)NTr)tempfileZmkdtemprJrrrKr0rLKeyrrrr<seekrGSEEK_SETr(r@r)r-Zrm_rf)rZpb_dirkeyinfosrr/r+infoZsinkr r r rawkey2infoss"  , r`c CsZ|jdrtjtd|j|tjj||d}t|}WdQRXx|D] }||_ qHW|S)Nzhttp:z.retrieving repo key for %s unencrypted from %s)rB) startswithr=Zwarningrr?r)r-Z_urlopenr`rQ)rCrBZhandler^rDr r r r;s   r;c@s,eZdZddZeddZeddZdS)r[cCs6|j|_|j|_d|_|j|_d|_|jdj|_ dS)Nr) rEr<ZfprrPr@Z timestamprQZuidsZuidrO)r r/r+r r r rs z Key.__init__cCs&tjjr dnd}|jddjd|S)N00i)r)ZpycompZPY3r<rjust)r Zrjr r r rAsz Key.short_idcCs |jjS)N)rAlower)r r r r rpm_idsz Key.rpm_idN)r!r"r#rr$rArgr r r r r[s r[)N)#Z __future__rrrZdnf.i18nr contextlibZ dnf.pycompr)Zdnf.utilZ dnf.yum.miscrZloggingrGrZZgpgrr ImportErrorr objectrYZ getLoggerr=r0r8rFr:rUrVcontextmanagerrJr`r;r[r r r r s<          PK!Tv""'__pycache__/dnssec.cpython-36.opt-1.pycnu[3 ft`;,@sddlmZddlmZddlmZddlmZddlZddlZddlZddl Z ddl m Z ddl Z ddlZ ejdZdZGd d d e jjZdd d ZGdddeZGdddZGdddZGdddZddZddZGdddZdS))print_function)absolute_import)unicode_literals)EnumN)_dnf=c@seZdZdZddZdS) DnssecErrorz- Exception used in the dnssec module cCsdj|jdk r|jndS)Nzz Not specified)formatvalue)selfr /usr/lib/python3.6/dnssec.py__repr__-szDnssecError.__repr__N)__name__ __module__ __qualname____doc__rr r r rr )sr _openpgpkeycCs~|jd}t|dkr"d}t||d}|d}tj}|j|jdtj|j ddj dj }|d|d|S) z Implements RFC 7929, section 3 https://tools.ietf.org/html/rfc7929#section-3 :param email_address: :param tag: :return: @z0Email address must contain exactly one '@' sign.rzutf-8.) splitlenr hashlibZsha256updateencodebase64Z b16encodedigestdecodelower)Z email_addresstagrmsgZlocalZdomainhashr r r remail2location2s   r&c@s(eZdZdZdZdZdZdZdZdZ dS) Validityz Output of the verification algorithm. TODO: this type might be simplified in order to less reflect the underlying DNS layer. TODO: more specifically the variants from 3 to 5 should have more understandable names rr N) rrrrVALIDREVOKEDPROVEN_NONEXISTENCERESULT_NOT_SECURE BOGUS_RESULTERRORr r r rr'Jsr'c@seZdZdZdS)NoKeyz This class represents an absence of a key in the cache. It is an expression of non-existence using the Python's type system. N)rrrrr r r rr2Xsr2c@s&eZdZdZdddZeddZdS)KeyInfozv Wrapper class for email and associated verification key, where both are represented in form of a string. NcCs||_||_dS)N)emailkey)r r4r5r r r__init__eszKeyInfo.__init__c Cstjd|}|dkrt|jd}|jdjd}d}d}x6tdt|D]$}||dkr^|}||dkrJ|}qJWd j||d |dj d}t ||S) z Since dnf uses different format of the key than the one used in DNS RR, I need to convert the former one into the new one. z <(.*@.*)>Nrascii rz$-----BEGIN PGP PUBLIC KEY BLOCK-----z"-----END PGP PUBLIC KEY BLOCK-----r) researchr groupr!rrangerjoinrr3) ZuseridZraw_keyZ input_emailr4r5startstopiZcat_keyr r rfrom_rpm_key_objectis     zKeyInfo.from_rpm_key_object)NN)rrrrr6 staticmethodrBr r r rr3`s r3c@s8eZdZdZiZeddZeddZeddZdS) DNSSECKeyVerificationz The main class when it comes to verification itself. It wraps Unbound context and a cache with already obtained results. cCsZ||krtjdtjS|tkr0tjdtjStjdj|tjdj|tjSdS)zD Compare the key in case it was found in the cache. zCache hit, valid keyzCache hit, proven non-existencezKey in cache: {}zInput key : {}N)loggerdebugr'r,r2r.r r-) key_unionZinput_key_stringr r r _cache_hits  z DNSSECKeyVerification._cache_hitc Csy ddl}Wn<tk rH}z tdj|}tjj|WYdd}~XnX|j}|jdddkrlt j d|jdddkrt j d |j dkrt j d |j d dkrt j d |j t|jt|j\}}|dkrt j d tjS|jrt j dtjS|jst j dtjS|jr,t j dtjS|jsDt j dtjS|jjd}tj|}||jkrntj St j dj|t j dj|jtj!SdS)zz In case the key was not found in the cache, create an Unbound context and contact the DNS system rNzLConfiguration option 'gpgkey_dns_verification' requires python3-unbound ({})z verbosity:0z(Unbound context: Failed to set verbosityzqname-minimisation:yesz1Unbound context: Failed to set qname minimisationz+Unbound context: Failed to read resolv.confz/var/lib/unbound/root.keyz0Unbound context: Failed to add trust anchor filez%Communication with DNS servers failedzDNSSEC signatures are wrongz!Result is not secured with DNSSECz1Non-existence of this record was proven by DNSSECz"Unknown error in DNS communicationzKey from DNS: {}zInput key : {})"unbound ImportErrorrr r exceptionsErrorZub_ctxZ set_optionrErFZ resolvconfZ add_ta_fileZresolver&r4RR_TYPE_OPENPGPKEYZ RR_CLASS_INr'r1Zbogusr0Zsecurer/Znxdomainr.ZhavedatadataZ as_raw_datarZ b64encoder5r,r-) input_keyrKer$ZctxZstatusresultrPZ dns_data_b64r r r _cache_misssN              z!DNSSECKeyVerification._cache_misscCsztjdj|jtjj|j}|dk r6tj||jStj |}|t j krZ|jtj|j<n|t j krrt tj|j<|SdS)zI Public API. Use this method to verify a KeyInfo object. z(Running verification for key with id: {}N)rErFr r4rD_cachegetrHr5rTr'r,r.r2)rQrGrSr r rverifys   zDNSSECKeyVerification.verifyN) rrrrrUrCrHrTrWr r r rrDs   9rDcCs8td|jd}|tjkr(|tdS|tdSdS)zE Inform the user about key validity in a human readable way. zDNSSEC extension: Key for user  z is valid.zhas unknown status.N)rr4r'r,)Zkivprefixr r r nice_user_msgs  r[cCs td|S)z; Label any given message with DNSSEC extension tag zDNSSEC extension: )r)mr r rany_msgsr]c@s(eZdZdZeddZeddZdS)RpmImportedKeysaQ Wrapper around keys, that are imported in the RPM database. The keys are stored in packages with name gpg-pubkey, where the version and release is different for each of them. The key content itself is stored as an ASCII armored string in the package description, so it needs to be parsed before it can be used. c Cstjjj}|jdd}g}xl|D]d}tjj|d}tjd|jd}tjj|d}|j ddd }d j |}|t ||j d g7}q"W|S) Nnamez gpg-pubkeypackagerz <(.*@.*)>r descriptionr8r(r9r7) rZrpmZ transactionZTransactionWrapperZdbMatchZ getheaderr:r;r<rr>r3r) Ztransaction_setZpackagesZ return_listZpkgr`r4raZ key_linesZkey_strr r r_query_db_for_gpg_keyss    z&RpmImportedKeys._query_db_for_gpg_keyscCstj}tjttdx|D]}ytj|}Wn:tk rl}ztj dj |j |j w WYdd}~XnX|t jkrtjtdj |j q |t jkrtjtdj |j q |t jkrtjtdj |j q |t jkrtjtdj |j q tjtdj |j q WdS)Nz1Testing already imported keys for their validity.z%DNSSEC extension error (email={}): {}zGPG Key {} is validz,GPG Key {} does not support DNS verificationzGPG Key {} could not be verified, because DNSSEC signatures are bogus. Possible causes: wrong configuration of the DNS server, MITM attackz=GPG Key {} has been revoked and should be removed immediatelyzGPG Key {} could not be tested)r^rcrEinfor]rrDrWr Zwarningr r4r r'r,rFr.r0r-)keysr5rSrRr r rcheck_imported_keys_validitys,        z,RpmImportedKeys.check_imported_keys_validityN)rrrrrCrcrfr r r rr^s r^)r)Z __future__rrrenumrrrZloggingr:Zdnf.i18nrZdnf.rpmrZdnf.exceptionsZ getLoggerrErOrMrNr r&r'r2r3rDr[r]r^r r r rs*       #g PK!Tv""!__pycache__/dnssec.cpython-36.pycnu[3 ft`;,@sddlmZddlmZddlmZddlmZddlZddlZddlZddl Z ddl m Z ddl Z ddlZ ejdZdZGd d d e jjZdd d ZGdddeZGdddZGdddZGdddZddZddZGdddZdS))print_function)absolute_import)unicode_literals)EnumN)_dnf=c@seZdZdZddZdS) DnssecErrorz- Exception used in the dnssec module cCsdj|jdk r|jndS)Nzz Not specified)formatvalue)selfr /usr/lib/python3.6/dnssec.py__repr__-szDnssecError.__repr__N)__name__ __module__ __qualname____doc__rr r r rr )sr _openpgpkeycCs~|jd}t|dkr"d}t||d}|d}tj}|j|jdtj|j ddj dj }|d|d|S) z Implements RFC 7929, section 3 https://tools.ietf.org/html/rfc7929#section-3 :param email_address: :param tag: :return: @z0Email address must contain exactly one '@' sign.rzutf-8.) splitlenr hashlibZsha256updateencodebase64Z b16encodedigestdecodelower)Z email_addresstagrmsgZlocalZdomainhashr r r remail2location2s   r&c@s(eZdZdZdZdZdZdZdZdZ dS) Validityz Output of the verification algorithm. TODO: this type might be simplified in order to less reflect the underlying DNS layer. TODO: more specifically the variants from 3 to 5 should have more understandable names rr N) rrrrVALIDREVOKEDPROVEN_NONEXISTENCERESULT_NOT_SECURE BOGUS_RESULTERRORr r r rr'Jsr'c@seZdZdZdS)NoKeyz This class represents an absence of a key in the cache. It is an expression of non-existence using the Python's type system. N)rrrrr r r rr2Xsr2c@s&eZdZdZdddZeddZdS)KeyInfozv Wrapper class for email and associated verification key, where both are represented in form of a string. NcCs||_||_dS)N)emailkey)r r4r5r r r__init__eszKeyInfo.__init__c Cstjd|}|dkrt|jd}|jdjd}d}d}x6tdt|D]$}||dkr^|}||dkrJ|}qJWd j||d |dj d}t ||S) z Since dnf uses different format of the key than the one used in DNS RR, I need to convert the former one into the new one. z <(.*@.*)>Nrascii rz$-----BEGIN PGP PUBLIC KEY BLOCK-----z"-----END PGP PUBLIC KEY BLOCK-----r) researchr groupr!rrangerjoinrr3) ZuseridZraw_keyZ input_emailr4r5startstopiZcat_keyr r rfrom_rpm_key_objectis     zKeyInfo.from_rpm_key_object)NN)rrrrr6 staticmethodrBr r r rr3`s r3c@s8eZdZdZiZeddZeddZeddZdS) DNSSECKeyVerificationz The main class when it comes to verification itself. It wraps Unbound context and a cache with already obtained results. cCsZ||krtjdtjS|tkr0tjdtjStjdj|tjdj|tjSdS)zD Compare the key in case it was found in the cache. zCache hit, valid keyzCache hit, proven non-existencezKey in cache: {}zInput key : {}N)loggerdebugr'r,r2r.r r-) key_unionZinput_key_stringr r r _cache_hits  z DNSSECKeyVerification._cache_hitc Csy ddl}Wn<tk rH}z tdj|}tjj|WYdd}~XnX|j}|jdddkrlt j d|jdddkrt j d |j dkrt j d |j d dkrt j d |j t|jt|j\}}|dkrt j d tjS|jrt j dtjS|jst j dtjS|jr,t j dtjS|jsDt j dtjS|jjd}tj|}||jkrntj St j dj|t j dj|jtj!SdS)zz In case the key was not found in the cache, create an Unbound context and contact the DNS system rNzLConfiguration option 'gpgkey_dns_verification' requires python3-unbound ({})z verbosity:0z(Unbound context: Failed to set verbosityzqname-minimisation:yesz1Unbound context: Failed to set qname minimisationz+Unbound context: Failed to read resolv.confz/var/lib/unbound/root.keyz0Unbound context: Failed to add trust anchor filez%Communication with DNS servers failedzDNSSEC signatures are wrongz!Result is not secured with DNSSECz1Non-existence of this record was proven by DNSSECz"Unknown error in DNS communicationzKey from DNS: {}zInput key : {})"unbound ImportErrorrr r exceptionsErrorZub_ctxZ set_optionrErFZ resolvconfZ add_ta_fileZresolver&r4RR_TYPE_OPENPGPKEYZ RR_CLASS_INr'r1Zbogusr0Zsecurer/Znxdomainr.ZhavedatadataZ as_raw_datarZ b64encoder5r,r-) input_keyrKer$ZctxZstatusresultrPZ dns_data_b64r r r _cache_misssN              z!DNSSECKeyVerification._cache_misscCsztjdj|jtjj|j}|dk r6tj||jStj |}|t j krZ|jtj|j<n|t j krrt tj|j<|SdS)zI Public API. Use this method to verify a KeyInfo object. z(Running verification for key with id: {}N)rErFr r4rD_cachegetrHr5rTr'r,r.r2)rQrGrSr r rverifys   zDNSSECKeyVerification.verifyN) rrrrrUrCrHrTrWr r r rrDs   9rDcCs8td|jd}|tjkr(|tdS|tdSdS)zE Inform the user about key validity in a human readable way. zDNSSEC extension: Key for user  z is valid.zhas unknown status.N)rr4r'r,)Zkivprefixr r r nice_user_msgs  r[cCs td|S)z; Label any given message with DNSSEC extension tag zDNSSEC extension: )r)mr r rany_msgsr]c@s(eZdZdZeddZeddZdS)RpmImportedKeysaQ Wrapper around keys, that are imported in the RPM database. The keys are stored in packages with name gpg-pubkey, where the version and release is different for each of them. The key content itself is stored as an ASCII armored string in the package description, so it needs to be parsed before it can be used. c Cstjjj}|jdd}g}xl|D]d}tjj|d}tjd|jd}tjj|d}|j ddd }d j |}|t ||j d g7}q"W|S) Nnamez gpg-pubkeypackagerz <(.*@.*)>r descriptionr8r(r9r7) rZrpmZ transactionZTransactionWrapperZdbMatchZ getheaderr:r;r<rr>r3r) Ztransaction_setZpackagesZ return_listZpkgr`r4raZ key_linesZkey_strr r r_query_db_for_gpg_keyss    z&RpmImportedKeys._query_db_for_gpg_keyscCstj}tjttdx|D]}ytj|}Wn:tk rl}ztj dj |j |j w WYdd}~XnX|t jkrtjtdj |j q |t jkrtjtdj |j q |t jkrtjtdj |j q |t jkrtjtdj |j q tjtdj |j q WdS)Nz1Testing already imported keys for their validity.z%DNSSEC extension error (email={}): {}zGPG Key {} is validz,GPG Key {} does not support DNS verificationzGPG Key {} could not be verified, because DNSSEC signatures are bogus. Possible causes: wrong configuration of the DNS server, MITM attackz=GPG Key {} has been revoked and should be removed immediatelyzGPG Key {} could not be tested)r^rcrEinfor]rrDrWr Zwarningr r4r r'r,rFr.r0r-)keysr5rSrRr r rcheck_imported_keys_validitys,        z,RpmImportedKeys.check_imported_keys_validityN)rrrrrCrcrfr r r rr^s r^)r)Z __future__rrrenumrrrZloggingr:Zdnf.i18nrZdnf.rpmrZdnf.exceptionsZ getLoggerrErOrMrNr r&r'r2r3rDr[r]r^r r r rs*       #g PK!r惤%__pycache__/drpm.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z ddl Z ddlZddlZddlZdZejdZGd d d e jjZGd d d eZdS) )absolute_import)unicode_literals)hexlify)unlink_f)_Nz/usr/bin/applydeltarpmdnfcsXeZdZfddZddZfddZddZed d Zed d Z d dZ Z S) DeltaPayloadcs"tt|j||||_||_dS)N)superr__init__ delta_infodelta)selfr r pkgprogress) __class__/usr/lib/python3.6/drpm.pyr )szDeltaPayload.__init__cCstjj|jjS)N)ospathbasenamer location)r rrr__str__.szDeltaPayload.__str__cs2tt|j||||tjjjkr.|jj|dS)N) r r_end_cblibdnfrepoZPackageTargetCBZTransferStatus_ERRORr enqueue)r ZcbdataZ lr_statusmsg)rrrr1szDeltaPayload._end_cbcCsh|j}|j\}}tj|}t|j}tjjj |}|tjjj krRt j t d||j|||j|jdS)Nzunsupported checksum type: %s)Z relative_urlZ checksum_typeZchecksumZ expectedsizeZbase_url)r chksumhawkeyZ chksum_namerdecoderrZ PackageTargetZ checksumTypeZChecksumType_UNKNOWNloggerZwarningrr downloadsizeZbaseurl)r r ZctypeZcsumrZ ctype_coderrr_target_params6s   zDeltaPayload._target_paramscCs|jjS)N)r r!)r rrr download_sizeHszDeltaPayload.download_sizecCs|jjS)N)rr!)r rrr _full_sizeLszDeltaPayload._full_sizecCs$|jj}tjj|jjjtjj|S)N) r rrrjoinrrZpkgdirr)r rrrrlocalPkgPszDeltaPayload.localPkg) __name__ __module__ __qualname__r rrr"propertyr#r$r& __classcell__rr)rrr(s    rc@s>eZdZdddZddZddZdd Zd d Zd d ZdS) DeltaInfoNc Csd|_tjttjrd|_ytjd|_Wnttfk rHd|_YnX|dkrbt j j j |_ n||_ ||_ ||_g|_i|_i|_dS)zA delta lookup and rebuild context query -- installed packages to use when looking up deltas progress -- progress obj to display finished delta rebuilds FTSC_NPROCESSORS_ONLNN)deltarpm_installedraccess APPLYDELTAX_OKsysconf deltarpm_jobs TypeError ValueErrorrZconfZConfdeltarpm_percentagequeryrqueuejobserr)r r8rr7rrrr Vs zDeltaInfo.__init__cCs|js dS|jj s|j r dS|jr,dStjj|jr@dS|j |jd}d}x@|j j |j |j dD](}|j|j}|rj|j|krj|j}|}qjW|rt||||SdS)z&Turn a po to Delta RPM po, if possibleNd)namearch)r/rZdeltarpmr7Z _is_local_pkgrrexistsr&Z_sizer8filterr=r>Zget_delta_from_evrZevrr!r)r ZporZbestZ best_deltaZipor rrr delta_factoryms$ zDeltaInfo.delta_factorycCstjtjjd||d?|d@|jj|}|j}|dkrXt|jj t dg|j |<nB|jj stt dg|j |<n&t j|j |jj|tjjt ddS)Nzdrpm: %d: return code: %d, %drzDelta RPM rebuild failedz(Checksum of the delta-rebuilt RPM faileddone)r logrloggingSUBDEBUGr:poprrr&rr;ZverifyLocalPkgrunlinkrendcallbackZ STATUS_DRPM)r pidcodeploadrrrrjob_dones  zDeltaInfo.job_donecCs`ttd|jj|j|jjg}tjtjf|}tjt j j d|dj |dd||j |<dS)Nz-azdrpm: spawned %d: %s )r1rr>r&rspawnlP_NOWAITr rErrFrGr%r:)r rNZ spawn_argsrLrrr start_jobszDeltaInfo.start_jobcCspx.|jr.tjdtj\}}|s P|j||qW|jj|x.t|j|jkrj|j |jj d|js>Pq>WdS)NrQr) r:rwaitpidWNOHANGrOr9appendlenr4rTrH)r rNrLrMrrrrs zDeltaInfo.enqueuecCs@x:|jr:tj\}}|j|||jr|j|jjdqWdS)z!Wait until all jobs have finishedrN)r:rwaitrOr9rTrH)r rLrMrrrrZs   zDeltaInfo.wait)N) r'r(r)r rArOrTrrZrrrrr,Us    r,)Z __future__rrZbinasciirZ dnf.yum.miscrZdnf.i18nrZ dnf.callbackrZ dnf.loggingZdnf.reporrFZ libdnf.reporrr1Z getLoggerr rZPackagePayloadrobjectr,rrrrs      -PK!r惤__pycache__/drpm.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z ddl Z ddlZddlZddlZdZejdZGd d d e jjZGd d d eZdS) )absolute_import)unicode_literals)hexlify)unlink_f)_Nz/usr/bin/applydeltarpmdnfcsXeZdZfddZddZfddZddZed d Zed d Z d dZ Z S) DeltaPayloadcs"tt|j||||_||_dS)N)superr__init__ delta_infodelta)selfr r pkgprogress) __class__/usr/lib/python3.6/drpm.pyr )szDeltaPayload.__init__cCstjj|jjS)N)ospathbasenamer location)r rrr__str__.szDeltaPayload.__str__cs2tt|j||||tjjjkr.|jj|dS)N) r r_end_cblibdnfrepoZPackageTargetCBZTransferStatus_ERRORr enqueue)r ZcbdataZ lr_statusmsg)rrrr1szDeltaPayload._end_cbcCsh|j}|j\}}tj|}t|j}tjjj |}|tjjj krRt j t d||j|||j|jdS)Nzunsupported checksum type: %s)Z relative_urlZ checksum_typeZchecksumZ expectedsizeZbase_url)r chksumhawkeyZ chksum_namerdecoderrZ PackageTargetZ checksumTypeZChecksumType_UNKNOWNloggerZwarningrr downloadsizeZbaseurl)r r ZctypeZcsumrZ ctype_coderrr_target_params6s   zDeltaPayload._target_paramscCs|jjS)N)r r!)r rrr download_sizeHszDeltaPayload.download_sizecCs|jjS)N)rr!)r rrr _full_sizeLszDeltaPayload._full_sizecCs$|jj}tjj|jjjtjj|S)N) r rrrjoinrrZpkgdirr)r rrrrlocalPkgPszDeltaPayload.localPkg) __name__ __module__ __qualname__r rrr"propertyr#r$r& __classcell__rr)rrr(s    rc@s>eZdZdddZddZddZdd Zd d Zd d ZdS) DeltaInfoNc Csd|_tjttjrd|_ytjd|_Wnttfk rHd|_YnX|dkrbt j j j |_ n||_ ||_ ||_g|_i|_i|_dS)zA delta lookup and rebuild context query -- installed packages to use when looking up deltas progress -- progress obj to display finished delta rebuilds FTSC_NPROCESSORS_ONLNN)deltarpm_installedraccess APPLYDELTAX_OKsysconf deltarpm_jobs TypeError ValueErrorrZconfZConfdeltarpm_percentagequeryrqueuejobserr)r r8rr7rrrr Vs zDeltaInfo.__init__cCs|js dS|jj s|j r dS|jr,dStjj|jr@dS|j |jd}d}x@|j j |j |j dD](}|j|j}|rj|j|krj|j}|}qjW|rt||||SdS)z&Turn a po to Delta RPM po, if possibleNd)namearch)r/rZdeltarpmr7Z _is_local_pkgrrexistsr&Z_sizer8filterr=r>Zget_delta_from_evrZevrr!r)r ZporZbestZ best_deltaZipor rrr delta_factoryms$ zDeltaInfo.delta_factorycCstjtjjd||d?|d@|jj|}|j}|dkrXt|jj t dg|j |<nB|jj stt dg|j |<n&t j|j |jj|tjjt ddS)Nzdrpm: %d: return code: %d, %drzDelta RPM rebuild failedz(Checksum of the delta-rebuilt RPM faileddone)r logrloggingSUBDEBUGr:poprrr&rr;ZverifyLocalPkgrunlinkrendcallbackZ STATUS_DRPM)r pidcodeploadrrrrjob_dones  zDeltaInfo.job_donecCs`ttd|jj|j|jjg}tjtjf|}tjt j j d|dj |dd||j |<dS)Nz-azdrpm: spawned %d: %s )r1rr>r&rspawnlP_NOWAITr rErrFrGr%r:)r rNZ spawn_argsrLrrr start_jobszDeltaInfo.start_jobcCspx.|jr.tjdtj\}}|s P|j||qW|jj|x.t|j|jkrj|j |jj d|js>Pq>WdS)NrQr) r:rwaitpidWNOHANGrOr9appendlenr4rTrH)r rNrLrMrrrrs zDeltaInfo.enqueuecCs@x:|jr:tj\}}|j|||jr|j|jjdqWdS)z!Wait until all jobs have finishedrN)r:rwaitrOr9rTrH)r rLrMrrrrZs   zDeltaInfo.wait)N) r'r(r)r rArOrTrrZrrrrr,Us    r,)Z __future__rrZbinasciirZ dnf.yum.miscrZdnf.i18nrZ dnf.callbackrZ dnf.loggingZdnf.reporrFZ libdnf.reporrr1Z getLoggerr rZPackagePayloadrobjectr,rrrrs      -PK! XxHH+__pycache__/exceptions.cpython-36.opt-1.pycnu[3 ft`@spdZddlmZddlmZmZmZddlZddl Z ddl Z Gddde Z Gddde Z Gd d d e ZGd d d e ZGd dde ZGddde ZGddde ZGddde ZGddde ZGddde ZGddde ZGddde ZGdddeZGdd d eZGd!d"d"eZGd#d$d$eZGd%d&d&e ZGd'd(d(eZGd)d*d*e ZdS)+z Core DNF Errors. )unicode_literals)ucd_P_Nc@s eZdZdS)DeprecationWarningN)__name__ __module__ __qualname__r r /usr/lib/python3.6/exceptions.pyrsrcs2eZdZdZd fdd ZddZddZZS) ErrorzTBase Error. All other Errors thrown by DNF should inherit from this. :api Ncs(tt|j|dkrdnt||_dS)N)superr __init__rvalue)selfr) __class__r r r&szError.__init__cCs dj|jS)Nz{})formatr)rr r r __str__*sz Error.__str__cCs t|jS)N)rr)rr r r __unicode__-szError.__unicode__)N)rrr __doc__rrr __classcell__r r )rr r sr c@s eZdZdS) CompsErrorN)rrr r r r r r2srcseZdZdfdd ZZS) ConfigErrorNcs*tt|j||dk r t|nd|_dS)N)r rrr raw_error)rrr)rr r r8szConfigError.__init__)NN)rrr rrr r )rr r7src@s eZdZdS) DatabaseErrorN)rrr r r r r r=src@s eZdZdS) DepsolveErrorN)rrr r r r r rAsrcs0eZdZfddZeddZddZZS) DownloadErrorcstt|j||_dS)N)r rrerrmap)rr)rr r rHszDownloadError.__init__cCsPg}x@|D]8}x2||D]&}|r,d||fnd|}|j|qWq Wdj|S)Nz%s: %sz%s )appendjoin)rZ errstringskeyerrormsgr r r errmap2strLs  zDownloadError.errmap2strcCs |j|jS)N)r$r)rr r r rUszDownloadError.__str__)rrr r staticmethodr$rrr r )rr rFs  rc@s eZdZdS) LockErrorN)rrr r r r r r&Ysr&cs*eZdZdfdd ZfddZZS) MarkingErrorNcs*tt|j||dkrdnt||_dS)z&Initialize the marking error instance.N)r r'rrpkg_spec)rrr()rr r r`szMarkingError.__init__cs&tt|j}|jr"|d|j7}|S)Nz: )r r'rr()rstring)rr r reszMarkingError.__str__)NN)rrr rrrr r )rr r']sr'cs4eZdZfffffffdd ZeddZZS) MarkingErrorscstd}|r&|dtddj|7}|rD|dtddj|7}|rb|dtddj|7}|r|dtddj|7}|rtjj|d}|d tjjjkr|ddjt d d t ||g7}n"|ddjt d d t ||g7}t t |j |||_||_||_||_||_dS)z&Initialize the marking error instance.zProblems in request:rzmissing packages: z, zbroken packages: zmissing groups or modules: zbroken groups or modules: rz)Modular dependency problem with Defaults:z*Modular dependency problems with Defaults:zModular dependency problem:zModular dependency problems:N)rr dnfutilZ_format_resolve_problemslibdnfmoduleZModulePackageContainerZ!ModuleErrorType_ERROR_IN_DEFAULTSrlenr r*rno_match_group_specserror_group_specsno_match_pkg_specserror_pkg_specsmodule_depsolv_errors)rr1r2r3r4r5r#Zmsg_mod)rr r rns6    zMarkingErrors.__init__cCsd}tj|tdd|jS)Nz[Attribute module_debsolv_errors is deprecated. Use module_depsolv_errors attribute instead.) stacklevel)warningswarnrr5)rr#r r r module_debsolv_errorssz#MarkingErrors.module_debsolv_errors)rrr rpropertyr:rr r )rr r*lsr*c@s eZdZdS) MetadataErrorN)rrr r r r r r<sr<c@s eZdZdS) MiscErrorN)rrr r r r r r=sr=cseZdZdfdd ZZS)PackagesNotAvailableErrorNcs tt|j|||pg|_dS)N)r r>rpackages)rrr(r?)rr r rsz"PackagesNotAvailableError.__init__)NNN)rrr rrr r )rr r>sr>c@s eZdZdS)PackageNotFoundErrorN)rrr r r r r r@sr@cseZdZdfdd ZZS)PackagesNotInstalledErrorNcs tt|j|||pg|_dS)N)r rArr?)rrr(r?)rr r rsz"PackagesNotInstalledError.__init__)NNN)rrr rrr r )rr rAsrAcs$eZdZfddZddZZS)ProcessLockErrorcstt|j|||_dS)N)r rBrpid)rrrC)rr r rszProcessLockError.__init__cCst|j|jffS)zPickling support.)rBrrC)rr r r __reduce__szProcessLockError.__reduce__)rrr rrDrr r )rr rBs rBc@s eZdZdS) RepoErrorN)rrr r r r r rEsrEc@s eZdZdS)ThreadLockErrorN)rrr r r r r rFsrFc@s eZdZdS)TransactionCheckErrorN)rrr r r r r rGsrG)rZ __future__rZdnf.i18nrrrZdnf.utilr,r.r8r Exceptionr rrrrrr&r'r*r<r=r>r@rArBrErFrGr r r r s0 ) PK! XxHH%__pycache__/exceptions.cpython-36.pycnu[3 ft`@spdZddlmZddlmZmZmZddlZddl Z ddl Z Gddde Z Gddde Z Gd d d e ZGd d d e ZGd dde ZGddde ZGddde ZGddde ZGddde ZGddde ZGddde ZGddde ZGdddeZGdd d eZGd!d"d"eZGd#d$d$eZGd%d&d&e ZGd'd(d(eZGd)d*d*e ZdS)+z Core DNF Errors. )unicode_literals)ucd_P_Nc@s eZdZdS)DeprecationWarningN)__name__ __module__ __qualname__r r /usr/lib/python3.6/exceptions.pyrsrcs2eZdZdZd fdd ZddZddZZS) ErrorzTBase Error. All other Errors thrown by DNF should inherit from this. :api Ncs(tt|j|dkrdnt||_dS)N)superr __init__rvalue)selfr) __class__r r r&szError.__init__cCs dj|jS)Nz{})formatr)rr r r __str__*sz Error.__str__cCs t|jS)N)rr)rr r r __unicode__-szError.__unicode__)N)rrr __doc__rrr __classcell__r r )rr r sr c@s eZdZdS) CompsErrorN)rrr r r r r r2srcseZdZdfdd ZZS) ConfigErrorNcs*tt|j||dk r t|nd|_dS)N)r rrr raw_error)rrr)rr r r8szConfigError.__init__)NN)rrr rrr r )rr r7src@s eZdZdS) DatabaseErrorN)rrr r r r r r=src@s eZdZdS) DepsolveErrorN)rrr r r r r rAsrcs0eZdZfddZeddZddZZS) DownloadErrorcstt|j||_dS)N)r rrerrmap)rr)rr r rHszDownloadError.__init__cCsPg}x@|D]8}x2||D]&}|r,d||fnd|}|j|qWq Wdj|S)Nz%s: %sz%s )appendjoin)rZ errstringskeyerrormsgr r r errmap2strLs  zDownloadError.errmap2strcCs |j|jS)N)r$r)rr r r rUszDownloadError.__str__)rrr r staticmethodr$rrr r )rr rFs  rc@s eZdZdS) LockErrorN)rrr r r r r r&Ysr&cs*eZdZdfdd ZfddZZS) MarkingErrorNcs*tt|j||dkrdnt||_dS)z&Initialize the marking error instance.N)r r'rrpkg_spec)rrr()rr r r`szMarkingError.__init__cs&tt|j}|jr"|d|j7}|S)Nz: )r r'rr()rstring)rr r reszMarkingError.__str__)NN)rrr rrrr r )rr r']sr'cs4eZdZfffffffdd ZeddZZS) MarkingErrorscstd}|r&|dtddj|7}|rD|dtddj|7}|rb|dtddj|7}|r|dtddj|7}|rtjj|d}|d tjjjkr|ddjt d d t ||g7}n"|ddjt d d t ||g7}t t |j |||_||_||_||_||_dS)z&Initialize the marking error instance.zProblems in request:rzmissing packages: z, zbroken packages: zmissing groups or modules: zbroken groups or modules: rz)Modular dependency problem with Defaults:z*Modular dependency problems with Defaults:zModular dependency problem:zModular dependency problems:N)rr dnfutilZ_format_resolve_problemslibdnfmoduleZModulePackageContainerZ!ModuleErrorType_ERROR_IN_DEFAULTSrlenr r*rno_match_group_specserror_group_specsno_match_pkg_specserror_pkg_specsmodule_depsolv_errors)rr1r2r3r4r5r#Zmsg_mod)rr r rns6    zMarkingErrors.__init__cCsd}tj|tdd|jS)Nz[Attribute module_debsolv_errors is deprecated. Use module_depsolv_errors attribute instead.) stacklevel)warningswarnrr5)rr#r r r module_debsolv_errorssz#MarkingErrors.module_debsolv_errors)rrr rpropertyr:rr r )rr r*lsr*c@s eZdZdS) MetadataErrorN)rrr r r r r r<sr<c@s eZdZdS) MiscErrorN)rrr r r r r r=sr=cseZdZdfdd ZZS)PackagesNotAvailableErrorNcs tt|j|||pg|_dS)N)r r>rpackages)rrr(r?)rr r rsz"PackagesNotAvailableError.__init__)NNN)rrr rrr r )rr r>sr>c@s eZdZdS)PackageNotFoundErrorN)rrr r r r r r@sr@cseZdZdfdd ZZS)PackagesNotInstalledErrorNcs tt|j|||pg|_dS)N)r rArr?)rrr(r?)rr r rsz"PackagesNotInstalledError.__init__)NNN)rrr rrr r )rr rAsrAcs$eZdZfddZddZZS)ProcessLockErrorcstt|j|||_dS)N)r rBrpid)rrrC)rr r rszProcessLockError.__init__cCst|j|jffS)zPickling support.)rBrrC)rr r r __reduce__szProcessLockError.__reduce__)rrr rrDrr r )rr rBs rBc@s eZdZdS) RepoErrorN)rrr r r r r rEsrEc@s eZdZdS)ThreadLockErrorN)rrr r r r r rFsrFc@s eZdZdS)TransactionCheckErrorN)rrr r r r r rGsrG)rZ __future__rZdnf.i18nrrrZdnf.utilr,r.r8r Exceptionr rrrrrr&r'r*r<r=r>r@rArBrErFrGr r r r s0 ) PK!7+%__pycache__/goal.cpython-36.opt-1.pycnu[3 ft`M@s(ddlmZddlmZddlmZdS))absolute_import)unicode_literals)GoalN)Z __future__rrZhawkeyrrr/usr/lib/python3.6/goal.pys  PK!7+__pycache__/goal.cpython-36.pycnu[3 ft`M@s(ddlmZddlmZddlmZdS))absolute_import)unicode_literals)GoalN)Z __future__rrZhawkeyrrr/usr/lib/python3.6/goal.pys  PK!(__pycache__/history.cpython-36.opt-1.pycnu[3 ft`~@s dZddlmZddlmZdS)z*Interfaces to the history of transactions.)absolute_import)unicode_literalsN)__doc__Z __future__rrrr/usr/lib/python3.6/history.pys PK!"__pycache__/history.cpython-36.pycnu[3 ft`~@s dZddlmZddlmZdS)z*Interfaces to the history of transactions.)absolute_import)unicode_literalsN)__doc__Z __future__rrrr/usr/lib/python3.6/history.pys PK!2C%%%__pycache__/i18n.cpython-36.opt-1.pycnu[3 ft`!0@sddlmZddlmZddlmZddlZddlZddlZddlZddl Z ddl Z Gddde Z ddZ d d Zd d Zd dZddZddZddZd'ddZddZd(ddZd)ddZd d!Zd"d#Zd$d%Zed&\ZZeZdS)*)print_function)unicode_literals)unicodeNc@s$eZdZddZddZddZdS) UnicodeStreamcCs||_||_dS)N)streamencoding)selfrrr /usr/lib/python3.6/i18n.py__init__$szUnicodeStream.__init__c Cst|ts.tjjr |j|jdn |j|jd}y|jj |Wn\t k r|j|jjd}t |jdrz|jj j |n|j|jjd}|jj |YnXdS)Nreplacebackslashreplacebufferignore) isinstancestrdnfpycompPY3decoderencoderwriteUnicodeEncodeErrorhasattrr)rsZs_bytesr r r r(s  zUnicodeStream.writecCs t|j|S)N)getattrr)rnamer r r __getattr__7szUnicodeStream.__getattr__N)__name__ __module__ __qualname__r rrr r r r r#srcCs0|dkr dS|j}|jds(|jdr,dSdS)aReturn true if encoding can express any Unicode character. Even if an encoding can express all accented letters in the given language, we can't generally settle for it in DNF since sometimes we output special characters like the registered trademark symbol (U+00AE) and surprisingly many national non-unicode encodings, including e.g. ASCII and ISO-8859-2, don't contain it. NFzutf-Zutf_T)lower startswith)rr!r r r _full_ucd_support:s r#cCstjd}|jdrdS|S)z= Take the best shot at the current system's string encoding. FZANSIzutf-8)localegetpreferredencodingr")rr r r _guess_encodingKs r&cCsytjjtjdWntjk rytjjtjddtjd<Wn0tjk rttjjtjddtjd<YnXtdj tjdt j dYnXdS)NzC.UTF-8LC_ALLCz&Failed to set locale, defaulting to {})file) rr setlocaler$r(Errorosenvironprintformatsysstderrr r r r setup_localePsr3c Cs`tj}|jstjtjtjy |j}Wntk r@d}YnXt|s\t |t t_dSdS)z Check that stdout is of suitable encoding and handle the situation if not. Returns True if stdout was of suitable encoding already and no changes were needed. NFT) r1stdoutisattysignalSIGPIPESIG_DFLrAttributeErrorr#rr&)r4rr r r setup_stdout^s  r:cCst|ddtjjS)z It uses print instead of passing the prompt to raw_input. raw_input doesn't encode the passed string and the output goes into stderr r')end)r/rrZ raw_input)Zucstringr r r ucd_inputrs r<c Cstjjr:tjj|r$t|tddSt|tr2|St|St|tjjrL|St|drxy tjj|St k rvYnXtjjt|tddSdS)zD Like the builtin unicode() but tries to use a reasonable encoding. r)errorsZ __unicode__N) rrrZ is_py3bytesrr&rrr UnicodeError)objr r r ucd}s    r@cCstj|dkrdSdS)NWF)rArB) unicodedataZeast_asian_width)Zucharr r r _exact_width_charsrFcCsX|dkrt||fSd}d}x2|D]*}t|}|||kr "%s" % (chop_str(msg, 10)) Nrr') exact_widthrF)msgchopwidthZ chopped_msgcharZ char_widthr r r chop_strs    rLcCstdd|DS)zQ Calculates width of char at terminal screen (Asian char counts for two) css|]}t|VqdS)N)rF).0cr r r szexact_width..)sum)rHr r r rGsrGTr'cCsjt||\}}||kr0|s|rfdj|||g}n6d||}|rTdj||||g}ndj||||g}|S)a Expand a msg to a specified "width" or chop to same. Expansion can be left or right. This is what you want to use instead of %*.*s, as it does the "right" thing with regard to different Unicode character width. prefix and suffix should be used for "invisible" bytes, like highlighting. Examples: ``"%-*.*s" % (10, 20, msg)`` becomes ``"%s" % (fill_exact_width(msg, 10, 20))``. ``"%20.10s" % (msg)`` becomes ``"%s" % (fill_exact_width(msg, 20, 10, left=False))``. ``"%s%.10s%s" % (pre, msg, suf)`` becomes ``"%s" % (fill_exact_width(msg, 0, 10, prefix=pre, suffix=suf))``. r' )rLjoin)rHZfillrIleftprefixsuffixrJZextrar r r fill_exact_widths rVFcsfdd|jd}|jdd jd}g}|}d}d}d} xr|D]h} | jd} || } } | \}} d} |rz| rzd } |r|t| krd } |r| r|d kr|| krd } | r|j|jd|}d}|t| krd } |r| jd} | } t|| |krd}|j|| |}qDd }| jd}|} | }| r@|d kr@|}x^|D]V}|t| |krt| t|kr|j| jd|d|} | |7} | d7} qFW| jdd}qDW|r|j|jddj|S) zq Works like we want textwrap.wrap() to work, uses Unicode strings and doesn't screw up lists/blocks/etc. csd}d}x|D]}|dkrP|d7}qW|d kr8|dfSt||ddd}|dkr||t|d}|dp||d}|r||d|fS|dfS)NrXrQrD-*.oâ•‣∘)rYrZr[r\r])rYrZr[r\r^r_r`)rLlen)linecountZbyteZlist_chrZnxt)_indent_at_begr r rds   z%textwrap_fill.._indent_at_beg  rQFrTr'z )rstripr splitraappendlstriprGrR)textrJZinitial_indentZsubsequent_indentlinesretindentZ wrap_lastZcsabZ cspc_indentrbZlsabZ lspc_indentZforce_nlZwordsZspcsZwordr )rdr textwrap_fillsf           rqcCsHt|}t|}||kr|S||kr4||kr0|S|S||kr@|S|SdS)a Automatically selects the short (abbreviated) or long (full) message depending on whether we have enough screen space to display the full message or not. If a caller by mistake passes a long string as msg_short and a short string as a msg_long this function recognizes the mistake and swaps the arguments. This function is especially useful in the i18n context when you cannot predict how long are the translated messages. Limitations: 1. If msg_short is longer than width you will still get an overflow. This function does not abbreviate the string. 2. You are not obliged to provide an actually abbreviated string, it is perfectly correct to pass the same string twice if you don't want any abbreviation. However, if you provide two different strings but having the same width this function is unable to recognize which one is correct and you should assume that it is unpredictable which one is returned. Example: ``select_short_long (10, _("Repo"), _("Repository"))`` will return "Repository" in English but the results in other languages may be different. N)rG)rJZ msg_shortZmsg_longZ width_shortZ width_longr r r select_short_long'srrcCs2tdd}tjjj|dd}t|tjj|S)z< Easy gettext translations setup based on given domain name cs fddS)Ncs t|S)N)r@)w)fncr r Tsz2translation..ucd_wrapper..r )rtr )rtr ucd_wrapperSsz translation..ucd_wrapperT)Zfallback)r3rrgettext translationmapZ gettext_setup)rrvtr r r rxNsrxcCs(t|td|}d|kr |S|SdS)Nrh)_chr)contextmessageresultr r r pgettextYsrr)N)NTr'r')rWr'r')Z __future__rrZ dnf.pycomprrr$r-r6r1rEobjectrr#r&r3r:r<r@rFrLrGrVrqrrrxrr|ZP_ZC_r r r r s2      " O'  PK!2C%%__pycache__/i18n.cpython-36.pycnu[3 ft`!0@sddlmZddlmZddlmZddlZddlZddlZddlZddl Z ddl Z Gddde Z ddZ d d Zd d Zd dZddZddZddZd'ddZddZd(ddZd)ddZd d!Zd"d#Zd$d%Zed&\ZZeZdS)*)print_function)unicode_literals)unicodeNc@s$eZdZddZddZddZdS) UnicodeStreamcCs||_||_dS)N)streamencoding)selfrrr /usr/lib/python3.6/i18n.py__init__$szUnicodeStream.__init__c Cst|ts.tjjr |j|jdn |j|jd}y|jj |Wn\t k r|j|jjd}t |jdrz|jj j |n|j|jjd}|jj |YnXdS)Nreplacebackslashreplacebufferignore) isinstancestrdnfpycompPY3decoderencoderwriteUnicodeEncodeErrorhasattrr)rsZs_bytesr r r r(s  zUnicodeStream.writecCs t|j|S)N)getattrr)rnamer r r __getattr__7szUnicodeStream.__getattr__N)__name__ __module__ __qualname__r rrr r r r r#srcCs0|dkr dS|j}|jds(|jdr,dSdS)aReturn true if encoding can express any Unicode character. Even if an encoding can express all accented letters in the given language, we can't generally settle for it in DNF since sometimes we output special characters like the registered trademark symbol (U+00AE) and surprisingly many national non-unicode encodings, including e.g. ASCII and ISO-8859-2, don't contain it. NFzutf-Zutf_T)lower startswith)rr!r r r _full_ucd_support:s r#cCstjd}|jdrdS|S)z= Take the best shot at the current system's string encoding. FZANSIzutf-8)localegetpreferredencodingr")rr r r _guess_encodingKs r&cCsytjjtjdWntjk rytjjtjddtjd<Wn0tjk rttjjtjddtjd<YnXtdj tjdt j dYnXdS)NzC.UTF-8LC_ALLCz&Failed to set locale, defaulting to {})file) rr setlocaler$r(Errorosenvironprintformatsysstderrr r r r setup_localePsr3c Cs`tj}|jstjtjtjy |j}Wntk r@d}YnXt|s\t |t t_dSdS)z Check that stdout is of suitable encoding and handle the situation if not. Returns True if stdout was of suitable encoding already and no changes were needed. NFT) r1stdoutisattysignalSIGPIPESIG_DFLrAttributeErrorr#rr&)r4rr r r setup_stdout^s  r:cCst|ddtjjS)z It uses print instead of passing the prompt to raw_input. raw_input doesn't encode the passed string and the output goes into stderr r')end)r/rrZ raw_input)Zucstringr r r ucd_inputrs r<c Cstjjr:tjj|r$t|tddSt|tr2|St|St|tjjrL|St|drxy tjj|St k rvYnXtjjt|tddSdS)zD Like the builtin unicode() but tries to use a reasonable encoding. r)errorsZ __unicode__N) rrrZ is_py3bytesrr&rrr UnicodeError)objr r r ucd}s    r@cCstj|dkrdSdS)NWF)rArB) unicodedataZeast_asian_width)Zucharr r r _exact_width_charsrFcCsX|dkrt||fSd}d}x2|D]*}t|}|||kr "%s" % (chop_str(msg, 10)) Nrr') exact_widthrF)msgchopwidthZ chopped_msgcharZ char_widthr r r chop_strs    rLcCstdd|DS)zQ Calculates width of char at terminal screen (Asian char counts for two) css|]}t|VqdS)N)rF).0cr r r szexact_width..)sum)rHr r r rGsrGTr'cCsjt||\}}||kr0|s|rfdj|||g}n6d||}|rTdj||||g}ndj||||g}|S)a Expand a msg to a specified "width" or chop to same. Expansion can be left or right. This is what you want to use instead of %*.*s, as it does the "right" thing with regard to different Unicode character width. prefix and suffix should be used for "invisible" bytes, like highlighting. Examples: ``"%-*.*s" % (10, 20, msg)`` becomes ``"%s" % (fill_exact_width(msg, 10, 20))``. ``"%20.10s" % (msg)`` becomes ``"%s" % (fill_exact_width(msg, 20, 10, left=False))``. ``"%s%.10s%s" % (pre, msg, suf)`` becomes ``"%s" % (fill_exact_width(msg, 0, 10, prefix=pre, suffix=suf))``. r' )rLjoin)rHZfillrIleftprefixsuffixrJZextrar r r fill_exact_widths rVFcsfdd|jd}|jdd jd}g}|}d}d}d} xr|D]h} | jd} || } } | \}} d} |rz| rzd } |r|t| krd } |r| r|d kr|| krd } | r|j|jd|}d}|t| krd } |r| jd} | } t|| |krd}|j|| |}qDd }| jd}|} | }| r@|d kr@|}x^|D]V}|t| |krt| t|kr|j| jd|d|} | |7} | d7} qFW| jdd}qDW|r|j|jddj|S) zq Works like we want textwrap.wrap() to work, uses Unicode strings and doesn't screw up lists/blocks/etc. csd}d}x|D]}|dkrP|d7}qW|d kr8|dfSt||ddd}|dkr||t|d}|dp||d}|r||d|fS|dfS)NrXrQrD-*.oâ•‣∘)rYrZr[r\r])rYrZr[r\r^r_r`)rLlen)linecountZbyteZlist_chrZnxt)_indent_at_begr r rds   z%textwrap_fill.._indent_at_beg  rQFrTr'z )rstripr splitraappendlstriprGrR)textrJZinitial_indentZsubsequent_indentlinesretindentZ wrap_lastZcsabZ cspc_indentrbZlsabZ lspc_indentZforce_nlZwordsZspcsZwordr )rdr textwrap_fillsf           rqcCsHt|}t|}||kr|S||kr4||kr0|S|S||kr@|S|SdS)a Automatically selects the short (abbreviated) or long (full) message depending on whether we have enough screen space to display the full message or not. If a caller by mistake passes a long string as msg_short and a short string as a msg_long this function recognizes the mistake and swaps the arguments. This function is especially useful in the i18n context when you cannot predict how long are the translated messages. Limitations: 1. If msg_short is longer than width you will still get an overflow. This function does not abbreviate the string. 2. You are not obliged to provide an actually abbreviated string, it is perfectly correct to pass the same string twice if you don't want any abbreviation. However, if you provide two different strings but having the same width this function is unable to recognize which one is correct and you should assume that it is unpredictable which one is returned. Example: ``select_short_long (10, _("Repo"), _("Repository"))`` will return "Repository" in English but the results in other languages may be different. N)rG)rJZ msg_shortZmsg_longZ width_shortZ width_longr r r select_short_long'srrcCs2tdd}tjjj|dd}t|tjj|S)z< Easy gettext translations setup based on given domain name cs fddS)Ncs t|S)N)r@)w)fncr r Tsz2translation..ucd_wrapper..r )rtr )rtr ucd_wrapperSsz translation..ucd_wrapperT)Zfallback)r3rrgettext translationmapZ gettext_setup)rrvtr r r rxNsrxcCs(t|td|}d|kr |S|SdS)Nrh)_chr)contextmessageresultr r r pgettextYsrr)N)NTr'r')rWr'r')Z __future__rrZ dnf.pycomprrr$r-r6r1rEobjectrr#r&r3r:r<r@rFrLrGrVrqrrrxrr|ZP_ZC_r r r r s2      " O'  PK!ʑ%__pycache__/lock.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZmZmZddlmZddl m Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZejdZdd Zd d Zd d ZddZddZGdddeZdS))absolute_import)unicode_literals)ProcessLockErrorThreadLockError LockError)_)miscNdnfcCs6tjjs2tj|jdj}tjj t j d|}|S)Nzutf-8Zlocks) r utilZ am_i_roothashlibZsha1encodeZ hexdigestospathjoinrZ getCacheDir)Zdir_Zhexdirr/usr/lib/python3.6/lock.py _fit_lock_dir&s rcCsttjjt|dd| S)Nzdownload_lock.pidcachedir) ProcessLockr rrr)r exit_on_lockrrrbuild_download_lock/srcCsttjjt|dd| S)Nzmetadata_lock.pidZmetadata)rr rrr)rrrrrbuild_metadata_lock3srcCsttjjt|dd| S)Nzrpmdb_lock.pidZRPMDB)rr rrr)Z persistdirrrrrbuild_rpmdb_lock8srcCsttjjt|dd| S)Nz log_lock.pidlog)rr rrr)Zlogdirrrrrbuild_log_lock=src@s>eZdZdddZddZddZdd Zd d Zd d ZdS)rFcCs&||_d|_||_||_tj|_dS)Nr)blockingcount descriptiontarget threadingRLock thread_lock)selfrrrrrr__init__Cs zProcessLock.__init__cCs2|jjdds d|j}t||jd7_dS)NF)rz'%s already locked by a different thread)r!acquirerrr)r"msgrrr _lock_threadJs zProcessLock._lock_threadc Cs>tj|jtjtjBd}zytj|tjtjBWn4t k rh}z|j t j krVdSWYdd}~XnXtj |d}t |dkrtj|t|jd|Sy t|}Wn*tk rtd|j}t|YnX||kr|Stjd|tjs*tj|dtjtj|dtj|t|jd|S|Stj|XdS) Nir$rzutf-8zMalformed lock file found: %s. Ensure no other dnf/yum process is running and remove the lock file manually or run systemd-tmpfiles --remove dnf.conf.z /proc/%d/stat)r openrO_CREATO_RDWRfcntlZflockZLOCK_EXZLOCK_NBOSErrorerrnoZ EWOULDBLOCKreadlenwritestrr int ValueErrorrraccessF_OKlseekSEEK_SET ftruncateclose)r"pidfdeZold_pidr&rrr _try_lockPs6     zProcessLock._try_lockcCs|jd8_|jjdS)Nr$)rr!release)r"rrr_unlock_threadzszProcessLock._unlock_threadcCstjjtjj|j|jd}tj}|j |}xp||kr|dkr|j sl|j d|j |f}t ||||krtd|}tj||}tjd|j |}q6WdS)Nr$z%s already locked by %dz*Waiting for process with pid %d to finish.r)r))r r Z ensure_dirr rdirnamerr'getpidr?rrArrrloggerinfotimeZsleep)r"Zprev_pidZmy_pidr<r&rrr __enter__~s"      zProcessLock.__enter__cGs"|jdkrtj|j|jdS)Nr$)rr unlinkrrA)r"Zexc_argsrrr__exit__s  zProcessLock.__exit__N)F) __name__ __module__ __qualname__r#r'r?rArGrIrrrrrBs  *r)Z __future__rrZdnf.exceptionsrrrZdnf.i18nrZdnf.yumrZ dnf.loggingr Zdnf.utilr/r-r Zloggingr rrFZ getLoggerrDrrrrrobjectrrrrrs(      PK!ʑ__pycache__/lock.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZmZmZddlmZddl m Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZejdZdd Zd d Zd d ZddZddZGdddeZdS))absolute_import)unicode_literals)ProcessLockErrorThreadLockError LockError)_)miscNdnfcCs6tjjs2tj|jdj}tjj t j d|}|S)Nzutf-8Zlocks) r utilZ am_i_roothashlibZsha1encodeZ hexdigestospathjoinrZ getCacheDir)Zdir_Zhexdirr/usr/lib/python3.6/lock.py _fit_lock_dir&s rcCsttjjt|dd| S)Nzdownload_lock.pidcachedir) ProcessLockr rrr)r exit_on_lockrrrbuild_download_lock/srcCsttjjt|dd| S)Nzmetadata_lock.pidZmetadata)rr rrr)rrrrrbuild_metadata_lock3srcCsttjjt|dd| S)Nzrpmdb_lock.pidZRPMDB)rr rrr)Z persistdirrrrrbuild_rpmdb_lock8srcCsttjjt|dd| S)Nz log_lock.pidlog)rr rrr)Zlogdirrrrrbuild_log_lock=src@s>eZdZdddZddZddZdd Zd d Zd d ZdS)rFcCs&||_d|_||_||_tj|_dS)Nr)blockingcount descriptiontarget threadingRLock thread_lock)selfrrrrrr__init__Cs zProcessLock.__init__cCs2|jjdds d|j}t||jd7_dS)NF)rz'%s already locked by a different thread)r!acquirerrr)r"msgrrr _lock_threadJs zProcessLock._lock_threadc Cs>tj|jtjtjBd}zytj|tjtjBWn4t k rh}z|j t j krVdSWYdd}~XnXtj |d}t |dkrtj|t|jd|Sy t|}Wn*tk rtd|j}t|YnX||kr|Stjd|tjs*tj|dtjtj|dtj|t|jd|S|Stj|XdS) Nir$rzutf-8zMalformed lock file found: %s. Ensure no other dnf/yum process is running and remove the lock file manually or run systemd-tmpfiles --remove dnf.conf.z /proc/%d/stat)r openrO_CREATO_RDWRfcntlZflockZLOCK_EXZLOCK_NBOSErrorerrnoZ EWOULDBLOCKreadlenwritestrr int ValueErrorrraccessF_OKlseekSEEK_SET ftruncateclose)r"pidfdeZold_pidr&rrr _try_lockPs6     zProcessLock._try_lockcCs|jd8_|jjdS)Nr$)rr!release)r"rrr_unlock_threadzszProcessLock._unlock_threadcCstjjtjj|j|jd}tj}|j |}xp||kr|dkr|j sl|j d|j |f}t ||||krtd|}tj||}tjd|j |}q6WdS)Nr$z%s already locked by %dz*Waiting for process with pid %d to finish.r)r))r r Z ensure_dirr rdirnamerr'getpidr?rrArrrloggerinfotimeZsleep)r"Zprev_pidZmy_pidr<r&rrr __enter__~s"      zProcessLock.__enter__cGs"|jdkrtj|j|jdS)Nr$)rr unlinkrrA)r"Zexc_argsrrr__exit__s  zProcessLock.__exit__N)F) __name__ __module__ __qualname__r#r'r?rArGrIrrrrrBs  *r)Z __future__rrZdnf.exceptionsrrrZdnf.i18nrZdnf.yumrZ dnf.loggingr Zdnf.utilr/r-r Zloggingr rrFZ getLoggerrDrrrrrobjectrrrrrs(      PK!&6I I (__pycache__/logging.cpython-36.opt-1.pycnu[3 ft`r(@sddlmZddlmZddlZddlZddlZddlZddlZ ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZdZe jZe jZe jZe jZe jZdZdZdZdZd d ZGd d d eZee je je je je je jeeeed ZddZee je jdZ ddZ!ddZ"d%Z#ddZ$Gddde j%j&Z'ddZ(ddZ)Gdd d eZ*Gd!d"d"eZ+e j,j-j.ee j,j-j/ee j,j-j0ee j,j-j1ee j,j-j2ee j,j-j3ee j,j-j4eiZ5Gd#d$d$e j,j-Z6e6Z7e j,j8j9e7dS)&)absolute_import)unicode_literalsNdcsddfdd}|S)zGMethod decorator turning the method into noop on second or later calls.c_sdS)N)Z_argsZ_kwargsr r /usr/lib/python3.6/logging.pynoop3szonly_once..noopcs"|f||t|jdS)N)setattr__name__)selfargskwargs)funcr r r swan_song5szonly_once..swan_songr )rrr )rr r only_once1src@seZdZddZddZdS)_MaxLevelFiltercCs ||_dS)N) max_level)rrr r r __init__;sz_MaxLevelFilter.__init__cCs|j|jkrdSdS)Nr)Zlevelnor)rrecordr r r filter>s z_MaxLevelFilter.filterN)r __module__ __qualname__rrr r r r r:sr) rrrrrr cCs tj|tS)N)_VERBOSE_VAL_MAPPINGgetTRACE) cfg_errvalr r r _cfg_verbose_val2levelQsr%)rrrcCstj|tjS)N)_ERR_VAL_MAPPINGr"loggingWARNING)r$r r r _cfg_err_val2level^sr)cCs|dS)Nz.gzr )namer r r compression_namercsr+icCs\t|d>}tj|d&}x|jt}|s,P|j|qWWdQRXWdQRXtj|dS)Nrbwb)opengzipread CHUNK_SIZEwriteosremove)sourcedestZsfZwfdatar r r compression_rotatorjs  "r9cs&eZdZd fdd ZddZZS) MultiprocessRotatingFileHandlerarNFcs.tt|j||||||tjjdd|_dS)Nz /var/log/T)superr:rdnflockZbuild_log_lock rotate_lock)rfilenamemodemaxBytes backupCountencodingZdelay) __class__r r rvs z(MultiprocessRotatingFileHandler.__init__cCsxyR|j|rD|j*tj|jj}|jtj|j|WdQRXtj j ||dSt j j t j jfk r~tjdYqtk r|j|dSXqWdS)Ng{Gz?)ZshouldRolloverr?r4statZ baseFilenamest_modeZ doRolloverchmodr'Z FileHandleremitr= exceptionsZProcessLockErrorZThreadLockErrortimeZsleep ExceptionZ handleError)rrrAr r r rI{s  z$MultiprocessRotatingFileHandler.emit)r;rrNF)r rrrrI __classcell__r r )rEr r:usr:cCsltjj|s,tjjtjj|tjj|t|||d}t j dd}t j |_ |j||rht|_t|_|S)N)rBrCz%%(asctime)s %(levelname)s %(message)sz%Y-%m-%dT%H:%M:%S%z)r4pathexistsr=utilZ ensure_dirdirnameZtouchr:r'Z FormatterrKZ localtimeZ converterZ setFormatterr9Zrotatorr+Znamer)logfilelog_size log_rotate log_compresshandlerZ formatterr r r _create_filehandlers   rWcCs|jttjjdS)N)logINFOr=constZ LOG_MARKER)Zloggerr r r _paint_marksr[c@sBeZdZddZeddZeddZeddZd d d Zd S)LoggingcCsPd|_|_tjtdtjtdtjtdtjtdtjddt_ dS)NDDEBUGSUBDEBUGr#ALLTF) stdout_handlerstderr_handlerr'Z addLevelNamer]r^r#r_ZcaptureWarningsZraiseExceptions)rr r r rs      zLogging.__init__cCsttjd}|jttjtj}|jt|jt tj |j |||_ tjtj }|jt |j |||_dS)Nr=)r' getLoggersetLevelr#Z StreamHandlersysstdoutrYZ addFilterrr( addHandlerr`stderrra)r logger_dnfrergr r r _presetups        zLogging._presetupc Cstjd}|jttjj|tjj }t ||||}|j||j |tjd} | j |tjd} | jttjj|tjj }t ||||}| j |t jjj ||tktjd} d| _| jttjj|tjj}t ||||}| j |dS)Nr=z py.warningslibrepozdnf.rpmF)r'rbrcr#r4rNjoinr=rZZLOGrWrfZ LOG_LIBREPOlibdnfZrepoZ LibrepoLogr_Z propagater^ZLOG_RPM) r logfile_levellogdirrSrTrUrhrRrVlogger_warningsZlogger_librepo logger_rpmr r r _setup_file_loggerss(           zLogging._setup_file_loggersc Cs|j|j|||||tjd}|j|jtjd} | j|j| j|jtjd} |jjt|jjtt | t | |jj||jj|dS)Nz py.warningszdnf.rpmr=) rirqr'rbrfrar`rcr(r[) rZ verbose_levelZ error_levelrmrnrSrTrUrorprhr r r _setups         zLogging._setupFc Csft|j}t|j}t|j}|j}|j}|j}|j} |rL|j ||||| S|j ||||||| SdS)N) r%Z debuglevelr)Z errorlevelZ logfilelevelrnrSrTrUrqrr) rZconfZfile_loggers_onlyZverbose_level_rZ error_level_rZlogfile_level_rrnrSrTrUr r r _setup_from_dnf_confs   zLogging._setup_from_dnf_confN)F) r rrrrrirqrrrsr r r r r\s    r\c@seZdZddZddZdS)TimercCs||_tj|_dS)N)whatrKstart)rrur r r rszTimer.__init__cCs6tj|j}d|j|df}tjdjt|dS)Nztimer: %s: %d msir=)rKrvrur'rbrXr])rZdiffmsgr r r __call__szTimer.__call__N)r rrrrxr r r r rtsrtcs$eZdZfddZddZZS)LibdnfLoggerCBcs*tt|jtjd|_tjd|_dS)Nr=rj)r<ryrr'rb _dnf_logger_librepo_logger)r)rEr r rs zLibdnfLoggerCB.__init__cGsft|dkr|\}}nt|dkr.|\}}}}|tjjjkrP|jjt||n|jjt||dS)zoLog message. source -- integer, defines origin (libdnf, librepo, ...) of message, 0 - unknown rrN) lenrlutilsLoggerZLOG_SOURCE_LIBREPOr{rX_LIBDNF_TO_DNF_LOGLEVEL_MAPPINGrz)rr6rlevelmessagerKpidr r r r3s    zLibdnfLoggerCB.write)r rrrr3rMr r )rEr rys ryi):Z __future__rrZdnf.exceptionsr=Z dnf.constZdnf.lockZdnf.utilZ libdnf.reporlr'Zlogging.handlersr4rdrKwarningsr0Z SUPERCRITICALZCRITICALZERRORr(rYDEBUGr]r^r#r_robjectrr!r%r&r)r+r2r9ZhandlersZRotatingFileHandlerr:rWr[r\rtr}r~ZLevel_CRITICALZ Level_ERRORZ Level_WARNINGZ Level_NOTICEZ Level_INFOZ Level_DEBUGZ Level_TRACErryZlibdnfLoggerCBZLogZ setLoggerr r r r sv      a      PK!g "__pycache__/logging.cpython-36.pycnu[3 ft`r(@sddlmZddlmZddlZddlZddlZddlZddlZ ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZdZe jZe jZe jZe jZe jZdZdZdZdZd d ZGd d d eZee je je je je je jeeeed ZddZee je jdZ ddZ!ddZ"d%Z#ddZ$Gddde j%j&Z'ddZ(ddZ)Gdd d eZ*Gd!d"d"eZ+e j,j-j.ee j,j-j/ee j,j-j0ee j,j-j1ee j,j-j2ee j,j-j3ee j,j-j4eiZ5Gd#d$d$e j,j-Z6e6Z7e j,j8j9e7dS)&)absolute_import)unicode_literalsNdcsddfdd}|S)zGMethod decorator turning the method into noop on second or later calls.c_sdS)N)Z_argsZ_kwargsr r /usr/lib/python3.6/logging.pynoop3szonly_once..noopcs"|f||t|jdS)N)setattr__name__)selfargskwargs)funcr r r swan_song5szonly_once..swan_songr )rrr )rr r only_once1src@seZdZddZddZdS)_MaxLevelFiltercCs ||_dS)N) max_level)rrr r r __init__;sz_MaxLevelFilter.__init__cCs|j|jkrdSdS)Nr)Zlevelnor)rrecordr r r filter>s z_MaxLevelFilter.filterN)r __module__ __qualname__rrr r r r r:sr) rrrrrr cCs(d|kodknsttj|tS)Nrr )AssertionError_VERBOSE_VAL_MAPPINGgetTRACE) cfg_errvalr r r _cfg_verbose_val2levelQsr&)rrrcCs*d|kodknsttj|tjS)Nrr )r!_ERR_VAL_MAPPINGr#loggingWARNING)r%r r r _cfg_err_val2level^sr*cCs|dS)Nz.gzr )namer r r compression_namercsr,icCs\t|d>}tj|d&}x|jt}|s,P|j|qWWdQRXWdQRXtj|dS)Nrbwb)opengzipread CHUNK_SIZEwriteosremove)sourcedestZsfZwfdatar r r compression_rotatorjs  "r:cs&eZdZd fdd ZddZZS) MultiprocessRotatingFileHandlerarNFcs.tt|j||||||tjjdd|_dS)Nz /var/log/T)superr;rdnflockZbuild_log_lock rotate_lock)rfilenamemodemaxBytes backupCountencodingZdelay) __class__r r rvs z(MultiprocessRotatingFileHandler.__init__cCsxyR|j|rD|j*tj|jj}|jtj|j|WdQRXtj j ||dSt j j t j jfk r~tjdYqtk r|j|dSXqWdS)Ng{Gz?)ZshouldRolloverr@r5statZ baseFilenamest_modeZ doRolloverchmodr(Z FileHandleremitr> exceptionsZProcessLockErrorZThreadLockErrortimeZsleep ExceptionZ handleError)rrrBr r r rJ{s  z$MultiprocessRotatingFileHandler.emit)r<rrNF)r rrrrJ __classcell__r r )rFr r;usr;cCsltjj|s,tjjtjj|tjj|t|||d}t j dd}t j |_ |j||rht|_t|_|S)N)rCrDz%%(asctime)s %(levelname)s %(message)sz%Y-%m-%dT%H:%M:%S%z)r5pathexistsr>utilZ ensure_dirdirnameZtouchr;r(Z FormatterrLZ localtimeZ converterZ setFormatterr:Zrotatorr,Znamer)logfilelog_size log_rotate log_compresshandlerZ formatterr r r _create_filehandlers   rXcCs|jttjjdS)N)logINFOr>constZ LOG_MARKER)Zloggerr r r _paint_marksr\c@sBeZdZddZeddZeddZeddZd d d Zd S)LoggingcCsPd|_|_tjtdtjtdtjtdtjtdtjddt_ dS)NDDEBUGSUBDEBUGr$ALLTF) stdout_handlerstderr_handlerr(Z addLevelNamer^r_r$r`ZcaptureWarningsZraiseExceptions)rr r r rs      zLogging.__init__cCsttjd}|jttjtj}|jt|jt tj |j |||_ tjtj }|jt |j |||_dS)Nr>)r( getLoggersetLevelr$Z StreamHandlersysstdoutrZZ addFilterrr) addHandlerrastderrrb)r logger_dnfrfrhr r r _presetups        zLogging._presetupc Cstjd}|jttjj|tjj }t ||||}|j||j |tjd} | j |tjd} | jttjj|tjj }t ||||}| j |t jjj ||tktjd} d| _| jttjj|tjj}t ||||}| j |dS)Nr>z py.warningslibrepozdnf.rpmF)r(rcrdr$r5rOjoinr>r[ZLOGrXrgZ LOG_LIBREPOlibdnfZrepoZ LibrepoLogr`Z propagater_ZLOG_RPM) r logfile_levellogdirrTrUrVrirSrWlogger_warningsZlogger_librepo logger_rpmr r r _setup_file_loggerss(           zLogging._setup_file_loggersc Cs|j|j|||||tjd}|j|jtjd} | j|j| j|jtjd} |jjt|jjtt | t | |jj||jj|dS)Nz py.warningszdnf.rpmr>) rjrrr(rcrgrbrardr)r\) rZ verbose_levelZ error_levelrnrorTrUrVrprqrir r r _setups         zLogging._setupFc Csft|j}t|j}t|j}|j}|j}|j}|j} |rL|j ||||| S|j ||||||| SdS)N) r&Z debuglevelr*Z errorlevelZ logfilelevelrorTrUrVrrrs) rZconfZfile_loggers_onlyZverbose_level_rZ error_level_rZlogfile_level_rrorTrUrVr r r _setup_from_dnf_confs   zLogging._setup_from_dnf_confN)F) r rrrrrjrrrsrtr r r r r]s    r]c@seZdZddZddZdS)TimercCs||_tj|_dS)N)whatrLstart)rrvr r r rszTimer.__init__cCs6tj|j}d|j|df}tjdjt|dS)Nztimer: %s: %d msir>)rLrwrvr(rcrYr^)rZdiffmsgr r r __call__szTimer.__call__N)r rrrryr r r r rusrucs$eZdZfddZddZZS)LibdnfLoggerCBcs*tt|jtjd|_tjd|_dS)Nr>rk)r=rzrr(rc _dnf_logger_librepo_logger)r)rFr r rs zLibdnfLoggerCB.__init__cGsft|dkr|\}}nt|dkr.|\}}}}|tjjjkrP|jjt||n|jjt||dS)zoLog message. source -- integer, defines origin (libdnf, librepo, ...) of message, 0 - unknown rrN) lenrmutilsLoggerZLOG_SOURCE_LIBREPOr|rY_LIBDNF_TO_DNF_LOGLEVEL_MAPPINGr{)rr7rlevelmessagerLpidr r r r4s    zLibdnfLoggerCB.write)r rrrr4rNr r )rFr rzs rzi):Z __future__rrZdnf.exceptionsr>Z dnf.constZdnf.lockZdnf.utilZ libdnf.repormr(Zlogging.handlersr5rerLwarningsr1Z SUPERCRITICALZCRITICALZERRORr)rZDEBUGr^r_r$r`robjectrr"r&r'r*r,r3r:ZhandlersZRotatingFileHandlerr;rXr\r]rur~rZLevel_CRITICALZ Level_ERRORZ Level_WARNINGZ Level_NOTICEZ Level_INFOZ Level_DEBUGZ Level_TRACErrzZlibdnfLoggerCBZLogZ setLoggerr r r r sv      a      PK!U.__pycache__/match_counter.cpython-36.opt-1.pycnu[3 ft`T@sZddlmZddlmZddlmZddlmZddddd Zd d ZGd d d eZ dS))absolute_import)print_function)unicode_literals)reduce)nameZsummary descriptionZurlcCs"t|}dg||t|}|S)z, Ordered sset with empty strings prepended. )lensorted)ZssetZlengthZcurrentlr#/usr/lib/python3.6/match_counter.py_canonize_string_set"src@sfeZdZdZeddZddZddZdd Zd d Z d d Z ddZ ddZ dddZ ddZdS) MatchCounterzMap packages to which of their attributes matched in a search against what values. The mapping is: ``package -> [(key, needle), ... ]``. csfdd}tt||S)Ncs>|d}|d}t|}|dkr6||kr6dt|St|S)Nrr r r)getattrWEIGHTS)matchkeyneedleZhaystack)pkgrrweight4s   z*MatchCounter._eval_weights..weight)summap)rZmatchesrr)rr _eval_weights1s zMatchCounter._eval_weightscsfdd}|S)aGet the key function used for sorting matches. It is not enough to only look at the matches and order them by the sum of their weighted hits. In case this number is the same we have to ensure that the same matched needles are next to each other in the result. Returned function is: pkg -> (weights_sum, canonized_needles_set, -distance) csj|| |jfS)N)rr )r)selfrrget_keyKsz'MatchCounter._key_func..get_keyr)rrr)rr _key_func?s zMatchCounter._key_funccsrtfddDSdS)z0Return the max count of needles of all packages.c3s|]}tj|VqdS)N)r matched_needles).0r)rrr Wsz,MatchCounter._max_needles..r)max)rr)rr _max_needlesTszMatchCounter._max_needlescCs|j|gj||fdS)N) setdefaultappend)rrrrrrraddZszMatchCounter.addcCs&x |D]}td|||fqWdS)Nz%s %s)print)rrrrrdump]s zMatchCounter.dumpcstfdd|DS)Nc3s|]}t|dVqdS)rN)r)r"m)rrrr#bsz1MatchCounter.matched_haystacks..)set)rrr)rrmatched_haystacksaszMatchCounter.matched_haystackscCs6g}x,||D] }|d|kr q|j|dqW|S)Nr)r')rrresultirrr matched_keysds  zMatchCounter.matched_keyscCstdd||DS)Ncss|]}|dVqdS)r Nr)r"r+rrrr#nsz/MatchCounter.matched_needles..)r,)rrrrrr!mszMatchCounter.matched_needlesFNcCs |r|n|j}t||jdS)N)r)keysrr )rreverseZlimit_tor1rrrrpszMatchCounter.sortedcstfdddS)Ncs|t|S)N)r )totalr)rrrusz$MatchCounter.total..r)r)rr)rrr3tszMatchCounter.total)FN)__name__ __module__ __qualname____doc__ staticmethodrr r%r(r*r-r0r!rr3rrrrr)s   rN) Z __future__rrr functoolsrrrdictrrrrrs    PK!U(__pycache__/match_counter.cpython-36.pycnu[3 ft`T@sZddlmZddlmZddlmZddlmZddddd Zd d ZGd d d eZ dS))absolute_import)print_function)unicode_literals)reduce)nameZsummary descriptionZurlcCs"t|}dg||t|}|S)z, Ordered sset with empty strings prepended. )lensorted)ZssetZlengthZcurrentlr#/usr/lib/python3.6/match_counter.py_canonize_string_set"src@sfeZdZdZeddZddZddZdd Zd d Z d d Z ddZ ddZ dddZ ddZdS) MatchCounterzMap packages to which of their attributes matched in a search against what values. The mapping is: ``package -> [(key, needle), ... ]``. csfdd}tt||S)Ncs>|d}|d}t|}|dkr6||kr6dt|St|S)Nrr r r)getattrWEIGHTS)matchkeyneedleZhaystack)pkgrrweight4s   z*MatchCounter._eval_weights..weight)summap)rZmatchesrr)rr _eval_weights1s zMatchCounter._eval_weightscsfdd}|S)aGet the key function used for sorting matches. It is not enough to only look at the matches and order them by the sum of their weighted hits. In case this number is the same we have to ensure that the same matched needles are next to each other in the result. Returned function is: pkg -> (weights_sum, canonized_needles_set, -distance) csj|| |jfS)N)rr )r)selfrrget_keyKsz'MatchCounter._key_func..get_keyr)rrr)rr _key_func?s zMatchCounter._key_funccsrtfddDSdS)z0Return the max count of needles of all packages.c3s|]}tj|VqdS)N)r matched_needles).0r)rrr Wsz,MatchCounter._max_needles..r)max)rr)rr _max_needlesTszMatchCounter._max_needlescCs|j|gj||fdS)N) setdefaultappend)rrrrrrraddZszMatchCounter.addcCs&x |D]}td|||fqWdS)Nz%s %s)print)rrrrrdump]s zMatchCounter.dumpcstfdd|DS)Nc3s|]}t|dVqdS)rN)r)r"m)rrrr#bsz1MatchCounter.matched_haystacks..)set)rrr)rrmatched_haystacksaszMatchCounter.matched_haystackscCs6g}x,||D] }|d|kr q|j|dqW|S)Nr)r')rrresultirrr matched_keysds  zMatchCounter.matched_keyscCstdd||DS)Ncss|]}|dVqdS)r Nr)r"r+rrrr#nsz/MatchCounter.matched_needles..)r,)rrrrrr!mszMatchCounter.matched_needlesFNcCs |r|n|j}t||jdS)N)r)keysrr )rreverseZlimit_tor1rrrrpszMatchCounter.sortedcstfdddS)Ncs|t|S)N)r )totalr)rrrusz$MatchCounter.total..r)r)rr)rrr3tszMatchCounter.total)FN)__name__ __module__ __qualname____doc__ staticmethodrr r%r(r*r-r0r!rr3rrrrr)s   rN) Z __future__rrr functoolsrrrdictrrrrrs    PK!f獖*'*'(__pycache__/package.cpython-36.opt-1.pycnu[3 ft`+@sdZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z ddl Z ddlZ ddlZddlZddlZejdZGddde jZdS) z! Contains the dnf.Package class. )absolute_import)unicode_literals)_NdnfcseZdZdZdZdZfddZefddZej ddZed d Z ed d Z ed dZ eddZ eddZefddZej ddZeddZeddZeddZeddZddZed d!Zed"d#Zed$d%Zed&d'Zed(d)Zej d*d)Zed+d,Zed-d.Zed/d0Zed1d2Zed3d4Zed5d6Zed7d8Z d9d:Z!d;d<Z"d=d>Z#d?d@Z$dAdBZ%dRdGdHZ&dIdJZ'edKdLZ(dMdNZ)dOdPZ*Z+S)SPackagez Represents a package. #:api z -debuginfoz -debugsourcecs,tt|j|||_d|_d|_d|_dS)N)superr__init__base _priv_chksum_repo _priv_size)selfZ initobjectr ) __class__/usr/lib/python3.6/package.pyr0s zPackage.__init__cs|jr |jS|jr~tjjj}ytjj||j }Wn6tj j k rh}ztj j t|WYdd}~XnXtj|tj|fStt|jS)N)r _from_cmdlinerZyumZmiscZget_default_chksum_typelibdnfutilsZchecksum_valuelocationerrorError exceptions MiscErrorstrhawkey chksum_typebinasciiZ unhexlifyrrchksum)r rZ chksum_vale)rrr_chksum7s " zPackage._chksumcCs ||_dS)N)r )r valrrrrEscCs |jtjkS)N)reponamerZCMDLINE_REPO_NAME)r rrrrIszPackage._from_cmdlinecCs |jtjkS)N)r!rSYSTEM_REPO_NAME)r rrr _from_systemMszPackage._from_systemcCs*d}|jr|jjj|}|r$d|S|jS)a9 For installed packages returns id of repository from which the package was installed prefixed with '@' (if such information is available in the history database). Otherwise returns id of repository the package belongs to (@System for installed packages of unknown origin) N@)r#r historyrepor!)r Zpkgreporrr _from_repoQs zPackage._from_repocCs|jr|jjj|SdS)N)r#r r%r&)r rrr from_repo`szPackage.from_repocCstjj|jS)z Returns the header of a locally present rpm package file. As opposed to self.get_header(), which retrieves the header of an installed package from rpmdb. )rrpm_headerlocalPkg)r rrrr+gszPackage._headercs|jr |jStt|jS)N)r rrsize)r )rrr_sizepsz Package._sizecCs ||_dS)N)r )r r rrrr.vscCs"|jdkrdS|j\}}tj|S)N)Z hdr_chksumrhexlify)r rrrrr_pkgidzs  zPackage._pkgidcCs4|jdk r,tjj|jd}|jddd}nd}|S)zO returns name of source package e.g. krb5-libs -> krb5 Nz.src.rpm-r)Z sourcerpmrutilZrtrimrsplit)r Zsrcnamerrr source_names  zPackage.source_namecCsF|jj|jr|jS|j}|jj|jr<|dt|j }||jS)a) Returns name of the debuginfo package for this package. If this package is a debuginfo package, returns its name. If this package is a debugsource package, returns the debuginfo package for the base package. e.g. kernel-PAE -> kernel-PAE-debuginfo N)nameendswithDEBUGINFO_SUFFIXDEBUGSOURCE_SUFFIXlen)r r6rrr debug_names zPackage.debug_namecCs |jdk r|jn|j}||jS)zv Returns name of the debugsource package for this package. e.g. krb5-libs -> krb5-debugsource N)r5r6r9)r src_namerrrdebugsource_nameszPackage.debugsource_namec CsN|js dSyt|jjjtj|jStk rHt j j dt |YnXdS)a` Returns the rpm header of the package if it is installed. If not installed, returns None. The header is not cached, it is retrieved from rpmdb on every call. In case of a failure (e.g. when the rpmdb changes between loading the data and calling this method), raises an instance of PackageNotFoundError. Nz4Package not found when attempting to retrieve header) r#nextr Z_tsZdbMatchr*ZRPMDBI_PACKAGESrpmdbid StopIterationrrZPackageNotFoundErrorr)r rrr get_headers zPackage.get_headercCs |jdk r|jn|j}||jS)z returns name of debuginfo package for source package of given package e.g. krb5-libs -> krb5-debuginfo N)r5r6r8)r r<rrrsource_debug_nameszPackage.source_debug_namecCs t|jS)z: Always type it to int, rpm bindings expect it like that. )intr?)r rrridxsz Package.idxcCs|jS)N)r!)r rrrrepoidszPackage.repoidcCs|j|jt|j|j|jfS)N)r6archrrvr)r rrrpkgtupszPackage.pkgtupcCs|jr |jS|jj|jS)N)r r Zreposr!)r rrrr&sz Package.repocCs ||_dS)N)r )r r rrrr&scCs |jtjkrdS|jjjj|S)N)rErr"r r%r*Zget_reason_name)r rrrreasons zPackage.reasoncCs|jS)N)r)r rrr relativepathszPackage.relativepathcCs|jS)N)rF)r rrrasz Package.acCs|jS)N)Zepoch)r rrrrsz Package.ecCs|jS)N)version)r rrrrGsz Package.vcCs|jS)N)release)r rrrrHsz Package.rcCs|jS)N)r!)r rrr ui_from_reposzPackage.ui_from_repocCs|j|dkS)Nr)evr_cmp)r pkgrrrevr_eqszPackage.evr_eqcCs|j|dkS)Nr)rP)r rQrrrevr_gt szPackage.evr_gtcCs|j|dkS)Nr)rP)r rQrrrevr_lt szPackage.evr_ltcCs|jS)N)Zmedianr)r rrr getDiscNumszPackage.getDiscNumcCsr|jr |jS|j}|jjjrH|jrH|jjdrHtjj |j |j dS|j s\tjj |}tjj |j|j dS)z Package's location in the filesystem. For packages in remote repo returns where the package will be/has been downloaded. zfile:///)rrr&r isLocalbaseurl startswithospathjoinZget_local_baseurllstrip _is_local_pkgbasenamepkgdir)r locrrrr,s zPackage.localPkghttpftpfilehttpscCs |js |jrdS|jj|j|S)a The location from where the package can be downloaded from. Returns None for installed and commandline packages. :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: location (string) or None N)r#rr&remote_locationr)r Zschemesrrrrf$s zPackage.remote_locationcCsL|jr dSd|jkr&|jjd r&dS|jpJ|jjjoJ|j pJ|jjdS)NTz://zfile://F)r#rrYrr&r rWrX)r rrrr^1s zPackage._is_local_pkgcCs,|jjjr |j r |jjS|jjSdS)N)r&r rWr^Z cache_pkgdirr`)r rrrr`:s zPackage.pkgdircCs0|jdkrdS|j\}}tj|tj|jfS)z] Return the chksum type and chksum string how the legacy yum expects it. N)NN)rrZ chksum_namerr/decode)r rrrrr returnIdSumBs  zPackage.returnIdSumcCst|jrtd|jrdS|j\}}ytjj||j|Stjj k rn}zt j j t |WYdd}~XnXdS)Nz$Can not verify an installed package.T)r# ValueErrorrrhrrZchecksum_checkr,rrrrrr)r rrrrrrverifyLocalPkgLs zPackage.verifyLocalPkgrbrcrdre)rk),__name__ __module__ __qualname____doc__r8r9rpropertyrsetterrr#r'r)r+r.r0r5r;r=rArBrDrErIr&rJrKrLrrGrHrOrRrSrTrUr,rfr^r`rhrj __classcell__rr)rrr*sR                        r)roZ __future__rrZdnf.i18nrrZdnf.exceptionsrZdnf.rpmZ dnf.yum.miscrZ libdnf.errorrZ libdnf.utilsZloggingrZr*Z getLoggerZloggerrrrrrs    PK!f獖*'*'"__pycache__/package.cpython-36.pycnu[3 ft`+@sdZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z ddl Z ddlZ ddlZddlZddlZejdZGddde jZdS) z! Contains the dnf.Package class. )absolute_import)unicode_literals)_NdnfcseZdZdZdZdZfddZefddZej ddZed d Z ed d Z ed dZ eddZ eddZefddZej ddZeddZeddZeddZeddZddZed d!Zed"d#Zed$d%Zed&d'Zed(d)Zej d*d)Zed+d,Zed-d.Zed/d0Zed1d2Zed3d4Zed5d6Zed7d8Z d9d:Z!d;d<Z"d=d>Z#d?d@Z$dAdBZ%dRdGdHZ&dIdJZ'edKdLZ(dMdNZ)dOdPZ*Z+S)SPackagez Represents a package. #:api z -debuginfoz -debugsourcecs,tt|j|||_d|_d|_d|_dS)N)superr__init__base _priv_chksum_repo _priv_size)selfZ initobjectr ) __class__/usr/lib/python3.6/package.pyr0s zPackage.__init__cs|jr |jS|jr~tjjj}ytjj||j }Wn6tj j k rh}ztj j t|WYdd}~XnXtj|tj|fStt|jS)N)r _from_cmdlinerZyumZmiscZget_default_chksum_typelibdnfutilsZchecksum_valuelocationerrorError exceptions MiscErrorstrhawkey chksum_typebinasciiZ unhexlifyrrchksum)r rZ chksum_vale)rrr_chksum7s " zPackage._chksumcCs ||_dS)N)r )r valrrrrEscCs |jtjkS)N)reponamerZCMDLINE_REPO_NAME)r rrrrIszPackage._from_cmdlinecCs |jtjkS)N)r!rSYSTEM_REPO_NAME)r rrr _from_systemMszPackage._from_systemcCs*d}|jr|jjj|}|r$d|S|jS)a9 For installed packages returns id of repository from which the package was installed prefixed with '@' (if such information is available in the history database). Otherwise returns id of repository the package belongs to (@System for installed packages of unknown origin) N@)r#r historyrepor!)r Zpkgreporrr _from_repoQs zPackage._from_repocCs|jr|jjj|SdS)N)r#r r%r&)r rrr from_repo`szPackage.from_repocCstjj|jS)z Returns the header of a locally present rpm package file. As opposed to self.get_header(), which retrieves the header of an installed package from rpmdb. )rrpm_headerlocalPkg)r rrrr+gszPackage._headercs|jr |jStt|jS)N)r rrsize)r )rrr_sizepsz Package._sizecCs ||_dS)N)r )r r rrrr.vscCs"|jdkrdS|j\}}tj|S)N)Z hdr_chksumrhexlify)r rrrrr_pkgidzs  zPackage._pkgidcCs4|jdk r,tjj|jd}|jddd}nd}|S)zO returns name of source package e.g. krb5-libs -> krb5 Nz.src.rpm-r)Z sourcerpmrutilZrtrimrsplit)r Zsrcnamerrr source_names  zPackage.source_namecCsF|jj|jr|jS|j}|jj|jr<|dt|j }||jS)a) Returns name of the debuginfo package for this package. If this package is a debuginfo package, returns its name. If this package is a debugsource package, returns the debuginfo package for the base package. e.g. kernel-PAE -> kernel-PAE-debuginfo N)nameendswithDEBUGINFO_SUFFIXDEBUGSOURCE_SUFFIXlen)r r6rrr debug_names zPackage.debug_namecCs |jdk r|jn|j}||jS)zv Returns name of the debugsource package for this package. e.g. krb5-libs -> krb5-debugsource N)r5r6r9)r src_namerrrdebugsource_nameszPackage.debugsource_namec CsN|js dSyt|jjjtj|jStk rHt j j dt |YnXdS)a` Returns the rpm header of the package if it is installed. If not installed, returns None. The header is not cached, it is retrieved from rpmdb on every call. In case of a failure (e.g. when the rpmdb changes between loading the data and calling this method), raises an instance of PackageNotFoundError. Nz4Package not found when attempting to retrieve header) r#nextr Z_tsZdbMatchr*ZRPMDBI_PACKAGESrpmdbid StopIterationrrZPackageNotFoundErrorr)r rrr get_headers zPackage.get_headercCs |jdk r|jn|j}||jS)z returns name of debuginfo package for source package of given package e.g. krb5-libs -> krb5-debuginfo N)r5r6r8)r r<rrrsource_debug_nameszPackage.source_debug_namecCs t|jS)z: Always type it to int, rpm bindings expect it like that. )intr?)r rrridxsz Package.idxcCs|jS)N)r!)r rrrrepoidszPackage.repoidcCs|j|jt|j|j|jfS)N)r6archrrvr)r rrrpkgtupszPackage.pkgtupcCs|jr |jS|jj|jS)N)r r Zreposr!)r rrrr&sz Package.repocCs ||_dS)N)r )r r rrrr&scCs |jtjkrdS|jjjj|S)N)rErr"r r%r*Zget_reason_name)r rrrreasons zPackage.reasoncCs|jS)N)r)r rrr relativepathszPackage.relativepathcCs|jS)N)rF)r rrrasz Package.acCs|jS)N)Zepoch)r rrrrsz Package.ecCs|jS)N)version)r rrrrGsz Package.vcCs|jS)N)release)r rrrrHsz Package.rcCs|jS)N)r!)r rrr ui_from_reposzPackage.ui_from_repocCs|j|dkS)Nr)evr_cmp)r pkgrrrevr_eqszPackage.evr_eqcCs|j|dkS)Nr)rP)r rQrrrevr_gt szPackage.evr_gtcCs|j|dkS)Nr)rP)r rQrrrevr_lt szPackage.evr_ltcCs|jS)N)Zmedianr)r rrr getDiscNumszPackage.getDiscNumcCsr|jr |jS|j}|jjjrH|jrH|jjdrHtjj |j |j dS|j s\tjj |}tjj |j|j dS)z Package's location in the filesystem. For packages in remote repo returns where the package will be/has been downloaded. zfile:///)rrr&r isLocalbaseurl startswithospathjoinZget_local_baseurllstrip _is_local_pkgbasenamepkgdir)r locrrrr,s zPackage.localPkghttpftpfilehttpscCs |js |jrdS|jj|j|S)a The location from where the package can be downloaded from. Returns None for installed and commandline packages. :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: location (string) or None N)r#rr&remote_locationr)r Zschemesrrrrf$s zPackage.remote_locationcCsL|jr dSd|jkr&|jjd r&dS|jpJ|jjjoJ|j pJ|jjdS)NTz://zfile://F)r#rrYrr&r rWrX)r rrrr^1s zPackage._is_local_pkgcCs,|jjjr |j r |jjS|jjSdS)N)r&r rWr^Z cache_pkgdirr`)r rrrr`:s zPackage.pkgdircCs0|jdkrdS|j\}}tj|tj|jfS)z] Return the chksum type and chksum string how the legacy yum expects it. N)NN)rrZ chksum_namerr/decode)r rrrrr returnIdSumBs  zPackage.returnIdSumcCst|jrtd|jrdS|j\}}ytjj||j|Stjj k rn}zt j j t |WYdd}~XnXdS)Nz$Can not verify an installed package.T)r# ValueErrorrrhrrZchecksum_checkr,rrrrrr)r rrrrrrverifyLocalPkgLs zPackage.verifyLocalPkgrbrcrdre)rk),__name__ __module__ __qualname____doc__r8r9rpropertyrsetterrr#r'r)r+r.r0r5r;r=rArBrDrErIr&rJrKrLrrGrHrOrRrSrTrUr,rfr^r`rhrj __classcell__rr)rrr*sR                        r)roZ __future__rrZdnf.i18nrrZdnf.exceptionsrZdnf.rpmZ dnf.yum.miscrZ libdnf.errorrZ libdnf.utilsZloggingrZr*Z getLoggerZloggerrrrrrs    PK!1G77*__pycache__/persistor.cpython-36.opt-1.pycnu[3 ft`o@sddlmZddlmZddlmZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZe jdZGdddeZGdd d eZGd d d eZdS) )absolute_import)unicode_literals)_Ndnfc@s,eZdZddZgfddZeddZdS)JSONDBcCs0tjj|s,tjjtjj||j|gdS)N)ospathisfilerutilZ ensure_dirdirname_write_json_db)self json_pathr/usr/lib/python3.6/persistor.py_check_json_db+s zJSONDB._check_json_dbcCst|d}|j}WdQRX|dkrDtjtd||j||nrrrrr8us r8)Z __future__rrZdnf.i18nrZdistutils.versionZ distutilsZdnf.utilrerrnoZfnmatchrZloggingrreZ getLoggerrobjectrr&r8rrrrs    0PK!1G77$__pycache__/persistor.cpython-36.pycnu[3 ft`o@sddlmZddlmZddlmZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZe jdZGdddeZGdd d eZGd d d eZdS) )absolute_import)unicode_literals)_Ndnfc@s,eZdZddZgfddZeddZdS)JSONDBcCs0tjj|s,tjjtjj||j|gdS)N)ospathisfilerutilZ ensure_dirdirname_write_json_db)self json_pathr/usr/lib/python3.6/persistor.py_check_json_db+s zJSONDB._check_json_dbcCst|d}|j}WdQRX|dkrDtjtd||j||nrrrrr8us r8)Z __future__rrZdnf.i18nrZdistutils.versionZ distutilsZdnf.utilrerrnoZfnmatchrZloggingrreZ getLoggerrobjectrr&r8rrrrs    0PK!N##'__pycache__/plugin.cpython-36.opt-1.pycnu[3 fV%@sddlmZddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlmZejdZdZGdd d eZGd d d eZd d ZddZddZddZddZdS))absolute_import)print_function)unicode_literalsN)_dnfzdnf.plugin.dynamicc@s\eZdZdZdZdZeddZddZdd Z d d Z d d Z ddZ ddZ ddZdS)Pluginz5The base class custom plugins must derive from. #:apiz Ncstjj}|jr|jn|jfdd|jD}xb|D]Z}tjj|r6y|j |Wq6t k r}zt j j tdt|WYdd}~Xq6Xq6W|S)Ncsg|]}d|fqS)z %s/%s.conf).0path)namer/usr/lib/python3.6/plugin.py 9sz&Plugin.read_config..zParsing file failed: %s)libdnfconfZ ConfigParser config_namer Zpluginconfpathosr isfileread Exceptionr exceptionsZ ConfigErrorrstr)clsrparserfilesfileer)r r read_config4s   .zPlugin.read_configcCs||_||_dS)N)basecli)selfrrrrr __init__BszPlugin.__init__cCsdS)Nr)rrrr pre_configGszPlugin.pre_configcCsdS)Nr)rrrr configKsz Plugin.configcCsdS)Nr)rrrr resolvedOszPlugin.resolvedcCsdS)Nr)rrrr sackSsz Plugin.sackcCsdS)Nr)rrrr pre_transactionWszPlugin.pre_transactioncCsdS)Nr)rrrr transaction[szPlugin.transaction)__name__ __module__ __qualname____doc__r r classmethodrr r!r"r#r$r%r&rrrr r.s rc@s~eZdZddZddZddZddZd d Zd d Zd dZ dddZ ddZ ddZ ddZ ddZddZddZdS)PluginscCsg|_g|_dS)N) plugin_clsplugins)rrrr r aszPlugins.__init__cCs |jdS)N)_unload)rrrr __del__eszPlugins.__del__c Cs~xx|jD]n}yt||Wqtjjk r6Yqtk rttj\}}}tj |||}t j dj |YqXqWdS)N) r.getattrrrErrorrsysexc_info tracebackformat_exceptionloggerZcriticaljoin)rmethodpluginexc_type exc_value exc_tracebackZ except_listrrr _callerhs zPlugins._callercsxxr|jddD]`}|jtfdd|Dr2q|j|}|jdo^|jddo^|jdd }|r|jj|qWdS)zwChecks whether plugins are enabled or disabled in configuration files and removes disabled plugins from listNc3s|]}tj|VqdS)N)fnmatch)r pattern)r rr xsz)Plugins._check_enabled..mainZenabled)r-r anyrZ has_sectionZ has_optionZ getbooleanremove)rrenable_pluginsZplug_clsrZdisabledr)r r _check_enabledss   zPlugins._check_enabledcCsttjkrtdtjjttjt<}g|_t|j ||}t ||t dd|_ |j ||t|j dkrtdd|j D}tjtddj|dS)z)Dynamically load relevant plugin modules.zload_plugins() called twiceNrcss|] }|jVqdS)N)r )r r;rrr rBsz Plugins._load..zLoaded plugins: %sz, )DYNAMIC_PACKAGEr4modules RuntimeErrorrZpycomp ModuleType__path___get_plugins_filesZ pluginpath_import_modules_plugin_classesr-rGlensortedr8debugrr9)rrZskipsrFpackagernamesrrr _loads   z Plugins._loadcCs|jddS)Nr!)r?)rrrr _run_pre_configszPlugins._run_pre_configcCs|jddS)Nr")r?)rrrr _run_configszPlugins._run_configNcCs*x$|jD]}|||}|jj|qWdS)N)r-r.append)rrrZp_clsr;rrr _run_inits  zPlugins._run_initcCs|jddS)Nr$)r?)rrrr run_sackszPlugins.run_sackcCs|jddS)Nr#)r?)rrrr run_resolvedszPlugins.run_resolvedcCs|jddS)Nr%)r?)rrrr run_pre_transactionszPlugins.run_pre_transactioncCs|jddS)Nr&)r?)rrrr run_transactionszPlugins.run_transactioncCs&ttjkr"tjtjjdtjt=dS)NzPlugins were unloaded.)rHr4rIr8logrloggingZDDEBUG)rrrr r/s zPlugins._unloadcCs|js dSt}x|jD]}||tj|j<qWt|j}t}x |jD]}|j|j |j qJW|sldSx|j D]}|j |j qtWx|D]}|jj ||qWdS)zH Unload plugins that were removed in the `transaction`. N)Z remove_setdictr.inspectZgetfile __class__setkeysupdate intersectionrZ install_setdifference_updaterE)rr&r.r;Z plugin_filesZerased_plugin_filesZpkgZ plugin_filerrr unload_removed_pluginss     zPlugins.unload_removed_plugins)N)r'r(r)r r0r?rGrUrVrWrYrZr[r\r]r/rhrrrr r,`s  r,cCstjS)N)r__subclasses__rrrr rOsrOcCsx|D]}tjj|\}}|jj|tjj|\}}d|j|f}ytj|}Wqt k r}z,t j t d||t j tjjdddWYdd}~XqXqWdS)Nz%s.%szFailed loading plugin "%s": %sr1T)r5)rr splitrLrXsplitextr' importlib import_modulerr8errorrr^rr_ZSUBDEBUG)rSZpy_filesfnr moduleZextr rrrr rNs  rNcCsJg}t|}t|}t}t}x|D]}xtjd|D]}tjjtjj|\}} d} d} xN|D]F} t|| rd|j| d} x$|D]} t|| rd} |j| qWd} qdW| sx |D]} t|| r|j| qW| r:|j|q:Wq&W|j |}|rt j t dj djt||j |}|rFt j t dj djt||S)Nz%s/*.pyTFz=No matches found for the following enable plugin patterns: {}z, z>No matches found for the following disable plugin patterns: {})rcglobrr rkbasename_plugin_name_matches_patternaddrX differencer8Zwarningrformatr9rQ)pathsZdisable_pluginsrFr.Zpattern_enable_foundZpattern_disable_foundpro plugin_nameZdummyZmatchedZenable_pattern_testedZ pattern_skipZpattern_enableZenable_not_foundZdisable_not_foundrrr rMsD            rMcs*t||jddf}tfdd|DS)z Checks plugin name matches the pattern. The alternative plugin name using dashes instead of underscores is tried in case of original name is not matched. (see https://bugzilla.redhat.com/show_bug.cgi?id=1980712) r-c3s|]}tj|VqdS)N)r@)r r )rArr rB sz/_plugin_name_matches_pattern..)rcreplacerD)ryrAZ try_namesr)rAr rss rscs<fdd}ttjdtjf|jdd}|_S)z5A class decorator for automatic command registration.cs|r|jdS)N)register_command)rrr) command_classrr r sz"register_command..__init__rr)r r )typerr'rraliasesZ_plugin)r}r Z plugin_classr)r}r r|s r|)Z __future__rrrr@rqrlrar_operatorrr4r6rZ dnf.loggingrZ dnf.pycompZdnf.utilZdnf.i18nrZ getLoggerr8rHobjectrr,rOrNrMrsr|rrrr s2     2k %PK!N##!__pycache__/plugin.cpython-36.pycnu[3 fV%@sddlmZddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlmZejdZdZGdd d eZGd d d eZd d ZddZddZddZddZdS))absolute_import)print_function)unicode_literalsN)_dnfzdnf.plugin.dynamicc@s\eZdZdZdZdZeddZddZdd Z d d Z d d Z ddZ ddZ ddZdS)Pluginz5The base class custom plugins must derive from. #:apiz Ncstjj}|jr|jn|jfdd|jD}xb|D]Z}tjj|r6y|j |Wq6t k r}zt j j tdt|WYdd}~Xq6Xq6W|S)Ncsg|]}d|fqS)z %s/%s.conf).0path)namer/usr/lib/python3.6/plugin.py 9sz&Plugin.read_config..zParsing file failed: %s)libdnfconfZ ConfigParser config_namer Zpluginconfpathosr isfileread Exceptionr exceptionsZ ConfigErrorrstr)clsrparserfilesfileer)r r read_config4s   .zPlugin.read_configcCs||_||_dS)N)basecli)selfrrrrr __init__BszPlugin.__init__cCsdS)Nr)rrrr pre_configGszPlugin.pre_configcCsdS)Nr)rrrr configKsz Plugin.configcCsdS)Nr)rrrr resolvedOszPlugin.resolvedcCsdS)Nr)rrrr sackSsz Plugin.sackcCsdS)Nr)rrrr pre_transactionWszPlugin.pre_transactioncCsdS)Nr)rrrr transaction[szPlugin.transaction)__name__ __module__ __qualname____doc__r r classmethodrr r!r"r#r$r%r&rrrr r.s rc@s~eZdZddZddZddZddZd d Zd d Zd dZ dddZ ddZ ddZ ddZ ddZddZddZdS)PluginscCsg|_g|_dS)N) plugin_clsplugins)rrrr r aszPlugins.__init__cCs |jdS)N)_unload)rrrr __del__eszPlugins.__del__c Cs~xx|jD]n}yt||Wqtjjk r6Yqtk rttj\}}}tj |||}t j dj |YqXqWdS)N) r.getattrrrErrorrsysexc_info tracebackformat_exceptionloggerZcriticaljoin)rmethodpluginexc_type exc_value exc_tracebackZ except_listrrr _callerhs zPlugins._callercsxxr|jddD]`}|jtfdd|Dr2q|j|}|jdo^|jddo^|jdd }|r|jj|qWdS)zwChecks whether plugins are enabled or disabled in configuration files and removes disabled plugins from listNc3s|]}tj|VqdS)N)fnmatch)r pattern)r rr xsz)Plugins._check_enabled..mainZenabled)r-r anyrZ has_sectionZ has_optionZ getbooleanremove)rrenable_pluginsZplug_clsrZdisabledr)r r _check_enabledss   zPlugins._check_enabledcCsttjkrtdtjjttjt<}g|_t|j ||}t ||t dd|_ |j ||t|j dkrtdd|j D}tjtddj|dS)z)Dynamically load relevant plugin modules.zload_plugins() called twiceNrcss|] }|jVqdS)N)r )r r;rrr rBsz Plugins._load..zLoaded plugins: %sz, )DYNAMIC_PACKAGEr4modules RuntimeErrorrZpycomp ModuleType__path___get_plugins_filesZ pluginpath_import_modules_plugin_classesr-rGlensortedr8debugrr9)rrZskipsrFpackagernamesrrr _loads   z Plugins._loadcCs|jddS)Nr!)r?)rrrr _run_pre_configszPlugins._run_pre_configcCs|jddS)Nr")r?)rrrr _run_configszPlugins._run_configNcCs*x$|jD]}|||}|jj|qWdS)N)r-r.append)rrrZp_clsr;rrr _run_inits  zPlugins._run_initcCs|jddS)Nr$)r?)rrrr run_sackszPlugins.run_sackcCs|jddS)Nr#)r?)rrrr run_resolvedszPlugins.run_resolvedcCs|jddS)Nr%)r?)rrrr run_pre_transactionszPlugins.run_pre_transactioncCs|jddS)Nr&)r?)rrrr run_transactionszPlugins.run_transactioncCs&ttjkr"tjtjjdtjt=dS)NzPlugins were unloaded.)rHr4rIr8logrloggingZDDEBUG)rrrr r/s zPlugins._unloadcCs|js dSt}x|jD]}||tj|j<qWt|j}t}x |jD]}|j|j |j qJW|sldSx|j D]}|j |j qtWx|D]}|jj ||qWdS)zH Unload plugins that were removed in the `transaction`. N)Z remove_setdictr.inspectZgetfile __class__setkeysupdate intersectionrZ install_setdifference_updaterE)rr&r.r;Z plugin_filesZerased_plugin_filesZpkgZ plugin_filerrr unload_removed_pluginss     zPlugins.unload_removed_plugins)N)r'r(r)r r0r?rGrUrVrWrYrZr[r\r]r/rhrrrr r,`s  r,cCstjS)N)r__subclasses__rrrr rOsrOcCsx|D]}tjj|\}}|jj|tjj|\}}d|j|f}ytj|}Wqt k r}z,t j t d||t j tjjdddWYdd}~XqXqWdS)Nz%s.%szFailed loading plugin "%s": %sr1T)r5)rr splitrLrXsplitextr' importlib import_modulerr8errorrr^rr_ZSUBDEBUG)rSZpy_filesfnr moduleZextr rrrr rNs  rNcCsJg}t|}t|}t}t}x|D]}xtjd|D]}tjjtjj|\}} d} d} xN|D]F} t|| rd|j| d} x$|D]} t|| rd} |j| qWd} qdW| sx |D]} t|| r|j| qW| r:|j|q:Wq&W|j |}|rt j t dj djt||j |}|rFt j t dj djt||S)Nz%s/*.pyTFz=No matches found for the following enable plugin patterns: {}z, z>No matches found for the following disable plugin patterns: {})rcglobrr rkbasename_plugin_name_matches_patternaddrX differencer8Zwarningrformatr9rQ)pathsZdisable_pluginsrFr.Zpattern_enable_foundZpattern_disable_foundpro plugin_nameZdummyZmatchedZenable_pattern_testedZ pattern_skipZpattern_enableZenable_not_foundZdisable_not_foundrrr rMsD            rMcs*t||jddf}tfdd|DS)z Checks plugin name matches the pattern. The alternative plugin name using dashes instead of underscores is tried in case of original name is not matched. (see https://bugzilla.redhat.com/show_bug.cgi?id=1980712) r-c3s|]}tj|VqdS)N)r@)r r )rArr rB sz/_plugin_name_matches_pattern..)rcreplacerD)ryrAZ try_namesr)rAr rss rscs<fdd}ttjdtjf|jdd}|_S)z5A class decorator for automatic command registration.cs|r|jdS)N)register_command)rrr) command_classrr r sz"register_command..__init__rr)r r )typerr'rraliasesZ_plugin)r}r Z plugin_classr)r}r r|s r|)Z __future__rrrr@rqrlrar_operatorrr4r6rZ dnf.loggingrZ dnf.pycompZdnf.utilZdnf.i18nrZ getLoggerr8rHobjectrr,rOrNrMrsr|rrrr s2     2k %PK!j) '__pycache__/pycomp.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlZddlZddlZddlZddlZddlZddl Z ej dkZ e r ddl m Z ddlmZddlZddlZddlZejZeZZejZeZeje_eje_eZe Z!ej"Z#ej$Z%e%j&Z'ej&Z(ej)Z*ddZ+d d Z,d d Z-d dZ.ej/Z0dddZ1ddZ2ddZ3nddl4mZmZmZmZm!Z!ddl m Z ddlmZddlZddlZddl%Z%ddl5Z5ejZej6Zej7Z#ej&Z'e5j&Z(ej8Z*ddZ+dd Z,dd Z-ddZ.ddZ0d ddZ1ddZ2ddZ3dS)!)NullTranslations) version_infoN)StringIO) ConfigParsercCs|j}|j}||fS)N)gettextngettext)t_P_r /usr/lib/python3.6/pycomp.py gettext_setup8srcCs t|tS)N) isinstancebytes)or r r is_py2str_py3bytes>srcCs t|tS)N)rr)rr r r is_py3bytes@srcCs tj|S)N)types ModuleType)mr r r DsrcCstj||dS)N)locale setlocale)categorylocr r r rFsrcCs|j|dS)N)write)fcontentr r r write_to_fileHsrcCstjjj|S)N)emailmimetextMIMEText)bodyr r r email_mimeJsr%)unicode basestringlongxrange raw_inputcCs|j}|j}||fS)N)ugettext ungettext)r r r r r r r]scCs t|tS)N)rstr)rr r r rcscCsdS)NFr )rr r r rescCstj|jdS)Nzutf-8)rrencode)rr r r riscOstj|jdf||S)Nzutf-8)rformatr.)Zpercentargskwargsr r r r/jsr/cCstj||jddS)Nzutf-8)rrr.)rrr r r rlscCs|j|jddS)Nzutf-8)rr.)rrr r r rnscCstjjj|jdS)Nzutf-8)r r!r"r#r.)r$r r r r%ps)N)N)9rrsysrbase64Zemail.mime.textr itertoolsrrmajorZPY3iorZ configparserrZqueueZ urllib.parseZurllibZshlexZQueuer-r'r& filterfalseintr(r+rr,ranger)inputr*Z decodebytesZbase64_decodebytesparseZurlparseZquoteZ urllib_quoteZ shlex_quotemaxsizeZ sys_maxsizerrrr format_stringr/rrr%Z __builtin__ZpipesZ ifilterfalseZ decodestringZmaxintr r r r sr          PK!j) !__pycache__/pycomp.cpython-36.pycnu[3 ft`@sddlmZddlmZddlZddlZddlZddlZddlZddlZddl Z ej dkZ e r ddl m Z ddlmZddlZddlZddlZejZeZZejZeZeje_eje_eZe Z!ej"Z#ej$Z%e%j&Z'ej&Z(ej)Z*ddZ+d d Z,d d Z-d dZ.ej/Z0dddZ1ddZ2ddZ3nddl4mZmZmZmZm!Z!ddl m Z ddlmZddlZddlZddl%Z%ddl5Z5ejZej6Zej7Z#ej&Z'e5j&Z(ej8Z*ddZ+dd Z,dd Z-ddZ.ddZ0d ddZ1ddZ2ddZ3dS)!)NullTranslations) version_infoN)StringIO) ConfigParsercCs|j}|j}||fS)N)gettextngettext)t_P_r /usr/lib/python3.6/pycomp.py gettext_setup8srcCs t|tS)N) isinstancebytes)or r r is_py2str_py3bytes>srcCs t|tS)N)rr)rr r r is_py3bytes@srcCs tj|S)N)types ModuleType)mr r r DsrcCstj||dS)N)locale setlocale)categorylocr r r rFsrcCs|j|dS)N)write)fcontentr r r write_to_fileHsrcCstjjj|S)N)emailmimetextMIMEText)bodyr r r email_mimeJsr%)unicode basestringlongxrange raw_inputcCs|j}|j}||fS)N)ugettext ungettext)r r r r r r r]scCs t|tS)N)rstr)rr r r rcscCsdS)NFr )rr r r rescCstj|jdS)Nzutf-8)rrencode)rr r r riscOstj|jdf||S)Nzutf-8)rformatr.)Zpercentargskwargsr r r r/jsr/cCstj||jddS)Nzutf-8)rrr.)rrr r r rlscCs|j|jddS)Nzutf-8)rr.)rrr r r rnscCstjjj|jdS)Nzutf-8)r r!r"r#r.)r$r r r r%ps)N)N)9rrsysrbase64Zemail.mime.textr itertoolsrrmajorZPY3iorZ configparserrZqueueZ urllib.parseZurllibZshlexZQueuer-r'r& filterfalseintr(r+rr,ranger)inputr*Z decodebytesZbase64_decodebytesparseZurlparseZquoteZ urllib_quoteZ shlex_quotemaxsizeZ sys_maxsizerrrr format_stringr/rrr%Z __builtin__ZpipesZ ifilterfalseZ decodestringZmaxintr r r r sr          PK!rӑ&__pycache__/query.cpython-36.opt-1.pycnu[3 ft`3@sZddlmZddlmZddlZddlmZddlmZddlmZd dd Z d d Z dS) )absolute_import)unicode_literalsN)Query)ucd) basestringFcCsLt|tr|g}|j}g}|r,|jtj|j|d|i|rD|S|jS)NZprovides__glob) isinstancerZqueryappendhawkeyZICASEZfiltermZrun)ZsackZpatternsZ ignore_caseZ get_queryqflagsr /usr/lib/python3.6/query.py _by_providess  rcCsdd|DS)NcSsi|]}|t|qSr )r).0Zpkgr r r .sz#_per_nevra_dict..r )Zpkg_listr r r _per_nevra_dict-sr)FF) Z __future__rrr rZdnf.i18nrZ dnf.pycomprrrr r r r s      PK!rӑ __pycache__/query.cpython-36.pycnu[3 ft`3@sZddlmZddlmZddlZddlmZddlmZddlmZd dd Z d d Z dS) )absolute_import)unicode_literalsN)Query)ucd) basestringFcCsLt|tr|g}|j}g}|r,|jtj|j|d|i|rD|S|jS)NZprovides__glob) isinstancerZqueryappendhawkeyZICASEZfiltermZrun)ZsackZpatternsZ ignore_caseZ get_queryqflagsr /usr/lib/python3.6/query.py _by_providess  rcCsdd|DS)NcSsi|]}|t|qSr )r).0Zpkgr r r .sz#_per_nevra_dict..r )Zpkg_listr r r _per_nevra_dict-sr)FF) Z __future__rrr rZdnf.i18nrZ dnf.pycomprrrr r r r s      PK!qսVV%__pycache__/repo.cpython-36.opt-1.pycnu[3 fLQ@sFddlmZddlmZddlmZmZddlZddlZddl Zddl Zddl Zddl Zddl ZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZdZ dZ!ej"ej#dZ$dej%e$ej&fZ'd e'e!fd e'e fd d Z(ej)d Z*ddZ+ddZ,d-ddZ-ddZ.Gddde/Z0Gddde1Z2Gdddej3j4Z5Gddde/Z6Gdd d ej7j8Z9Gd!d"d"ej3j:Z;Gd#d$d$e;Zej7j?j@ZAej7j?jBZCej7j?jDZEGd)d*d*ej7jFZGGd+d,d,ejHjIZ?dS).)absolute_import)unicode_literals)ucd_NpackagesZ mirrorlistz-_.:z(?P[%s]+)\-[%s]{16}z>^%s\/.*((xml|yaml)(\.gz|\.xz|\.bz2|.zck)?|asc|cachecookie|%s)$z^%s\/%s\/.+rpm$z^.+(solv|solvx)$)metadatarZdbcachednfcCstjjj|}|dkrdS|S)zAReturn index of an invalid character in the repo ID (if present).rN)libdnfrepoRepoZverifyId)Zrepo_idZ first_invalidr /usr/lib/python3.6/repo.pyrepo_id_invalidHsrcGs8x"|D]}|||}|dk r|SqWttd|dS)Nz"no matching payload factory for %s) ValueErrorr)pkgprogressZ factoriesfnploadr r r _pkg2payloadOs   rTc Csdd}|jjddt||dD}t}ytjjjtjj||Wn,t k rv}zt ||_ WYdd}~XnX|j |jj |_xj|D]b}|j}|dks|jdrq|j} | j} | j} |dkr|jj| q| jjj|g|j| <qW|S)NcSs t|d S)NZdelta)hasattr)payloadr r r _download_sort_keyYsz._download_payloads.._download_sort_keycSsg|] }|jqSr )_librepo_target).0rr r r ]sz&_download_payloads..)keyz Not finishedzAlready downloaded)errclearsorted_DownloadErrorsr r PackageTargetZdownloadPackagesZVectorPPackageTarget RuntimeErrorstr_fatalwaitcopy _recoverableZgetErr startswithZ getCallbacks package_ploadr_skippedadd_repoZexpire_pkg_irrecoverable) payloadsZdrpmZ fail_fastrZtargetserrseZtgtr callbacksrrr r r _download_payloadsWs0     r1cCsL|\}}x:|D]2}|j}||kr,||j7}q||j7}||j7}qW||fS)N)r download_size _full_size)Zsavingr-r.realZfullrrr r r _update_savingxs   r5c@s>eZdZddZddZeddZejddZdd Zd S) rcCsi|_i|_d|_t|_dS)N)r,_val_recoverabler#setr))selfr r r __init__sz_DownloadErrors.__init__cCs"|jr |jS|jrd|jgiSiS)N)r,r#)r8r r r _irrecoverables  z_DownloadErrors._irrecoverablecCs|jS)N)r6)r8r r r r&sz_DownloadErrors._recoverablecCs ||_dS)N)r6)r8Znew_dctr r r r&scCs|j|jkrdS|jS)Nr)rr)r2)r8rr r r _bandwidth_useds z_DownloadErrors._bandwidth_usedN) __name__ __module__ __qualname__r9r;propertyr&setterr<r r r r rs  rc@seZdZddZdS)_DetailedLibrepoErrorcCs,tj||jd|_|jd|_||_dS)Nr) Exceptionr9argsZ librepo_codeZ librepo_msg source_url)r8Z librepo_errrFr r r r9s   z_DetailedLibrepoError.__init__N)r=r>r?r9r r r r rBsrBc@seZdZddZdS)_NullKeyImportcCsdS)NTr )r8iduserid fingerprinturl timestampr r r _confirmsz_NullKeyImport._confirmN)r=r>r?rMr r r r rGsrGc@s eZdZddZeddZdS)MetadatacCs ||_dS)N)r+)r8r r r r r9szMetadata.__init__cCs |jjS)N)r+fresh)r8r r r rOszMetadata.freshN)r=r>r?r9r@rOr r r r rNsrNcs4eZdZfddZddZddZddZZS) PackageTargetCallbackscstt|j||_dS)N)superrPr9r()r8r() __class__r r r9szPackageTargetCallbacks.__init__cCs|jjd||dS)Nr)r(_end_cb)r8statusmsgr r r endszPackageTargetCallbacks.endcCs|jjd||dS)Nr)r( _progress_cb)r8totalToDownload downloadedr r r rszPackageTargetCallbacks.progresscCs|jjd||dS)Nr)r(_mirrorfail_cb)r8rUrKr r r mirrorFailuresz$PackageTargetCallbacks.mirrorFailure)r=r>r?r9rVrr[ __classcell__r r )rRr rPs rPcsHeZdZfddZddZddZddZed d Zd d Z Z S) PackagePayloadcs$tt|j|t||_||_dS)N)rQr]r9rPr0r)r8rr)rRr r r9s zPackagePayload.__init__cCsRtjj}|dkrtjj}n$|jdr(dS|tjjjkr>tjj }|j j |||dS)z"End callback to librepo operation.Nz Not finished) rcallbackZ STATUS_FAILEDZ STATUS_OKr'r r PackageTargetCBZTransferStatus_ALREADYEXISTSZSTATUS_ALREADY_EXISTSrrV)r8cbdataZ lr_statusrUrTr r r rSs  zPackagePayload._end_cbcCs|jj|tjj|dS)N)rrVrr^Z STATUS_MIRROR)r8r`rrKr r r rZszPackagePayload._mirrorfail_cbc CsXy|jj||Wn@tk rRtj\}}}tj|||}tjdj|YnXdS)Nr:) rrDsysexc_info tracebackformat_exceptionloggercriticaljoin)r8r`totaldoneexc_type exc_value exc_traceback except_listr r r rWs zPackagePayload._progress_cbcCs|jS)N)r2)r8r r r r3szPackagePayload._full_sizec Cs|j}|j}tjj||d||j|j|jd}|j|j t j j |j j |d|d|d|d|d|d|d d d |j S) NT)destresumer`Z progresscbZendcbZmirrorfailurecb relative_urlrn checksum_typechecksum expectedsizebase_urlror)rpkgdirrutil ensure_dirrWrSrZupdate_target_paramsr r r r+r0)r8rruZ target_dctr r r rs   zPackagePayload._librepo_target) r=r>r?r9rSrZrWr@r3rr\r r )rRr r]s    r]c@s(eZdZddZddZeddZdS) RPMPayloadcCstjj|jjS)N)ospathbasenamerlocation)r8r r r __str__szRPMPayload.__str__cCsT|j}|j\}}tjjj|}|tjjjkr>tjt d||j |||j |j dS)Nzunsupported checksum type: %s)rprqrrrsrt) rZ returnIdSumr r r Z checksumTypeZChecksumType_UNKNOWNrewarningrr~ downloadsizebaseurl)r8rZctypeZcsumZ ctype_coder r r rys zRPMPayload._target_paramscCs|jjS)zTotal size of the download.)rr)r8r r r r2szRPMPayload.download_sizeN)r=r>r?rryr@r2r r r r rzsrzcs@eZdZfddZddZddZddZed d ZZ S) RemoteRPMPayloadcstt|jd|||_d|_||_|jjp.d|jjjd}t j |j dj dd}d|}t jj|jj|d|_tjj|jt jj|j|jjd |_dS) NZ unused_objectrr:Zbasearchutf8z commandline-r/)rQrr9remote_location remote_sizeconfZ releasever substitutionsgethashlibZsha256encodeZ hexdigestr{r|rgZcachedirrurrvrwrlstripZ local_path)r8rrrsZdigestZrepodir)rRr r r9szRemoteRPMPayload.__init__cCstjj|jS)N)r{r|r}r)r8r r r r)szRemoteRPMPayload.__str__c Cs^||_y|jj||Wn@tk rXtj\}}}tj|||}tjdj |YnXdS)Nr:) rrrDrarbrcrdrerfrg)r8r`rhrirjrkrlrmr r r rW,szRemoteRPMPayload._progress_cbc Cs<tjj|jjtjj|j|j dddtjj |jddd|j S)NrT) r r r r_configr{r|r}rrudirnamer0)r8r r r r5sz RemoteRPMPayload._librepo_targetcCs|jS)zTotal size of the download.)r)r8r r r r2;szRemoteRPMPayload.download_size) r=r>r?r9rrWrr@r2r\r r )rRr rs   rcszeZdZfddZddZddZddZd d Zd d Ze d dZ e ddZ e j ddZ ddZ ddZZS) MDPayloadcs.tt|j|d|_d|_d|_t|_dS)Nr:rF)rQrr9_text_download_sizefastest_mirror_runningr7mirror_failures)r8r)rRr r r9Cs zMDPayload.__init__cCstjjr|jS|jjdSdS)Nzutf-8)rpycompZPY3rr)r8r r r rJszMDPayload.__str__cCs|jS)N)r)r8r r r __unicode__PszMDPayload.__unicode__cCs||_|jj||dS)N)rr)r8r`rhrir r r rWSszMDPayload._progress_cbcCs\|tjjjkr"td|}d|_n*|tjjjkrH|jrH|rBd|nd}ndS|jj|dS)Nz,determining the fastest mirror (%s hosts).. Tz error: %s zdone. ) r r RepoCBZFastestMirrorStage_DETECTIONrrZFastestMirrorStage_STATUSrmessage)r8r`stagedatarUr r r _fastestmirror_cbWs zMDPayload._fastestmirror_cbcCs&|jj|d||f}tj|dS)Nzerror: %s (%s).)rr*redebug)r8r`rUrKrr r r _mirror_failure_cbcs  zMDPayload._mirror_failure_cbcCs|jS)N)r)r8r r r r2hszMDPayload.download_sizecCs|jS)N) _progress)r8r r r rlszMDPayload.progresscCs|dkrtjj}||_dS)N)rr^NullDownloadProgressr)r8rr r r rps cCs||_|jjdddS)NrCr)rrstart)r8textr r r rvszMDPayload.startcCsd|_|jj|dddS)Nr)rrrV)r8r r r rVzsz MDPayload.end)r=r>r?r9rrrWrrr@r2rrArrVr\r r )rRr rAs    rcsLeZdZfddZddZddZddZd d Zd d Zd dZ Z S) RepoCallbackscs tt|j||_|j|_dS)N)rQrr9r+ _md_pload)r8r )rRr r r9szRepoCallbacks.__init__cCs|jj|dS)N)rr)r8Zwhatr r r rszRepoCallbacks.startcCs|jjdS)N)rrV)r8r r r rVszRepoCallbacks.endcCs|jjd||dS)Nr)rrW)r8rXrYr r r rszRepoCallbacks.progresscCs|jjd||dS)N)rr)r8rZptrr r r fastestMirrorszRepoCallbacks.fastestMirrorcCs|jjd|||dS)Nr)rr)r8rUrKrr r r handleMirrorFailuresz!RepoCallbacks.handleMirrorFailurecCs|jjj|||||S)N)r+ _key_importrM)r8rHrIrJrKrLr r r repokeyImportszRepoCallbacks.repokeyImport) r=r>r?r9rrVrrrrr\r r )rRr rs rcseZdZeZd7fdd ZeddZeddZej ddZed d Z d d Z e j d d Z eddZ eddZ e j ddZ ddZddZfddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd9d5d6ZZS):r Ncstt|j||d|jjjtjj|r.|nd|j|_t t j j |_ t||_|jjj|jj|jd|_t|_d|_|jj|r|jrtn|j|r|jj|jt jjj|_|dk r|jnd|_dS)N)Zsectionparentr:T) rQr r9rthisZdisownr r r+rrr^rrrZ _callbacksZ setCallbacks_pkgdirrGrrZsetSyncStrategyZ cacheonlySYNC_ONLY_CACHE DEFAULT_SYNCZsetSubstitutionsrrZ SubstitutionsZ_substitutionsZcheck_config_file_ageZ_check_config_file_age)r8nameZ parent_conf)rRr r r9s   z Repo.__init__cCs |jjS)N)r+ZgetId)r8r r r rHszRepo.idcCs |jjS)N)r+ZgetRepoFilePath)r8r r r repofilesz Repo.repofilecCs|jj|dS)N)r+ZsetRepoFilePath)r8valuer r r rscCs|jjr|jjS|jS)N)r+ZisLocalZgetLocalBaseurl cache_pkgdir)r8r r r rus  z Repo.pkgdircCs$|jdk r|jStjj|jjtS)N)rr{r|rgr+ getCachedir_PACKAGES_RELATIVE_DIR)r8r r r rs zRepo.cache_pkgdircCs ||_dS)N)r)r8valr r r ruscCstjj|jjdS)NZpubring)r{r|rgr+r)r8r r r _pubring_dirszRepo._pubring_dircCs |jjS)N)r+ZgetLoadMetadataOther)r8r r r load_metadata_otherszRepo.load_metadata_othercCs|jj|dS)N)r+ZsetLoadMetadataOther)r8rr r r rscCs |j|jkS)N)rH)r8otherr r r __lt__sz Repo.__lt__cCsd|jj|jfS)Nz<%s %s>)rRr=rH)r8r r r __repr__sz Repo.__repr__cstt|j||dS)N)rQr __setattr__)r8rr)rRr r rszRepo.__setattr__cCs|jjdS)N)r+disable)r8r r r rsz Repo.disablecCs|jjdS)N)r+enable)r8r r r rsz Repo.enablecCs|jj|dS)a/Ask for additional repository metadata type to download. Given metadata_type is appended to the default metadata set when repository is downloaded. Parameters ---------- metadata_type: string Example: add_metadata_type_to_download("productid") N)r+ZaddMetadataTypeToDownload)r8 metadata_typer r r add_metadata_type_to_downloads z"Repo.add_metadata_type_to_downloadcCs|jj|dS)aIStop asking for this additional repository metadata type in download. Given metadata_type is no longer downloaded by default when this repository is downloaded. Parameters ---------- metadata_type: string Example: remove_metadata_type_from_download("productid") N)r+ZremoveMetadataTypeFromDownload)r8rr r r "remove_metadata_type_from_downloadsz'Repo.remove_metadata_type_from_downloadcCs |jj|S)zReturn path to the file with downloaded repository metadata of given type. Parameters ---------- metadata_type: string )r+ZgetMetadataPath)r8rr r r get_metadata_pathszRepo.get_metadata_pathcCs |jj|S)zReturn content of the file with downloaded repository metadata of given type. Content of compressed metadata file is returned uncompressed. Parameters ---------- metadata_type: string )r+ZgetMetadataContent)r8rr r r get_metadata_content!s zRepo.get_metadata_contentcCsd}zy|jj}Wnttjjtfk r}zP|jjrhd|j}x|jjD]}|d|7}qJWt j |t j j t|WYdd}~XnXWdt|j_Xt|j|_|S)aLoad the metadata for this repo. Depending on the configuration and the age and consistence of data available on the disk cache, either loads the metadata from the cache or downloads them from the mirror, baseurl or metalink. This method will by default not try to refresh already loaded data if called repeatedly. Returns True if this call to load() caused a fresh metadata download. Fz7Errors during downloading metadata for repository '%s':z - %sN)r+loadr errorErrorr!rrrHrerr exceptionsZ RepoErrorr"r7rNr)r8retr/rUZfailurer r r r-s  &  z Repo.loadcCsP|js|jjd|jrL|jdkr&dS|jj}|jjrDtd|}d|fSdS) a)Get the number of seconds after which the cached metadata will expire. Returns a tuple, boolean whether there even is cached metadata and the number of seconds it will expire in. Negative number means the metadata has expired already, None that it never expires. FrCTNr)TN)Fr)rr+Z loadCacheZmetadata_expireZ getExpiresInZ isExpiredmin)r8Z expirationr r r _metadata_expire_inJs     zRepo._metadata_expire_incCs ||_dS)N)r)r8Z key_importr r r _set_key_import]szRepo._set_key_importcCs ||j_dS)N)rr)r8rr r r set_progress_bar`szRepo.set_progress_barcCs |jjS)zoReturns user defined http headers. Returns ------- headers : tuple of strings )r+ZgetHttpHeaders)r8r r r get_http_headersdszRepo.get_http_headerscCs|jj|dS)aSets http headers. Sets new http headers and rewrites existing ones. Parameters ---------- headers : tuple or list of strings Example: set_http_headers(["User-Agent: Agent007", "MyFieldName: MyFieldValue"]) N)r+ZsetHttpHeaders)r8Zheadersr r r set_http_headersns zRepo.set_http_headershttpftpfilehttpscs@fdd}sdS|jj}|r,||S|jr<||jSdS)z :param location: relative location inside the repo :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: absolute url (string) or None csZxT|D]L}r>tjjj|d}|krRtjj|jdSqtjj|jdSqWdS)Nrr)rrZurlparser{r|rgr)Zurl_listrKr)r~schemesr r schemes_filters z,Repo.remote_location..schemes_filterN)r+Z getMirrorsr)r8r~rrZmirrorsr )r~rr r{s  zRepo.remote_location)NNrrrr)r)r=r>r?SYNC_TRY_CACHErr9r@rHrrArurrrrrrrrrrrrrrrrrrrr\r r )rRr r s6          r )T)JZ __future__rrZdnf.i18nrrZ dnf.callbackrZdnf.confZdnf.conf.substitutionsZ dnf.constZ dnf.cryptoZdnf.exceptionsZ dnf.loggingZ dnf.pycompZdnf.utilZ dnf.yum.miscZ libdnf.errorr Z libdnf.repo functoolsrZhawkeyZloggingoperatorr{reZshutilstringraZtimercrZ_MIRRORLIST_FILENAMEZ ascii_lettersZdigitsZ _REPOID_CHARSescapeZ hexdigitsZ _CACHEDIR_REZ CACHE_FILESZ getLoggerrerrr1r5objectrrDrBr^Z KeyImportrGrNr r_rPZPayloadr]rzrrr ZSyncStrategy_LAZYZ SYNC_LAZYZSyncStrategy_ONLY_CACHErZSyncStrategy_TRY_CACHErrrrZRepoConfr r r r sl       !  8&?   PK!qսVV__pycache__/repo.cpython-36.pycnu[3 fLQ@sFddlmZddlmZddlmZmZddlZddlZddl Zddl Zddl Zddl Zddl ZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZdZ dZ!ej"ej#dZ$dej%e$ej&fZ'd e'e!fd e'e fd d Z(ej)d Z*ddZ+ddZ,d-ddZ-ddZ.Gddde/Z0Gddde1Z2Gdddej3j4Z5Gddde/Z6Gdd d ej7j8Z9Gd!d"d"ej3j:Z;Gd#d$d$e;Zej7j?j@ZAej7j?jBZCej7j?jDZEGd)d*d*ej7jFZGGd+d,d,ejHjIZ?dS).)absolute_import)unicode_literals)ucd_NpackagesZ mirrorlistz-_.:z(?P[%s]+)\-[%s]{16}z>^%s\/.*((xml|yaml)(\.gz|\.xz|\.bz2|.zck)?|asc|cachecookie|%s)$z^%s\/%s\/.+rpm$z^.+(solv|solvx)$)metadatarZdbcachednfcCstjjj|}|dkrdS|S)zAReturn index of an invalid character in the repo ID (if present).rN)libdnfrepoRepoZverifyId)Zrepo_idZ first_invalidr /usr/lib/python3.6/repo.pyrepo_id_invalidHsrcGs8x"|D]}|||}|dk r|SqWttd|dS)Nz"no matching payload factory for %s) ValueErrorr)pkgprogressZ factoriesfnploadr r r _pkg2payloadOs   rTc Csdd}|jjddt||dD}t}ytjjjtjj||Wn,t k rv}zt ||_ WYdd}~XnX|j |jj |_xj|D]b}|j}|dks|jdrq|j} | j} | j} |dkr|jj| q| jjj|g|j| <qW|S)NcSs t|d S)NZdelta)hasattr)payloadr r r _download_sort_keyYsz._download_payloads.._download_sort_keycSsg|] }|jqSr )_librepo_target).0rr r r ]sz&_download_payloads..)keyz Not finishedzAlready downloaded)errclearsorted_DownloadErrorsr r PackageTargetZdownloadPackagesZVectorPPackageTarget RuntimeErrorstr_fatalwaitcopy _recoverableZgetErr startswithZ getCallbacks package_ploadr_skippedadd_repoZexpire_pkg_irrecoverable) payloadsZdrpmZ fail_fastrZtargetserrseZtgtr callbacksrrr r r _download_payloadsWs0     r1cCsL|\}}x:|D]2}|j}||kr,||j7}q||j7}||j7}qW||fS)N)r download_size _full_size)Zsavingr-r.realZfullrrr r r _update_savingxs   r5c@s>eZdZddZddZeddZejddZdd Zd S) rcCsi|_i|_d|_t|_dS)N)r,_val_recoverabler#setr))selfr r r __init__sz_DownloadErrors.__init__cCs"|jr |jS|jrd|jgiSiS)N)r,r#)r8r r r _irrecoverables  z_DownloadErrors._irrecoverablecCs|jS)N)r6)r8r r r r&sz_DownloadErrors._recoverablecCs ||_dS)N)r6)r8Znew_dctr r r r&scCs|j|jkrdS|jS)Nr)rr)r2)r8rr r r _bandwidth_useds z_DownloadErrors._bandwidth_usedN) __name__ __module__ __qualname__r9r;propertyr&setterr<r r r r rs  rc@seZdZddZdS)_DetailedLibrepoErrorcCs,tj||jd|_|jd|_||_dS)Nr) Exceptionr9argsZ librepo_codeZ librepo_msg source_url)r8Z librepo_errrFr r r r9s   z_DetailedLibrepoError.__init__N)r=r>r?r9r r r r rBsrBc@seZdZddZdS)_NullKeyImportcCsdS)NTr )r8iduserid fingerprinturl timestampr r r _confirmsz_NullKeyImport._confirmN)r=r>r?rMr r r r rGsrGc@s eZdZddZeddZdS)MetadatacCs ||_dS)N)r+)r8r r r r r9szMetadata.__init__cCs |jjS)N)r+fresh)r8r r r rOszMetadata.freshN)r=r>r?r9r@rOr r r r rNsrNcs4eZdZfddZddZddZddZZS) PackageTargetCallbackscstt|j||_dS)N)superrPr9r()r8r() __class__r r r9szPackageTargetCallbacks.__init__cCs|jjd||dS)Nr)r(_end_cb)r8statusmsgr r r endszPackageTargetCallbacks.endcCs|jjd||dS)Nr)r( _progress_cb)r8totalToDownload downloadedr r r rszPackageTargetCallbacks.progresscCs|jjd||dS)Nr)r(_mirrorfail_cb)r8rUrKr r r mirrorFailuresz$PackageTargetCallbacks.mirrorFailure)r=r>r?r9rVrr[ __classcell__r r )rRr rPs rPcsHeZdZfddZddZddZddZed d Zd d Z Z S) PackagePayloadcs$tt|j|t||_||_dS)N)rQr]r9rPr0r)r8rr)rRr r r9s zPackagePayload.__init__cCsRtjj}|dkrtjj}n$|jdr(dS|tjjjkr>tjj }|j j |||dS)z"End callback to librepo operation.Nz Not finished) rcallbackZ STATUS_FAILEDZ STATUS_OKr'r r PackageTargetCBZTransferStatus_ALREADYEXISTSZSTATUS_ALREADY_EXISTSrrV)r8cbdataZ lr_statusrUrTr r r rSs  zPackagePayload._end_cbcCs|jj|tjj|dS)N)rrVrr^Z STATUS_MIRROR)r8r`rrKr r r rZszPackagePayload._mirrorfail_cbc CsXy|jj||Wn@tk rRtj\}}}tj|||}tjdj|YnXdS)Nr:) rrDsysexc_info tracebackformat_exceptionloggercriticaljoin)r8r`totaldoneexc_type exc_value exc_traceback except_listr r r rWs zPackagePayload._progress_cbcCs|jS)N)r2)r8r r r r3szPackagePayload._full_sizec Cs|j}|j}tjj||d||j|j|jd}|j|j t j j |j j |d|d|d|d|d|d|d d d |j S) NT)destresumer`Z progresscbZendcbZmirrorfailurecb relative_urlrn checksum_typechecksum expectedsizebase_urlror)rpkgdirrutil ensure_dirrWrSrZupdate_target_paramsr r r r+r0)r8rruZ target_dctr r r rs   zPackagePayload._librepo_target) r=r>r?r9rSrZrWr@r3rr\r r )rRr r]s    r]c@s(eZdZddZddZeddZdS) RPMPayloadcCstjj|jjS)N)ospathbasenamerlocation)r8r r r __str__szRPMPayload.__str__cCsT|j}|j\}}tjjj|}|tjjjkr>tjt d||j |||j |j dS)Nzunsupported checksum type: %s)rprqrrrsrt) rZ returnIdSumr r r Z checksumTypeZChecksumType_UNKNOWNrewarningrr~ downloadsizebaseurl)r8rZctypeZcsumZ ctype_coder r r rys zRPMPayload._target_paramscCs|jjS)zTotal size of the download.)rr)r8r r r r2szRPMPayload.download_sizeN)r=r>r?rryr@r2r r r r rzsrzcs@eZdZfddZddZddZddZed d ZZ S) RemoteRPMPayloadcstt|jd|||_d|_||_|jjp.d|jjjd}t j |j dj dd}d|}t jj|jj|d|_tjj|jt jj|j|jjd |_dS) NZ unused_objectrr:Zbasearchutf8z commandline-r/)rQrr9remote_location remote_sizeconfZ releasever substitutionsgethashlibZsha256encodeZ hexdigestr{r|rgZcachedirrurrvrwrlstripZ local_path)r8rrrsZdigestZrepodir)rRr r r9szRemoteRPMPayload.__init__cCstjj|jS)N)r{r|r}r)r8r r r r)szRemoteRPMPayload.__str__c Cs^||_y|jj||Wn@tk rXtj\}}}tj|||}tjdj |YnXdS)Nr:) rrrDrarbrcrdrerfrg)r8r`rhrirjrkrlrmr r r rW,szRemoteRPMPayload._progress_cbc Cs<tjj|jjtjj|j|j dddtjj |jddd|j S)NrT) r r r r_configr{r|r}rrudirnamer0)r8r r r r5sz RemoteRPMPayload._librepo_targetcCs|jS)zTotal size of the download.)r)r8r r r r2;szRemoteRPMPayload.download_size) r=r>r?r9rrWrr@r2r\r r )rRr rs   rcszeZdZfddZddZddZddZd d Zd d Ze d dZ e ddZ e j ddZ ddZ ddZZS) MDPayloadcs.tt|j|d|_d|_d|_t|_dS)Nr:rF)rQrr9_text_download_sizefastest_mirror_runningr7mirror_failures)r8r)rRr r r9Cs zMDPayload.__init__cCstjjr|jS|jjdSdS)Nzutf-8)rpycompZPY3rr)r8r r r rJszMDPayload.__str__cCs|jS)N)r)r8r r r __unicode__PszMDPayload.__unicode__cCs||_|jj||dS)N)rr)r8r`rhrir r r rWSszMDPayload._progress_cbcCs\|tjjjkr"td|}d|_n*|tjjjkrH|jrH|rBd|nd}ndS|jj|dS)Nz,determining the fastest mirror (%s hosts).. Tz error: %s zdone. ) r r RepoCBZFastestMirrorStage_DETECTIONrrZFastestMirrorStage_STATUSrmessage)r8r`stagedatarUr r r _fastestmirror_cbWs zMDPayload._fastestmirror_cbcCs&|jj|d||f}tj|dS)Nzerror: %s (%s).)rr*redebug)r8r`rUrKrr r r _mirror_failure_cbcs  zMDPayload._mirror_failure_cbcCs|jS)N)r)r8r r r r2hszMDPayload.download_sizecCs|jS)N) _progress)r8r r r rlszMDPayload.progresscCs|dkrtjj}||_dS)N)rr^NullDownloadProgressr)r8rr r r rps cCs||_|jjdddS)NrCr)rrstart)r8textr r r rvszMDPayload.startcCsd|_|jj|dddS)Nr)rrrV)r8r r r rVzsz MDPayload.end)r=r>r?r9rrrWrrr@r2rrArrVr\r r )rRr rAs    rcsLeZdZfddZddZddZddZd d Zd d Zd dZ Z S) RepoCallbackscs tt|j||_|j|_dS)N)rQrr9r+ _md_pload)r8r )rRr r r9szRepoCallbacks.__init__cCs|jj|dS)N)rr)r8Zwhatr r r rszRepoCallbacks.startcCs|jjdS)N)rrV)r8r r r rVszRepoCallbacks.endcCs|jjd||dS)Nr)rrW)r8rXrYr r r rszRepoCallbacks.progresscCs|jjd||dS)N)rr)r8rZptrr r r fastestMirrorszRepoCallbacks.fastestMirrorcCs|jjd|||dS)Nr)rr)r8rUrKrr r r handleMirrorFailuresz!RepoCallbacks.handleMirrorFailurecCs|jjj|||||S)N)r+ _key_importrM)r8rHrIrJrKrLr r r repokeyImportszRepoCallbacks.repokeyImport) r=r>r?r9rrVrrrrr\r r )rRr rs rcseZdZeZd7fdd ZeddZeddZej ddZed d Z d d Z e j d d Z eddZ eddZ e j ddZ ddZddZfddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd9d5d6ZZS):r Ncstt|j||d|jjjtjj|r.|nd|j|_t t j j |_ t||_|jjj|jj|jd|_t|_d|_|jj|r|jrtn|j|r|jj|jt jjj|_|dk r|jnd|_dS)N)Zsectionparentr:T) rQr r9rthisZdisownr r r+rrr^rrrZ _callbacksZ setCallbacks_pkgdirrGrrZsetSyncStrategyZ cacheonlySYNC_ONLY_CACHE DEFAULT_SYNCZsetSubstitutionsrrZ SubstitutionsZ_substitutionsZcheck_config_file_ageZ_check_config_file_age)r8nameZ parent_conf)rRr r r9s   z Repo.__init__cCs |jjS)N)r+ZgetId)r8r r r rHszRepo.idcCs |jjS)N)r+ZgetRepoFilePath)r8r r r repofilesz Repo.repofilecCs|jj|dS)N)r+ZsetRepoFilePath)r8valuer r r rscCs|jjr|jjS|jS)N)r+ZisLocalZgetLocalBaseurl cache_pkgdir)r8r r r rus  z Repo.pkgdircCs$|jdk r|jStjj|jjtS)N)rr{r|rgr+ getCachedir_PACKAGES_RELATIVE_DIR)r8r r r rs zRepo.cache_pkgdircCs ||_dS)N)r)r8valr r r ruscCstjj|jjdS)NZpubring)r{r|rgr+r)r8r r r _pubring_dirszRepo._pubring_dircCs |jjS)N)r+ZgetLoadMetadataOther)r8r r r load_metadata_otherszRepo.load_metadata_othercCs|jj|dS)N)r+ZsetLoadMetadataOther)r8rr r r rscCs |j|jkS)N)rH)r8otherr r r __lt__sz Repo.__lt__cCsd|jj|jfS)Nz<%s %s>)rRr=rH)r8r r r __repr__sz Repo.__repr__cstt|j||dS)N)rQr __setattr__)r8rr)rRr r rszRepo.__setattr__cCs|jjdS)N)r+disable)r8r r r rsz Repo.disablecCs|jjdS)N)r+enable)r8r r r rsz Repo.enablecCs|jj|dS)a/Ask for additional repository metadata type to download. Given metadata_type is appended to the default metadata set when repository is downloaded. Parameters ---------- metadata_type: string Example: add_metadata_type_to_download("productid") N)r+ZaddMetadataTypeToDownload)r8 metadata_typer r r add_metadata_type_to_downloads z"Repo.add_metadata_type_to_downloadcCs|jj|dS)aIStop asking for this additional repository metadata type in download. Given metadata_type is no longer downloaded by default when this repository is downloaded. Parameters ---------- metadata_type: string Example: remove_metadata_type_from_download("productid") N)r+ZremoveMetadataTypeFromDownload)r8rr r r "remove_metadata_type_from_downloadsz'Repo.remove_metadata_type_from_downloadcCs |jj|S)zReturn path to the file with downloaded repository metadata of given type. Parameters ---------- metadata_type: string )r+ZgetMetadataPath)r8rr r r get_metadata_pathszRepo.get_metadata_pathcCs |jj|S)zReturn content of the file with downloaded repository metadata of given type. Content of compressed metadata file is returned uncompressed. Parameters ---------- metadata_type: string )r+ZgetMetadataContent)r8rr r r get_metadata_content!s zRepo.get_metadata_contentcCsd}zy|jj}Wnttjjtfk r}zP|jjrhd|j}x|jjD]}|d|7}qJWt j |t j j t|WYdd}~XnXWdt|j_Xt|j|_|S)aLoad the metadata for this repo. Depending on the configuration and the age and consistence of data available on the disk cache, either loads the metadata from the cache or downloads them from the mirror, baseurl or metalink. This method will by default not try to refresh already loaded data if called repeatedly. Returns True if this call to load() caused a fresh metadata download. Fz7Errors during downloading metadata for repository '%s':z - %sN)r+loadr errorErrorr!rrrHrerr exceptionsZ RepoErrorr"r7rNr)r8retr/rUZfailurer r r r-s  &  z Repo.loadcCsP|js|jjd|jrL|jdkr&dS|jj}|jjrDtd|}d|fSdS) a)Get the number of seconds after which the cached metadata will expire. Returns a tuple, boolean whether there even is cached metadata and the number of seconds it will expire in. Negative number means the metadata has expired already, None that it never expires. FrCTNr)TN)Fr)rr+Z loadCacheZmetadata_expireZ getExpiresInZ isExpiredmin)r8Z expirationr r r _metadata_expire_inJs     zRepo._metadata_expire_incCs ||_dS)N)r)r8Z key_importr r r _set_key_import]szRepo._set_key_importcCs ||j_dS)N)rr)r8rr r r set_progress_bar`szRepo.set_progress_barcCs |jjS)zoReturns user defined http headers. Returns ------- headers : tuple of strings )r+ZgetHttpHeaders)r8r r r get_http_headersdszRepo.get_http_headerscCs|jj|dS)aSets http headers. Sets new http headers and rewrites existing ones. Parameters ---------- headers : tuple or list of strings Example: set_http_headers(["User-Agent: Agent007", "MyFieldName: MyFieldValue"]) N)r+ZsetHttpHeaders)r8Zheadersr r r set_http_headersns zRepo.set_http_headershttpftpfilehttpscs@fdd}sdS|jj}|r,||S|jr<||jSdS)z :param location: relative location inside the repo :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: absolute url (string) or None csZxT|D]L}r>tjjj|d}|krRtjj|jdSqtjj|jdSqWdS)Nrr)rrZurlparser{r|rgr)Zurl_listrKr)r~schemesr r schemes_filters z,Repo.remote_location..schemes_filterN)r+Z getMirrorsr)r8r~rrZmirrorsr )r~rr r{s  zRepo.remote_location)NNrrrr)r)r=r>r?SYNC_TRY_CACHErr9r@rHrrArurrrrrrrrrrrrrrrrrrrr\r r )rRr r s6          r )T)JZ __future__rrZdnf.i18nrrZ dnf.callbackrZdnf.confZdnf.conf.substitutionsZ dnf.constZ dnf.cryptoZdnf.exceptionsZ dnf.loggingZ dnf.pycompZdnf.utilZ dnf.yum.miscZ libdnf.errorr Z libdnf.repo functoolsrZhawkeyZloggingoperatorr{reZshutilstringraZtimercrZ_MIRRORLIST_FILENAMEZ ascii_lettersZdigitsZ _REPOID_CHARSescapeZ hexdigitsZ _CACHEDIR_REZ CACHE_FILESZ getLoggerrerrr1r5objectrrDrBr^Z KeyImportrGrNr r_rPZPayloadr]rzrrr ZSyncStrategy_LAZYZ SYNC_LAZYZSyncStrategy_ONLY_CACHErZSyncStrategy_TRY_CACHErrrrZRepoConfr r r r sl       !  8&?   PK! )__pycache__/repodict.cpython-36.opt-1.pycnu[3 f&@s`ddlmZddlmZddlmZddlZddlZ ddl Z ddl Z ej j Z GdddeZdS))unicode_literals) ConfigError)_NcseZdZddZddZddZddZffd d Zd d Zd dZ ddZ ddZ fddZ ddZ ddZddZZS)RepoDictcCsj|j}||krd}t||y|jjWn0tk r\}ztdj|WYdd}~XnX|||<dS)Nz;Repository %s is listed more than once in the configurationz{0})idrZ_repoZverify RuntimeErrorformat)selfrepoZid_msger /usr/lib/python3.6/repodict.pyadd#s  z RepoDict.addcCstjj|jS)N)dnfutil MultiCallListvalues)r r r rall/sz RepoDict.allcCstjj|j S)N)rrempty iter_enabled)r r r r _any_enabled3szRepoDict._any_enabledcCsPxJ|jD]>}x8|j||jD]$}|js tjtd|j|jq Wq WdS)Nzenabling %s repository)r get_matchingrenabledloggerinforenable)r Z sub_name_fnr foundr r r_enable_sub_repos6s zRepoDict._enable_sub_reposc  sfdd}tjj|}x:|D]2}d|kr>djtjj|}|j||g7_q Wx$|jD]\}} t |||| q`W|j |t j t d|dj||S)a Creates new repo object and add it into RepoDict. Variables in provided values will be automatically substituted using conf.substitutions (like $releasever, ...) @param repoid: Repo ID - string @param conf: dnf Base().conf object @param baseurl: List of strings @param kwargs: keys and values that will be used to setattr on dnf.repo.Repo() object @return: dnf.repo.Repo() object cspt|trtjjj|jSt|ts0t|trlg}x.|D]&}t|tr:|j tjjj|jq:W|rl|S|S)N) isinstancestrlibdnfconfZ ConfigParser substituteZ substitutionslisttupleappend)rZ substitutedvalue)r"r rr#Is   z)RepoDict.add_new_repo..substitutez://z file://{}zAdded %s repo from %sz, )rr ZReporospathabspathbaseurlitemssetattrrrrrjoin) r Zrepoidr"r+kwargsr#r r)keyr'r )r"r add_new_repo=s   zRepoDict.add_new_repocCsdd}|j|dS)z@enable debug repos corresponding to already enabled binary reposcSs&|jdrdj|ddSdj|S)Nz-rpmsz {}-debug-rpmsz {}-debuginfo)endswithr)namer r r debug_nameesz/RepoDict.enable_debug_repos..debug_nameN)r)r r6r r renable_debug_reposaszRepoDict.enable_debug_reposcCsdd}|j|dS)zAenable source repos corresponding to already enabled binary reposcSs&|jdrdj|ddSdj|S)Nz-rpmsz{}-source-rpmsr2z {}-sourcer3)r4r)r5r r r source_nameosz1RepoDict.enable_source_repos..source_nameN)r)r r8r r renable_source_reposkszRepoDict.enable_source_reposcsZtjjr,fddD}tjj|Sjd}|dkrLtjjgStjj|gS)Ncs g|]}tj|r|qSr )fnmatch).0k)r0r r r xsz)RepoDict.get_matching..)rrZis_glob_patternrget)r r0lr r )r0r rrus    zRepoDict.get_matchingcCsdd|jDS)Ncss|]}|jr|VqdS)N)r)r;rr r r sz(RepoDict.iter_enabled..)r)r r r rrszRepoDict.iter_enabledcs$ddttt|jdddDS)zreturn repos sorted by prioritycss|] }|VqdS)Nr )r;itemr r rrAsz!RepoDict.items..cSs|dj|djfS)N)ZpriorityZcost)xr r rsz RepoDict.items..)r0)sortedsuperrr,)r ) __class__r rr,szRepoDict.itemscCs|jS)N)keys)r r r r__iter__szRepoDict.__iter__cCsdd|jDS)Ncss|]\}}|VqdS)Nr )r;r<vr r rrAsz RepoDict.keys..)r,)r r r rrIsz RepoDict.keyscCsdd|jDS)Ncss|]\}}|VqdS)Nr )r;r<rKr r rrAsz"RepoDict.values..)r,)r r r rrszRepoDict.values)__name__ __module__ __qualname__rrrrr1r7r9rrr,rJrIr __classcell__r r )rHrr!s  $    r)Z __future__rZdnf.exceptionsrZdnf.i18nrZdnf.utilrZ libdnf.confr!r:r(rrdictrr r r rs   PK! #__pycache__/repodict.cpython-36.pycnu[3 f&@s`ddlmZddlmZddlmZddlZddlZ ddl Z ddl Z ej j Z GdddeZdS))unicode_literals) ConfigError)_NcseZdZddZddZddZddZffd d Zd d Zd dZ ddZ ddZ fddZ ddZ ddZddZZS)RepoDictcCsj|j}||krd}t||y|jjWn0tk r\}ztdj|WYdd}~XnX|||<dS)Nz;Repository %s is listed more than once in the configurationz{0})idrZ_repoZverify RuntimeErrorformat)selfrepoZid_msger /usr/lib/python3.6/repodict.pyadd#s  z RepoDict.addcCstjj|jS)N)dnfutil MultiCallListvalues)r r r rall/sz RepoDict.allcCstjj|j S)N)rrempty iter_enabled)r r r r _any_enabled3szRepoDict._any_enabledcCsPxJ|jD]>}x8|j||jD]$}|js tjtd|j|jq Wq WdS)Nzenabling %s repository)r get_matchingrenabledloggerinforenable)r Z sub_name_fnr foundr r r_enable_sub_repos6s zRepoDict._enable_sub_reposc  sfdd}tjj|}x:|D]2}d|kr>djtjj|}|j||g7_q Wx$|jD]\}} t |||| q`W|j |t j t d|dj||S)a Creates new repo object and add it into RepoDict. Variables in provided values will be automatically substituted using conf.substitutions (like $releasever, ...) @param repoid: Repo ID - string @param conf: dnf Base().conf object @param baseurl: List of strings @param kwargs: keys and values that will be used to setattr on dnf.repo.Repo() object @return: dnf.repo.Repo() object cspt|trtjjj|jSt|ts0t|trlg}x.|D]&}t|tr:|j tjjj|jq:W|rl|S|S)N) isinstancestrlibdnfconfZ ConfigParser substituteZ substitutionslisttupleappend)rZ substitutedvalue)r"r rr#Is   z)RepoDict.add_new_repo..substitutez://z file://{}zAdded %s repo from %sz, )rr ZReporospathabspathbaseurlitemssetattrrrrrjoin) r Zrepoidr"r+kwargsr#r r)keyr'r )r"r add_new_repo=s   zRepoDict.add_new_repocCsdd}|j|dS)z@enable debug repos corresponding to already enabled binary reposcSs&|jdrdj|ddSdj|S)Nz-rpmsz {}-debug-rpmsz {}-debuginfo)endswithr)namer r r debug_nameesz/RepoDict.enable_debug_repos..debug_nameN)r)r r6r r renable_debug_reposaszRepoDict.enable_debug_reposcCsdd}|j|dS)zAenable source repos corresponding to already enabled binary reposcSs&|jdrdj|ddSdj|S)Nz-rpmsz{}-source-rpmsr2z {}-sourcer3)r4r)r5r r r source_nameosz1RepoDict.enable_source_repos..source_nameN)r)r r8r r renable_source_reposkszRepoDict.enable_source_reposcsZtjjr,fddD}tjj|Sjd}|dkrLtjjgStjj|gS)Ncs g|]}tj|r|qSr )fnmatch).0k)r0r r r xsz)RepoDict.get_matching..)rrZis_glob_patternrget)r r0lr r )r0r rrus    zRepoDict.get_matchingcCsdd|jDS)Ncss|]}|jr|VqdS)N)r)r;rr r r sz(RepoDict.iter_enabled..)r)r r r rrszRepoDict.iter_enabledcs$ddttt|jdddDS)zreturn repos sorted by prioritycss|] }|VqdS)Nr )r;itemr r rrAsz!RepoDict.items..cSs|dj|djfS)N)ZpriorityZcost)xr r rsz RepoDict.items..)r0)sortedsuperrr,)r ) __class__r rr,szRepoDict.itemscCs|jS)N)keys)r r r r__iter__szRepoDict.__iter__cCsdd|jDS)Ncss|]\}}|VqdS)Nr )r;r<vr r rrAsz RepoDict.keys..)r,)r r r rrIsz RepoDict.keyscCsdd|jDS)Ncss|]\}}|VqdS)Nr )r;r<rKr r rrAsz"RepoDict.values..)r,)r r r rrszRepoDict.values)__name__ __module__ __qualname__rrrrr1r7r9rrr,rJrIr __classcell__r r )rHrr!s  $    r)Z __future__rZdnf.exceptionsrZdnf.i18nrZdnf.utilrZ libdnf.confr!r:r(rrdictrr r r rs   PK!$%__pycache__/sack.cpython-36.opt-1.pycnu[3 ft` @sddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl m Z ddl m Z ejdZGdddejZd d Zd d Zd dZdS))absolute_import)unicode_literalsN) basestring)_dnfcs0eZdZfddZd ddZd ddZZS) Sackcstt|j||dS)N)superr__init__)selfargskwargs) __class__/usr/lib/python3.6/sack.pyr %sz Sack.__init__NrcCs8|r ||_||_|dk r4||_|dkr4tjtddS)NFznallow_vendor_change is disabled. This option is currently not supported for downgrade and distro-sync commands) installonlyinstallonly_limitallow_vendor_changeloggerZwarningr)r rrrrrr _configure(szSack._configurecCstjj||S)z'Factory function returning a DNF Query.)rqueryZQuery)r flagsrrrr1sz Sack.query)NrN)r)__name__ __module__ __qualname__r rr __classcell__rr)r rr"s  rc CsT|jj}tjj|ttjj||jjd||jj t j j |jj tjj|jjdkdS)Narch )ZpkgclsZ pkginitvalrcachedirZrootdirZlogfileZlogdebug)ZconfrrutilZ ensure_dirrpackageZPackageZ substitutionsZ installrootospathjoinZlogdirconstZ LOG_HAWKEYZ logfilelevel)baserrrr _build_sack7s   r%c Cs2t|}y|jddWntk r,YnX|S)NF)Z build_cache)r%Zload_system_repoIOError)r$Zsackrrr _rpmdb_sackBs r'cCst|S)z Returns a new instance of sack containing only installed packages (@System repo) Useful to get list of the installed RPMs after transaction. )r')r$rrr rpmdb_sackMsr()Z __future__rrZdnf.utilrZ dnf.packageZ dnf.queryZloggingZhawkeyr Z dnf.pycomprZdnf.i18nrZ getLoggerrrr%r'r(rrrrs       PK!$__pycache__/sack.cpython-36.pycnu[3 ft` @sddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl m Z ddl m Z ejdZGdddejZd d Zd d Zd dZdS))absolute_import)unicode_literalsN) basestring)_dnfcs0eZdZfddZd ddZd ddZZS) Sackcstt|j||dS)N)superr__init__)selfargskwargs) __class__/usr/lib/python3.6/sack.pyr %sz Sack.__init__NrcCs8|r ||_||_|dk r4||_|dkr4tjtddS)NFznallow_vendor_change is disabled. This option is currently not supported for downgrade and distro-sync commands) installonlyinstallonly_limitallow_vendor_changeloggerZwarningr)r rrrrrr _configure(szSack._configurecCstjj||S)z'Factory function returning a DNF Query.)rqueryZQuery)r flagsrrrr1sz Sack.query)NrN)r)__name__ __module__ __qualname__r rr __classcell__rr)r rr"s  rc CsT|jj}tjj|ttjj||jjd||jj t j j |jj tjj|jjdkdS)Narch )ZpkgclsZ pkginitvalrcachedirZrootdirZlogfileZlogdebug)ZconfrrutilZ ensure_dirrpackageZPackageZ substitutionsZ installrootospathjoinZlogdirconstZ LOG_HAWKEYZ logfilelevel)baserrrr _build_sack7s   r%c Cs2t|}y|jddWntk r,YnX|S)NF)Z build_cache)r%Zload_system_repoIOError)r$Zsackrrr _rpmdb_sackBs r'cCst|S)z Returns a new instance of sack containing only installed packages (@System repo) Useful to get list of the installed RPMs after transaction. )r')r$rrr rpmdb_sackMsr()Z __future__rrZdnf.utilrZ dnf.packageZ dnf.queryZloggingZhawkeyr Z dnf.pycomprZdnf.i18nrZ getLoggerrrr%r'r(rrrrs       PK!`x)__pycache__/selector.cpython-36.opt-1.pycnu[3 ft`e@s(ddlmZddlmZddlmZdS))absolute_import)unicode_literals)SelectorN)Z __future__rrZhawkeyrrr/usr/lib/python3.6/selector.pys  PK!`x#__pycache__/selector.cpython-36.pycnu[3 ft`e@s(ddlmZddlmZddlmZdS))absolute_import)unicode_literals)SelectorN)Z __future__rrZhawkeyrrr/usr/lib/python3.6/selector.pys  PK!6!(__pycache__/subject.cpython-36.opt-1.pycnu[3 ft`~@s4ddlmZddlmZddlmZddlmZdS))absolute_import)print_function)unicode_literals)SubjectN)Z __future__rrrZhawkeyrrr/usr/lib/python3.6/subject.pys   PK!6!"__pycache__/subject.cpython-36.pycnu[3 ft`~@s4ddlmZddlmZddlmZddlmZdS))absolute_import)print_function)unicode_literals)SubjectN)Z __future__rrrZhawkeyrrr/usr/lib/python3.6/subject.pys   PK!'&sgg,__pycache__/transaction.cpython-36.opt-1.pycnu[3 ft`-@sddlmZddlmZddlZddlmZmZejj Z ejj Z ejj ZejjZejjZejjZejjZejjZejjZejjZeZdZdZdZ dZ!d Z"ejj ejj ejjejjejjgZ#ejj ejjejjejjgZ$e ed d e ed eed d eedeedeed deed eedeed deed eed eede ede!ediZ%e de dededededededededed ede de!diZ&dS) )absolute_import)unicode_literalsN)_C_efgZ currentlyZ DowngradingZCleanupZ InstallingZ ObsoletingZ ReinstallingZErasingZ UpgradingZ VerifyingzRunning scriptletZ PreparingZ DowngradeZ DowngradedZ InstalledZObsoleteZ ObsoletedZ ReinstallZ ReinstalledZEraseZUpgradeZUpgradedZVerified)'Z __future__rrZlibdnf.transactionZlibdnfZdnf.i18nrrZ transactionZTransactionItemAction_DOWNGRADEZ PKG_DOWNGRADEZ TransactionItemAction_DOWNGRADEDZPKG_DOWNGRADEDZTransactionItemAction_INSTALLZ PKG_INSTALLZTransactionItemAction_OBSOLETEZ PKG_OBSOLETEZTransactionItemAction_OBSOLETEDZ PKG_OBSOLETEDZTransactionItemAction_REINSTALLZ PKG_REINSTALLZ!TransactionItemAction_REINSTALLEDZPKG_REINSTALLEDZTransactionItemAction_REMOVEZ PKG_REMOVEZTransactionItemAction_UPGRADEZ PKG_UPGRADEZTransactionItemAction_UPGRADEDZ PKG_UPGRADEDZ PKG_ERASEZ PKG_CLEANUPZ PKG_VERIFYZ PKG_SCRIPTLETZTRANS_PREPARATIONZ TRANS_POSTZFORWARD_ACTIONSZBACKWARD_ACTIONSZACTIONSZ FILE_ACTIONSr r !/usr/lib/python3.6/transaction.pysp         PK!'&sgg&__pycache__/transaction.cpython-36.pycnu[3 ft`-@sddlmZddlmZddlZddlmZmZejj Z ejj Z ejj ZejjZejjZejjZejjZejjZejjZejjZeZdZdZdZ dZ!d Z"ejj ejj ejjejjejjgZ#ejj ejjejjejjgZ$e ed d e ed eed d eedeedeed deed eedeed deed eed eede ede!ediZ%e de dededededededededed ede de!diZ&dS) )absolute_import)unicode_literalsN)_C_efgZ currentlyZ DowngradingZCleanupZ InstallingZ ObsoletingZ ReinstallingZErasingZ UpgradingZ VerifyingzRunning scriptletZ PreparingZ DowngradeZ DowngradedZ InstalledZObsoleteZ ObsoletedZ ReinstallZ ReinstalledZEraseZUpgradeZUpgradedZVerified)'Z __future__rrZlibdnf.transactionZlibdnfZdnf.i18nrrZ transactionZTransactionItemAction_DOWNGRADEZ PKG_DOWNGRADEZ TransactionItemAction_DOWNGRADEDZPKG_DOWNGRADEDZTransactionItemAction_INSTALLZ PKG_INSTALLZTransactionItemAction_OBSOLETEZ PKG_OBSOLETEZTransactionItemAction_OBSOLETEDZ PKG_OBSOLETEDZTransactionItemAction_REINSTALLZ PKG_REINSTALLZ!TransactionItemAction_REINSTALLEDZPKG_REINSTALLEDZTransactionItemAction_REMOVEZ PKG_REMOVEZTransactionItemAction_UPGRADEZ PKG_UPGRADEZTransactionItemAction_UPGRADEDZ PKG_UPGRADEDZ PKG_ERASEZ PKG_CLEANUPZ PKG_VERIFYZ PKG_SCRIPTLETZTRANS_PREPARATIONZ TRANS_POSTZFORWARD_ACTIONSZBACKWARD_ACTIONSZACTIONSZ FILE_ACTIONSr r !/usr/lib/python3.6/transaction.pysp         PK!gL_D_D/__pycache__/transaction_sr.cpython-36.opt-1.pycnu[3 faf@sddlmZddlmZddlmZddlZddlZddlmZddlZ ddl Z dZ dZ de e fZ Gddde jjZGd d d e jjZGd d d eZd dZddZGdddeZdS))absolute_import)print_function)unicode_literalsN)_z%s.%scseZdZfddZZS)TransactionErrorcstt|j|dS)N)superr__init__)selfmsg) __class__$/usr/lib/python3.6/transaction_sr.pyr/szTransactionError.__init__)__name__ __module__ __qualname__r __classcell__r r )r r r.srcseZdZfddZZS)TransactionReplayErrorcsv||_t|ttfr||_n|g|_|r:tdj|d}ntd}x|jD]}|dt|7}qJWtt |j |dS)z :param filename: The name of the transaction file being replayed :param errors: a list of error classes or a string with an error description zWThe following problems occurred while replaying the transaction from file "{filename}":)filenameztk r,} z ttd j| jd d WYdd} ~ XnX|S) NzGroup id '%s' is not available.r-zgroups.packages.namerer.zgroups.packages.installedbooleanr/zgroups.packages.package_typez.Missing object key "{key}" in groups.packages.r)rd)rDcompsZ _group_by_idrarHrrr;newr-ui_namer^rboolZ addPackager7r8stringToCompsPackageTyperErrorrrtrru) r group_id pkg_typespkgsZ comps_group swdb_groupr=r-r.r/r$r r r _create_swdb_groupvs* &*z$TransactionReplay._create_swdb_groupcCs*|j|||}|dk r&|jjjj|dS)N)rrDrr;r~)r rrrrr r r _swdb_group_installsz%TransactionReplay._swdb_group_installcCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrrupgrade)r rrrrr r r _swdb_group_upgrades z%TransactionReplay._swdb_group_upgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrr downgrade)r rrrrr r r _swdb_group_downgrades z'TransactionReplay._swdb_group_downgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrrremove)r rrrrr r r _swdb_group_removes z$TransactionReplay._swdb_group_removec Csd|jjj|}|s,|j|jtd|dS|jjjj||j |j |}yx|D]}|d}|j |t dd|d}|j |t dd|d} |j | t d dytjj| } Wn2tjjk r} ztt | WYdd} ~ XnX| tjjtjjfkr ttd j|dd |j||| qNWWn>tk r^} z ttd j| jd dWYdd} ~ XnX|S)Nz%Environment id '%s' is not available.r*zenvironments.groups.idrer.zenvironments.groups.installedrr1zenvironments.groups.group_typezlInvalid value "{group_type}" of environments.groups.group_type, only "mandatory" or "optional" is supported.)r1z2Missing object key "{key}" in environments.groups.r)rd)rDrZ_environment_by_idrarHrrr>rr-rr^rrr7r8rrrrZCompsPackageType_MANDATORYZCompsPackageType_OPTIONALrZaddGrouprtru) r env_idrr0Z comps_envswdb_envr@r*r.r1r$r r r _create_swdb_environments8 *z*TransactionReplay._create_swdb_environmentcCs*|j|||}|dk r&|jjjj|dS)N)rrDrr>r~)r rrr0rr r r _swdb_environment_installsz+TransactionReplay._swdb_environment_installcCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_upgrades z+TransactionReplay._swdb_environment_upgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_downgrades z-TransactionReplay._swdb_environment_downgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_removes z*TransactionReplay._swdb_environment_removecCs|jS)z> :returns: the loaded data of the transaction )rZ)r r r r get_dataszTransactionReplay.get_datacCs|jS)zW :returns: an array of warnings gathered during the transaction replay )rN)r r r r get_warnings szTransactionReplay.get_warningsc Cs|j}g}xJ|jD]@}y|j|Wqtk rP}z|j|WYdd}~XqXqWx|jD]}y |d}|d}ytjj|d}Wn:tj j k r}z|jtt |w`WYdd}~XnX|dkr|j |||dn|dkr|j |||dnl|dks|d kr,|j|||dnD|d ks@|d krT|j|||dn|jttd j||d Wq`tk r}z&|jttdj|jddWYdd}~Xq`tk r}z|j|WYdd}~Xq`Xq`Wx|jD]} y| d}| d} ytjj| d}Wn>tj j k r^}z|jtt |wWYdd}~XnX|dkr~|j| || dn|dkr|j| || dnl|dks|d kr|j| || dnD|d ks|d kr|j| || dn|jttdj|| dWnptk rN}z&|jttdj|jddWYdd}~Xn.tk rz}z|j|WYdd}~XnXqW|rt||dS)z* Replays the transaction. Nr&r*r,rjr+rnrkrprlrqz@Unexpected value of group action "{action}" for group "{group}".)r&r;z&Missing object key "{key}" in a group.r)rdr0zJUnexpected value of environment action "{action}" for environment "{env}".)r&r>z-Missing object key "{key}" in an environment.)rEr]rrr6r_r7r8rrrrrrrrrrrtrur`rrrrr) r rWrrr$r<r&rrr?rr r r runsv   *   *"zTransactionReplay.runcCs8|jjs dSg}x|jjD]}y |j}Wn$tk rP}zwWYdd}~XnXt|}||jkr|j s|jtjj tjj tjj fkrt dj |d}|js|jt|n |jj|y>|j|}|jtjjtjjfkstjj||jdkr||_Wqtk r}zWYdd}~XqXqW|r4t|j|dS)z Sets reasons in the transaction history to values from the stored transaction. Also serves to check whether additional packages were pulled in by the transaction, which results in an error (unless ignore_extras is True). NzgPackage nevra "{nevra}", which is not present in the transaction file, was pulled into the transaction.)r'r)rDr8r=rtrrLrFr&r7ZTransactionItemAction_UPGRADEDZ TransactionItemAction_DOWNGRADEDZ!TransactionItemAction_REINSTALLEDrrrGr6rrNrMZTransactionItemAction_INSTALLZTransactionItemAction_REMOVEZTransactionItemReasonComparer(rrE)r rr:r=r$r'r Z replay_reasonr r r post_transactionds<        z"TransactionReplay.post_transaction)rCNFFF)rrr__doc__rrOrPrar^r[rrrrrrrrrrrrrrrr r r r rBs4    a   (   SrB)Z __future__rrrr7rwZdnf.i18nrZdnf.exceptionsr}rTr"Z VERSION_MINORr4 exceptionsrrrrr%rAobjectrBr r r r s     KPK!gL_D_D)__pycache__/transaction_sr.cpython-36.pycnu[3 faf@sddlmZddlmZddlmZddlZddlZddlmZddlZ ddl Z dZ dZ de e fZ Gddde jjZGd d d e jjZGd d d eZd dZddZGdddeZdS))absolute_import)print_function)unicode_literalsN)_z%s.%scseZdZfddZZS)TransactionErrorcstt|j|dS)N)superr__init__)selfmsg) __class__$/usr/lib/python3.6/transaction_sr.pyr/szTransactionError.__init__)__name__ __module__ __qualname__r __classcell__r r )r r r.srcseZdZfddZZS)TransactionReplayErrorcsv||_t|ttfr||_n|g|_|r:tdj|d}ntd}x|jD]}|dt|7}qJWtt |j |dS)z :param filename: The name of the transaction file being replayed :param errors: a list of error classes or a string with an error description zWThe following problems occurred while replaying the transaction from file "{filename}":)filenameztk r,} z ttd j| jd d WYdd} ~ XnX|S) NzGroup id '%s' is not available.r-zgroups.packages.namerer.zgroups.packages.installedbooleanr/zgroups.packages.package_typez.Missing object key "{key}" in groups.packages.r)rd)rDcompsZ _group_by_idrarHrrr;newr-ui_namer^rboolZ addPackager7r8stringToCompsPackageTyperErrorrrtrru) r group_id pkg_typespkgsZ comps_group swdb_groupr=r-r.r/r$r r r _create_swdb_groupvs* &*z$TransactionReplay._create_swdb_groupcCs*|j|||}|dk r&|jjjj|dS)N)rrDrr;r~)r rrrrr r r _swdb_group_installsz%TransactionReplay._swdb_group_installcCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrrupgrade)r rrrrr r r _swdb_group_upgrades z%TransactionReplay._swdb_group_upgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrr downgrade)r rrrrr r r _swdb_group_downgrades z'TransactionReplay._swdb_group_downgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)NzGroup id '%s' is not installed.) rDrr;r\rarFrrremove)r rrrrr r r _swdb_group_removes z$TransactionReplay._swdb_group_removec Csd|jjj|}|s,|j|jtd|dS|jjjj||j |j |}yx|D]}|d}|j |t dd|d}|j |t dd|d} |j | t d dytjj| } Wn2tjjk r} ztt | WYdd} ~ XnX| tjjtjjfkr ttd j|dd |j||| qNWWn>tk r^} z ttd j| jd dWYdd} ~ XnX|S)Nz%Environment id '%s' is not available.r*zenvironments.groups.idrer.zenvironments.groups.installedrr1zenvironments.groups.group_typezlInvalid value "{group_type}" of environments.groups.group_type, only "mandatory" or "optional" is supported.)r1z2Missing object key "{key}" in environments.groups.r)rd)rDrZ_environment_by_idrarHrrr>rr-rr^rrr7r8rrrrZCompsPackageType_MANDATORYZCompsPackageType_OPTIONALrZaddGrouprtru) r env_idrr0Z comps_envswdb_envr@r*r.r1r$r r r _create_swdb_environments8 *z*TransactionReplay._create_swdb_environmentcCs*|j|||}|dk r&|jjjj|dS)N)rrDrr>r~)r rrr0rr r r _swdb_environment_installsz+TransactionReplay._swdb_environment_installcCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_upgrades z+TransactionReplay._swdb_environment_upgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_downgrades z-TransactionReplay._swdb_environment_downgradecCsT|jjjj|s*|j|jtd|dS|j|||}|dk rP|jjjj|dS)Nz%Environment id '%s' is not installed.) rDrr>r\rarFrrr)r rrr0rr r r _swdb_environment_removes z*TransactionReplay._swdb_environment_removecCs|jS)z> :returns: the loaded data of the transaction )rZ)r r r r get_dataszTransactionReplay.get_datacCs|jS)zW :returns: an array of warnings gathered during the transaction replay )rN)r r r r get_warnings szTransactionReplay.get_warningsc Cs|j}g}xJ|jD]@}y|j|Wqtk rP}z|j|WYdd}~XqXqWx|jD]}y |d}|d}ytjj|d}Wn:tj j k r}z|jtt |w`WYdd}~XnX|dkr|j |||dn|dkr|j |||dnl|dks|d kr,|j|||dnD|d ks@|d krT|j|||dn|jttd j||d Wq`tk r}z&|jttdj|jddWYdd}~Xq`tk r}z|j|WYdd}~Xq`Xq`Wx|jD]} y| d}| d} ytjj| d}Wn>tj j k r^}z|jtt |wWYdd}~XnX|dkr~|j| || dn|dkr|j| || dnl|dks|d kr|j| || dnD|d ks|d kr|j| || dn|jttdj|| dWnptk rN}z&|jttdj|jddWYdd}~Xn.tk rz}z|j|WYdd}~XnXqW|rt||dS)z* Replays the transaction. Nr&r*r,rjr+rnrkrprlrqz@Unexpected value of group action "{action}" for group "{group}".)r&r;z&Missing object key "{key}" in a group.r)rdr0zJUnexpected value of environment action "{action}" for environment "{env}".)r&r>z-Missing object key "{key}" in an environment.)rEr]rrr6r_r7r8rrrrrrrrrrrtrur`rrrrr) r rWrrr$r<r&rrr?rr r r runsv   *   *"zTransactionReplay.runcCs8|jjs dSg}x|jjD]}y |j}Wn$tk rP}zwWYdd}~XnXt|}||jkr|j s|jtjj tjj tjj fkrt dj |d}|js|jt|n |jj|y>|j|}|jtjjtjjfkstjj||jdkr||_Wqtk r}zWYdd}~XqXqW|r4t|j|dS)z Sets reasons in the transaction history to values from the stored transaction. Also serves to check whether additional packages were pulled in by the transaction, which results in an error (unless ignore_extras is True). NzgPackage nevra "{nevra}", which is not present in the transaction file, was pulled into the transaction.)r'r)rDr8r=rtrrLrFr&r7ZTransactionItemAction_UPGRADEDZ TransactionItemAction_DOWNGRADEDZ!TransactionItemAction_REINSTALLEDrrrGr6rrNrMZTransactionItemAction_INSTALLZTransactionItemAction_REMOVEZTransactionItemReasonComparer(rrE)r rr:r=r$r'r Z replay_reasonr r r post_transactionds<        z"TransactionReplay.post_transaction)rCNFFF)rrr__doc__rrOrPrar^r[rrrrrrrrrrrrrrrr r r r rBs4    a   (   SrB)Z __future__rrrr7rwZdnf.i18nrZdnf.exceptionsr}rTr"Z VERSION_MINORr4 exceptionsrrrrr%rAobjectrBr r r r s     KPK!0,uKuK%__pycache__/util.cpython-36.opt-1.pycnu[3 ft`O@svddlmZddlmZddlmZddlmZmZddlmZm Z ddl Z ddl Z ddl Z ddl Z ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZejdZe j j!d kre j j!ndZ"e"j#Z$d d Z%d^d d Z&d_ddZ'ddZ(ddZ)ddZ*ddZ+ddZ,ddZ-ddZ.dd Z/d!d"Z0d#d$Z1d%d&Z2d'd(Z3d)d*Z4d+d,Z5d-d.Z6d/d0Z7d1d2Z8d3d4Z9d5d6Z:d7d8Z;d9d:ZZ>d?d@Z?dAdBZ@dCdDZAd`dFdGZBdHdIejCfdJdKZDdLdMZEdNdOZFdPdQZGdRdSZHGdTdUdUeIZJGdVdWdWeKZLGdXdYdYeMZNdZd[ZOd\d]ZPdS)a)print_function)absolute_import)unicode_literals)PY3 basestring)_ucdNdnfZyumcCst|dgt|dgt|dgt}x|D]}||kr>q0|j|tjjj|d}|jdrr|jj|q0|r|d kr|jj|q0|j d r|j j|d d q0|j j|q0Wd S)a Categorize :param values list into packages, groups and filenames :param namespace: argparse.Namespace, where specs will be stored :param values: list of specs, whether packages ('foo') or groups/modules ('@bar') or filenames ('*.rmp', 'http://*', ...) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs, to access filenames use: specs.filenames filenames grp_specs pkg_specsrz.rpmhttpftpfilehttps@rN)rrrr) setattrsetaddr pycompZurlparseendswithr append startswithr r ) namespacevaluesZtmp_setvalueZschemesr/usr/lib/python3.6/util.py _parse_specs7s        rcCs|dkrtjj}tjj|||}tjj|jr6|jSt |j g}|j d||j g}yt jjjt jj|dWnBtk r}z&|jrtt|tjt|WYdd}~XnX|jS)NrT)r callbackZNullDownloadProgressrepoZRemoteRPMPayloadospathexistsZ local_pathsumZ download_sizestartZ_librepo_targetlibdnfZ PackageTargetZdownloadPackagesZVectorPPackageTarget RuntimeErrorstrictIOErrorstrloggererror)urlconfZprogressZploadZest_remote_sizeZtargetserrr_urlopen_progressWs      r1w+bcKstrd|kr|jddtj|f|}y<|r@|jj||jn tjj j |rR|j nd||jWn.t k r}zt t|WYdd}~XnX|jd|S)z| Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads bencodingzutf-8Nr)r setdefaulttempfileZNamedTemporaryFileZ_repoZ downloadUrlfilenor'r!Z DownloaderZ downloadURLZ_configr(r*r+seek)r.r/r!modekwargsZfor0rrr_urlopenhs  $ r;cCs |j|r|dt| }|S)N)rlen)srrrrrtrim|s r?cCs tjdkS)Nr)r"geteuidrrrr am_i_rootsrAcCs.x(tj|D]}tjj||}t|q WdS)zBRemove all files and dirs under `path` Also see rm_rf() N)r"listdirr#joinrm_rf)r#entryZcontained_pathrrr clear_dirsrFcCsXytj|ddWn@tk rR}z$|jtjks>tjj| rB|WYdd}~XnXdS)Ni)r9)r"makedirsOSErrorerrnoZEEXISTr#isdir)Zdnamer0rrr ensure_dirs rKcCsJg}|}xsz!first_not_none..)rWrXrY)rTrZrrrfirst_not_nones r_cCstjt|S)N)timefile_timestamp)fnrrrfile_agesrccCs tj|jS)N)r"statst_mtime)rbrrrrasrac Cs4ytjtjdStk r.dtjSXdS)NrzUID: %s)pwdgetpwuidr"r@KeyErrorrrrrget_effective_loginsricCs(x"|D]}|j|}|dkr|SqW|S)z!Like dict.get() for nested dicts.N)get)ZdctkeysZ not_foundkrrrget_ins   rmcsfdd}tj||ggfS)Ncs|t| j||S)N)boolr)Zaccr])rbrrsplittersz!group_by_filter..splitter) functoolsreduce)rbrTror)rbrgroup_by_filters rrccs&x |D]}||r|V|VqWdS)z/Insert an item into an iterable by a condition.Nr)r]rT conditionZ original_itemrrr insert_ifs rtc Cs*y t|Wntk r dSXdSdS)z&Test whether an iterator is exhausted.TFN)rXrY)iteratorrrr is_exhausteds  rvcCs*t|r|g}t|to(tdd|DS)Ncss|]}t|td@VqdS)z*[?N)r)r\prrrr^sz"is_glob_pattern..)is_string_type isinstancerSany)patternrrris_glob_patternsr|cCstrt|tSt|tSdS)N)rryr+r)objrrrrxs rxcsfdd}|S)zDecorator to get lazy attribute initialization. Composes with @property. Force reinitialization by deleting the . csfdd}|S)Nc s8y t|Stk r2|}t|||SXdS)N)getattrAttributeErrorr)r}val)attrnamerbrr cached_getters   z6lazyattr..get_decorated..cached_getterr)rbr)r)rbr get_decoratedszlazyattr..get_decoratedr)rrr)rrlazyattrs rcGstt|f|S)zLike functools.map(), but return a list instead of an iterator. This means all side effects of fn take place even without iterating the result. )rSmap)rbseqrrrmapall srcCs8tjdtj|}tjjs4tjd}|r4|j|}|S)z6Convert time into locale aware datetime string object.z%cr) r`ZstrftimeZ localtimer rrlocaleZ getlocaledecode)Z timestamptZcurrent_locale_settingrrrnormalize_times   rcCszy\d}ddtj|D}t|dkrZ|d}tdj||}|j}t|dkSQRXdSttfk rtdSXdS)zDecide whether we are on line power. Returns True if we are on line power, False if not, None if it can not be decided. z/sys/class/power_supplycSsg|]}|jdr|qS)ZAC)r)r\Znoderrr &szon_ac_power..rz {}/{}/onlinerN) r"rBr<openformatreadintr* ValueError)Z ps_folderZac_nodesZac_nodeZ ac_statusdatarrr on_ac_powers rcCsy ddl}Wntk r dSXy0|j}|jdd}|j|d}|jdd}Wn|jk rhdSX|dkrvdS|dkrd S|dkrd Std |dS)zDecide whether we are on metered connection. Returns: True: if on metered connection False: if not None: if it can not be decided rNzorg.freedesktop.NetworkManagerz/org/freedesktop/NetworkManagerzorg.freedesktop.DBus.PropertiesZMeteredrTFz&Unknown value for metered property: %r)rr)rr)dbus ImportErrorZ SystemBusZ get_objectZ InterfaceZGetZ DBusExceptionr)rZbusproxyZifaceZmeteredrrron_metered_connection1s&  rcCs&tj|\}}tjj||t||fS)zUse a predicate to partition entries into false entries and true entries. Credit: Python library itertools' documentation. ) itertoolsteer r filterfalsefilter)ZpredrTZt1Zt2rrr partitionNsrc Cs(ytj|Wntk r"YnXdS)N)shutilZrmtreerH)r#rrrrDWsrDc#sFtfdd}t||}||Vx||}|s8P|Vq*WdS)zSplit an iterable into tuples by a condition. Inserts a separator before each item which meets the condition and then cuts the iterable by these separators. csttjfdd|S)Ncs|kS)Nr)r0) separatorrrgsz4split_by..next_subsequence..)tupler takewhile)rZ)rrrnext_subsequencefsz"split_by..next_subsequenceN)objectrt)rTrsrZmarkedZ subsequencer)rrsplit_by]s   rcCs|j|r|t|dSdS)N)rr<)r=prefixrrr strip_prefixus rFc Cs8|stj|tjrtj|dSt|dWdQRXdS)z{Create an empty file if it doesn't exist or bump it's timestamps. If no_create is True only bumps the timestamps. Na)r"accessF_OKutimer)r#Z no_createrrrtouch{s  rwritecCsyh|dkr|j|nP|dkr(|jn>|dkrD|j||jn"|dkrZt||dn td|Wn>tk r}z"tjdjt|j t |WYdd}~XnXdS)NrflushZ write_flushprint)rzUnsupported type: z{}: {}) rrrrr*r,criticalrtype__name__r )tpmsgoutr0rrr_terminal_messengers    rcCsnd}t|dk}xXt|ddD]H\}}|rD|dtdd|7}n|dtdd7}|dj|7}qW|S) z Format string about problems in resolve :param resolve_problems: list with list of strings (output of goal.problem_rules()) :return: string rr)r&z ZProblemz %d: z: z - )r< enumeraterrC)Zresolve_problemsrZcount_problemsiZrsrrr_format_resolve_problemss rcCsX|jd}|jdk r4|jdkr4||jd7}||jd|jd|jS)N-0:.)NEVRA)ZteZnevrarrr _te_nevras rcCstjdxH|D]@}|j}d}|dk r.|j}djt||||j}tj|qWx:|D]2}djt||j|j|j |j |j }tj|qZWdS)NzLogging transaction elementsz@RPM element: '{}', Key(): '{}', Key state: '{}', Failed() '{}': z^SWDB element: '{}', State: '{}', Action: '{}', From repo: '{}', Reason: '{}', Get reason: '{}') r,debugKeystaterrFailedr+actionZ from_reporeasonZ get_reason)rpm_transactionswdb_transactionrpm_eltsiZ tsi_staterrrr_log_rpm_trans_with_swdbs    rc CsVtjjtjjtjjtjjtjjh}dd|D}d}d}x|D]}t|}|j}|dksft |d rx:|D]2} | j tjj krql| j |krqlt | |krl| }PqlW|dkst |d rtjtdj|d}q>|jrtjj|_ d}q>tjj|_ q>Wx6|D].}|j tjj krtjtdjt |d}qW|rBtjtd|rRt||dS) NcSsg|]}|qSrr)r\rrrrrsz-_sync_rpm_trans_with_swdb..FZpkgz%TransactionItem not found for key: {}Tz)TransactionSWDBItem not found for key: {}z#Errors occurred during transaction.)r' transactionZ TransactionItemAction_DOWNGRADEDZTransactionItemAction_OBSOLETEDTransactionItemAction_REMOVEZTransactionItemAction_UPGRADEDZ!TransactionItemAction_REINSTALLEDrrhasattrrZTransactionItemState_UNKNOWNrr+r,rrrrTransactionItemState_ERRORZTransactionItemState_DONErr) rrZrevert_actionsZ cached_tsiZ el_not_foundr-rZte_nevrarZ tsi_candidaterrr_sync_rpm_trans_with_swdbsH       rc@s$eZdZddZddZddZdS)tmpdircCsdtjj}tj|d|_dS)Nz%s-)r)r constZPREFIXr6Zmkdtempr#)selfrrrr__init__s ztmpdir.__init__cCs|jS)N)r#)rrrr __enter__sztmpdir.__enter__cCst|jdS)N)rDr#)rexc_type exc_value tracebackrrr__exit__sztmpdir.__exit__N)r __module__ __qualname__rrrrrrrrsrcs(eZdZdZfddZddZZS)BunchzDictionary with attribute accessing syntax. In DNF, prefer using this over dnf.yum.misc.GenericHolder. Credit: Alex Martelli, Doug Hudgeon cstt|j||||_dS)N)superrr__dict__)rargskwds) __class__rrrszBunch.__init__cCst|S)N)id)rrrr__hash__szBunch.__hash__)rrr__doc__rr __classcell__rr)rrrs rcs,eZdZfddZddZddZZS) MultiCallListcstt|j|j|dS)N)rrrextend)rrT)rrrrszMultiCallList.__init__csfdd}|S)Ncsfdd}tt|S)Ncst|}|S)N)r~)vmethod)rr:whatrr call_what s z8MultiCallList.__getattr__..fn..call_what)rSr)rr:r)rr)rr:rrb sz%MultiCallList.__getattr__..fnr)rrrbr)rrr __getattr__ szMultiCallList.__getattr__csfdd}tt||S)Ncst|dS)N)r)r])rrrrsettersz)MultiCallList.__setattr__..setter)rSr)rrrrr)rrr __setattr__szMultiCallList.__setattr__)rrrrrrrrr)rrrs rc Csntgggggggggggd }xF|D]<}|jtjjkrJ|jj|q(|jtjjkrf|j j|q(|jtjj kr|j tjj kr|j j|nD|j tjjkr|jj|n(|j tjjkr|jj|n |jj|q(|jtjjkr|jj|q(|jtjjkrL|j tjjkr |jj|n*|j tjjkr>|jj|n |jj|q(|jtjjkr(|jj|q(W|S)N) downgradederased erased_clean erased_dep installedinstalled_group installed_depinstalled_weak reinstalledupgradedfailed)rrr'rrrrrZTransactionItemAction_DOWNGRADErZTransactionItemAction_INSTALLrZTransactionItemReason_GROUPrZ TransactionItemReason_DEPENDENCYrZ%TransactionItemReason_WEAK_DEPENDENCYrrZTransactionItemAction_REINSTALLrrZTransactionItemReason_CLEANrrrZTransactionItemAction_UPGRADEr)rr3rrrr _make_listssH rc sfdd}tjj|}jd|d\}}|j|}g}xtd|jftd|jftd|j|j |j |j ftd|j ftd |ftd |j |j|jftd |jfgD]&\} } |j|| t| tj|d qW|S) alReturns a human-readable summary of the results of the transaction. :param action_callback: function generating output for specific action. It takes two parameters - action as a string and list of affected packages for this action :return: a list of lines containing a human-readable summary of the results of the transaction cs|j|jk|j|jk}|dkr$|Stj|j|j|j|j|jd}tj|j|j|j|j|jd}|j|j}|dkrz|S|j|jk|j|jkS)zCompares two transaction items or packages by nevra. Used as a fallback when tsi does not contain package object. r)nameepochversionreleasearch) rhawkeyZNEVRArrrrZevr_cmpZsack)Zitem1Zitem2retZnevra1Znevra2)baserr_tsi_or_pkg_nevra_cmpPsz7_post_transaction_output.._tsi_or_pkg_nevra_cmpF)Zreport_problemsrZUpgradedZ DowngradedZ InstalledZ ReinstalledZSkippedZRemovedr)key)r utilrZ_skipped_packagesunionrrrrrrrrrrrrrsortedrp cmp_to_key) rrZaction_callbackr Z list_bunchZskipped_conflictsZskipped_brokenZskippedrrZtsisr)rr_post_transaction_outputFs(       r)N)NNr2)F)QZ __future__rrrrrrZdnf.i18nrr argparser Z dnf.callbackZ dnf.constZ dnf.pycomprIrprrrZloggingr"rfrsysr6r`Z libdnf.repor'Zlibdnf.transactionZ getLoggerr,ArgumentParserprogZ MAIN_PROGupperZMAIN_PROG_UPPERrr1r;r?rArFrKrQrVr[r_rcrarirmrrrtrvr|rxrrrrrrrDrrrstdoutrrrrrrrdictrrSrrrrrrrs              ( -PK!0,uKuK__pycache__/util.cpython-36.pycnu[3 ft`O@svddlmZddlmZddlmZddlmZmZddlmZm Z ddl Z ddl Z ddl Z ddl Z ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZejdZe j j!d kre j j!ndZ"e"j#Z$d d Z%d^d d Z&d_ddZ'ddZ(ddZ)ddZ*ddZ+ddZ,ddZ-ddZ.dd Z/d!d"Z0d#d$Z1d%d&Z2d'd(Z3d)d*Z4d+d,Z5d-d.Z6d/d0Z7d1d2Z8d3d4Z9d5d6Z:d7d8Z;d9d:ZZ>d?d@Z?dAdBZ@dCdDZAd`dFdGZBdHdIejCfdJdKZDdLdMZEdNdOZFdPdQZGdRdSZHGdTdUdUeIZJGdVdWdWeKZLGdXdYdYeMZNdZd[ZOd\d]ZPdS)a)print_function)absolute_import)unicode_literals)PY3 basestring)_ucdNdnfZyumcCst|dgt|dgt|dgt}x|D]}||kr>q0|j|tjjj|d}|jdrr|jj|q0|r|d kr|jj|q0|j d r|j j|d d q0|j j|q0Wd S)a Categorize :param values list into packages, groups and filenames :param namespace: argparse.Namespace, where specs will be stored :param values: list of specs, whether packages ('foo') or groups/modules ('@bar') or filenames ('*.rmp', 'http://*', ...) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs, to access filenames use: specs.filenames filenames grp_specs pkg_specsrz.rpmhttpftpfilehttps@rN)rrrr) setattrsetaddr pycompZurlparseendswithr append startswithr r ) namespacevaluesZtmp_setvalueZschemesr/usr/lib/python3.6/util.py _parse_specs7s        rcCs|dkrtjj}tjj|||}tjj|jr6|jSt |j g}|j d||j g}yt jjjt jj|dWnBtk r}z&|jrtt|tjt|WYdd}~XnX|jS)NrT)r callbackZNullDownloadProgressrepoZRemoteRPMPayloadospathexistsZ local_pathsumZ download_sizestartZ_librepo_targetlibdnfZ PackageTargetZdownloadPackagesZVectorPPackageTarget RuntimeErrorstrictIOErrorstrloggererror)urlconfZprogressZploadZest_remote_sizeZtargetserrr_urlopen_progressWs      r1w+bcKstrd|kr|jddtj|f|}y<|r@|jj||jn tjj j |rR|j nd||jWn.t k r}zt t|WYdd}~XnX|jd|S)z| Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads bencodingzutf-8Nr)r setdefaulttempfileZNamedTemporaryFileZ_repoZ downloadUrlfilenor'r!Z DownloaderZ downloadURLZ_configr(r*r+seek)r.r/r!modekwargsZfor0rrr_urlopenhs  $ r;cCs |j|r|dt| }|S)N)rlen)srrrrrtrim|s r?cCs tjdkS)Nr)r"geteuidrrrr am_i_rootsrAcCs.x(tj|D]}tjj||}t|q WdS)zBRemove all files and dirs under `path` Also see rm_rf() N)r"listdirr#joinrm_rf)r#entryZcontained_pathrrr clear_dirsrFcCsXytj|ddWn@tk rR}z$|jtjks>tjj| rB|WYdd}~XnXdS)Ni)r9)r"makedirsOSErrorerrnoZEEXISTr#isdir)Zdnamer0rrr ensure_dirs rKcCsJg}|}xsz!first_not_none..)rWrXrY)rTrZrrrfirst_not_nones r_cCstjt|S)N)timefile_timestamp)fnrrrfile_agesrccCs tj|jS)N)r"statst_mtime)rbrrrrasrac Cs4ytjtjdStk r.dtjSXdS)NrzUID: %s)pwdgetpwuidr"r@KeyErrorrrrrget_effective_loginsricCs(x"|D]}|j|}|dkr|SqW|S)z!Like dict.get() for nested dicts.N)get)ZdctkeysZ not_foundkrrrget_ins   rmcsfdd}tj||ggfS)Ncs|t| j||S)N)boolr)Zaccr])rbrrsplittersz!group_by_filter..splitter) functoolsreduce)rbrTror)rbrgroup_by_filters rrccs&x |D]}||r|V|VqWdS)z/Insert an item into an iterable by a condition.Nr)r]rT conditionZ original_itemrrr insert_ifs rtc Cs*y t|Wntk r dSXdSdS)z&Test whether an iterator is exhausted.TFN)rXrY)iteratorrrr is_exhausteds  rvcCs*t|r|g}t|to(tdd|DS)Ncss|]}t|td@VqdS)z*[?N)r)r\prrrr^sz"is_glob_pattern..)is_string_type isinstancerSany)patternrrris_glob_patternsr|cCstrt|tSt|tSdS)N)rryr+r)objrrrrxs rxcsfdd}|S)zDecorator to get lazy attribute initialization. Composes with @property. Force reinitialization by deleting the . csfdd}|S)Nc s8y t|Stk r2|}t|||SXdS)N)getattrAttributeErrorr)r}val)attrnamerbrr cached_getters   z6lazyattr..get_decorated..cached_getterr)rbr)r)rbr get_decoratedszlazyattr..get_decoratedr)rrr)rrlazyattrs rcGstt|f|S)zLike functools.map(), but return a list instead of an iterator. This means all side effects of fn take place even without iterating the result. )rSmap)rbseqrrrmapall srcCs8tjdtj|}tjjs4tjd}|r4|j|}|S)z6Convert time into locale aware datetime string object.z%cr) r`ZstrftimeZ localtimer rrlocaleZ getlocaledecode)Z timestamptZcurrent_locale_settingrrrnormalize_times   rcCszy\d}ddtj|D}t|dkrZ|d}tdj||}|j}t|dkSQRXdSttfk rtdSXdS)zDecide whether we are on line power. Returns True if we are on line power, False if not, None if it can not be decided. z/sys/class/power_supplycSsg|]}|jdr|qS)ZAC)r)r\Znoderrr &szon_ac_power..rz {}/{}/onlinerN) r"rBr<openformatreadintr* ValueError)Z ps_folderZac_nodesZac_nodeZ ac_statusdatarrr on_ac_powers rcCsy ddl}Wntk r dSXy0|j}|jdd}|j|d}|jdd}Wn|jk rhdSX|dkrvdS|dkrd S|dkrd Std |dS)zDecide whether we are on metered connection. Returns: True: if on metered connection False: if not None: if it can not be decided rNzorg.freedesktop.NetworkManagerz/org/freedesktop/NetworkManagerzorg.freedesktop.DBus.PropertiesZMeteredrTFz&Unknown value for metered property: %r)rr)rr)dbus ImportErrorZ SystemBusZ get_objectZ InterfaceZGetZ DBusExceptionr)rZbusproxyZifaceZmeteredrrron_metered_connection1s&  rcCs&tj|\}}tjj||t||fS)zUse a predicate to partition entries into false entries and true entries. Credit: Python library itertools' documentation. ) itertoolsteer r filterfalsefilter)ZpredrTZt1Zt2rrr partitionNsrc Cs(ytj|Wntk r"YnXdS)N)shutilZrmtreerH)r#rrrrDWsrDc#sFtfdd}t||}||Vx||}|s8P|Vq*WdS)zSplit an iterable into tuples by a condition. Inserts a separator before each item which meets the condition and then cuts the iterable by these separators. csttjfdd|S)Ncs|kS)Nr)r0) separatorrrgsz4split_by..next_subsequence..)tupler takewhile)rZ)rrrnext_subsequencefsz"split_by..next_subsequenceN)objectrt)rTrsrZmarkedZ subsequencer)rrsplit_by]s   rcCs|j|r|t|dSdS)N)rr<)r=prefixrrr strip_prefixus rFc Cs8|stj|tjrtj|dSt|dWdQRXdS)z{Create an empty file if it doesn't exist or bump it's timestamps. If no_create is True only bumps the timestamps. Na)r"accessF_OKutimer)r#Z no_createrrrtouch{s  rwritecCsyh|dkr|j|nP|dkr(|jn>|dkrD|j||jn"|dkrZt||dn td|Wn>tk r}z"tjdjt|j t |WYdd}~XnXdS)NrflushZ write_flushprint)rzUnsupported type: z{}: {}) rrrrr*r,criticalrtype__name__r )tpmsgoutr0rrr_terminal_messengers    rcCsnd}t|dk}xXt|ddD]H\}}|rD|dtdd|7}n|dtdd7}|dj|7}qW|S) z Format string about problems in resolve :param resolve_problems: list with list of strings (output of goal.problem_rules()) :return: string rr)r&z ZProblemz %d: z: z - )r< enumeraterrC)Zresolve_problemsrZcount_problemsiZrsrrr_format_resolve_problemss rcCsX|jd}|jdk r4|jdkr4||jd7}||jd|jd|jS)N-0:.)NEVRA)ZteZnevrarrr _te_nevras rcCstjdxH|D]@}|j}d}|dk r.|j}djt||||j}tj|qWx:|D]2}djt||j|j|j |j |j }tj|qZWdS)NzLogging transaction elementsz@RPM element: '{}', Key(): '{}', Key state: '{}', Failed() '{}': z^SWDB element: '{}', State: '{}', Action: '{}', From repo: '{}', Reason: '{}', Get reason: '{}') r,debugKeystaterrFailedr+actionZ from_reporeasonZ get_reason)rpm_transactionswdb_transactionrpm_eltsiZ tsi_staterrrr_log_rpm_trans_with_swdbs    rc CsVtjjtjjtjjtjjtjjh}dd|D}d}d}x|D]}t|}|j}|dksft |d rx:|D]2} | j tjj krql| j |krqlt | |krl| }PqlW|dkst |d rtjtdj|d}q>|jrtjj|_ d}q>tjj|_ q>Wx6|D].}|j tjj krtjtdjt |d}qW|rBtjtd|rRt||dS) NcSsg|]}|qSrr)r\rrrrrsz-_sync_rpm_trans_with_swdb..FZpkgz%TransactionItem not found for key: {}Tz)TransactionSWDBItem not found for key: {}z#Errors occurred during transaction.)r' transactionZ TransactionItemAction_DOWNGRADEDZTransactionItemAction_OBSOLETEDTransactionItemAction_REMOVEZTransactionItemAction_UPGRADEDZ!TransactionItemAction_REINSTALLEDrrhasattrrZTransactionItemState_UNKNOWNrr+r,rrrrTransactionItemState_ERRORZTransactionItemState_DONErr) rrZrevert_actionsZ cached_tsiZ el_not_foundr-rZte_nevrarZ tsi_candidaterrr_sync_rpm_trans_with_swdbsH       rc@s$eZdZddZddZddZdS)tmpdircCsdtjj}tj|d|_dS)Nz%s-)r)r constZPREFIXr6Zmkdtempr#)selfrrrr__init__s ztmpdir.__init__cCs|jS)N)r#)rrrr __enter__sztmpdir.__enter__cCst|jdS)N)rDr#)rexc_type exc_value tracebackrrr__exit__sztmpdir.__exit__N)r __module__ __qualname__rrrrrrrrsrcs(eZdZdZfddZddZZS)BunchzDictionary with attribute accessing syntax. In DNF, prefer using this over dnf.yum.misc.GenericHolder. Credit: Alex Martelli, Doug Hudgeon cstt|j||||_dS)N)superrr__dict__)rargskwds) __class__rrrszBunch.__init__cCst|S)N)id)rrrr__hash__szBunch.__hash__)rrr__doc__rr __classcell__rr)rrrs rcs,eZdZfddZddZddZZS) MultiCallListcstt|j|j|dS)N)rrrextend)rrT)rrrrszMultiCallList.__init__csfdd}|S)Ncsfdd}tt|S)Ncst|}|S)N)r~)vmethod)rr:whatrr call_what s z8MultiCallList.__getattr__..fn..call_what)rSr)rr:r)rr)rr:rrb sz%MultiCallList.__getattr__..fnr)rrrbr)rrr __getattr__ szMultiCallList.__getattr__csfdd}tt||S)Ncst|dS)N)r)r])rrrrsettersz)MultiCallList.__setattr__..setter)rSr)rrrrr)rrr __setattr__szMultiCallList.__setattr__)rrrrrrrrr)rrrs rc Csntgggggggggggd }xF|D]<}|jtjjkrJ|jj|q(|jtjjkrf|j j|q(|jtjj kr|j tjj kr|j j|nD|j tjjkr|jj|n(|j tjjkr|jj|n |jj|q(|jtjjkr|jj|q(|jtjjkrL|j tjjkr |jj|n*|j tjjkr>|jj|n |jj|q(|jtjjkr(|jj|q(W|S)N) downgradederased erased_clean erased_dep installedinstalled_group installed_depinstalled_weak reinstalledupgradedfailed)rrr'rrrrrZTransactionItemAction_DOWNGRADErZTransactionItemAction_INSTALLrZTransactionItemReason_GROUPrZ TransactionItemReason_DEPENDENCYrZ%TransactionItemReason_WEAK_DEPENDENCYrrZTransactionItemAction_REINSTALLrrZTransactionItemReason_CLEANrrrZTransactionItemAction_UPGRADEr)rr3rrrr _make_listssH rc sfdd}tjj|}jd|d\}}|j|}g}xtd|jftd|jftd|j|j |j |j ftd|j ftd |ftd |j |j|jftd |jfgD]&\} } |j|| t| tj|d qW|S) alReturns a human-readable summary of the results of the transaction. :param action_callback: function generating output for specific action. It takes two parameters - action as a string and list of affected packages for this action :return: a list of lines containing a human-readable summary of the results of the transaction cs|j|jk|j|jk}|dkr$|Stj|j|j|j|j|jd}tj|j|j|j|j|jd}|j|j}|dkrz|S|j|jk|j|jkS)zCompares two transaction items or packages by nevra. Used as a fallback when tsi does not contain package object. r)nameepochversionreleasearch) rhawkeyZNEVRArrrrZevr_cmpZsack)Zitem1Zitem2retZnevra1Znevra2)baserr_tsi_or_pkg_nevra_cmpPsz7_post_transaction_output.._tsi_or_pkg_nevra_cmpF)Zreport_problemsrZUpgradedZ DowngradedZ InstalledZ ReinstalledZSkippedZRemovedr)key)r utilrZ_skipped_packagesunionrrrrrrrrrrrrrsortedrp cmp_to_key) rrZaction_callbackr Z list_bunchZskipped_conflictsZskipped_brokenZskippedrrZtsisr)rr_post_transaction_outputFs(       r)N)NNr2)F)QZ __future__rrrrrrZdnf.i18nrr argparser Z dnf.callbackZ dnf.constZ dnf.pycomprIrprrrZloggingr"rfrsysr6r`Z libdnf.repor'Zlibdnf.transactionZ getLoggerr,ArgumentParserprogZ MAIN_PROGupperZMAIN_PROG_UPPERrr1r;r?rArFrKrQrVr[r_rcrarirmrrrtrvr|rxrrrrrrrDrrrstdoutrrrrrrrdictrrSrrrrrrrs              ( -PK!M)__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`m@spddlmZddlZddlZejdeddddlmZeZ ddl Zej j Z ddl ZejjZejjjjddS))unicode_literalsNoncez ^dnf\..*$)categorymodule)VERSIONZmedia)Z __future__rwarningsZ dnf.pycompZdnffilterwarningsDeprecationWarningZ dnf.constr __version__Zdnf.basebaseZBaseZ dnf.pluginZpluginZPluginZpycompZurlparseZ uses_fragmentappendr r /usr/lib/python3.6/__init__.pys  PK!M#__pycache__/__init__.cpython-36.pycnu[3 ft`m@spddlmZddlZddlZejdeddddlmZeZ ddl Zej j Z ddl ZejjZejjjjddS))unicode_literalsNoncez ^dnf\..*$)categorymodule)VERSIONZmedia)Z __future__rwarningsZ dnf.pycompZdnffilterwarningsDeprecationWarningZ dnf.constr __version__Zdnf.basebaseZBaseZ dnf.pluginZpluginZPluginZpycompZurlparseZ uses_fragmentappendr r /usr/lib/python3.6/__init__.pys  PK!ؤ 55%__pycache__/base.cpython-36.opt-1.pycnu[3 f@sXdZddlmZddlmZddlmZddlmZddlZddlZddlZ ddl m Z ddl m Z dd lmZmZmZdd lmZdd lmZdd lmZydd lmZWn ek rdd lmZYnXddlZddlZddl ZddlZddlZddl Zddl!Zddl"Zddl#Zddl$Zddl%Zddl&Zddl'Zyddl(ZdZ)Wnek r`dZ)YnXddl*Zddl+Zddl,Zddl-Zddl.Zddl/Zddl0Zddl1Zddl2Zddl3Zddl4Zddl5ZddlZddl6Zddl7Z7ddl8Z8ddl9Z9ddl:Z:ddl;Z;ddlZ>ddl?Z?ddl@Z@ddlAZAddlBZBe;jCdZDGdddeEZFddZGdS)z Supplies the Base class. )absolute_import)division)print_function)unicode_literalsN)deepcopy) CompsQuery)_P_ucd) _parse_specs) SwdbInterface)misc)SequenceTFdnfc@seZdZdddZddZddZdd Zd d Zd d Ze ddZ ddZ dddZ ddZ eddZeddZeddZejddZeejjddd Zed!d"Zed#d$Zed%d&Zejd'd&Zd(d)Zffdfd*d+Zd,d-Zd.d/Zd0d1Zdd2d3Z dd5d6Z!dd7d8Z"d9d:Z#d;d<Z$dd=d>Z%dd?d@Z&dAdBZ'e(j)e(j*e(j+e(j,e(j-e(j.e(j/dCZ0e1e(dDre(j2e0dE<dFe(j3e(j4BiZ5edGdHZ6edIdJZ7e7jdKdJZ7ddLdMZ8dNdOZ9edPdQdRdQdSdQdTdUZ:dVdWZ;dXdYZd^d_Z?dd`daZ@ffdbdcZAdddeZBdfdgZCdhdiZDddjdkZEddldmZFddndoZGddpdqZHdrdsZIdtduZJdvdwZKddydzZLdd{d|ZMd}d~ZNddZOddZPddZQdddZRddZSdddZTdddZUddZVddZWddZXddZYddZZddZ[dddZ\ddZ]ddZ^ddZ_dddZ`dddZadddZbdddZcdddZdddZeddZfddZgdddZhdddZidddZjdddZkdddZldddZmdddZnddZoddd„ZpddĄZqffffffddƄZrddȄZsdddʄZtddd̄Zuddd΄ZvddЄZwdddӄZxdddՄZyddׄZzddلZ{ddۄZ|dd݄Z}dd߄Z~ddZddZdS( BaseNcCsd|_|p|j|_d|_d|_d|_d|_d|_d|_t j j |_ d|_ t|_t|_t jj|_t jj|_t jj|_ttjg|_t jj|_d|_ d|_!d|_"g|_#i|_$d|_%t|_&d|_'dS)NF)(_closed_setup_default_conf_conf_goal_repo_persistor_sack _transaction_priv_ts_compsrcompsTransactionBunch _comps_trans_historyset _tempfiles_trans_tempfilescallbackZDepsolve _ds_callbackloggingZLogging_loggingrepodictRepoDict_reposrpmZRPMPROB_FILTER_OLDPACKAGE_rpm_probfilterZpluginZPlugins_plugins_trans_success_trans_install_set_tempfile_persistor_update_security_filters_update_security_options_allow_erasing_repo_set_imported_gpg_keysoutput)selfconfr5/usr/lib/python3.6/base.py__init__]s2     z Base.__init__cCs|S)Nr5)r3r5r5r6 __enter__zszBase.__enter__cGs |jdS)N)close)r3Zexc_argsr5r5r6__exit__}sz Base.__exit__cCs |jdS)N)r9)r3r5r5r6__del__sz Base.__del__cCs.|jr|jj|n|jjrn |jj|dS)N)rr updater4destdirr)r3filesr5r5r6_add_tempfiless zBase._add_tempfilescCs|jtd|jdd}|jr&d|d<y|jj|jfddi|WnTtjk r}z6t j t dj |j |tjjt dj |j WYdd}~XnXdS)NT)load_filelists load_prestoload_updateinfo load_other build_cachezloading repo '{}' failure: {}z"Loading repository '{}' has failed)loaddictdeltarpmload_metadata_otherr load_repo_repohawkey Exceptionloggerdebugrformatidr exceptions RepoError)r3repo mdload_flagser5r5r6_add_repo_to_sackszBase._add_repo_to_sackcCs.tjj}|j}d|kr*tjj|j|d<|S)N releasever)rr4ZConf substitutionsr(Zdetect_releasever installroot)r4Zsubstr5r5r6rs  zBase._setup_default_confcCsdd|jjD}y0|jj|j||jj|jjd|jj|jj d}Wn4t j k rx}zt j jt|WYdd}~XnX|rtjt jjj|ddS)NcSsg|]}|jr|jqSr5)Zmodule_hotfixesrP).0ir5r5r6 sz0Base._setup_modular_excludes..F)Z update_onlyZ debugsolvermodule_obsoletesr)repos iter_enabledsackZfilter_modules_moduleContainerr4rYZmodule_platform_id debug_solverr]rKrLrrQErrorr rMwarningmodule module_baseZformat_modular_solver_errors)r3Z hot_fix_reposZ solver_errorsrUr5r5r6_setup_modular_excludess "zBase._setup_modular_excludesFc Cst|jj}d|kr$tr$|jdSg}g}|s>x|jjD]}|j|krPq@t|j dkr|j j j dd}x8t|j D]*}t jj|}|j|j|j dddd}q|W|j |jd|j|j|jf|j j j dd} x8t|jD]*} t jj| }| j|j|j dddd} qW| j |jd| r@|j| |jfq@Wd|kr6|j j j dd} t|jj dkrx.) rr4 cacheonlyr^r_Zexpired_to_addr<saver-)r3Zexpiredr5r5r6_store_persistent_datas  zBase._store_persistent_datacCs|jdkr|jdd|jS)NT) arch_filter)r read_comps)r3r5r5r6rs  z Base.compscCs|jS)N)r)r3r5r5r6r4sz Base.confcCs|jS)N)r')r3r5r5r6r^sz Base.reposcCs d|_dS)N)r')r3r5r5r6r^sZ _priv_rpmconncCstjjj|jjS)N)rr(Z connectionZ RpmConnectionr4rY)r3r5r5r6_rpmconn sz Base._rpmconncCs|jS)N)r)r3r5r5r6r`sz Base.sackcCsP|jdkrtjjd|jjdkrHtjjd|jj |jj d|jj |j_|jjS)NzSack was not initializedFarch) r`rrQrcralibdnfreZModulePackageContainerr4rYrX persistdir)r3r5r5r6ras     zBase._moduleContainercCs|jS)N)r)r3r5r5r6 transactionszBase.transactioncCs|jrtd||_dS)Nztransaction already set)r ValueError)r3valuer5r5r6r$scCstjj|jj|_dS)N)r persistorZ RepoPersistorr4cachedirr)r3r5r5r6_activate_persistor+szBase._activate_persistorcCs,|jjr|jj|j|||jj||dS)z&Load plugins and run their __init__().N)r4Zpluginsr*_loadZ _run_init)r3Z disabled_globZenable_pluginsclir5r5r6 init_plugins.szBase.init_pluginscCs|jjdS)z#Run plugins pre_configure() method.N)r*Z_run_pre_config)r3r5r5r6pre_configure_plugins5szBase.pre_configure_pluginscCs|jjdS)zRun plugins configure() method.N)r*Z _run_config)r3r5r5r6configure_plugins:szBase.configure_pluginscCs|jjdS)zRun plugins unload() method.N)r*Z_unload)r3r5r5r6unload_plugins?szBase.unload_pluginsc Cs|jj}|jdkr|j|j}|rtjjrDtd}tj |dStjj dkrhtd}tj |dS|dkrtd}tj |dS|j }|dk r||krtj tddSx|j j D]}|jjdqW|j jstj tdjd j|jjdSx|j jD]}|j\}} | dkr6tj td |jnx| sH| dkrftjtd |j|jjnH|r| |krtd }tj||j| |jjntjtd |j| qW|rd|_|jdddtj tddS)NzCMetadata timer caching disabled when running on metered connection.Fz:Metadata timer caching disabled when running on a battery.rz Metadata timer caching disabled.z"Metadata cache refreshed recently.z*There are no enabled repositories in "{}".z", "z4%s: will never be expired and will not be refreshed.z&%s: has expired and will be refreshed.zC%s: metadata will expire after %d seconds and will be refreshed nowz!%s: will expire after %d seconds.T)load_system_repoload_available_reposzMetadata cache created.)r4Zmetadata_timer_syncrrrutilZon_metered_connectionrrMinfoZ on_ac_powersince_last_makecacher^valuesrJZsetMaxMirrorTries _any_enabledrOjoinZreposdirr_Z_metadata_expire_inrPrNexpireZreset_last_makecache fill_sack) r3timerZperiodrmsgrrSr{Zis_cacheZ expires_inr5r5r6 update_cacheDsZ            zBase.update_cacheTc CsPtjjd}|jdddtjj||_tjj|j j |j j }||dk ry|jj ddWnt k r~|dkrzYnX|rg}d}tj}|j jrtjjjx|jjD]}y`|j||jj|kr|jj}|jj|kr|jj}tjtd|jtjj|jjWqtj j!k rz} z>|jj"|j#dkrJtj$d | |j%|j|j&WYd d } ~ XqXqW|rtj$td d j'||jj(r|dkr|dkrtj)td t*j+t,|dtjj|n|jj-j&Wd QRX|j } |jj.| j/| j0| j1|j2|tj3j4|j|_5| j6|j5_6|j7j8|jS)z'Prepare the Sack and the Goal objects. z sack setupT)r`goalF)rDautorz%s: using metadata from %s.z Error: %sNzIgnoring repositories: %sz, z-Last metadata expiration check: %s ago on %s.)Zseconds)9rr#Timerresetr` _build_sackrlockbuild_metadata_lockr4r exit_on_lockrIOErrortimegpgkey_dns_verificationdnssecRpmImportedKeyscheck_imported_keys_validityr^r_rVrJZ getTimestampZgetAgerMrNrrPrnormalize_timegetMaxTimestamprQrRrskip_if_unavailablerdrwdisablerrrdatetimeZ timedeltaintrh _configureinstallonlypkgsinstallonly_limitallow_vendor_changerrGoalrprotect_running_kernelr*run_sack) r3rrrr error_reposZmtsZager{rUr4r5r5r6r|sf             zBase.fill_sackc Cs tjjd}|jdddtjj||_tjj|j j |j j }|n|dk ry|jj ddWnt k r~|dkrzYnXg}|j jrtjjjx|jjD]}yf|jjdddtd|jdd}|jrd|d <|jj|jf|tjtd |jtjj|jj Wqt!t"j#fk r}zZ|j$dkrPtj%j&td j'|j|ntjtd j'|j||j(|j|j)WYd d }~XqXqW|rtj*td dj+|Wd QRX|j }|jj,|j-|j.|j/|j0|tj1j2|j|_3|j4|j3_4|j5j6|jS)a Prepare Sack and Goal objects and also load all enabled repositories from cache only, it doesn't download anything and it doesn't check if metadata are expired. If there is not enough metadata present (repond.xml or both primary.xml and solv file are missing) given repo is either skipped or it throws a RepoError exception depending on skip_if_unavailable configuration. z sack setupT)r`rF)rDr)Z throwExceptZ ignoreMissing)r@rArBrCz%s: using metadata from %s.zloading repo '{}' failure: {}NzIgnoring repositories: %sz, )7rr#rrr`rrrrr4rrrrrrrrr^r_rJZ loadCacherFrGrHrIrMrNrrPrrr RuntimeErrorrKrLrrQrRrOrwrrdrrrrrrrrrrr*r) r3rrrrrSrTrUr4r5r5r6fill_sack_from_repos_in_cachesX      z"Base.fill_sack_from_repos_in_cachecCstjj|jj|_|jjsl|j|j|j r\|j j |jj |jj |jrl|j|j n|jjj |j |jjrtjtdtjtddjtjjd|jdk r|jj|j|jd|_ dS)NzRThe downloaded packages were saved in cache until the next successful transaction.z1You can remove cached packages by executing '%s'.z{prog} clean packages)progF)rrZTempfilePersistorr4rr-Z keepcache_clean_packagesrr+r r<Zget_saved_tempfilesrir,Ztempfiles_to_addrMrrrOrZ MAIN_PROGrhistoryr9r _closeRpmDB)r3r5r5r6_finalize_bases*       zBase._finalize_basecCsB|jr dStjtjjdd|_|j|jddddd|_dS)ztClose all potential handles and clean cache. Typically the handles are to data sources and sinks. Nz Cleaning up.T)r`r^r) rrMlogrr#DDEBUGrrr*)r3r5r5r6r9sz Base.closecCsftjjj|j|}xN|D]F}y|jj|Wqtjjk r\}ztj |WYdd}~XqXqWdS)z?Read repositories from the main conf file and from .repo files.N) rr4readZ RepoReaderr^addrQZ ConfigErrorrMrd)r3ZoptsreaderrSrUr5r5r6read_all_repos"s  zBase.read_all_reposcCs|r d|_|rtjj|_|rd|_|jdk rJtjj|j|_|jj |j_ |jr`|j r`|j j |j dk rt|j jtjj|_d|_g|_|r|rtjdS)z1Make the Base object forget about various things.N)rrr%r&r'rrrr4rraZrollbackrrr9rrrrr.gcZcollect)r3r`r^rr5r5r6r-s$        'z Base.resetcCs|`dS)z6Closes down the instances of rpmdb that could be open.N)_ts)r3r5r5r6rjszBase._closeRpmDB)Z noscriptsZ notriggersZnodocstestZjustdbZ nocontextsnocryptoRPMTRANS_FLAG_NOCAPSZnocapsrcCs|jS)N)r)r3r5r5r6r|sz Base.goalcCs|jdk r|jStjjj|jj|_|jjdxb|jjD]V}|j j |}|dkrdt j t d|q:|jj||jj |}|dk r:|jj|q:W|jjs|jjtj|jjr|jjtjtjtj|jd}|jj||jS)zMSet up the RPM transaction set that will be used for all the work.Nrz!Invalid tsflag in config file: %s)rrr(rZTransactionWrapperr4rYsetFlagsZtsflags_TS_FLAGS_TO_RPMgetrMcriticalrZ addTsFlag_TS_VSFLAGS_TO_RPM pushVSFlagsZdiskspacecheckr)rZRPMPROB_FILTER_DISKSPACEZ ignorearchZRPMPROB_FILTER_IGNOREARCH functoolsreduceoperatoror_Z setProbFilter)r3flagZrpm_flagZvs_flagZ probfilterr5r5r6rs*       zBase._tscCs&|jdkrdS|jj|`d|_dS)z"Releases the RPM transaction set. N)rr9)r3r5r5r6rs   cCs$tjjd}tjj|_tjtjjdx|j j D]}|j s@q4|j sHq4|j j}|sXq4tjtjjd|j|j jtjjkrtj|d}tjj|sq4n tj|d}y|jj|Wq4tjjk r}ztd}tj||j|WYdd}~Xq4Xq4W|r|jjj|j j!dg||jS)z6Create the groups object to access the comps metadata.z loading compszGetting group metadataz%Adding group file from repository: %sz groups.xmlz1Failed to add groups file for repository: %s - %sNbasearch)"rr#rrZCompsrrMrrr^r_Z enablegroupsrrJZ getCompsFnrPZgetSyncStrategyrSZSYNC_ONLY_CACHEr Zcalculate_repo_gen_destospathexistsZrepo_gen_decompressZ_add_from_xml_filenamerQ CompsErrorrrZ_irrrX)r3rrrSZcomps_fnZ decompressedrUrr5r5r6rs:       &zBase.read_compscCs*|jdkr$|jj}t|jj|d|_|jS)zeauto create the history object that to access/append the transaction history information. N)rW)rr4rWr r)r3rWr5r5r6 _getHistorys zBase._getHistorycCs|jS)N)r)r3r5r5r6sz Base.cCs t|d|S)Nr)setattr)r3rr5r5r6rscCs t|ddS)Nr)r)r3r5r5r6rszDNF SWDB Interface Object)fgetfsetfdeldoccsFjj}t|jj}|j|jj}xT|jD]H|j}|d}j j |dj j d|j ||ddq:Wx|j D]xj j d|j}t }g}x0|D](} t | |kr|jd| q|j| qW|d} |j| |ddqWx|jD]j j d|j}fdd|D}|j} |krt|jjd rt|j} x0|D](} |j| } tjj| | dkrz| } qzW|j|| fd d }tjj||qWx|jD]ȉ|j}d}x"|D]}|jjkr|}PqW|dkr*|jd}n |j|fd d|D}fd d }tjj|||krz|j|n|j||j j |dj j dqW|j }|rBj!j"t#j$dj}|j%|dxh|D]`|jjd r|d}|j|jj&||j|j j d|j} |j'| qW|S)NrZdddrr{r[cs$g|]}|ks|jjkr|qSr5)name)rZr[) all_obsoletedpkgr5r6r\sz*Base._goal2transaction..)rcsjj|dS)Nod)r" pkg_added)r)r3r5r6r sz(Base._goal2transaction..cs$g|]}|ks|jjkr|qSr5)r)rZr[)rrr5r6r\scsjj|dS)Nr)r"r)r)r3r5r6r!sZudu)flags)Zpkg__neqrU)(rr(rZlist_obsoleted_get_installonly_queryrx installedlist_downgradesZobsoleted_by_packager"rZ add_downgradelist_reinstallsstrinsertrwZ add_reinstall list_installs get_reasonfilterrrrZTransactionItemReasonCompareZ add_installrrZmapall list_upgradespopremoveZ add_upgradeZ list_erasuresr`rqrKIGNORE_EXCLUDESrr set_reasonZ add_erase)r3rtsZinstallonly_queryZinstallonly_query_installedZobsZ downgradedZ nevra_pkg obsoletesZobs_pkgZ reinstalledreasonZobsoleteZreason_obsoletecbZupgradedr[ZerasuresZremaining_installed_queryZ remainingr5)rrr3r6_goal2transactions                        zBase._goal2transactioncCsd|j}|j}|jj}g}g}x6|D].}||krJ|j||dq*|j||q*W||fS)aJ See what packages in the query match packages (also in older versions, but always same architecture) that are already installed. Unlike in case of _sltr_matches_installed(), it is practical here to know even the packages in the original query that can still be installed. r)r_na_dict availablerw)r3qinstZ inst_per_archZavail_per_archZavail_lZinst_lZnar5r5r6_query_matches_installed7s  zBase._query_matches_installedcCs"|jjjj|jd}t|S)z See if sltr matches a patches that is (in older version or different architecture perhaps) already installed. )r)r`rqrrrmatcheslist)r3sltrrr5r5r6_sltr_matches_installedKszBase._sltr_matches_installedcsfddjjjDS)z5Get iterator over the packages installed by the user.c3s|]}jj|r|VqdS)N)rZuser_installed)rZr)r3r5r6 Tsz*Base.iter_userinstalled..)r`rqr)r3r5)r3r6iter_userinstalledRszBase.iter_userinstalledcCs0|j||jj|jj d}|jjr,|jd|S)N)allow_uninstall force_bestZignore_weak_depsz./debugdata/rpms)runr4bestZinstall_weak_depsrbZwrite_debugdata)r3r allow_erasingretr5r5r6_run_hawkey_goalWs  zBase._run_hawkey_goalc Cstd}|jtjjd}|jj|j}|jrJ|j|j j j |j n|j jsd|j}|j||j|j j j|j jd|j||s|j jdkr|jtjj|j}tjj|}n |j||_|jj||jdk ot|jdk}|r|jj }|rtjj!|}|dk r"||j"j#|jj$}||jj%7}||jj&7}||jj'7}|j j(|j)||S)zBuild the transaction set.NZdepsolve)rr)*_finalize_comps_transrr#rr"startrZ req_has_eraseZpush_userinstalledr`rqrrr4Zupgrade_group_objects_upgrade_build_comps_solverZ'_exclude_packages_from_installed_groupsZ add_protectedrrZprotected_packagesr debuglevelZ log_decisionsr_format_resolve_problems problem_rulesrQZ DepsolveErrorrrendrpZ_rpm_limitationsrcr*Z run_resolvedrrrrZset_modules_enabled_by_pkgsetra) r3rexcrrsolverrZgot_transactionZnew_pkgsr5r5r6resolve_sH              z Base.resolvec Cs^t|ts|g}tjjjgt|}|js|jj |jj |j r|j j sV|j j rd}t|drx|jrxdj|j}nt|dr|jrdj|j}|jj}|dkr|jj}n|j}|jj|gg||jj||jj|jjd|_dSd}tjtdtj j!|j"j#|j"j$}||jj%|j&|j'}|rxtd}tj(|x|D]}tj(|qXWtj)j*|tjtdtj+j,d} tjtd |j&j-|j&j.tjjj/|dd } |j&j0| } t1| d kr\x&| j2D]}tj3td j4|qWtd d} x| D]} | dt5| 7} qW|j6| }|rP| d|7} tj)j7| ~ tjtd|j&j8t9j:rdS| |jj |jj tj+j,d} tjjj/||d}|j"j;dkrx|j|d}WdQRX| |jj?|j|jjdd}x&tj@jA||j|D]}tjB|qFW|S)Nargs cmdsTzRunning transaction checkz%Error: transaction check vs depsolve:zTransaction check succeeded.ztransaction testzRunning transaction test)rrzRPM: {}zTransaction test error: z %s zTransaction test succeeded.r)displaysFzRunning transaction)rcSs,g}x"|D]}|jdj|t|q W|S)Nz{}: {})rwrOr)actionZtsismsgstsir5r5r6 _pto_callbacks z*Base.do_transaction.._pto_callback)C isinstancerrZyumZrpmtransZLoggingTransactionDisplayr rrarZupdateFailSafeDatargroupenvhasattrr"rr$rlastr`_rpmdb_versionend_rpmdb_versionbegrr*Zrun_pre_transactionZrun_transactionr+rMrrrZbuild_rpmdb_lockr4rrZ_populate_rpm_tsr_run_rpm_checkerrorrQZTransactionCheckErrorr#rorderZcleanZRPMTransactionrrpmessagesrrOr _trans_error_summaryrc isTsFlagSetr(RPMTRANS_FLAG_TESTrr&r2_run_transactionZunload_removed_pluginsrZ_post_transaction_outputrN)r3ZdisplaycmdlineoldZ rpmdb_versiontidrr)rrZtestcbZtserrors errstringZdescrsummaryrZdisplay_r+r5r5r6do_transactions                              zBase.do_transactioncCsd}tjd}i}x|j|D]t}|jddkr>t|jdntjt|jdd}|jd|krr|||jd<||jd|kr|||jd<qW|r|tdd 7}x4|D],}|d td d ||j |||d 7}qW|sd Std d|}|S)zParse the error string for 'interesting' errors which can be grouped, such as disk space issues. :param errstring: the error string :return: a string containing a summary of the errors z9needs (\d+)(K|M)B(?: more space)? on the (\S+) filesystemr'Mrg@zDisk Requirements:r%z z7At least {0}MB more space needed on the {1} filesystem.Nz Error Summaryz ------------- ) recompilefinditerr-rmathZceilrr rO)r3r?r@pZdiskmZ size_in_mbkr5r5r6r8s&   *zBase._trans_error_summarycCs|jjo|jjtj S)N)r4Zhistory_recordrr9r(r:)r3r5r5r6_record_history%szBase._record_historycCsd}|jrt|jj}|jjj}|j|dj}|jj }|j j }|dk rX|j }|dksh||krt jtdjtjjdd}t|dr|jrdj|j}nt|dr|jrdj|j}|jjr|jjnd} |j j||g|| }|jjr$tjd } | r$ytj| Wnd } YnXt jtjjd |j j|j!d} t jtjjd |jjrzytj| Wn YnXtjj"|j |j#| dkrnt$| d krd d |j D} | sfx&|j%D]} t j&tdj| qWtd} tj'j(| nlt j&tdx | D]}t j&t)|d qW|jrR|j j*t+j, rR|j j-|td} tj'j(| xbdD]Z}t||rlt.||}yt/j0|Wn.t1t2fk rtd} t j&| |YnXqlWt3|j#j4|_5|j j*t+j,s|j6|j7|S)zh Perform the RPM transaction. :return: history database transaction ID or None N)rz RPMDB altered outside of {prog}.)rr"r#r$rBrzRPM transaction start.zRPM transaction over.cSsg|]}|jr|qSr5)ZFailed)rZZelr5r5r6r\esz)Base._run_transaction..zRPM: {}zCould not run transaction.zTransaction couldn't start: ts_all_fn ts_done_fnz$Failed to remove transaction file %s)rMrN)8rLr r4Zhistory_record_packagesr`rqrrrr1rr0r2rMrNrrOrrZMAIN_PROG_UPPERr/r"rr$commentr3Z reset_nicernicerr#rrr!Z_sync_rpm_trans_with_swdbrrpr7rrQrcr r9r(r:rgetattrr unlink_frOSErrorboolZ install_setr,_verify_transactionZverify_tsi_package)r3rr>Zusing_pkgs_patsinstalled_queryZ using_pkgsrpmdbvZlastdbvr<rOZoniceerrorsZfailedrrUr[fnr5r5r6r;)s~                 zBase._run_transactioncsdd|jD}t|fdd}tjjd}d}tjj|}|jj}t dd|D}xH|j j D]<} | j } x.| j D]"} | j|kr| jd| jqWqjWx|D]} || j|}qW|j} |j j| |d|_dS) NcSsg|]}|jtjjkr|qSr5)r(rrZ#TransactionItemAction_REASON_CHANGE)rZr*r5r5r6r\sz,Base._verify_transaction..cs |d7}dk r|||S)Nrr5)rcount)total verify_pkg_cbr5r6display_banners z0Base._verify_transaction..display_bannerzverify transactionrcSsg|] }|jqSr5)r)rZr[r5r5r6r\sT)rrprr#rr` rpmdb_sackrqrrrr-ZgetCompsGroupItemZ getPackagesZgetNameZ setInstalledrrr1rr+)r3r\Ztransaction_itemsr]rrZr^rnamesZtigrIr*rWr5)r[r\r6rUs(       zBase._verify_transactionc sXtjj|jj|jj}|tj}tdd|D}tdd|D} j j j dkrnj t||| dnj t||tj j |||jrtjjjtfdd|D} tj jd|j} |jj} | dk} xԈjo| s| dkr| dkr| d 8} td }tj|d djD}fd d|D}td d|D}j t||tj j |||jrtjjj| tfdd|D7} tj j| |i} qWjrtjjjj}tj|WdQRX|dk r|| || \}}||krT||krtd}n||kr,td}d||d}tj||d|d|dS)Ncss|] }|jVqdS)N) download_size)rZploadr5r5r6rsz1Base._download_remote_payloads..cSsg|]}t|tjjr|qSr5)r,rdrpmZ DeltaPayload)rZZpayloadr5r5r6r\sz2Base._download_remote_payloads..)Z total_drpmsc3s|]}j|VqdS)N)_bandwidth_used)rZrb)rXr5r6rsrrz,Some packages were not downloaded. Retrying.cSsg|]}|qSr5r5)rZrr5r5r6r\scs g|]}tjj|tjjqSr5)rrS _pkg2payload RPMPayload)rZr)progressr5r6r\scss|] }|jVqdS)N)ra)rZrbr5r5r6rsc3s|]}j|VqdS)N)re)rZrb)rXr5r6rsz?Delta RPMs reduced %.1f MB of updates to %.1f MB (%d.1%% saved)zIFailed Delta RPMs increased %.1f MB of updates to %.1f MB (%d.1%% wasted)dir')rrii)rrZbuild_download_lockr4rrrsumrpr__code__ co_argcountrSZ_download_payloadsZ_irrecoverablerQZ DownloadErrorZ_update_savingZ _recoverableretriesrrMrZ errmap2str)r3payloadsrcrhcallback_totalZ fail_fastrZ beg_downloadZest_remote_sizeZ total_drpmZ remote_sizeZsavingrmZforeverrZremaining_pkgsrealZfullZpercentr5)rXrhr6_download_remote_payloadssb              zBase._download_remote_payloadsc s|j|\}}|rzdkr$tjjtjj|jjj|j j |j dd|Dfdd|D}|j |||j j rxX|D]P}|jrtjj|j|jjd}ntjj|jj|jjd}tj||j j qWdS)aDownload the packages specified by the given list of packages. `pkglist` is a list of packages to download, `progress` is an optional DownloadProgress instance, `callback_total` an optional callback to output messages about the download operation. NcSsg|] }|jqSr5)localPkg)rZrr5r5r6r\sz*Base.download_packages..cs$g|]}tjj|jtjjqSr5)rrSrfZ delta_factoryrg)rZr)rcrhr5r6r\s/)_select_remote_pkgsrr!ZNullDownloadProgressrcZ DeltaInfor`rqrr4Zdeltarpm_percentager?rqr=ZbaseurlrrrZget_local_baseurllocationlstriprSZpkgdirshutilcopy) r3Zpkglistrhro remote_pkgsZ local_pkgsrnrrur5)rcrhr6download_packagess"      zBase.download_packagescCsg}|s |S|jjr&tjjtdg}x|D]}tjj| rhd|krhtj j ||j |}|j |gy|j |jj|Wq0tk r}ztj||j |WYdd}~Xq0Xq0W|jdd|r|rttdjdj||S)NzACannot add local packages, because transaction job already existsz://T)rzzCould not open: {}r#)rZ req_lengthrrQrcrrrrrZ_urlopen_progressr4r?rwr`Zadd_cmdline_packagerrMrdrrOr)r3 path_liststrictrhpkgsZ pkgs_errorrrUr5r5r6add_remote_rpmss(       zBase.add_remote_rpmsc Cs|jr|jj}d}n|j|j}|j}|j }|r|jj}tj j j |}tj j j ||j}tjj|j}~|dkrd} d} n|dkr|rd} nd} td|} n\|dkrd} td|} nB|dkr|rd} nd} d} td|} n|d krd} td |} nd} d} | | fS) aVerify the GPG signature of the given package object. :param po: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. rrBrr'z"Public key for %s is not installedzProblem opening package %srDz Public key for %s is not trustedrdzPackage %s is not signed) _from_cmdliner4Zlocalpkg_gpgcheckr^r~ZgpgcheckgpgkeyrYrr(rinitReadOnlyTransactionZ miscutilsZcheckSigrrrrbasenamer) r3pocheckZ hasgpgkeyrSrootrZ sigresultZlocalfnresultrr5r5r6_sig_check_pkg(sF    zBase._sig_check_pkgcCs |j|S)aVerify the GPG signature of the given package object. :param pkg: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. )r)r3rr5r5r6package_signature_checkcs zBase.package_signature_checkc Cslxf|D]^}tjj|sqytj|Wn&tk rLtjtd|wYqXtj t j j td|qWdS)NzCannot remove %sz %s removed) rrrr rRrSrMrdrrrr#r)r3packagesrYr5r5r6rrs   zBase._clean_packagesrhcCsv|dkr|jj}|dkr*|j|||||Stj|j||||d}|dksTt|dkr\|dSt||}tjdd|S)aRReturn a :class:`misc.GenericHolder` containing lists of package objects. The contents of the lists are specified in various ways by the arguments. :param pkgnarrow: a string specifying which types of packages lists to produces, such as updates, installed, available, etc. :param patterns: a list of names or wildcards specifying packages to list :param showdups: whether to include duplicate packages in the lists :param ignore_case: whether to ignore case when searching by package names :param reponame: limit packages list to the given repository :return: a :class:`misc.GenericHolder` instance with the following lists defined:: available = list of packageObjects installed = list of packageObjects upgrades = tuples of packageObjects (updating, installed) extras = list of packageObjects obsoletes = tuples of packageObjects (obsoleting, installed) recent = list of packageObjects N)showdups ignore_casermrcSs |j|S)N)Z merge_lists)abr5r5r6rsz(Base._do_package_lists..)r4Zshowdupesfromrepos _list_patternrpartialrpmapr)r3 pkgnarrowpatternsrrrmZlist_fnZyghsr5r5r6_do_package_listss  zBase._do_package_listsc&sfddfdd}fdd}tj|d}g} g} g} g} g} g}g}g}g}g}|}jj}|dk rtjj||d}|jjd d }|d kri}i}xH|jD]<}|||j <|rq|j |j f}||ks|||kr|||<qWt ||j } ||j}|s|jd d }x|D]}|rN|j |krB| j|n | j|nT|j |j f}|j |krr| j|n0||ks|j||r| j|n | j|q Wn|dkr||jd d} j| d d} | jddgd| jj} nP|dkrt ||j} n2|dkrB|r||j}|jj}x\|D]Tj j f}|j|g}fdd|D}t|dkr| jn | jq@Wn||jjd d j}|jjj}xz|D]r\} }!|| |!fd|j| |!fdgd}"|" s j|"r| jn"j|"r.| jn | jqWn|dkrh||jjj}#|#j}n|dkrfdd|jD}n|dkr|j}$|jjj|$d}j|d d d}|jddgdg}xl|D],j}%|jfdd|$j|%d DqWn6|d!krD|j}|s2|jd d }||j j!j"}| |_| |_| |_#| |_$| |_%||_||_&||_"||_||_'|S)"Ncsdkr dSjj|kS)z:Test whether given package originates from the repository.NT)rrS)package)rmr3r5r6 is_from_reposz(Base._list_pattern..is_from_repocsfdd|DS)z=Filter out the packages which do not originate from the repo.c3s|]}|r|VqdS)Nr5)rZr)rr5r6rsz=Base._list_pattern..pkgs_from_repo..r5)r)rr5r6pkgs_from_reposz*Base._list_pattern..pkgs_from_repocsdkr |S|jdS)z=Filter out the packages which do not originate from the repo.N)rm)r)rq)rmr5r6query_for_reposz*Base._list_pattern..query_for_repo)iter)rF)rkrhT)Zlatest_per_arch_by_priorityupgrades)Zupgrades_by_priority)upgradesrcnosrc) arch__neqrrcsg|]}|jjkr|qSr5)evr)rZr) avail_pkgr5r6r\sz&Base._list_pattern..r autoremoveextrascsg|]}|r|qSr5r5)rZr)rr5r6r\sr)Zobsoletes_by_priority)rdrcsg|] }|fqSr5r5)rZr=)newr5r6r\.s)providesrecent)(r Z GenericHolderr`rqrrsrtrvrZpkgtuprrr rrrrrwZevr_gt_merge_update_filterslatestrrrrpZevr_eq _unneededrswdbrrrextendZ_recentr4rreinstall_available old_availableupdatesobsoletesTuplesr)&r3rpatternrrrmrrZyghrrrrrrrrrrZicrr|ZdinstZndinstrkeyZavailrZinstalled_dictinstalled_pkgsZsame_verZavailable_dictrrZinst_pkgZ autoremove_qrZobsoleted_reldepsr5)rrrrmr3r6rs                                      zBase._list_patterncCs|j|7_t|S)N)rrp)r3transr5r5r6_add_comps_transEszBase._add_comps_transcs|j}|sdS|jjjdd}|jfdd|Dd}|j|}x|D]}jj|tjj qLW|j |}|j |}|rx |D]}j j |j jdqWdS)z Mark to remove packages that are not required by any user installed package (reason group or user) :param query: dnf.query.Query() object NF)rbcs g|]}jjj|jr|qSr5)rr-Zis_removable_pkgr)rZr[)r3r5r6r\Usz,Base._remove_if_unneeded..)r) clean_deps)rZ_safe_to_removerrr differencerrrZ TransactionItemReason_DEPENDENCY intersectionreraser4clean_requirements_on_remove)r3rqZ unneeded_pkgsZunneeded_pkgs_historyZpkg_with_dependent_pkgsrZremove_packagesr5)r3r6_remove_if_unneededIs     zBase._remove_if_unneededcs>j}jjd}fdd}fdd}dd}jjjdd }|jtj|dd f|j tj|d d f|j |f|j |ff}x|D]\}} x|D]} d | j i} | j r| jd |ijjjf| j} | jddgd| s | j } | j r| d|7} tjtdj| q| | || }jjj| j qWqWj|dS)Nrcs,tjjj}|j|djj|d|S)N)r)select)rselectorSelectorr`rrr)rq remove_query comps_pkgr )r3r5r6 trans_upgradegs z1Base._finalize_comps_trans..trans_upgradecsjjdkrr|js"j||dq|jj}j|tjj j }|j dj |j |jdjj|| dnltjj j }|jr|j dj |j |jdn,jjr|jj jj|d}|j |djj|| d|S)Nrh)r|z ({} if {}))r)roptional)r)r)r4multilib_policyZrequires_install_multiarchrrx_report_already_installedrrrr`rrOrrinstallrrurqrr)rqrrr|rVr )r3r5r6 trans_installms     z1Base._finalize_comps_trans..trans_installcSs|j|}|S)N)ru)rqrrr5r5r6 trans_removes z0Base._finalize_comps_trans..trans_removeT)ri)r|Frrrr)r.zNo match for group package "{}")rr4rXr`rqrrrrr install_optrrrZ basearchonlyr<rxrMrdrrOrZ group_membersrr)r3rrrrrrZattr_fnattrrYrZ query_argsrZpackage_stringr5)r3r6rcs4        zBase._finalize_comps_transcs fdd}tjjjj|S)Nc sNjjjj|d}|sdSyjjj|dStk rHtj j SXdS)N)rr) r`rqrrrrr(rAttributeErrorrrZTransactionItemReason_UNKNOWN)Zpkgnamer)r3r5r6 reason_fnsz+Base._build_comps_solver..reason_fn)rrZSolverrr)r3rr5)r3r6rs zBase._build_comps_solvercCsH|j}t|tstjj|}|j|||p.t||}|s>dS|j|S)a&Installs packages of environment group identified by env_id. :param types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). r) rr,rrrlistToCompsPackageTypeZ_environment_installrr)r3env_idtypesexcluder|exclude_groupsr rr5r5r6environment_installs  zBase.environment_installcCs|j}|j|}|j|S)N)rZ_environment_remover)r3rr rr5r5r6environment_removes zBase.environment_removec sfddd}|r2fdd|D}tjj|}j}t|tsPtjj|}|j ||||}|shdS|rt|j } n|j } t j td|| j|S)anInstalls packages of selected group :param pkg_types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). :param exclude: list of package name glob patterns that will be excluded from install set :param strict: boolean indicating whether group packages that exist but are non-installable due to e.g. dependency issues should be skipped (False) or cause transaction to fail to resolve (True) cs6tjj|r,jjj|d}tdd|S|fSdS)N) name__globcSs|jS)N)r)rIr5r5r6rszABase.group_install.._pattern_to_pkgname..)rris_glob_patternr`rqrrr)rr)r3r5r6_pattern_to_pkgnames z/Base.group_install.._pattern_to_pkgnameNcsg|] }|qSr5r5)rZrI)rr5r6r\sz&Base.group_install..rz#Adding packages from group '%s': %s) itertoolschain from_iterablerr,rrrrZ_group_installrrrMrNrr) r3grp_idZ pkg_typesrr|Zexclude_pkgnamesZnested_excludesr rZinstlogr5)rr3r6 group_installs"     zBase.group_installcCst|j|jtjtjBtj}d}d}x|D]} y|j| } Wn:tjj k rv} zt j t | d}w*WYdd} ~ XnXx2| j D](} | s| |kr||j| |||d7}qWx&| jD]} ||j| ||||d7}qWq*W| r|rtjjtd|S)NrTF)rr|)rr|rzNothing to do.)rrr ENVIRONMENTSGROUPS AVAILABLErrrQrrMr5r groupsr environmentsrrcr)r3rrr|rrrcntdonerreserrZgroup_idrr5r5r6env_group_installs(    zBase.env_group_installcCs|j}|j|}|j|S)N)rZ _group_remover)r3rr rr5r5r6 group_removes zBase.group_removecCst|j|jtjtjBtj}y|j|}WnFtjj k rp}z&t j dt |tjj tdWYdd}~XnXd}x|jD]}||j|7}q~Wx|jD]}||j|7}qW|S)Nz Warning: %szNo groups marked for removal.r)rrrrr INSTALLEDrrrQrrMr5r rcrrrrr)r3rrrrrr.grpr5r5r6env_group_remove s  "  zBase.env_group_removec CsLt|j|jtjtjBtj}d}x |D]}y|j|}Wn6tjj k rr}zt j t |w(WYdd}~XnXxX|j D]N}y|j|d}Wq|tjj k r}zt j t |w|WYdd}~Xq|Xq|WxZ|jD]P}y|j|d}Wqtjj k r$}zt j t |wWYdd}~XqXqWq(W|sHtd} tjj| dS)NFTzNo group marked for upgrade.)rrrrrrrrrQrrMr5r renvironment_upgrader group_upgraderrZCliError) r3rrZgroup_upgradedrrrr.rrr5r5r6env_group_upgrades6      zBase.env_group_upgradecCs|j}|j|}|j|S)N)rZ_environment_upgrader)r3rr rr5r5r6r9s zBase.environment_upgradecCs|j}|j|}|j|S)N)rZ_group_upgrader)r3rr rr5r5r6r@s zBase.group_upgradecCs|jjd}tjj|rdS|jj}tjjj |d}|j tj tj B|j dd}t|}~~|dkrldStjj|}tjj|stj|t|d}|j~dSdS) zChecks for the presence of GPG keys in the rpmdb. :return: 0 if there are no GPG keys in the rpmdb, and 1 if there are keys z/.gpgkeyschecked.yumr)rrz gpg-pubkeyrwN)r4rrrrrYrr(rrr_RPMVSF_NOSIGNATURES_RPMVSF_NODIGESTSZdbMatchrpdirnamemakedirsopenr9)r3ZgpgkeyscheckedrYZmytsidxkeysZmydirZfor5r5r6_gpg_key_checkGs&       zBase._gpg_key_checkc Cs|j|\}}|j|x~|D]v}tjj|j}|jjj|d}|jj rb|j |jjj|d}|j |d}|dk r|j |d}|j j || dqWt|S)N)r)r)rm)rr)r rrrrr`rqrrr4rrurrrrp) r3rqrmr| already_instrrr rr5r5r6res    zBase._install_multiarchcCs,tj}tj}t||t||||fS)a Categorize :param install and :param exclude list into two groups each (packages and groups) :param install: list of specs, whether packages ('foo') or groups/modules ('@bar') :param exclude: list of specs, whether packages ('foo') or groups/modules ('@bar') :return: categorized install and exclude specs (stored in argparse.Namespace class) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs )argparseZ Namespacer )r3rr install_specs exclude_specsr5r5r6_categorize_specsss   zBase._categorize_specscsddd|jDfdd|jD}|jjj|d}|jjjd}|jj||jj|dS)NcSsg|]}tjj|r|qSr5)rrr)rZrr5r5r6r\sz/Base._exclude_package_specs..csg|]}|kr|qSr5r5)rZr) glob_excludesr5r6r\s)r)r) pkg_specsr`rqrry)r3rexcludesr}Zglob_exclude_queryr5)rr6_exclude_package_specss  zBase._exclude_package_specsc Cst}t|j|jtjtjBtjtjB}x|D]}y|j|}Wn8t j j k rx}zt j dt|w.WYdd}~XnX|j|j|j|jx8|jD].}|jj|}x|jD]} |j| jqWqWq.Wt|S)NzWarning: Module or %s)rrrrrrrrrrrQrrMr5r r<rrZ_environment_by_idZ groups_iterrrPr ) r3 group_specsrrrrrZenvironment_idZ environmentr-r5r5r6_expand_groupss"       zBase._expand_groupsc Csx|D]x}yL|jj}d|kr<|jd}|d}|djd}|j|g|||j|jWqtjjk r||j d|YqXqWdS)Nrsrr,@) r4Zgroup_package_typessplitrr grp_specsrrQrcrw)r3rrZskippedr|Z group_specrrr5r5r6_install_groupss   zBase._install_groupscCs|dkr g}g}g}g}g} |j||\} } |j| xd| jD]Z} y|j| |||dWq>tjjk r} ztjt | |j | WYdd} ~ Xq>Xq>Wg}f}t o| j rLy tj jj|}|j| j |Wnxtjjk rH} zV| jr x| jD]}|j |qW| jr2x| jD]}|j d|qW| j}WYdd} ~ XnXn| j }|rv|j| j | _ |j|| |||s|s|s| s|rtjj|||| |ddS)N)rmr|formsr)no_match_group_specserror_group_specsno_match_pkg_specserror_pkg_specsmodule_depsolv_errors)rrrrrrQ MarkingErrorrMr5rrwrorrerfZ ModuleBaseZ MarkingErrorsrrr rr)r3rrrmr|rrrrrrrspecrUZno_match_module_specsr rfZe_specr5r5r6rsN      zBase.install_specsc Cstjj|}|j|j|dd}|jjdks4|j|rr|d}|dk rP|j|d|sb|j ||||j |||dS|jjdkr|j |||jj |d |d }|s|j |||x|D]} |j j| | d qWd Sd S)z@Mark package(s) given by pkg_spec and reponame for installation.F)rwith_srcrhrqN)rm)rmr|rT)rrrmreportssolution)rrrr)rrsrtget_best_solutionr`r4rZ_is_arch_specifiedrr_raise_package_not_found_errorr_get_best_selectorsrrr) r3pkg_specrmr|rr|rrsltrsr r5r5r6rs,    z Base.installcCs|jrd}t||jjjj|j|jdgd}|shtd}t j ||jt j j td|j|jn\t|d|krt jj|j}|j|gd|jj|| dd Std }t j ||jdSdS) Nz-downgrade_package() for an installed package.noarch)rrz.Package %s not installed, cannot downgrade it.zNo match for argument: %sr)r)rrrzCPackage %s of lower version already installed, cannot downgrade it.) _from_systemNotImplementedErrorr`rqrrrrrrrMrdrrQr rusortedrrrrr)r3rr|rrr r5r5r6package_downgrades  zBase.package_downgradecCs|jjj|j|j|j}|j|\}}||kr>|j|gnT|tj j |krdt j j td|jn.t jj|j}|j|gd|jj|| ddS)NzNo match for argument: %s)r)rrr)r`rq_nevrarrrr rrrrrrQPackageNotFoundErrorrrurrrrr)r3rr|rrrr r5r5r6package_installszBase.package_installcCsf|jjjj|j|j|jdr0|jj|dSt d}t j |t |t jjt d|j|jdS)N)rrrrz.Package %s not installed, cannot reinstall it.zNo match for argument: %s)r`rqrrrrrrrrrrMrdrrrQr ru)r3rrr5r5r6package_reinstall(s   zBase.package_reinstallcCs|jj|dS)Nr)rr)r3rr5r5r6package_remove0s zBase.package_removecCs`|jrd}t||jdkr6td}tj||jdS|jjj j }|j j r|jjj |gdj |drtjj|j}|j|gd|jj|ddS|jd kr|j|jd }n|j|j|jd gd }|std }tj||jtjjtd |j|jnZt|d|krBtjj|j}|j|gd|jj|ddStd}tj||jdSdS)Nz+upgrade_package() for an installed package.rz.)r)r)rmT)rrcSsg|] }|jqSr5)r)rZrr5r5r6r\xs)r)rr)r`rqrrrrrrurrrrrrrrrr) r3rqrrmrZ installed_allrrVr r5r5r6_upgrade_internalYs "    zBase._upgrade_internalc Csttjj|}|j|j}|d}|rZtjj|}| oH|doH|djr*|dj}|jjj j }|j j r||j |dn|jjjdd} | s*|j |dj } | std} tj| |tjjtd||nV|djotjj|dj r*| j|djd s*td } tj| d j||dj|j j oH|doH|dj} |j|| ||Stjjtd||dS) Nrqnevra)rT)ri)rz(Package %s available, but not installed.zNo match for argument: %s)rz?Package %s available, but installed for different architecture.z{}.{})rrsrtrr`rrrrqrrxr4rrrrrrMrdrQPackagesNotInstalledErrorrrOZ has_just_namerr ) r3rrmr|rrZwildcardpkg_namerZ obsoletersZinstalled_namerrr5r5r6rs0    &   z Base.upgradecCs|j|jj|jj|ddS)N)r)rr`rqr4r)r3rmr5r5r6 upgrade_allszBase.upgrade_allcCs|dkr|jjnxtjj|}|j|jdd}|djtj d|j |||j j dd}|spt jtd|dSx|D]}|jj|d qvWd S) NF)r rq) reponame__neqT)rrr zNo package %s installed.r)rr)rZdistupgrade_allrrsrtrr`rrrKZSYSTEM_REPO_NAMErr4rrMrrZ distupgrade)r3rrsrrr r5r5r6 distro_syncs   zBase.distro_syncc Cst|||gr||7}d}|rF|rFx4|D]}td}tj||q(Wn|rX|j|rXd}xX|D]P}y|j||dWn4tjjk r} ztj t | WYdd} ~ Xq^Xd}q^W|stjtdn4|j j j |jj|jjd} x| D]} |j| qWdS)zRemoves all 'leaf' packages from the system that were originally installed as dependencies of user-installed packages but which are no longer required by any such package.FzNot a valid form: %sT)rNzNo packages marked for removal.)rb)anyrrMrdrrrrQr rrr`rqrrrr4rbr) r3rrr filenamesrZgrp_specrrrUr}rr5r5r6rs,      zBase.autoremovecsptjj|jj|d}fdd|jD}|sBj||jj}x|D]}j j ||dqPWt |S)z'Mark the specified package for removal.)rcs(g|] }dks jj|kr|qS)N)rrS)rZr)rmr3r5r6r\szBase.remove..)r) rrsrtrvr`r"_raise_package_not_installed_errorr4rrrrp)r3rrmrr rrrr5)rmr3r6rs z Base.removec s tjj|}|jj}fdd|jD}|j} |dk rL| j|d|dk r`| j|dtjj | } |stj j d|| j d} j j} x\|D]T} y| t| }Wn*tk r|swjj| | dYnXjj|| d7} qW| dkrtj jd||| S) Ncs(g|] }dks jj|kr|qS)N)rrS)rZr) old_reponamer3r5r6r\sz"Base.reinstall..)rm)r$zno package matchedr)rr)rrsrtrvr`rrrrrqZ_per_nevra_dictrQr!rr4rr KeyErrorrrrZPackagesNotAvailableError)r3rr)Z new_reponameZnew_reponame_neqZ remove_nar|rrZ available_qZavailable_nevra2pkgrrZ installed_pkgZ available_pkgr5)r)r3r6 reinstalls6          zBase.reinstallcCs |j|S)zMark a package to be downgraded. This is equivalent to first removing the currently installed package, and then installing an older version. ) downgrade_to)r3rr5r5r6 downgrade szBase.downgradec Cstjj|}|j|j}|s6td|}tjj||d}|j}t |j j }|jj j j|d} t| dkrtd|}tjj|||xn| j j D]^} |jj| d} | std}tj|| qtjj|j} | j| d|jj| | dd}qW|S) zDowngrade to specific version if specified otherwise downgrades to one version lower than the package installed. zNo match for argument: %sr)rz6Packages for argument %s available, but not installed.zDPackage %s of lowest version already installed, cannot downgrade it.)r)rrr)rrsrtrvr`rrQrrr  _name_dictrrqrrrrpr!Z downgradesrrMrdrrrrr) r3rr|r|rrrZavailable_pkgsZavailable_pkg_namesZ q_installedr"Zdowngrade_pkgsr r5r5r6r, s.       zBase.downgrade_tocs|jjjd}|r |gfStjj|j}|r>|gfSjdsRjdr^dg}n&jdrr|gfSfddd D}|jjj|d|fS) N)Z file__glob/bin//sbin/z/usrrscsg|] }|qSr5r5)rZprefix) provides_specr5r6r\E sz!Base.provides.. /usr/bin/ /usr/sbin/)r/r0r3r4)r`rqrrrZ _by_provides startswith)r3r2Z providersZbinary_providesr5)r2r6r6 s      z Base.providesc Csddd}||krtd||}|rDd|} |jj| tj||rfd|} |jj| tj||rd|} |jj| tj||rd|} |jj| tj||rd |} |jj| tj|d S) a It modifies results of install, upgrade, and distrosync methods according to provided filters. :param cmp_type: only 'eq' or 'gte' allowed :param types: List or tuple with strings. E.g. 'bugfix', 'enhancement', 'newpackage', 'security' :param advisory: List or tuple with strings. E.g.Eg. FEDORA-2201-123 :param bugzilla: List or tuple with strings. Include packages that fix a Bugzilla ID, Eg. 123123. :param cves: List or tuple with strings. Include packages that fix a CVE (Common Vulnerabilities and Exposures) ID. Eg. CVE-2201-0123 :param severity: List or tuple with strings. Includes packages that provide a fix for an issue of the specified severity. Z__eqgZ __eqg__gt)eqZgtez Unsupported value for `cmp_type`Z advisory_typeadvisoryZ advisory_bugZ advisory_cveZadvisory_severityN)rr/ setdefaultrr<) r3Zcmp_typerr7ZbugzillaZcvesZseverityZcmp_dictZcmprr5r5r6add_security_filtersI s& zBase.add_security_filterscCs i|_dS)z, Reset all security filters N)r/)r3r5r5r6reset_security_filtersn szBase.reset_security_filtersc Cs>|jp |j s| r|S|jjjdd}|jrRx|jD]}|j|}q8W|g|_|jrx<|jjD].\}}|rx|d}||i} |j|jf| }qdW|j|}|s:|r:|j }t |j j } | dkr:|dkrt dj| } t dj| } tjt| | | n2t dj|| } t d j|| } tjt| | | |S) z Merge Queries in _update_filters and return intersection with q Query @param q: Query @return: Query T)riZ __upgraderNz3No security updates needed, but {} update availablez4No security updates needed, but {} updates availablez    zBase._merge_update_filtersc sjrtd}t||jjj|jk}|r:gnj}fdd}d}|jjjx |D]} t j j | } x| D]} t j |j| j| jdkrtd}tj|| | jq|jjrt jjj| j| j} t jjj| } tjt jj| | | | _|jjrt j j| | n t j j | d}|jj!r:d}n|jj"r|jjr| t jj#j$t jj#j%fkrd}tjt jj&tdnd}tjt jj&td nd}n<|r|| j| j| | j'| jd }n|r|| j| j}|sd}q|jj(t)j*}|r|jj+}|jj,|t)j*|jj-t j.| j}|rD|jj,||dkrjtd |}t j/j0||tjtd d}qWqhW| r|rt j/j0td |stdj1}t j/j0|||j2\}}|dkr|rtd}tj|t3|}t j/j0||dS)aRetrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param po: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys z6Unable to retrieve a key for a commandline package: %scs0|tdd7}|tddjj7}|S)Nz. Failing package is: %sz zGPG Keys are configured as: %sz, )rrr)r)rrSr5r6_prov_key_data sz1Base._get_key_for_package.._prov_key_dataFrz)GPG key at %s (0x%s) is already installedTzThe key has been approved.zThe key has been rejected.)ruseridZhexkeyidkeyurl fingerprint timestampzKey import failed (code %d)zKey imported successfullyzDidn't install any keyszThe GPG keys listed for the "%s" repository are already installed but they are not correct for this package. Check that the correct key URLs are configured for this repository.z+Import of key(s) didn't help, wrong key(s)?N)4rrrr^r~rPr1rrrZcryptoZretriever Z keyInstalledrZrpm_idrArMrZshort_idr4rrZKeyInfoZfrom_rpm_key_objectr>Zraw_keyZDNSSECKeyVerificationZverifyZ nice_user_msgurlZlog_dns_key_importZlog_key_importZassumenoZ assumeyesZValidityZVALIDZPROVEN_NONEXISTENCEZany_msgr@r9r(r:Z getTsFlagsrZpgpImportPubkeyZ procgpgkeyrQrcrrr )r3raskcb fullaskcbrZ key_installedZkeyurlsr=Z user_cb_failr?rrZ dns_input_keyZ dns_resultZrcZ test_flagZ orig_flagsrerrmsgr5)rrSr6_get_key_for_package s                zBase._get_key_for_packagecCs|j|||dS)aRetrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param pkg: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys N)rF)r3rrCrDr5r5r6package_import_key$ szBase.package_import_keycCs4g}|jjx |jjD]}|jt|qW|S)N)rrZproblemsrwr )r3resultsZprobr5r5r6r45 s  zBase._run_rpm_checkw+bcKstjj||j||f|S)z Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads )rrZ_urlopenr4)r3rBrSmoder<r5r5r6urlopen@ sz Base.urlopencCs,|dkr|jjtjd}|j|jjd}|S)N)r)r)rrqrKrrr4r)r3rZ installonlyr5r5r6rH szBase._get_installonly_querycCsrtjj|dd}|j|jdddd}|drn|drn|djrn||ddjkrntjtdj |ddjdS) NT)rF)rjrkrlrqr rz * Maybe you meant: {}) rrsrtrr`rrMrrrO)r3rr|rr5r5r6_report_icase_hintN s   zBase._report_icase_hintcCsdd}g}g}x6|D].}|jr:|jtjkrD|j|q|j|qWtd}|||sjtjjtd|j j rtd}|||stjjtdg}||fS)a  Check checksum of packages from local repositories and returns list packages from remote repositories that will be downloaded. Packages from commandline are skipped. :param install_pkgs: list of packages :return: list of remote pkgs cSsxd}xn|D]f}d}y |j}Wn0tk rN}ztjt|WYdd}~XnX|dk r tj|j||jd}q W|S)NTF)ZverifyLocalPkgrLrMrrrOrm)Zpkg_listZ logger_msgZall_packages_verifiedrZpkg_successfully_verifiedrUr5r5r6_verification_of_packages] s   z;Base._select_remote_pkgs.._verification_of_packagesz>Package "{}" from local repository "{}" has incorrect checksumz;Some packages from local repository have incorrect checksumz8Package "{}" from repository "{}" has incorrect checksumzVSome packages have invalid cache, but cannot be downloaded due to "--cacheonly" option) Z _is_local_pkgrmrKZCMDLINE_REPO_NAMErwrrrQrcr4r)r3Z install_pkgsrMryZlocal_repository_pkgsrrr5r5r6rtV s&       zBase._select_remote_pkgscCsx|D] }t|qWdS)N)_msg_installed)r3rrr5r5r6r s zBase._report_already_installedc Cs|jjtjd}tjj|}|j|j|d|d}|dk rH|dj|d|dsdtj j t d|nB|jjtj d}|dj |}|rt d}nt d}tj j ||dS) N)rF)rr rqrq)rmzNo match for argumentz?All matches were filtered out by exclude filtering for argumentz?All matches were filtered out by modular filtering for argument)r`rqrKrrrsrtrrrrQrrZIGNORE_REGULAR_EXCLUDESr) r3rrrm all_queryrsrZwith_regular_queryrr5r5r6r s  z#Base._raise_package_not_found_errorc sjjtjdj}tjj|}|jj|d|d}|dsNtj j t d|dk rpfdd|dD}n|d}|st d}nt d }tj j ||dS) N)rF)rr rqrqzNo match for argumentcs g|]}jj|kr|qSr5)rrS)rZr)rmr3r5r6r\ sz;Base._raise_package_not_installed_error..zCAll matches were installed from a different repository for argumentz?All matches were filtered out by exclude filtering for argument) r`rqrKrrrrsrtrrQr!r) r3rrrmrOrsrrrr5)rmr3r6r( s  z'Base._raise_package_not_installed_errorcCs|jj|jdddS)z Setup DNF file loggers based on given configuration file. The loggers are set the same way as if DNF was run from CLI. T)Zfile_loggers_onlyN)r$Z_setup_from_dnf_confr4)r3r5r5r6 setup_loggers szBase.setup_loggersc s|jjtjtjBtjB@r d}nd}t|j}|j|dd}|jf|}| rl|rlt j j |j }t j|t|jdd}t|jdd|} ddfdd|Dtfd d|D} tfd d| D} | | fS) zreturns set of conflicting packages and set of packages with broken dependency that would be additionally installed when --best and --allowerasingTF)rrZ ignore_weak)rcSstj|j|j|j|j|jdS)N)repochversionreleaser)rKZNEVRArrQrRrSr)itemr5r5r6r sz&Base._skipped_packages.._nevracsg|] }|qSr5r5)rZr*)rr5r6r\ sz*Base._skipped_packages..csg|]}|kr|qSr5r5)rZr)rtransaction_nevrasr5r6r\ scsg|]}|kr|qSr5r5)rZr)rrUr5r6r\ s)rZactionsrKINSTALLZUPGRADEZ UPGRADE_ALLrr0rrrrrrMrdrproblem_conflictsZproblem_broken_dependency) r3Zreport_problemsrrZngZparamsrrrWZproblem_dependencyZskipped_conflictsZskipped_dependencyr5)rrUr6_skipped_packages s(    zBase._skipped_packages)N)F)F)TT)T)N)FFF)F)F)N)T)NN)TN)rhNNFN)N)NTN)NT)TNN)NT)T)NNTN)NTN)F)T)N)N)N)N)NNNN)NN)NNNF)F)NTF)NN)NN)NrI)N)__name__ __module__ __qualname__r7r8r:r;r?rV staticmethodrrgrrpropertyrr4r^deleterrrZlazyattrrr`rarsetterrrrrrrrrrr9rrrr(ZRPMTRANS_FLAG_NOSCRIPTSZRPMTRANS_FLAG_NOTRIGGERSZRPMTRANS_FLAG_NODOCSr:ZRPMTRANS_FLAG_JUSTDBZRPMTRANS_FLAG_NOCONTEXTSZRPMTRANS_FLAG_NOFILEDIGESTrr/rrrrrrrrrrr r rrr!rAr8rLr;rUrqrzr~rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr#r%rrr+r-r,rr9r:rrFrGr4rKrrLrtrrr(rPrXr5r5r5r6r[s   =      8 ; > =       '\ 8 l"] * B  ;  ) =  *     /   % &    #  & % )    -rcCs t|}td}tj||dS)Nz Package %s is already installed.)r rrMr)rrrr5r5r6rN srN)H__doc__Z __future__rrrrrrZlibdnf.transactionrrxrZ dnf.compsrZdnf.i18nrr r Zdnf.utilr Zdnf.db.historyr Zdnf.yumr collections.abcr ImportError collectionsrZ dnf.callbackZdnf.confZ dnf.conf.readZ dnf.cryptoZ dnf.dnssecZdnf.drpmZdnf.exceptionsZdnf.goalZ dnf.historyZdnf.lockZ dnf.loggingZdnf.module.module_baseroZ dnf.persistorZ dnf.pluginZ dnf.queryZdnf.repoZ dnf.repodictZdnf.rpm.connectionZdnf.rpm.miscutilsZdnf.rpm.transactionZdnf.sackZ dnf.selectorZ dnf.subjectZdnf.transactionZdnf.yum.rpmtransrrrKrr#rHrrrEr(rrwZ getLoggerrMobjectrrNr5r5r5r6s           PK!Ȗ66__pycache__/base.cpython-36.pycnu[3 f@sXdZddlmZddlmZddlmZddlmZddlZddlZddlZ ddl m Z ddl m Z dd lmZmZmZdd lmZdd lmZdd lmZydd lmZWn ek rdd lmZYnXddlZddlZddl ZddlZddlZddl Zddl!Zddl"Zddl#Zddl$Zddl%Zddl&Zddl'Zyddl(ZdZ)Wnek r`dZ)YnXddl*Zddl+Zddl,Zddl-Zddl.Zddl/Zddl0Zddl1Zddl2Zddl3Zddl4Zddl5ZddlZddl6Zddl7Z7ddl8Z8ddl9Z9ddl:Z:ddl;Z;ddlZ>ddl?Z?ddl@Z@ddlAZAddlBZBe;jCdZDGdddeEZFddZGdS)z Supplies the Base class. )absolute_import)division)print_function)unicode_literalsN)deepcopy) CompsQuery)_P_ucd) _parse_specs) SwdbInterface)misc)SequenceTFdnfc@seZdZdddZddZddZdd Zd d Zd d Ze ddZ ddZ dddZ ddZ eddZeddZeddZejddZeejjddd Zed!d"Zed#d$Zed%d&Zejd'd&Zd(d)Zffdfd*d+Zd,d-Zd.d/Zd0d1Zdd2d3Z dd5d6Z!dd7d8Z"d9d:Z#d;d<Z$dd=d>Z%dd?d@Z&dAdBZ'e(j)e(j*e(j+e(j,e(j-e(j.e(j/dCZ0e1e(dDre(j2e0dE<dFe(j3e(j4BiZ5edGdHZ6edIdJZ7e7jdKdJZ7ddLdMZ8dNdOZ9edPdQdRdQdSdQdTdUZ:dVdWZ;dXdYZd^d_Z?dd`daZ@ffdbdcZAdddeZBdfdgZCdhdiZDddjdkZEddldmZFddndoZGddpdqZHdrdsZIdtduZJdvdwZKddydzZLdd{d|ZMd}d~ZNddZOddZPddZQdddZRddZSdddZTdddZUddZVddZWddZXddZYddZZddZ[dddZ\ddZ]ddZ^ddZ_dddZ`dddZadddZbdddZcdddZdddZeddZfddZgdddZhdddZidddZjdddZkdddZldddZmdddZnddZoddd„ZpddĄZqffffffddƄZrddȄZsdddʄZtddd̄Zuddd΄ZvddЄZwdddӄZxdddՄZyddׄZzddلZ{ddۄZ|dd݄Z}dd߄Z~ddZddZdS( BaseNcCsd|_|p|j|_d|_d|_d|_d|_d|_d|_t j j |_ d|_ t|_t|_t jj|_t jj|_t jj|_ttjg|_t jj|_d|_ d|_!d|_"g|_#i|_$d|_%t|_&d|_'dS)NF)(_closed_setup_default_conf_conf_goal_repo_persistor_sack _transaction_priv_ts_compsrcompsTransactionBunch _comps_trans_historyset _tempfiles_trans_tempfilescallbackZDepsolve _ds_callbackloggingZLogging_loggingrepodictRepoDict_reposrpmZRPMPROB_FILTER_OLDPACKAGE_rpm_probfilterZpluginZPlugins_plugins_trans_success_trans_install_set_tempfile_persistor_update_security_filters_update_security_options_allow_erasing_repo_set_imported_gpg_keysoutput)selfconfr5/usr/lib/python3.6/base.py__init__]s2     z Base.__init__cCs|S)Nr5)r3r5r5r6 __enter__zszBase.__enter__cGs |jdS)N)close)r3Zexc_argsr5r5r6__exit__}sz Base.__exit__cCs |jdS)N)r9)r3r5r5r6__del__sz Base.__del__cCs.|jr|jj|n|jjrn |jj|dS)N)rr updater4destdirr)r3filesr5r5r6_add_tempfiless zBase._add_tempfilescCs|jtd|jdd}|jr&d|d<y|jj|jfddi|WnTtjk r}z6t j t dj |j |tjjt dj |j WYdd}~XnXdS)NT)load_filelists load_prestoload_updateinfo load_other build_cachezloading repo '{}' failure: {}z"Loading repository '{}' has failed)loaddictdeltarpmload_metadata_otherr load_repo_repohawkey Exceptionloggerdebugrformatidr exceptions RepoError)r3repo mdload_flagser5r5r6_add_repo_to_sackszBase._add_repo_to_sackcCs.tjj}|j}d|kr*tjj|j|d<|S)N releasever)rr4ZConf substitutionsr(Zdetect_releasever installroot)r4Zsubstr5r5r6rs  zBase._setup_default_confcCsdd|jjD}y0|jj|j||jj|jjd|jj|jj d}Wn4t j k rx}zt j jt|WYdd}~XnX|rtjt jjj|ddS)NcSsg|]}|jr|jqSr5)Zmodule_hotfixesrP).0ir5r5r6 sz0Base._setup_modular_excludes..F)Z update_onlyZ debugsolvermodule_obsoletesr)repos iter_enabledsackZfilter_modules_moduleContainerr4rYZmodule_platform_id debug_solverr]rKrLrrQErrorr rMwarningmodule module_baseZformat_modular_solver_errors)r3Z hot_fix_reposZ solver_errorsrUr5r5r6_setup_modular_excludess "zBase._setup_modular_excludesFc Cst|jj}d|kr$tr$|jdSg}g}|s>x|jjD]}|j|krPq@t|j dkr|j j j dd}x8t|j D]*}t jj|}|j|j|j dddd}q|W|j |jd|j|j|jf|j j j dd} x8t|jD]*} t jj| }| j|j|j dddd} qW| j |jd| r@|j| |jfq@Wd|kr6|j j j dd} t|jj dkrx.) rr4 cacheonlyr^r_Zexpired_to_addr<saver-)r3Zexpiredr5r5r6_store_persistent_datas  zBase._store_persistent_datacCs|jdkr|jdd|jS)NT) arch_filter)r read_comps)r3r5r5r6rs  z Base.compscCs|jS)N)r)r3r5r5r6r4sz Base.confcCs|jS)N)r')r3r5r5r6r^sz Base.reposcCs d|_dS)N)r')r3r5r5r6r^sZ _priv_rpmconncCstjjj|jjS)N)rr(Z connectionZ RpmConnectionr4rY)r3r5r5r6_rpmconn sz Base._rpmconncCs|jS)N)r)r3r5r5r6r`sz Base.sackcCsP|jdkrtjjd|jjdkrHtjjd|jj |jj d|jj |j_|jjS)NzSack was not initializedFarch) r`rrQrcralibdnfreZModulePackageContainerr4rYrX persistdir)r3r5r5r6ras     zBase._moduleContainercCs|jS)N)r)r3r5r5r6 transactionszBase.transactioncCs|jrtd||_dS)Nztransaction already set)r ValueError)r3valuer5r5r6r$scCstjj|jj|_dS)N)r persistorZ RepoPersistorr4cachedirr)r3r5r5r6_activate_persistor+szBase._activate_persistorcCs,|jjr|jj|j|||jj||dS)z&Load plugins and run their __init__().N)r4Zpluginsr*_loadZ _run_init)r3Z disabled_globZenable_pluginsclir5r5r6 init_plugins.szBase.init_pluginscCs|jjdS)z#Run plugins pre_configure() method.N)r*Z_run_pre_config)r3r5r5r6pre_configure_plugins5szBase.pre_configure_pluginscCs|jjdS)zRun plugins configure() method.N)r*Z _run_config)r3r5r5r6configure_plugins:szBase.configure_pluginscCs|jjdS)zRun plugins unload() method.N)r*Z_unload)r3r5r5r6unload_plugins?szBase.unload_pluginsc Cs|jj}|jdkr|j|j}|rtjjrDtd}tj |dStjj dkrhtd}tj |dS|dkrtd}tj |dS|j }|dk r||krtj tddSx|j j D]}|jjdqW|j jstj tdjd j|jjdSx|j jD]}|j\}} | dkr6tj td |jnx| sH| dkrftjtd |j|jjnH|r| |krtd }tj||j| |jjntjtd |j| qW|rd|_|jdddtj tddS)NzCMetadata timer caching disabled when running on metered connection.Fz:Metadata timer caching disabled when running on a battery.rz Metadata timer caching disabled.z"Metadata cache refreshed recently.z*There are no enabled repositories in "{}".z", "z4%s: will never be expired and will not be refreshed.z&%s: has expired and will be refreshed.zC%s: metadata will expire after %d seconds and will be refreshed nowz!%s: will expire after %d seconds.T)load_system_repoload_available_reposzMetadata cache created.)r4Zmetadata_timer_syncrrrutilZon_metered_connectionrrMinfoZ on_ac_powersince_last_makecacher^valuesrJZsetMaxMirrorTries _any_enabledrOjoinZreposdirr_Z_metadata_expire_inrPrNexpireZreset_last_makecache fill_sack) r3timerZperiodrmsgrrSr{Zis_cacheZ expires_inr5r5r6 update_cacheDsZ            zBase.update_cacheTc CsPtjjd}|jdddtjj||_tjj|j j |j j }||dk ry|jj ddWnt k r~|dkrzYnX|rg}d}tj}|j jrtjjjx|jjD]}y`|j||jj|kr|jj}|jj|kr|jj}tjtd|jtjj|jjWqtj j!k rz} z>|jj"|j#dkrJtj$d | |j%|j|j&WYd d } ~ XqXqW|rtj$td d j'||jj(r|dkr|dkrtj)td t*j+t,|dtjj|n|jj-j&Wd QRX|j } |jj.| j/| j0| j1|j2|tj3j4|j|_5| j6|j5_6|j7j8|jS)z'Prepare the Sack and the Goal objects. z sack setupT)r`goalF)rDautorz%s: using metadata from %s.z Error: %sNzIgnoring repositories: %sz, z-Last metadata expiration check: %s ago on %s.)Zseconds)9rr#Timerresetr` _build_sackrlockbuild_metadata_lockr4r exit_on_lockrIOErrortimegpgkey_dns_verificationdnssecRpmImportedKeyscheck_imported_keys_validityr^r_rVrJZ getTimestampZgetAgerMrNrrPrnormalize_timegetMaxTimestamprQrRrskip_if_unavailablerdrwdisablerrrdatetimeZ timedeltaintrh _configureinstallonlypkgsinstallonly_limitallow_vendor_changerrGoalrprotect_running_kernelr*run_sack) r3rrrr error_reposZmtsZager{rUr4r5r5r6r|sf             zBase.fill_sackc Cs tjjd}|jdddtjj||_tjj|j j |j j }|n|dk ry|jj ddWnt k r~|dkrzYnXg}|j jrtjjjx|jjD]}yf|jjdddtd|jdd}|jrd|d <|jj|jf|tjtd |jtjj|jj Wqt!t"j#fk r}zZ|j$dkrPtj%j&td j'|j|ntjtd j'|j||j(|j|j)WYd d }~XqXqW|rtj*td dj+|Wd QRX|j }|jj,|j-|j.|j/|j0|tj1j2|j|_3|j4|j3_4|j5j6|jS)a Prepare Sack and Goal objects and also load all enabled repositories from cache only, it doesn't download anything and it doesn't check if metadata are expired. If there is not enough metadata present (repond.xml or both primary.xml and solv file are missing) given repo is either skipped or it throws a RepoError exception depending on skip_if_unavailable configuration. z sack setupT)r`rF)rDr)Z throwExceptZ ignoreMissing)r@rArBrCz%s: using metadata from %s.zloading repo '{}' failure: {}NzIgnoring repositories: %sz, )7rr#rrr`rrrrr4rrrrrrrrr^r_rJZ loadCacherFrGrHrIrMrNrrPrrr RuntimeErrorrKrLrrQrRrOrwrrdrrrrrrrrrrr*r) r3rrrrrSrTrUr4r5r5r6fill_sack_from_repos_in_cachesX      z"Base.fill_sack_from_repos_in_cachecCstjj|jj|_|jjsl|j|j|j r\|j j |jj |jj |jrl|j|j n|jjj |j |jjrtjtdtjtddjtjjd|jdk r|jj|j|jd|_ dS)NzRThe downloaded packages were saved in cache until the next successful transaction.z1You can remove cached packages by executing '%s'.z{prog} clean packages)progF)rrZTempfilePersistorr4rr-Z keepcache_clean_packagesrr+r r<Zget_saved_tempfilesrir,Ztempfiles_to_addrMrrrOrZ MAIN_PROGrhistoryr9r _closeRpmDB)r3r5r5r6_finalize_bases*       zBase._finalize_basecCsB|jr dStjtjjdd|_|j|jddddd|_dS)ztClose all potential handles and clean cache. Typically the handles are to data sources and sinks. Nz Cleaning up.T)r`r^r) rrMlogrr#DDEBUGrrr*)r3r5r5r6r9sz Base.closecCsftjjj|j|}xN|D]F}y|jj|Wqtjjk r\}ztj |WYdd}~XqXqWdS)z?Read repositories from the main conf file and from .repo files.N) rr4readZ RepoReaderr^addrQZ ConfigErrorrMrd)r3ZoptsreaderrSrUr5r5r6read_all_repos"s  zBase.read_all_reposcCs|r d|_|rtjj|_|rd|_|jdk rJtjj|j|_|jj |j_ |jr`|j r`|j j |j dk rt|j jtjj|_d|_g|_|r|rtjdS)z1Make the Base object forget about various things.N)rrr%r&r'rrrr4rraZrollbackrrr9rrrrr.gcZcollect)r3r`r^rr5r5r6r-s$        'z Base.resetcCs|`dS)z6Closes down the instances of rpmdb that could be open.N)_ts)r3r5r5r6rjszBase._closeRpmDB)Z noscriptsZ notriggersZnodocstestZjustdbZ nocontextsnocryptoRPMTRANS_FLAG_NOCAPSZnocapsrcCs|jS)N)r)r3r5r5r6r|sz Base.goalcCs|jdk r|jStjjj|jj|_|jjdxb|jjD]V}|j j |}|dkrdt j t d|q:|jj||jj |}|dk r:|jj|q:W|jjs|jjtj|jjr|jjtjtjtj|jd}|jj||jS)zMSet up the RPM transaction set that will be used for all the work.Nrz!Invalid tsflag in config file: %s)rrr(rZTransactionWrapperr4rYsetFlagsZtsflags_TS_FLAGS_TO_RPMgetrMcriticalrZ addTsFlag_TS_VSFLAGS_TO_RPM pushVSFlagsZdiskspacecheckr)rZRPMPROB_FILTER_DISKSPACEZ ignorearchZRPMPROB_FILTER_IGNOREARCH functoolsreduceoperatoror_Z setProbFilter)r3flagZrpm_flagZvs_flagZ probfilterr5r5r6rs*       zBase._tscCs&|jdkrdS|jj|`d|_dS)z"Releases the RPM transaction set. N)rr9)r3r5r5r6rs   cCs$tjjd}tjj|_tjtjjdx|j j D]}|j s@q4|j sHq4|j j}|sXq4tjtjjd|j|j jtjjkrtj|d}tjj|sq4n tj|d}y|jj|Wq4tjjk r}ztd}tj||j|WYdd}~Xq4Xq4W|r|jjj|j j!dg||jS)z6Create the groups object to access the comps metadata.z loading compszGetting group metadataz%Adding group file from repository: %sz groups.xmlz1Failed to add groups file for repository: %s - %sNbasearch)"rr#rrZCompsrrMrrr^r_Z enablegroupsrrJZ getCompsFnrPZgetSyncStrategyrSZSYNC_ONLY_CACHEr Zcalculate_repo_gen_destospathexistsZrepo_gen_decompressZ_add_from_xml_filenamerQ CompsErrorrrZ_irrrX)r3rrrSZcomps_fnZ decompressedrUrr5r5r6rs:       &zBase.read_compscCs*|jdkr$|jj}t|jj|d|_|jS)zeauto create the history object that to access/append the transaction history information. N)rW)rr4rWr r)r3rWr5r5r6 _getHistorys zBase._getHistorycCs|jS)N)r)r3r5r5r6sz Base.cCs t|d|S)Nr)setattr)r3rr5r5r6rscCs t|ddS)Nr)r)r3r5r5r6rszDNF SWDB Interface Object)fgetfsetfdeldoccsFjj}t|jj}|j|jj}xT|jD]H|j}|d}j j |dj j d|j ||ddq:Wx|j D]xj j d|j}t }g}x0|D](} t | |kr|jd| q|j| qW|d} |j| |ddqWx|jD]j j d|j}fdd|D}|j} |krt|jjd rt|j} x0|D](} |j| } tjj| | dkrz| } qzW|j|| fd d }tjj||qWx|jD]ȉ|j}d}x"|D]}|jjkr|}PqW|dkr*|jd}n |j|fd d|D}fd d }tjj|||krz|j|n|j||j j |dj j dqW|j }|rBj!j"t#j$dj}|j%|dxh|D]`|jjd r|d}|j|jj&||j|j j d|j} |j'| qW|S)NrZdddrr{r[cs$g|]}|ks|jjkr|qSr5)name)rZr[) all_obsoletedpkgr5r6r\sz*Base._goal2transaction..)rcsjj|dS)Nod)r" pkg_added)r)r3r5r6r sz(Base._goal2transaction..cs$g|]}|ks|jjkr|qSr5)r)rZr[)rrr5r6r\scsjj|dS)Nr)r"r)r)r3r5r6r!sZudu)flags)Zpkg__neqrU)(rr(rZlist_obsoleted_get_installonly_queryrx installedlist_downgradesZobsoleted_by_packager"rZ add_downgradelist_reinstallsstrinsertrwZ add_reinstall list_installs get_reasonfilterrrrZTransactionItemReasonCompareZ add_installrrZmapall list_upgradespopremoveZ add_upgradeZ list_erasuresr`rqrKIGNORE_EXCLUDESrr set_reasonZ add_erase)r3rtsZinstallonly_queryZinstallonly_query_installedZobsZ downgradedZ nevra_pkg obsoletesZobs_pkgZ reinstalledreasonZobsoleteZreason_obsoletecbZupgradedr[ZerasuresZremaining_installed_queryZ remainingr5)rrr3r6_goal2transactions                        zBase._goal2transactioncCsd|j}|j}|jj}g}g}x6|D].}||krJ|j||dq*|j||q*W||fS)aJ See what packages in the query match packages (also in older versions, but always same architecture) that are already installed. Unlike in case of _sltr_matches_installed(), it is practical here to know even the packages in the original query that can still be installed. r)r_na_dict availablerw)r3qinstZ inst_per_archZavail_per_archZavail_lZinst_lZnar5r5r6_query_matches_installed7s  zBase._query_matches_installedcCs"|jjjj|jd}t|S)z See if sltr matches a patches that is (in older version or different architecture perhaps) already installed. )r)r`rqrrrmatcheslist)r3sltrrr5r5r6_sltr_matches_installedKszBase._sltr_matches_installedcsfddjjjDS)z5Get iterator over the packages installed by the user.c3s|]}jj|r|VqdS)N)rZuser_installed)rZr)r3r5r6 Tsz*Base.iter_userinstalled..)r`rqr)r3r5)r3r6iter_userinstalledRszBase.iter_userinstalledcCs0|j||jj|jj d}|jjr,|jd|S)N)allow_uninstall force_bestZignore_weak_depsz./debugdata/rpms)runr4bestZinstall_weak_depsrbZwrite_debugdata)r3r allow_erasingretr5r5r6_run_hawkey_goalWs  zBase._run_hawkey_goalc Cstd}|jtjjd}|jj|j}|jrJ|j|j j j |j n|j jsd|j}|j||j|j j j|j jd|j||s|j jdkr|jtjj|j}tjj|}n |j||_|jj||jdk ot|jdk}|r|jj }|rtjj!|}|dk r"||j"j#|jj$}||jj%7}||jj&7}||jj'7}|j j(|j)||S)zBuild the transaction set.NZdepsolve)rr)*_finalize_comps_transrr#rr"startrZ req_has_eraseZpush_userinstalledr`rqrrr4Zupgrade_group_objects_upgrade_build_comps_solverZ'_exclude_packages_from_installed_groupsZ add_protectedrrZprotected_packagesr debuglevelZ log_decisionsr_format_resolve_problems problem_rulesrQZ DepsolveErrorrrendrpZ_rpm_limitationsrcr*Z run_resolvedrrrrZset_modules_enabled_by_pkgsetra) r3rexcrrsolverrZgot_transactionZnew_pkgsr5r5r6resolve_sH              z Base.resolvec Cs^t|ts|g}tjjjgt|}|js|jj |jj |j r|j j sV|j j rd}t|drx|jrxdj|j}nt|dr|jrdj|j}|jj}|dkr|jj}n|j}|jj|gg||jj||jj|jjd|_dSd}tjtdtj j!|j"j#|j"j$}||jj%|j&|j'}|rxtd}tj(|x|D]}tj(|qXWtj)j*|tjtdtj+j,d} tjtd |j&j-|j&j.tjjj/|dd } |j&j0| } t1| d kr\x&| j2D]}tj3td j4|qWtd d} x| D]} | dt5| 7} qW|j6| }|rP| d|7} tj)j7| ~ tjtd|j&j8t9j:rdS| |jj |jj tj+j,d} tjjj/||d}|j"j;dkrx|j|d}WdQRX| |jj?|j|jjdd}x&tj@jA||j|D]}tjB|qFW|S)Nargs cmdsTzRunning transaction checkz%Error: transaction check vs depsolve:zTransaction check succeeded.ztransaction testzRunning transaction test)rrzRPM: {}zTransaction test error: z %s zTransaction test succeeded.r)displaysFzRunning transaction)rcSs,g}x"|D]}|jdj|t|q W|S)Nz{}: {})rwrOr)actionZtsismsgstsir5r5r6 _pto_callbacks z*Base.do_transaction.._pto_callback)C isinstancerrZyumZrpmtransZLoggingTransactionDisplayr rrarZupdateFailSafeDatargroupenvhasattrr"rr$rlastr`_rpmdb_versionend_rpmdb_versionbegrr*Zrun_pre_transactionZrun_transactionr+rMrrrZbuild_rpmdb_lockr4rrZ_populate_rpm_tsr_run_rpm_checkerrorrQZTransactionCheckErrorr#rorderZcleanZRPMTransactionrrpmessagesrrOr _trans_error_summaryrc isTsFlagSetr(RPMTRANS_FLAG_TESTrr&r2_run_transactionZunload_removed_pluginsrZ_post_transaction_outputrN)r3ZdisplaycmdlineoldZ rpmdb_versiontidrr)rrZtestcbZtserrors errstringZdescrsummaryrZdisplay_r+r5r5r6do_transactions                              zBase.do_transactioncCsd}tjd}i}x|j|D]t}|jddkr>t|jdntjt|jdd}|jd|krr|||jd<||jd|kr|||jd<qW|r|tdd 7}x4|D],}|d td d ||j |||d 7}qW|sd Std d|}|S)zParse the error string for 'interesting' errors which can be grouped, such as disk space issues. :param errstring: the error string :return: a string containing a summary of the errors z9needs (\d+)(K|M)B(?: more space)? on the (\S+) filesystemr'Mrg@zDisk Requirements:r%z z7At least {0}MB more space needed on the {1} filesystem.Nz Error Summaryz ------------- ) recompilefinditerr-rmathZceilrr rO)r3r?r@pZdiskmZ size_in_mbkr5r5r6r8s&   *zBase._trans_error_summarycCs|jjo|jjtj S)N)r4Zhistory_recordrr9r(r:)r3r5r5r6_record_history%szBase._record_historycCsd}|jrt|jj}|jjj}|j|dj}|jj }|j j }|dk rX|j }|dksh||krt jtdjtjjdd}t|dr|jrdj|j}nt|dr|jrdj|j}|jjr|jjnd} |j j||g|| }|jjr$tjd } | r$ytj| Wnd } YnXt jtjjd |j j|j!d} t jtjjd |jjrzytj| Wn YnXtjj"|j |j#| dkrnt$| d krd d |j D} | sfx&|j%D]} t j&tdj| qWtd} tj'j(| nlt j&tdx | D]}t j&t)|d qW|jrR|j j*t+j, rR|j j-|td} tj'j(| xbdD]Z}t||rlt.||}yt/j0|Wn.t1t2fk rtd} t j&| |YnXqlWt3|j#j4|_5|j j*t+j,s|j6|j7|S)zh Perform the RPM transaction. :return: history database transaction ID or None N)rz RPMDB altered outside of {prog}.)rr"r#r$rBrzRPM transaction start.zRPM transaction over.cSsg|]}|jr|qSr5)ZFailed)rZZelr5r5r6r\esz)Base._run_transaction..zRPM: {}zCould not run transaction.zTransaction couldn't start: ts_all_fn ts_done_fnz$Failed to remove transaction file %s)rMrN)8rLr r4Zhistory_record_packagesr`rqrrrr1rr0r2rMrNrrOrrZMAIN_PROG_UPPERr/r"rr$commentr3Z reset_nicernicerr#rrr!Z_sync_rpm_trans_with_swdbrrpr7rrQrcr r9r(r:rgetattrr unlink_frOSErrorboolZ install_setr,_verify_transactionZverify_tsi_package)r3rr>Zusing_pkgs_patsinstalled_queryZ using_pkgsrpmdbvZlastdbvr<rOZoniceerrorsZfailedrrUr[fnr5r5r6r;)s~                 zBase._run_transactioncsdd|jD}t|fdd}tjjd}d}tjj|}|jj}t dd|D}xH|j j D]<} | j } x.| j D]"} | j|kr| jd| jqWqjWx|D]} || j|}qW|j} |j j| |d|_dS) NcSsg|]}|jtjjkr|qSr5)r(rrZ#TransactionItemAction_REASON_CHANGE)rZr*r5r5r6r\sz,Base._verify_transaction..cs |d7}dk r|||S)Nrr5)rcount)total verify_pkg_cbr5r6display_banners z0Base._verify_transaction..display_bannerzverify transactionrcSsg|] }|jqSr5)r)rZr[r5r5r6r\sT)rrprr#rr` rpmdb_sackrqrrrr-ZgetCompsGroupItemZ getPackagesZgetNameZ setInstalledrrr1rr+)r3r\Ztransaction_itemsr]rrZr^rnamesZtigrIr*rWr5)r[r\r6rUs(       zBase._verify_transactionc sXtjj|jj|jj}|tj}tdd|D}tdd|D} j j j dkrnj t||| dnj t||tj j |||jrtjjjtfdd|D} tj jd|j} |jj} | dk} xԈjo| s| dkr| dkr| d 8} td }tj|d djD}fd d|D}td d|D}j t||tj j |||jrtjjj| tfdd|D7} tj j| |i} qWjrtjjjj}tj|WdQRX|dk r|| || \}}||krT||krtd}n||kr,td}d||d}tj||d|d|dS)Ncss|] }|jVqdS)N) download_size)rZploadr5r5r6rsz1Base._download_remote_payloads..cSsg|]}t|tjjr|qSr5)r,rdrpmZ DeltaPayload)rZZpayloadr5r5r6r\sz2Base._download_remote_payloads..)Z total_drpmsc3s|]}j|VqdS)N)_bandwidth_used)rZrb)rXr5r6rsrrz,Some packages were not downloaded. Retrying.cSsg|]}|qSr5r5)rZrr5r5r6r\scs g|]}tjj|tjjqSr5)rrS _pkg2payload RPMPayload)rZr)progressr5r6r\scss|] }|jVqdS)N)ra)rZrbr5r5r6rsc3s|]}j|VqdS)N)re)rZrb)rXr5r6rsz?Delta RPMs reduced %.1f MB of updates to %.1f MB (%d.1%% saved)zIFailed Delta RPMs increased %.1f MB of updates to %.1f MB (%d.1%% wasted)dir')rrii)rrZbuild_download_lockr4rrrsumrpr__code__ co_argcountrSZ_download_payloadsZ_irrecoverablerQZ DownloadErrorZ_update_savingZ _recoverableretriesrrMrZ errmap2str)r3payloadsrcrhcallback_totalZ fail_fastrZ beg_downloadZest_remote_sizeZ total_drpmZ remote_sizeZsavingrmZforeverrZremaining_pkgsrealZfullZpercentr5)rXrhr6_download_remote_payloadssb              zBase._download_remote_payloadsc s|j|\}}|rzdkr$tjjtjj|jjj|j j |j dd|Dfdd|D}|j |||j j rxX|D]P}|jrtjj|j|jjd}ntjj|jj|jjd}tj||j j qWdS)aDownload the packages specified by the given list of packages. `pkglist` is a list of packages to download, `progress` is an optional DownloadProgress instance, `callback_total` an optional callback to output messages about the download operation. NcSsg|] }|jqSr5)localPkg)rZrr5r5r6r\sz*Base.download_packages..cs$g|]}tjj|jtjjqSr5)rrSrfZ delta_factoryrg)rZr)rcrhr5r6r\s/)_select_remote_pkgsrr!ZNullDownloadProgressrcZ DeltaInfor`rqrr4Zdeltarpm_percentager?rqr=ZbaseurlrrrZget_local_baseurllocationlstriprSZpkgdirshutilcopy) r3Zpkglistrhro remote_pkgsZ local_pkgsrnrrur5)rcrhr6download_packagess"      zBase.download_packagescCsg}|s |S|jjr&tjjtdg}x|D]}tjj| rhd|krhtj j ||j |}|j |gy|j |jj|Wq0tk r}ztj||j |WYdd}~Xq0Xq0W|jdd|r|rttdjdj||S)NzACannot add local packages, because transaction job already existsz://T)rzzCould not open: {}r#)rZ req_lengthrrQrcrrrrrZ_urlopen_progressr4r?rwr`Zadd_cmdline_packagerrMrdrrOr)r3 path_liststrictrhpkgsZ pkgs_errorrrUr5r5r6add_remote_rpmss(       zBase.add_remote_rpmsc Cs|jr|jj}d}n|j|j}|j}|j }|r|jj}tj j j |}tj j j ||j}tjj|j}~|dkrd} d} n|dkr|rd} nd} td|} n\|dkrd} td|} nB|dkr|rd} nd} d} td|} n|d krd} td |} nd} d} | | fS) aVerify the GPG signature of the given package object. :param po: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. rrBrr'z"Public key for %s is not installedzProblem opening package %srDz Public key for %s is not trustedrdzPackage %s is not signed) _from_cmdliner4Zlocalpkg_gpgcheckr^r~ZgpgcheckgpgkeyrYrr(rinitReadOnlyTransactionZ miscutilsZcheckSigrrrrbasenamer) r3pocheckZ hasgpgkeyrSrootrZ sigresultZlocalfnresultrr5r5r6_sig_check_pkg(sF    zBase._sig_check_pkgcCs |j|S)aVerify the GPG signature of the given package object. :param pkg: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. )r)r3rr5r5r6package_signature_checkcs zBase.package_signature_checkc Cslxf|D]^}tjj|sqytj|Wn&tk rLtjtd|wYqXtj t j j td|qWdS)NzCannot remove %sz %s removed) rrrr rRrSrMrdrrrr#r)r3packagesrYr5r5r6rrs   zBase._clean_packagesrhcCs|dkr|jj}|dkr*|j|||||Stjj| s.) r4Zshowdupesfromrepos _list_patternrris_string_typeAssertionErrorrpartialrpmapr)r3 pkgnarrowpatternsrrrmZlist_fnZyghsr5r5r6_do_package_listss  zBase._do_package_listsc&sfddfdd}fdd}tj|d}g} g} g} g} g} g}g}g}g}g}|}jj}|dk rtjj||d}|jjd d }|d kri}i}xH|jD]<}|||j <|rq|j |j f}||ks|||kr|||<qWt ||j } ||j}|s|jd d }x|D]}|rN|j |krB| j|n | j|nT|j |j f}|j |krr| j|n0||ks|j||r| j|n | j|q Wn|dkr||jd d} j| d d} | jddgd| jj} nP|dkrt ||j} n2|dkrB|r||j}|jj}x\|D]Tj j f}|j|g}fdd|D}t|dkr| jn | jq@Wn||jjd d j}|jjj}xz|D]r\} }!|| |!fd|j| |!fdgd}"|" s j|"r| jn"j|"r.| jn | jqWn|dkrh||jjj}#|#j}n|dkrfdd|jD}n|dkr|j}$|jjj|$d}j|d d d}|jddgdg}xl|D],j}%|jfdd|$j|%d DqWn6|d!krD|j}|s2|jd d }||j j!j"}| |_| |_| |_#| |_$| |_%||_||_&||_"||_||_'|S)"Ncsdkr dSjj|kS)z:Test whether given package originates from the repository.NT)rrS)package)rmr3r5r6 is_from_reposz(Base._list_pattern..is_from_repocsfdd|DS)z=Filter out the packages which do not originate from the repo.c3s|]}|r|VqdS)Nr5)rZr)rr5r6rsz=Base._list_pattern..pkgs_from_repo..r5)r)rr5r6pkgs_from_reposz*Base._list_pattern..pkgs_from_repocsdkr |S|jdS)z=Filter out the packages which do not originate from the repo.N)rm)r)rq)rmr5r6query_for_reposz*Base._list_pattern..query_for_repo)iter)rF)rkrhT)Zlatest_per_arch_by_priorityupgrades)Zupgrades_by_priority)upgradesrcnosrc) arch__neqrrcsg|]}|jjkr|qSr5)evr)rZr) avail_pkgr5r6r\sz&Base._list_pattern..r autoremoveextrascsg|]}|r|qSr5r5)rZr)rr5r6r\sr)Zobsoletes_by_priority)rdrcsg|] }|fqSr5r5)rZr=)newr5r6r\.s)providesrecent)(r Z GenericHolderr`rqrrsrtrvrZpkgtuprrr rrrrrwZevr_gt_merge_update_filterslatestrrrrpZevr_eq _unneededrswdbrrrextendZ_recentr4rreinstall_available old_availableupdatesobsoletesTuplesr)&r3rpatternrrrmrrZyghrrrrrrrrrrZicrr|ZdinstZndinstrkeyZavailrZinstalled_dictinstalled_pkgsZsame_verZavailable_dictrrZinst_pkgZ autoremove_qrZobsoleted_reldepsr5)rrrrmr3r6rs                                      zBase._list_patterncCs|j|7_t|S)N)rrp)r3transr5r5r6_add_comps_transEszBase._add_comps_transcs|j}|sdS|jjjdd}|jfdd|Dd}|j|}x|D]}jj|tjj qLW|j |}|j |}|rx |D]}j j |j jdqWdS)z Mark to remove packages that are not required by any user installed package (reason group or user) :param query: dnf.query.Query() object NF)rbcs g|]}jjj|jr|qSr5)rr-Zis_removable_pkgr)rZr[)r3r5r6r\Usz,Base._remove_if_unneeded..)r) clean_deps)rZ_safe_to_removerrr differencerrrZ TransactionItemReason_DEPENDENCY intersectionreraser4clean_requirements_on_remove)r3rqZ unneeded_pkgsZunneeded_pkgs_historyZpkg_with_dependent_pkgsrZremove_packagesr5)r3r6_remove_if_unneededIs     zBase._remove_if_unneededcs>j}jjd}fdd}fdd}dd}jjjdd }|jtj|dd f|j tj|d d f|j |f|j |ff}x|D]\}} x|D]} d | j i} | j r| jd |ijjjf| j} | jddgd| s | j } | j r| d|7} tjtdj| q| | || }jjj| j qWqWj|dS)Nrcs,tjjj}|j|djj|d|S)N)r)select)rselectorSelectorr`rrr)rq remove_query comps_pkgr )r3r5r6 trans_upgradegs z1Base._finalize_comps_trans..trans_upgradecsjjdkrr|js"j||dq|jj}j|tjj j }|j dj |j |jdjj|| dnltjj j }|jr|j dj |j |jdn,jjr|jj jj|d}|j |djj|| d|S)Nrh)r|z ({} if {}))r)roptional)r)r)r4multilib_policyZrequires_install_multiarchrrx_report_already_installedrrrr`rrOrrinstallrrurqrr)rqrrr|rVr )r3r5r6 trans_installms     z1Base._finalize_comps_trans..trans_installcSs|j|}|S)N)ru)rqrrr5r5r6 trans_removes z0Base._finalize_comps_trans..trans_removeT)ri)r|Frrrr)r.zNo match for group package "{}")rr4rXr`rqrrrrr install_optrrrZ basearchonlyr<rxrMrdrrOrZ group_membersrr)r3rrrrrrZattr_fnattrrYrZ query_argsrZpackage_stringr5)r3r6rcs4        zBase._finalize_comps_transcs fdd}tjjjj|S)Nc sNjjjj|d}|sdSyjjj|dStk rHtj j SXdS)N)rr) r`rqrrrrr(rAttributeErrorrrZTransactionItemReason_UNKNOWN)Zpkgnamer)r3r5r6 reason_fnsz+Base._build_comps_solver..reason_fn)rrZSolverrr)r3rr5)r3r6rs zBase._build_comps_solvercCsXtjj|st|j}t|ts.tjj |}|j |||p>t ||}|sNdS|j |S)a&Installs packages of environment group identified by env_id. :param types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). r) rrrrrr,rrrlistToCompsPackageTypeZ_environment_installrr)r3env_idtypesexcluder|exclude_groupsr rr5r5r6environment_installs  zBase.environment_installcCs,tjj|st|j}|j|}|j|S)N)rrrrrZ_environment_remover)r3rr rr5r5r6environment_removes zBase.environment_removec sfddtjj|std}|rBfdd|D}tjj|}j}t|t s`t j j |}|j ||||}|sxdS|r|j} n|j} tjtd|| j|S)anInstalls packages of selected group :param pkg_types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). :param exclude: list of package name glob patterns that will be excluded from install set :param strict: boolean indicating whether group packages that exist but are non-installable due to e.g. dependency issues should be skipped (False) or cause transaction to fail to resolve (True) cs6tjj|r,jjj|d}tdd|S|fSdS)N) name__globcSs|jS)N)r)rIr5r5r6rszABase.group_install.._pattern_to_pkgname..)rris_glob_patternr`rqrrr)rr)r3r5r6_pattern_to_pkgnames z/Base.group_install.._pattern_to_pkgnameNcsg|] }|qSr5r5)rZrI)rr5r6r\sz&Base.group_install..rz#Adding packages from group '%s': %s)rrrr itertoolschain from_iterablerr,rrrrZ_group_installrrrMrNrr) r3grp_idZ pkg_typesrr|Zexclude_pkgnamesZnested_excludesr rZinstlogr5)rr3r6 group_installs$     zBase.group_installcCst|j|jtjtjBtj}d}d}x|D]} y|j| } Wn:tjj k rv} zt j t | d}w*WYdd} ~ XnXx2| j D](} | s| |kr||j| |||d7}qWx&| jD]} ||j| ||||d7}qWq*W| r|rtjjtd|S)NrTF)rr|)rr|rzNothing to do.)rrr ENVIRONMENTSGROUPS AVAILABLErrrQrrMr5r groupsr environmentsrrcr)r3rrr|rrrcntdonerreserrZgroup_idrr5r5r6env_group_installs(    zBase.env_group_installcCs,tjj|st|j}|j|}|j|S)N)rrrrrZ _group_remover)r3rr rr5r5r6 group_removes zBase.group_removecCst|j|jtjtjBtj}y|j|}WnFtjj k rp}z&t j dt |tjj tdWYdd}~XnXd}x|jD]}||j|7}q~Wx|jD]}||j|7}qW|S)Nz Warning: %szNo groups marked for removal.r)rrrrr INSTALLEDrrrQrrMr5r rcrrrrr)r3rrrrrr.grpr5r5r6env_group_remove s  "  zBase.env_group_removec CsLt|j|jtjtjBtj}d}x |D]}y|j|}Wn6tjj k rr}zt j t |w(WYdd}~XnXxX|j D]N}y|j|d}Wq|tjj k r}zt j t |w|WYdd}~Xq|Xq|WxZ|jD]P}y|j|d}Wqtjj k r$}zt j t |wWYdd}~XqXqWq(W|sHtd} tjj| dS)NFTzNo group marked for upgrade.)rrrrrrrrrQrrMr5r renvironment_upgrader group_upgraderrZCliError) r3rrZgroup_upgradedrrrr.rrr5r5r6env_group_upgrades6      zBase.env_group_upgradecCs,tjj|st|j}|j|}|j|S)N)rrrrrZ_environment_upgrader)r3rr rr5r5r6r9s zBase.environment_upgradecCs,tjj|st|j}|j|}|j|S)N)rrrrrZ_group_upgrader)r3rr rr5r5r6r@s zBase.group_upgradecCs|jjd}tjj|rdS|jj}tjjj |d}|j tj tj B|j dd}t|}~~|dkrldStjj|}tjj|stj|t|d}|j~dSdS) zChecks for the presence of GPG keys in the rpmdb. :return: 0 if there are no GPG keys in the rpmdb, and 1 if there are keys z/.gpgkeyschecked.yumr)rrz gpg-pubkeyrwN)r4rrrrrYrr(rrr_RPMVSF_NOSIGNATURES_RPMVSF_NODIGESTSZdbMatchrpdirnamemakedirsopenr9)r3ZgpgkeyscheckedrYZmytsidxkeysZmydirZfor5r5r6_gpg_key_checkGs&       zBase._gpg_key_checkc Cs|j|\}}|j|x~|D]v}tjj|j}|jjj|d}|jj rb|j |jjj|d}|j |d}|dk r|j |d}|j j || dqWt|S)N)r)r)rm)rr)r rrrrr`rqrrr4rrurrrrp) r3rqrmr| already_instrrr rr5r5r6res    zBase._install_multiarchcCs,tj}tj}t||t||||fS)a Categorize :param install and :param exclude list into two groups each (packages and groups) :param install: list of specs, whether packages ('foo') or groups/modules ('@bar') :param exclude: list of specs, whether packages ('foo') or groups/modules ('@bar') :return: categorized install and exclude specs (stored in argparse.Namespace class) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs )argparseZ Namespacer )r3rr install_specs exclude_specsr5r5r6_categorize_specsss   zBase._categorize_specscsddd|jDfdd|jD}|jjj|d}|jjjd}|jj||jj|dS)NcSsg|]}tjj|r|qSr5)rrr)rZrr5r5r6r\sz/Base._exclude_package_specs..csg|]}|kr|qSr5r5)rZr) glob_excludesr5r6r\s)r)r) pkg_specsr`rqrry)r3rexcludesr}Zglob_exclude_queryr5)rr6_exclude_package_specss  zBase._exclude_package_specsc Cst}t|j|jtjtjBtjtjB}x|D]}y|j|}Wn8t j j k rx}zt j dt|w.WYdd}~XnX|j|j|j|jx8|jD].}|jj|}x|jD]} |j| jqWqWq.Wt|S)NzWarning: Module or %s)rrrrrrrrrrrQrrMr5r r<rrZ_environment_by_idZ groups_iterrrPr ) r3 group_specsrrrrrZenvironment_idZ environmentr-r5r5r6_expand_groupss"       zBase._expand_groupsc Csx|D]x}yL|jj}d|kr<|jd}|d}|djd}|j|g|||j|jWqtjjk r||j d|YqXqWdS)Nrsrr,@) r4Zgroup_package_typessplitrr grp_specsrrQrcrw)r3rrZskippedr|Z group_specrrr5r5r6_install_groupss   zBase._install_groupscCs|dkr g}g}g}g}g} |j||\} } |j| xd| jD]Z} y|j| |||dWq>tjjk r} ztjt | |j | WYdd} ~ Xq>Xq>Wg}f}t o| j rLy tj jj|}|j| j |Wnxtjjk rH} zV| jr x| jD]}|j |qW| jr2x| jD]}|j d|qW| j}WYdd} ~ XnXn| j }|rv|j| j | _ |j|| |||s|s|s| s|rtjj|||| |ddS)N)rmr|formsr)no_match_group_specserror_group_specsno_match_pkg_specserror_pkg_specsmodule_depsolv_errors)rrrrrrQ MarkingErrorrMr5rrwrorrerfZ ModuleBaseZ MarkingErrorsrrr rr)r3rrrmr|rrrr r rrspecrUZno_match_module_specsr rfZe_specr5r5r6rsN      zBase.install_specsc Cstjj|}|j|j|dd}|jjdks4|j|rr|d}|dk rP|j|d|sb|j ||||j |||dS|jjdkr|j |||jj |d |d }|s|j |||x|D]} |j j| | d qWd Sd S)z@Mark package(s) given by pkg_spec and reponame for installation.F)rwith_srcrhrqN)rm)rmr|rT)rrrmreportssolution)rrrr)rrsrtget_best_solutionr`r4rZ_is_arch_specifiedrr_raise_package_not_found_errorr_get_best_selectorsrrr) r3pkg_specrmr|rr|rrsltrsr r5r5r6rs,    z Base.installcCs|jrd}t||jjjj|j|jdgd}|shtd}t j ||jt j j td|j|jn\t|d|krt jj|j}|j|gd|jj|| dd Std }t j ||jdSdS) Nz-downgrade_package() for an installed package.noarch)rrz.Package %s not installed, cannot downgrade it.zNo match for argument: %sr)r)rrrzCPackage %s of lower version already installed, cannot downgrade it.) _from_systemNotImplementedErrorr`rqrrrrrrrMrdrrQr rusortedrrrrr)r3rr|rrr r5r5r6package_downgrades  zBase.package_downgradecCs|jjj|j|j|j}|j|\}}||kr>|j|gnT|tj j |krdt j j td|jn.t jj|j}|j|gd|jj|| ddS)NzNo match for argument: %s)r)rrr)r`rq_nevrarrrr rrrrrrQPackageNotFoundErrorrrurrrrr)r3rr|rrrr r5r5r6package_installszBase.package_installcCsf|jjjj|j|j|jdr0|jj|dSt d}t j |t |t jjt d|j|jdS)N)rrrrz.Package %s not installed, cannot reinstall it.zNo match for argument: %s)r`rqrrrrrrrrrrMrdrrrQr ru)r3rrr5r5r6package_reinstall(s   zBase.package_reinstallcCs|jj|dS)Nr)rr)r3rr5r5r6package_remove0s zBase.package_removecCs`|jrd}t||jdkr6td}tj||jdS|jjj j }|j j r|jjj |gdj |drtjj|j}|j|gd|jj|ddS|jd kr|j|jd }n|j|j|jd gd }|std }tj||jtjjtd |j|jnZt|d|krBtjj|j}|j|gd|jj|ddStd}tj||jdSdS)Nz+upgrade_package() for an installed package.rz.)r)r)rmT)rrcSsg|] }|jqSr5)r)rZrr5r5r6r\xs)r)rr)r`rqrrrrrrurrrrrrrrrr) r3rqrrmrZ installed_allrrVr r5r5r6_upgrade_internalYs "    zBase._upgrade_internalc Csttjj|}|j|j}|d}|rZtjj|}| oH|doH|djr*|dj}|jjj j }|j j r||j |dn|jjjdd} | s*|j |dj } | std} tj| |tjjtd||nV|djotjj|dj r*| j|djd s*td } tj| d j||dj|j j oH|doH|dj} |j|| ||Stjjtd||dS) Nrqnevra)rT)ri)rz(Package %s available, but not installed.zNo match for argument: %s)rz?Package %s available, but installed for different architecture.z{}.{})rrsrtrr`rrrrqrrxr4rrrrrrMrdrQPackagesNotInstalledErrorrrOZ has_just_namer!r ) r3rrmr|rrZwildcardpkg_namerZ obsoletersZinstalled_namerrr5r5r6rs0    &   z Base.upgradecCs|j|jj|jj|ddS)N)r)r!r`rqr4r)r3rmr5r5r6 upgrade_allszBase.upgrade_allcCs|dkr|jjnxtjj|}|j|jdd}|djtj d|j |||j j dd}|spt jtd|dSx|D]}|jj|d qvWd S) NF)rrq) reponame__neqT)rrrzNo package %s installed.r)rr)rZdistupgrade_allrrsrtrr`rrrKZSYSTEM_REPO_NAMErr4rrMrrZ distupgrade)r3rrsrrr r5r5r6 distro_syncs   zBase.distro_syncc Cst|||gr||7}d}|rF|rFx4|D]}td}tj||q(Wn|rX|j|rXd}xX|D]P}y|j||dWn4tjjk r} ztj t | WYdd} ~ Xq^Xd}q^W|stjtdn4|j j j |jj|jjd} x| D]} |j| qWdS)zRemoves all 'leaf' packages from the system that were originally installed as dependencies of user-installed packages but which are no longer required by any such package.FzNot a valid form: %sT)rNzNo packages marked for removal.)rb)anyrrMrdrrrrQr rrr`rqrrrr4rbr) r3rrr filenamesrZgrp_specrrrUr}rr5r5r6rs,      zBase.autoremovecsptjj|jj|d}fdd|jD}|sBj||jj}x|D]}j j ||dqPWt |S)z'Mark the specified package for removal.)rcs(g|] }dks jj|kr|qS)N)rrS)rZr)rmr3r5r6r\szBase.remove..)r) rrsrtrvr`r"_raise_package_not_installed_errorr4rrrrp)r3rrmrr rrrr5)rmr3r6rs z Base.removec s tjj|}|jj}fdd|jD}|j} |dk rL| j|d|dk r`| j|dtjj | } |stj j d|| j d} j j} x\|D]T} y| t| }Wn*tk r|swjj| | dYnXjj|| d7} qW| dkrtj jd||| S) Ncs(g|] }dks jj|kr|qS)N)rrS)rZr) old_reponamer3r5r6r\sz"Base.reinstall..)rm)r&zno package matchedr)rr)rrsrtrvr`rrrrrqZ_per_nevra_dictrQr#rr4rr KeyErrorrrrZPackagesNotAvailableError)r3rr+Z new_reponameZnew_reponame_neqZ remove_nar|rrZ available_qZavailable_nevra2pkgrrZ installed_pkgZ available_pkgr5)r+r3r6 reinstalls6          zBase.reinstallcCs |j|S)zMark a package to be downgraded. This is equivalent to first removing the currently installed package, and then installing an older version. ) downgrade_to)r3rr5r5r6 downgrade szBase.downgradec Cstjj|}|j|j}|s6td|}tjj||d}|j}t |j j }|jj j j|d} t| dkrtd|}tjj|||xn| j j D]^} |jj| d} | std}tj|| qtjj|j} | j| d|jj| | dd}qW|S) zDowngrade to specific version if specified otherwise downgrades to one version lower than the package installed. zNo match for argument: %sr)rz6Packages for argument %s available, but not installed.zDPackage %s of lowest version already installed, cannot downgrade it.)r)rrr)rrsrtrvr`rrQrrr  _name_dictrrqrrrrpr#Z downgradesrrMrdrrrrr) r3rr|r|rrrZavailable_pkgsZavailable_pkg_namesZ q_installedr$Zdowngrade_pkgsr r5r5r6r. s.       zBase.downgrade_tocs|jjjd}|r |gfStjj|j}|r>|gfSjdsRjdr^dg}n&jdrr|gfSfddd D}|jjj|d|fS) N)Z file__glob/bin//sbin/z/usrrscsg|] }|qSr5r5)rZprefix) provides_specr5r6r\E sz!Base.provides.. /usr/bin/ /usr/sbin/)r1r2r5r6)r`rqrrrZ _by_provides startswith)r3r4Z providersZbinary_providesr5)r4r6r6 s      z Base.providesc Csddd}||krtd||}|rDd|} |jj| tj||rfd|} |jj| tj||rd|} |jj| tj||rd|} |jj| tj||rd |} |jj| tj|d S) a It modifies results of install, upgrade, and distrosync methods according to provided filters. :param cmp_type: only 'eq' or 'gte' allowed :param types: List or tuple with strings. E.g. 'bugfix', 'enhancement', 'newpackage', 'security' :param advisory: List or tuple with strings. E.g.Eg. FEDORA-2201-123 :param bugzilla: List or tuple with strings. Include packages that fix a Bugzilla ID, Eg. 123123. :param cves: List or tuple with strings. Include packages that fix a CVE (Common Vulnerabilities and Exposures) ID. Eg. CVE-2201-0123 :param severity: List or tuple with strings. Includes packages that provide a fix for an issue of the specified severity. Z__eqgZ __eqg__gt)eqZgtez Unsupported value for `cmp_type`Z advisory_typeadvisoryZ advisory_bugZ advisory_cveZadvisory_severityN)rr/ setdefaultrr<) r3Zcmp_typerr9ZbugzillaZcvesZseverityZcmp_dictZcmprr5r5r6add_security_filtersI s& zBase.add_security_filterscCs i|_dS)z, Reset all security filters N)r/)r3r5r5r6reset_security_filtersn szBase.reset_security_filtersc Cs>|jp |j s| r|S|jjjdd}|jrRx|jD]}|j|}q8W|g|_|jrx<|jjD].\}}|rx|d}||i} |j|jf| }qdW|j|}|s:|r:|j }t |j j } | dkr:|dkrt dj| } t dj| } tjt| | | n2t dj|| } t d j|| } tjt| | | |S) z Merge Queries in _update_filters and return intersection with q Query @param q: Query @return: Query T)riZ __upgraderNz3No security updates needed, but {} update availablez4No security updates needed, but {} updates availablez    zBase._merge_update_filtersc sjrtd}t||jjj|jk}|r:gnj}fdd}d}|jjjx |D]} t j j | } x| D]} t j |j| j| jdkrtd}tj|| | jq|jjrt jjj| j| j} t jjj| } tjt jj| | | | _|jjrt j j| | n t j j | d}|jj!r:d}n|jj"r|jjr| t jj#j$t jj#j%fkrd}tjt jj&tdnd}tjt jj&td nd}n<|r|| j| j| | j'| jd }n|r|| j| j}|sd}q|jj(t)j*}|r|jj+}|jj,|t)j*|jj-t j.| j}|rD|jj,||dkrjtd |}t j/j0||tjtd d}qWqhW| r|rt j/j0td |stdj1}t j/j0|||j2\}}|dkr|rtd}tj|t3|}t j/j0||dS)aRetrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param po: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys z6Unable to retrieve a key for a commandline package: %scs0|tdd7}|tddjj7}|S)Nz. Failing package is: %sz zGPG Keys are configured as: %sz, )rrr)r)rrSr5r6_prov_key_data sz1Base._get_key_for_package.._prov_key_dataFrz)GPG key at %s (0x%s) is already installedTzThe key has been approved.zThe key has been rejected.)ruseridZhexkeyidkeyurl fingerprint timestampzKey import failed (code %d)zKey imported successfullyzDidn't install any keyszThe GPG keys listed for the "%s" repository are already installed but they are not correct for this package. Check that the correct key URLs are configured for this repository.z+Import of key(s) didn't help, wrong key(s)?N)4rrrr^r~rPr1rrrZcryptoZretriever Z keyInstalledrZrpm_idrCrMrZshort_idr4rrZKeyInfoZfrom_rpm_key_objectr@Zraw_keyZDNSSECKeyVerificationZverifyZ nice_user_msgurlZlog_dns_key_importZlog_key_importZassumenoZ assumeyesZValidityZVALIDZPROVEN_NONEXISTENCEZany_msgrBr9r(r:Z getTsFlagsrZpgpImportPubkeyZ procgpgkeyrQrcrrr )r3raskcb fullaskcbrZ key_installedZkeyurlsr?Z user_cb_failrArrZ dns_input_keyZ dns_resultZrcZ test_flagZ orig_flagsrerrmsgr5)rrSr6_get_key_for_package s                zBase._get_key_for_packagecCs|j|||dS)aRetrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param pkg: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys N)rH)r3rrErFr5r5r6package_import_key$ szBase.package_import_keycCs4g}|jjx |jjD]}|jt|qW|S)N)rrZproblemsrwr )r3resultsZprobr5r5r6r45 s  zBase._run_rpm_checkw+bcKstjj||j||f|S)z Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads )rrZ_urlopenr4)r3rDrSmoder>r5r5r6urlopen@ sz Base.urlopencCs,|dkr|jjtjd}|j|jjd}|S)N)r)r)rrqrKrrr4r)r3rZ installonlyr5r5r6rH szBase._get_installonly_querycCsrtjj|dd}|j|jdddd}|drn|drn|djrn||ddjkrntjtdj |ddjdS) NT)rF)rjrkrlrqr"rz * Maybe you meant: {}) rrsrtrr`rrMrrrO)r3rr|rr5r5r6_report_icase_hintN s   zBase._report_icase_hintcCsdd}g}g}x6|D].}|jr:|jtjkrD|j|q|j|qWtd}|||sjtjjtd|j j rtd}|||stjjtdg}||fS)a  Check checksum of packages from local repositories and returns list packages from remote repositories that will be downloaded. Packages from commandline are skipped. :param install_pkgs: list of packages :return: list of remote pkgs cSsxd}xn|D]f}d}y |j}Wn0tk rN}ztjt|WYdd}~XnX|dk r tj|j||jd}q W|S)NTF)ZverifyLocalPkgrLrMrrrOrm)Zpkg_listZ logger_msgZall_packages_verifiedrZpkg_successfully_verifiedrUr5r5r6_verification_of_packages] s   z;Base._select_remote_pkgs.._verification_of_packagesz>Package "{}" from local repository "{}" has incorrect checksumz;Some packages from local repository have incorrect checksumz8Package "{}" from repository "{}" has incorrect checksumzVSome packages have invalid cache, but cannot be downloaded due to "--cacheonly" option) Z _is_local_pkgrmrKZCMDLINE_REPO_NAMErwrrrQrcr4r)r3Z install_pkgsrOryZlocal_repository_pkgsrrr5r5r6rtV s&       zBase._select_remote_pkgscCsx|D] }t|qWdS)N)_msg_installed)r3rrr5r5r6r s zBase._report_already_installedc Cs|jjtjd}tjj|}|j|j|d|d}|dk rH|dj|d|dsdtj j t d|nB|jjtj d}|dj |}|rt d}nt d}tj j ||dS) N)rF)rrrqrq)rmzNo match for argumentz?All matches were filtered out by exclude filtering for argumentz?All matches were filtered out by modular filtering for argument)r`rqrKrrrsrtrrrrQrrZIGNORE_REGULAR_EXCLUDESr) r3rrrm all_queryrsrZwith_regular_queryrr5r5r6r s  z#Base._raise_package_not_found_errorc sjjtjdj}tjj|}|jj|d|d}|dsNtj j t d|dk rpfdd|dD}n|d}|st d}nt d }tj j ||dS) N)rF)rrrqrqzNo match for argumentcs g|]}jj|kr|qSr5)rrS)rZr)rmr3r5r6r\ sz;Base._raise_package_not_installed_error..zCAll matches were installed from a different repository for argumentz?All matches were filtered out by exclude filtering for argument) r`rqrKrrrrsrtrrQr#r) r3rrrmrQrsrrrr5)rmr3r6r* s  z'Base._raise_package_not_installed_errorcCs|jj|jdddS)z Setup DNF file loggers based on given configuration file. The loggers are set the same way as if DNF was run from CLI. T)Zfile_loggers_onlyN)r$Z_setup_from_dnf_confr4)r3r5r5r6 setup_loggers szBase.setup_loggersc s|jjtjtjBtjB@r d}nd}t|j}|j|dd}|jf|}| rl|rlt j j |j }t j|t|jdd}t|jdd|} ddfdd|Dtfd d|D} tfd d| D} | | fS) zreturns set of conflicting packages and set of packages with broken dependency that would be additionally installed when --best and --allowerasingTF)rrZ ignore_weak)rcSstj|j|j|j|j|jdS)N)repochversionreleaser)rKZNEVRArrSrTrUr)itemr5r5r6r sz&Base._skipped_packages.._nevracsg|] }|qSr5r5)rZr*)rr5r6r\ sz*Base._skipped_packages..csg|]}|kr|qSr5r5)rZr)rtransaction_nevrasr5r6r\ scsg|]}|kr|qSr5r5)rZr)rrWr5r6r\ s)rZactionsrKINSTALLZUPGRADEZ UPGRADE_ALLrr0rrrrrrMrdrproblem_conflictsZproblem_broken_dependency) r3Zreport_problemsrrZngZparamsrrrYZproblem_dependencyZskipped_conflictsZskipped_dependencyr5)rrWr6_skipped_packages s(    zBase._skipped_packages)N)F)F)TT)T)N)FFF)F)F)N)T)NN)TN)rhNNFN)N)NTN)NT)TNN)NT)T)NNTN)NTN)F)T)N)N)N)N)NNNN)NN)NNNF)F)NTF)NN)NN)NrK)N)__name__ __module__ __qualname__r7r8r:r;r?rV staticmethodrrgrrpropertyrr4r^deleterrrZlazyattrrr`rarsetterrrrrrrrrrr9rrrr(ZRPMTRANS_FLAG_NOSCRIPTSZRPMTRANS_FLAG_NOTRIGGERSZRPMTRANS_FLAG_NODOCSr:ZRPMTRANS_FLAG_JUSTDBZRPMTRANS_FLAG_NOCONTEXTSZRPMTRANS_FLAG_NOFILEDIGESTrr/rrrrrrrrrrr r rrr!rAr8rLr;rUrqrzr~rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr r!rr%r'rrr-r/r.rr;r<rrHrIr4rMrrNrtrrr*rRrZr5r5r5r6r[s   =      8 ; > =       '\ 8 l"] * B  ;  ) =  *     /   % &    #  & % )    -rcCs t|}td}tj||dS)Nz Package %s is already installed.)r rrMr)rrrr5r5r6rP srP)H__doc__Z __future__rrrrrrZlibdnf.transactionrrxrZ dnf.compsrZdnf.i18nrr r Zdnf.utilr Zdnf.db.historyr Zdnf.yumr collections.abcr ImportError collectionsrZ dnf.callbackZdnf.confZ dnf.conf.readZ dnf.cryptoZ dnf.dnssecZdnf.drpmZdnf.exceptionsZdnf.goalZ dnf.historyZdnf.lockZ dnf.loggingZdnf.module.module_baseroZ dnf.persistorZ dnf.pluginZ dnf.queryZdnf.repoZ dnf.repodictZdnf.rpm.connectionZdnf.rpm.miscutilsZdnf.rpm.transactionZdnf.sackZ dnf.selectorZ dnf.subjectZdnf.transactionZdnf.yum.rpmtransrrrKrr#rHrrrEr(rrwZ getLoggerrMobjectrrPr5r5r5r6s           PK!<*l77)__pycache__/callback.cpython-36.opt-1.pycnu[3 ft`@s ddlmZddlZddlZejjZejjZejjZejj Z ejj Z ejj Z ejj Z ejj ZeZ ejjZejjZejjZejjZejjZejjZejjZdZdZdZdZdZGdddeZGd d d eZGd d d eZGd ddeZGdddeZ ej!j"j#Z$dS))unicode_literalsNc@seZdZddZdS) KeyImportcCsdS)z+Ask the user if the key should be imported.F)selfidZuseridZ fingerprintZurlZ timestamprr/usr/lib/python3.6/callback.py_confirm5szKeyImport._confirmN)__name__ __module__ __qualname__r rrrr r4src@s(eZdZddZddZeddZdS)PayloadcCs ||_dS)N)progress)r rrrr __init__=szPayload.__init__cCsdS)z)Nice, human-readable representation. :apiNr)r rrr __str__@szPayload.__str__cCsdS)z Total size of the download. :apiNr)r rrr download_sizeDszPayload.download_sizeN)r rrrrpropertyrrrrr r:src@s.eZdZddZddZddZd dd Zd S) DownloadProgresscCsdS)zCommunicate the information that `payload` has finished downloading. :api, `status` is a constant denoting the type of outcome, `err_msg` is an error message in case the outcome was an error. Nr)r payloadZstatusmsgrrr endMszDownloadProgress.endcCsdS)Nr)r rrrr messageVszDownloadProgress.messagecCsdS)zUpdate the progress display. :api `payload` is the payload this call reports progress for, `done` is how many bytes of this payload are already downloaded. Nr)r rdonerrr rYszDownloadProgress.progressrcCsdS)zStart new progress metering. :api `total_files` the number of files that will be downloaded, `total_size` total size of all files. Nr)r Z total_filesZ total_sizeZ total_drpmsrrr startcszDownloadProgress.startN)r)r rrrrrrrrrr rJs  rc@s eZdZdS)NullDownloadProgressN)r rrrrrr rnsrc@s$eZdZddZddZddZdS)DepsolvecCsdS)Nr)r rrr rsszDepsolve.startcCsdS)Nr)r Zpkgmoderrr pkg_addedvszDepsolve.pkg_addedcCsdS)Nr)r rrr rysz Depsolve.endN)r rrrr rrrrr rrsr)%Z __future__rZdnf.yum.rpmtransZdnfZdnf.transactionZ transactionZ PKG_DOWNGRADEZPKG_DOWNGRADEDZ PKG_INSTALLZ PKG_OBSOLETEZ PKG_OBSOLETEDZ PKG_REINSTALLZPKG_REINSTALLEDZ PKG_ERASEZ PKG_REMOVEZ PKG_UPGRADEZ PKG_UPGRADEDZ PKG_CLEANUPZ PKG_VERIFYZ PKG_SCRIPTLETZTRANS_PREPARATIONZ TRANS_POSTZ STATUS_OKZ STATUS_FAILEDZSTATUS_ALREADY_EXISTSZ STATUS_MIRRORZ STATUS_DRPMobjectrrrrrZyumZrpmtransZTransactionDisplayZTransactionProgressrrrr s: $ PK!<*l77#__pycache__/callback.cpython-36.pycnu[3 ft`@s ddlmZddlZddlZejjZejjZejjZejj Z ejj Z ejj Z ejj Z ejj ZeZ ejjZejjZejjZejjZejjZejjZejjZdZdZdZdZdZGdddeZGd d d eZGd d d eZGd ddeZGdddeZ ej!j"j#Z$dS))unicode_literalsNc@seZdZddZdS) KeyImportcCsdS)z+Ask the user if the key should be imported.F)selfidZuseridZ fingerprintZurlZ timestamprr/usr/lib/python3.6/callback.py_confirm5szKeyImport._confirmN)__name__ __module__ __qualname__r rrrr r4src@s(eZdZddZddZeddZdS)PayloadcCs ||_dS)N)progress)r rrrr __init__=szPayload.__init__cCsdS)z)Nice, human-readable representation. :apiNr)r rrr __str__@szPayload.__str__cCsdS)z Total size of the download. :apiNr)r rrr download_sizeDszPayload.download_sizeN)r rrrrpropertyrrrrr r:src@s.eZdZddZddZddZd dd Zd S) DownloadProgresscCsdS)zCommunicate the information that `payload` has finished downloading. :api, `status` is a constant denoting the type of outcome, `err_msg` is an error message in case the outcome was an error. Nr)r payloadZstatusmsgrrr endMszDownloadProgress.endcCsdS)Nr)r rrrr messageVszDownloadProgress.messagecCsdS)zUpdate the progress display. :api `payload` is the payload this call reports progress for, `done` is how many bytes of this payload are already downloaded. Nr)r rdonerrr rYszDownloadProgress.progressrcCsdS)zStart new progress metering. :api `total_files` the number of files that will be downloaded, `total_size` total size of all files. Nr)r Z total_filesZ total_sizeZ total_drpmsrrr startcszDownloadProgress.startN)r)r rrrrrrrrrr rJs  rc@s eZdZdS)NullDownloadProgressN)r rrrrrr rnsrc@s$eZdZddZddZddZdS)DepsolvecCsdS)Nr)r rrr rsszDepsolve.startcCsdS)Nr)r Zpkgmoderrr pkg_addedvszDepsolve.pkg_addedcCsdS)Nr)r rrr rysz Depsolve.endN)r rrrr rrrrr rrsr)%Z __future__rZdnf.yum.rpmtransZdnfZdnf.transactionZ transactionZ PKG_DOWNGRADEZPKG_DOWNGRADEDZ PKG_INSTALLZ PKG_OBSOLETEZ PKG_OBSOLETEDZ PKG_REINSTALLZPKG_REINSTALLEDZ PKG_ERASEZ PKG_REMOVEZ PKG_UPGRADEZ PKG_UPGRADEDZ PKG_CLEANUPZ PKG_VERIFYZ PKG_SCRIPTLETZTRANS_PREPARATIONZ TRANS_POSTZ STATUS_OKZ STATUS_FAILEDZSTATUS_ALREADY_EXISTSZ STATUS_MIRRORZ STATUS_DRPMobjectrrrrrZyumZrpmtransZTransactionDisplayZTransactionProgressrrrr s: $ PK!dd&__pycache__/comps.cpython-36.opt-1.pycnu[3 f`@sddlmZddlmZddlmZddlZddlmZddlm Z m Z ddl m Z ddlZ ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZejdZejjZejjZejjZ ejj!Z"eeBe Be"BZ#d d Z$d d Z%d dZ&ddZ'd*ddZ(Gddde)Z*Gddde)Z+Gddde)Z,Gddde,Z-Gddde,Z.Gddde,Z/Gd d!d!e,Z0Gd"d#d#e)Z1Gd$d%d%e)Z2Gd&d'd'e)Z3Gd(d)d)e)Z4dS)+)absolute_import)print_function)unicode_literalsN) CompsError)_ucd)reducednfcCs"|j|j|jf}ttjtt|S)N) categoriesgroups environmentsroperator__add__maplen)comps collectionsr/usr/lib/python3.6/comps.py_internal_comps_length6srcCs|dkr dStjj|S)N)r utilfirst)seqrrr_first_if_iterable;srcstjjfdd|D}|r&|S|r>tjtjj}ntjtjtjdj}t }x`|D]X}||j r||j |qb|j dk r||j r|j |qb|j dk rb||j rb|j |qbW|S)z;Return items from sqn matching either exactly or glob-wise.cs$h|]}|jks|jkr|qSr)nameid).0g)patternrr Esz_by_pattern..)flagsN)r Zi18nrrecompilefnmatch translatematchIsetraddrui_name)rcase_sensitiveZsqnexactr%retrr)rr _by_patternAs      r-cCs|jdkrtjS|jS)N)Z display_ordersysmaxsize)grouprrr_fn_display_orderZsr1TcCs||||||S)aF Installs a group or an environment identified by grp_or_env_id. This method is preserved for API compatibility. It used to catch an exception thrown when a gorup or env was already installed, which is no longer thrown. `install_fnc` has to be Solver._group_install or Solver._environment_install. r)Z install_fncZ grp_or_env_idtypesexcludestrictexclude_groupsrrrinstall_or_skip^s r6c@s,eZdZdZddZeddZddZdS) _Langsz6Get all usable abbreviations for the current language.cCsd|_d|_dS)N) last_localecache)selfrrr__init__osz_Langs.__init__cCs"tjtj}|dkrdSdj|S)NC.)NN)localeZ getlocale LC_MESSAGESjoin)Zlclrrr_dotted_locale_strss z_Langs._dotted_locale_strcCsz|j}|j|kr|jSg|_|g}|dkr6|jdx6|D].}x(tj|D]}||jkrL|jj|qLWq.)r'rP AVAILABLEupdate INSTALLEDZgetCompsGroupItemr( getGroupId)r: available installedresultrQr0rrr _get_groupss   zCompsQuery._get_groupscCs`t}|j|j@r&|jdd|D|j|j@r\x(|D] }|j}|sJq8|j|jq8W|S)NcSsh|] }|jqSr)r)rrQrrrrsz'CompsQuery._get_envs..)r'rPrRrSrTZgetCompsEnvironmentItemr(ZgetEnvironmentId)r:rVrWrXrQenvrrr _get_envss   zCompsQuery._get_envsc Gs tjj}g|_g|_x|D]}g}}|j|j@rf|jj|}|j j j |}|j ||}|jj ||j|j@r|jj|}|j jj |}|j||}|jj || o| r|j|jkrtdt|}n.|j|jkrtdt|}ntdt|}t|qW|S)Nz&Module or Group '%s' is not installed.z&Module or Group '%s' is not available.z$Module or Group '%s' does not exist.)r rZBunchr r rO ENVIRONMENTSrenvironments_by_patternrNrZZsearch_by_patternr[extendGROUPSgroups_by_patternr0rYrPrTrrrRr) r:ZpatternsresZpatenvsgrpsrVrWmsgrrrrEs.              zCompsQuery.getN) rFrGrHrRrTr\r_r;rYr[rErrrrrKs  rKc@s<eZdZddZddZddZeddZed d Zd S) ForwardercCs||_||_dS)N)_i_langs)r:iobjlangsrrrr;szForwarder.__init__cCs t|j|S)N)getattrrf)r:rrrr __getattr__szForwarder.__getattr__cCs.x(|jjD]}|j|}|dk r |Sq W|S)N)rgrE)r:defaultZdctrDtrrr_ui_texts  zForwarder._ui_textcCs|j|j|jS)N)rnZdescZ desc_by_lang)r:rrrui_descriptionszForwarder.ui_descriptioncCs|j|j|jS)N)rnrZ name_by_lang)r:rrrr)szForwarder.ui_nameN) rFrGrHr;rkrnpropertyror)rrrrres  recs8eZdZfddZddZddZeddZZS) Categorycstt|j||||_dS)N)superrqr;_group_factory)r:rhri group_factory) __class__rrr;szCategory.__init__cCs0|j|j}|dkr,d}t||j|jf|S)Nz no group '%s' from category '%s')rsr ValueErrorr)r:grp_idgrprdrrr _build_groups  zCategory._build_groupccs x|jD]}|j|VqWdS)N) group_idsry)r:rwrrr groups_iters zCategory.groups_itercCs t|jS)N)listr{)r:rrrr szCategory.groups) rFrGrHr;ryr{rpr __classcell__rr)rurrqs rqcsLeZdZfddZddZddZddZed d Zed d Z Z S) Environmentcstt|j||||_dS)N)rrr~r;rs)r:rhrirt)rurrr;szEnvironment.__init__cCs0|j|j}|dkr,d}t||j|jf|S)Nz#no group '%s' from environment '%s')rsrrvr)r:rwrxrdrrrrys  zEnvironment._build_groupcCsXg}xN|D]F}y|j|j|Wq tk rN}ztj|WYdd}~Xq Xq W|S)N)rBryrvloggererror)r:Zidsr Zgierrr _build_groupss  zEnvironment._build_groupsccs\xVtj|j|jD]B}y|j|VWqtk rR}ztj|WYdd}~XqXqWdS)N) itertoolschainrz option_idsryrvrr)r:rwrrrrr{s zEnvironment.groups_itercCs |j|jS)N)rrz)r:rrrmandatory_groupsszEnvironment.mandatory_groupscCs |j|jS)N)rr)r:rrroptional_groupsszEnvironment.optional_groups) rFrGrHr;ryrr{rprrr}rr)rurr~s    r~csheZdZfddZddZeddZeddZd d Zed d Z ed dZ eddZ Z S)Groupcs$tt|j||||_|j|_dS)N)rrrr; _pkg_factoryrlZselected)r:rhriZ pkg_factory)rurrr;"szGroup.__init__csfdd|jDS)Ncsg|]}|jkr|qSr)type)rpkg)type_rr (sz+Group._packages_of_type..)packages)r:rr)rr_packages_of_type'szGroup._packages_of_typecCs |jtjS)N)rlibcompsPACKAGE_TYPE_CONDITIONAL)r:rrrconditional_packages*szGroup.conditional_packagescCs |jtjS)N)rrPACKAGE_TYPE_DEFAULT)r:rrrdefault_packages.szGroup.default_packagescCst|j|jS)N)rrr)r:rrr packages_iter2szGroup.packages_itercCs |jtjS)N)rrPACKAGE_TYPE_MANDATORY)r:rrrmandatory_packages6szGroup.mandatory_packagescCs |jtjS)N)rrPACKAGE_TYPE_OPTIONAL)r:rrroptional_packages:szGroup.optional_packagescCs|jjS)N)rfZ uservisible)r:rrrvisible>sz Group.visible) rFrGrHr;rrprrrrrrr}rr)rurr s     rc@sLeZdZdZejeejeej e ej e iZ ddZeddZeddZdS) Packagez#Represents comps package data. :apicCs ||_dS)N)rf)r:ipkgrrrr;LszPackage.__init__cCs|jjS)N)rfr)r:rrrrOsz Package.namecCs |j|jS)N)_OPT_MAPr)r:rrr option_typeTszPackage.option_typeN)rFrGrHrIrr CONDITIONALrDEFAULTr MANDATORYrOPTIONALrr;rprrrrrrrBs  rc@seZdZddZddZddZddZd d Zd d Zd dZ e ddZ d-ddZ d.ddZ ddZe ddZddZd/ddZd0ddZd d!Ze d"d#Zd$d%Zd1d&d'Zd2d(d)Zd*d+Zd,S)3CompscCstj|_t|_dS)N)rrrfr7rg)r:rrrr;\s zComps.__init__cCs t|jS)N)rrf)r:rrr__len__`sz Comps.__len__cCst||j|jS)N)rqrg _group_by_id)r:Z icategoryrrr_build_categorycszComps._build_categorycCst||j|jS)N)r~rgr)r:Z ienvironmentrrr_build_environmentfszComps._build_environmentcCst||j|jS)N)rrg_build_package)r:ZigrouprrrryiszComps._build_groupcCst|S)N)r)r:rrrrrlszComps._build_packagec CsVtj}y|j|Wn,tjk rB|j}tdj|YnX|j|7_dS)N )rrZ fromxml_fZ ParserErrorZget_last_errorsrr@rf)r:fnrerrorsrrr_add_from_xml_filenameoszComps._add_from_xml_filenamecCs t|jS)N)r|categories_iter)r:rrrr xszComps.categoriesFcCs|j||}t|S)N)categories_by_patternr)r:rr*Zcatsrrrcategory_by_pattern}s zComps.category_by_patterncCst|||jS)N)r-r )r:rr*rrrrszComps.categories_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)r)rc)r:rr sz(Comps.categories_iter..)rfr )r:r)r:rrszComps.categories_itercCst|jtdS)N)key)sortedenvironments_iterr1)r:rrrr szComps.environmentscstjjfdd|jDS)Nc3s|]}|jkr|VqdS)N)r)rr)rrrrsz+Comps._environment_by_id..)r rrr)r:rr)rr_environment_by_idszComps._environment_by_idcCs|j||}t|S)N)r]r)r:rr*rbrrrenvironment_by_patterns zComps.environment_by_patterncCs$t|j}t|||}t|tdS)N)r)r|rr-rr1)r:rr*rbZ found_envsrrrr]s  zComps.environments_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)r)rr)r:rrrsz*Comps.environments_iter..)rfr )r:r)r:rrszComps.environments_itercCst|jtdS)N)r)rr{r1)r:rrrr sz Comps.groupscstjjfdd|jDS)Nc3s|]}|jkr|VqdS)N)r)rr)id_rrrsz%Comps._group_by_id..)r rrr{)r:rr)rrrszComps._group_by_idcCs|j||}t|S)N)r`r)r:rr*rcrrrgroup_by_patterns zComps.group_by_patterncCs t||t|j}t|tdS)N)r)r-r|r{rr1)r:rr*rcrrrr`szComps.groups_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)ry)rr)r:rrrsz$Comps.groups_iter..)rfr )r:r)r:rr{szComps.groups_iterN)F)F)F)F)F)F)rFrGrHr;rrrryrrrpr rrrr rrr]rr rrr`r{rrrrrYs*         rc@s,eZdZddZddZddZddZd S) CompsTransPkgcCstjj|r&d|_||_d|_d|_n\t|tj j r\d|_|j |_|j t j@|_d|_n&|j|_|j|_|jt j@|_|j|_dS)NFT)r rZis_string_type basearchonlyroptionalrequires isinstancelibdnf transactionZCompsGroupPackagegetNameZgetPackageTyperrr)r:Z pkg_or_namerrrr;s  zCompsTransPkg.__init__cCs0|j|jko.|j|jko.|j|jko.|j|jkS)N)rrrr)r:otherrrr__eq__s   zCompsTransPkg.__eq__cCs|jS)N)r)r:rrr__str__szCompsTransPkg.__str__cCst|j|j|j|jfS)N)hashrrrr)r:rrr__hash__szCompsTransPkg.__hash__N)rFrGrHr;rrrrrrrrsrc@seZdZddZddZddZeddZed d Z e j d d Z ed d Z e j dd Z eddZ e j ddZ eddZ e j ddZ dS)TransactionBunchcCs$t|_t|_t|_t|_dS)N)r'_install _install_opt_remove_upgrade)r:rrrr;szTransactionBunch.__init__cCsN|jj|j|jj|j|jj|j|j|jB|j|j|j|_|S)N)rrSrrr)r:rrrr__iadd__s  zTransactionBunch.__iadd__cCs(t|jt|jt|jt|jS)N)rinstall install_optupgraderemove)r:rrrrszTransactionBunch.__len__cCs6x0|D](}t|tr |j|q|jt|qWdS)N)rrr()Zparamvalitemrrr _set_values   zTransactionBunch._set_valuecCs|jS)z Packages to be installed with strict=True - transaction will fail if they cannot be installed due to dependency errors etc. )r)r:rrrrszTransactionBunch.installcCs|j|j|dS)N)rr)r:valuerrrr scCs|jS)zw Packages to be installed with strict=False - they will be skipped if they cannot be installed )r)r:rrrr szTransactionBunch.install_optcCs|j|j|dS)N)rr)r:rrrrrscCs|jS)N)r)r:rrrrszTransactionBunch.removecCs|j|j|dS)N)rr)r:rrrrrscCs|jS)N)r)r:rrrr!szTransactionBunch.upgradecCs|j|j|dS)N)rr)r:rrrrr%sN)rFrGrHr;rrrJrrprsetterrrrrrrrrs     rc@seZdZddZeddZeddZegfddZd d Zd d Z dddZ ddZ ddZ dddZ ddZddZddZd S)SolvercCs||_||_||_dS)N)rNrZ _reason_fn)r:rNrZ reason_fnrrrr;+szSolver.__init__cCsdd|jDS)NcSsh|] }|jqSr)r)rrxrrrr2sz.Solver._mandatory_group_set..)r)rZrrr_mandatory_group_set0szSolver._mandatory_group_setcCs"dd|j|j|j|jDS)NcSsh|] }|jqSr)r)rrrrrr6sz+Solver._full_package_set..)rrrr)rxrrr_full_package_set4szSolver._full_package_setcsvfdd}t}|t@r*|j||j|t@rB|j||j|t@rZ|j||j|t@rr|j||j |S)Ncsfdd|DS)Ncsg|]}|jkr|qSr)r)rr)r3rrr=sz8Solver._pkgs_of_type..filter..r)pkgs)r3rrfilter<sz$Solver._pkgs_of_type..filter) r'rrSrrrrrrr)r0 pkg_typesr3rrr)r3r _pkgs_of_type:s zSolver._pkgs_of_typecCs|jjj|S)N)rNr0Zis_removable_pkg)r:Zpkg_namerrr_removable_pkgKszSolver._removable_pkgcCs|jjj|S)N)rNrZZis_removable_group)r:group_idrrr_removable_grpOszSolver._removable_grpNTc Cs|jj|}|s$ttdt||jjj||j|j |}|jjj |t }xD|j D]:} |rl| j |krlqX||j| j |||7}|j| j dtqXWx.|jD]$} |r| j |krq|j| j dtqW|S)Nz#Environment id '%s' does not exist.TF)rrrrrrNrZnewrr)rrrr_group_installaddGrouprrr) r:env_idrr3r4r5 comps_envswdb_envtrans comps_grouprrr_environment_installSs    zSolver._environment_installcCsx|jjj|}|s"ttd||jjj|t}tdd|jD}x&|D]}|j |sbqR||j |7}qRW|S)Nz%Environment id '%s' is not installed.cSsg|] }|jqSr)rU)rrQrrrrssz.Solver._environment_remove..) rNrZrErrrrr' getGroupsr _group_remove)r:rrrrzrrrr_environment_removejs  zSolver._environment_removecCs>|jj|}|jjj|}|s.ttd||sBttd|tdd|jD}|j }|jjj |j |j |j |}t}x\|jD]R}|j |kr|jjj|j r||j|j 7}n||j|j |7}|j|j dtqWxL|jD]B}|j |kr|jjj|j r||j|j 7}|j|j dtqW|jjj||S)Nz"Environment '%s' is not installed.z"Environment '%s' is not available.cSsg|] }|jqSr)rU)rrQrrrrsz/Solver._environment_upgrade..TF)rrrNrZrErrr'rgetPackageTypesrrrr)rrr0_group_upgraderrrrrr)r:rrrold_setrrrrrr_environment_upgradezs,    zSolver._environment_upgradec Cs|jj|}|s$ttdt||jjj||j|j |}x(|j D]}|j |jdt j |jqFW|jjj|t} |r| jj|j||gdn| jj|j||gd| S)NzGroup id '%s' does not exist.F)r3)rrrrrrNr0rrr)r addPackagerrrrrrSrr) r:rrr3r4r5r swdb_grouprQrrrrrs zSolver._group_installcsRjjj|}|s"ttd|jjj|t}fdd|jD|_|S)Nz&Module or Group '%s' is not installed.csh|]}j|jr|qSr)rr)rr)r:rrrsz'Solver._group_remove..)rNr0rErrrr getPackages)r:rrrr)r:rrszSolver._group_removec s|jj|}|jjj|}g}|s@|r,|jn|}ttd||sTttd||j}t dd|j D|j ||||jjj ||j |j|}x(|jD]}|j|j dtj|jqW|jjj|t}fddD|_fddD|_fd dD|_|S) Nz&Module or Group '%s' is not installed.z&Module or Group '%s' is not available.cSsg|] }|jqSr)r)rrQrrrrsz)Solver._group_upgrade..Fcsh|]}|jkr|qSr)r)rr)rrrrsz(Solver._group_upgrade..cs"h|]}|ddDkr|qS)cSsg|] }|jqSr)r)rrrrrrsz3Solver._group_upgrade...r)rr)new_setrrrscsh|]}|jkr|qSr)r)rr)rrrrs)rrrNr0rEr)rrrr'rrrrrrrrrrrrr) r:rrrr3ZargumentrrQrr)rrrrs( zSolver._group_upgradecCslxf|jjD]Z}|jj|}|jr t|jt|j}|jjjj |d}x|D]}|j j |qPWq WdS)N)r) Z persistorr r0rWr'Z full_listZ pkg_excludeZsackZqueryZfiltermZ_goalr)r:baser0Zp_grpZinstalled_pkg_namesZinstalled_pkgsrrrr'_exclude_packages_from_installed_groupss  z.Solver._exclude_packages_from_installed_groups)NTN)NTN)rFrGrHr;rJrrrrrrrrrrrrrrrrr*s   #  r)NTN)5Z __future__rrrZlibdnf.transactionrZdnf.exceptionsrZdnf.i18nrr functoolsrr Zdnf.utilr#rCrrr>Zloggingr r!r.Z getLoggerrrZCompsPackageType_CONDITIONALrZCompsPackageType_DEFAULTrZCompsPackageType_MANDATORYrZCompsPackageType_OPTIONALrZ ALL_TYPESrrr-r1r6objectr7rKrerqr~rrrrrrrrrrsP       !A'"f(CPK!I܊kfkf __pycache__/comps.cpython-36.pycnu[3 f`@sddlmZddlmZddlmZddlZddlmZddlm Z m Z ddl m Z ddlZ ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZejdZejjZejjZejjZ ejj!Z"eeBe Be"BZ#d d Z$d d Z%d dZ&ddZ'd*ddZ(Gddde)Z*Gddde)Z+Gddde)Z,Gddde,Z-Gddde,Z.Gddde,Z/Gd d!d!e,Z0Gd"d#d#e)Z1Gd$d%d%e)Z2Gd&d'd'e)Z3Gd(d)d)e)Z4dS)+)absolute_import)print_function)unicode_literalsN) CompsError)_ucd)reducednfcCs"|j|j|jf}ttjtt|S)N) categoriesgroups environmentsroperator__add__maplen)comps collectionsr/usr/lib/python3.6/comps.py_internal_comps_length6srcCs|dkr dStjj|S)N)r utilfirst)seqrrr_first_if_iterable;srcstjjfdd|D}|r&|S|r>tjtjj}ntjtjtjdj}t }x`|D]X}||j r||j |qb|j dk r||j r|j |qb|j dk rb||j rb|j |qbW|S)z;Return items from sqn matching either exactly or glob-wise.cs$h|]}|jks|jkr|qSr)nameid).0g)patternrr Esz_by_pattern..)flagsN)r Zi18nrrecompilefnmatch translatematchIsetraddrui_name)rcase_sensitiveZsqnexactr%retrr)rr _by_patternAs      r-cCs|jdkrtjS|jS)N)Z display_ordersysmaxsize)grouprrr_fn_display_orderZsr1TcCs||||||S)aF Installs a group or an environment identified by grp_or_env_id. This method is preserved for API compatibility. It used to catch an exception thrown when a gorup or env was already installed, which is no longer thrown. `install_fnc` has to be Solver._group_install or Solver._environment_install. r)Z install_fncZ grp_or_env_idtypesexcludestrictexclude_groupsrrrinstall_or_skip^s r6c@s,eZdZdZddZeddZddZdS) _Langsz6Get all usable abbreviations for the current language.cCsd|_d|_dS)N) last_localecache)selfrrr__init__osz_Langs.__init__cCs"tjtj}|dkrdSdj|S)NC.)NN)localeZ getlocale LC_MESSAGESjoin)Zlclrrr_dotted_locale_strss z_Langs._dotted_locale_strcCsz|j}|j|kr|jSg|_|g}|dkr6|jdx6|D].}x(tj|D]}||jkrL|jj|qLWq.)r'rP AVAILABLEupdate INSTALLEDZgetCompsGroupItemr( getGroupId)r: available installedresultrQr0rrr _get_groupss   zCompsQuery._get_groupscCs`t}|j|j@r&|jdd|D|j|j@r\x(|D] }|j}|sJq8|j|jq8W|S)NcSsh|] }|jqSr)r)rrQrrrrsz'CompsQuery._get_envs..)r'rPrRrSrTZgetCompsEnvironmentItemr(ZgetEnvironmentId)r:rVrWrXrQenvrrr _get_envss   zCompsQuery._get_envsc Gs tjj}g|_g|_x|D]}g}}|j|j@rf|jj|}|j j j |}|j ||}|jj ||j|j@r|jj|}|j jj |}|j||}|jj || o| r|j|jkrtdt|}n.|j|jkrtdt|}ntdt|}t|qW|S)Nz&Module or Group '%s' is not installed.z&Module or Group '%s' is not available.z$Module or Group '%s' does not exist.)r rZBunchr r rO ENVIRONMENTSrenvironments_by_patternrNrZZsearch_by_patternr[extendGROUPSgroups_by_patternr0rYrPrTrrrRr) r:ZpatternsresZpatenvsgrpsrVrWmsgrrrrEs.              zCompsQuery.getN) rFrGrHrRrTr\r_r;rYr[rErrrrrKs  rKc@s<eZdZddZddZddZeddZed d Zd S) ForwardercCs||_||_dS)N)_i_langs)r:iobjlangsrrrr;szForwarder.__init__cCs t|j|S)N)getattrrf)r:rrrr __getattr__szForwarder.__getattr__cCs.x(|jjD]}|j|}|dk r |Sq W|S)N)rgrE)r:defaultZdctrDtrrr_ui_texts  zForwarder._ui_textcCs|j|j|jS)N)rnZdescZ desc_by_lang)r:rrrui_descriptionszForwarder.ui_descriptioncCs|j|j|jS)N)rnrZ name_by_lang)r:rrrr)szForwarder.ui_nameN) rFrGrHr;rkrnpropertyror)rrrrres  recs8eZdZfddZddZddZeddZZS) Categorycstt|j||||_dS)N)superrqr;_group_factory)r:rhri group_factory) __class__rrr;szCategory.__init__cCs0|j|j}|dkr,d}t||j|jf|S)Nz no group '%s' from category '%s')rsr ValueErrorr)r:grp_idgrprdrrr _build_groups  zCategory._build_groupccs x|jD]}|j|VqWdS)N) group_idsry)r:rwrrr groups_iters zCategory.groups_itercCs t|jS)N)listr{)r:rrrr szCategory.groups) rFrGrHr;ryr{rpr __classcell__rr)rurrqs rqcsLeZdZfddZddZddZddZed d Zed d Z Z S) Environmentcstt|j||||_dS)N)rrr~r;rs)r:rhrirt)rurrr;szEnvironment.__init__cCs0|j|j}|dkr,d}t||j|jf|S)Nz#no group '%s' from environment '%s')rsrrvr)r:rwrxrdrrrrys  zEnvironment._build_groupcCsXg}xN|D]F}y|j|j|Wq tk rN}ztj|WYdd}~Xq Xq W|S)N)rBryrvloggererror)r:Zidsr Zgierrr _build_groupss  zEnvironment._build_groupsccs\xVtj|j|jD]B}y|j|VWqtk rR}ztj|WYdd}~XqXqWdS)N) itertoolschainrz option_idsryrvrr)r:rwrrrrr{s zEnvironment.groups_itercCs |j|jS)N)rrz)r:rrrmandatory_groupsszEnvironment.mandatory_groupscCs |j|jS)N)rr)r:rrroptional_groupsszEnvironment.optional_groups) rFrGrHr;ryrr{rprrr}rr)rurr~s    r~csheZdZfddZddZeddZeddZd d Zed d Z ed dZ eddZ Z S)Groupcs$tt|j||||_|j|_dS)N)rrrr; _pkg_factoryrlZselected)r:rhriZ pkg_factory)rurrr;"szGroup.__init__csfdd|jDS)Ncsg|]}|jkr|qSr)type)rpkg)type_rr (sz+Group._packages_of_type..)packages)r:rr)rr_packages_of_type'szGroup._packages_of_typecCs |jtjS)N)rlibcompsPACKAGE_TYPE_CONDITIONAL)r:rrrconditional_packages*szGroup.conditional_packagescCs |jtjS)N)rrPACKAGE_TYPE_DEFAULT)r:rrrdefault_packages.szGroup.default_packagescCst|j|jS)N)rrr)r:rrr packages_iter2szGroup.packages_itercCs |jtjS)N)rrPACKAGE_TYPE_MANDATORY)r:rrrmandatory_packages6szGroup.mandatory_packagescCs |jtjS)N)rrPACKAGE_TYPE_OPTIONAL)r:rrroptional_packages:szGroup.optional_packagescCs|jjS)N)rfZ uservisible)r:rrrvisible>sz Group.visible) rFrGrHr;rrprrrrrrr}rr)rurr s     rc@sLeZdZdZejeejeej e ej e iZ ddZeddZeddZdS) Packagez#Represents comps package data. :apicCs ||_dS)N)rf)r:ipkgrrrr;LszPackage.__init__cCs|jjS)N)rfr)r:rrrrOsz Package.namecCs |j|jS)N)_OPT_MAPr)r:rrr option_typeTszPackage.option_typeN)rFrGrHrIrr CONDITIONALrDEFAULTr MANDATORYrOPTIONALrr;rprrrrrrrBs  rc@seZdZddZddZddZddZd d Zd d Zd dZ e ddZ d-ddZ d.ddZ ddZe ddZddZd/ddZd0ddZd d!Ze d"d#Zd$d%Zd1d&d'Zd2d(d)Zd*d+Zd,S)3CompscCstj|_t|_dS)N)rrrfr7rg)r:rrrr;\s zComps.__init__cCs t|jS)N)rrf)r:rrr__len__`sz Comps.__len__cCst||j|jS)N)rqrg _group_by_id)r:Z icategoryrrr_build_categorycszComps._build_categorycCst||j|jS)N)r~rgr)r:Z ienvironmentrrr_build_environmentfszComps._build_environmentcCst||j|jS)N)rrg_build_package)r:ZigrouprrrryiszComps._build_groupcCst|S)N)r)r:rrrrrlszComps._build_packagec CsVtj}y|j|Wn,tjk rB|j}tdj|YnX|j|7_dS)N )rrZ fromxml_fZ ParserErrorZget_last_errorsrr@rf)r:fnrerrorsrrr_add_from_xml_filenameoszComps._add_from_xml_filenamecCs t|jS)N)r|categories_iter)r:rrrr xszComps.categoriesFcCs$tjj|st|j||}t|S)N)r ris_string_typeAssertionErrorcategories_by_patternr)r:rr*Zcatsrrrcategory_by_pattern}s zComps.category_by_patterncCstjj|stt|||jS)N)r rrrr-r )r:rr*rrrrszComps.categories_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)r)rc)r:rr sz(Comps.categories_iter..)rfr )r:r)r:rrszComps.categories_itercCst|jtdS)N)key)sortedenvironments_iterr1)r:rrrr szComps.environmentscs.tjjsttjjfdd|jDS)Nc3s|]}|jkr|VqdS)N)r)rr)rrrrsz+Comps._environment_by_id..)r rrrrr)r:rr)rr_environment_by_idszComps._environment_by_idcCs$tjj|st|j||}t|S)N)r rrrr]r)r:rr*rbrrrenvironment_by_patterns zComps.environment_by_patterncCs4tjj|stt|j}t|||}t|tdS)N)r) r rrrr|rr-rr1)r:rr*rbZ found_envsrrrr]s  zComps.environments_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)r)rr)r:rrrsz*Comps.environments_iter..)rfr )r:r)r:rrszComps.environments_itercCst|jtdS)N)r)rr{r1)r:rrrr sz Comps.groupscs.tjjsttjjfdd|jDS)Nc3s|]}|jkr|VqdS)N)r)rr)id_rrrsz%Comps._group_by_id..)r rrrrr{)r:rr)rrrszComps._group_by_idcCs$tjj|st|j||}t|S)N)r rrrr`r)r:rr*rcrrrgroup_by_patterns zComps.group_by_patterncCs0tjj|stt||t|j}t|tdS)N)r) r rrrr-r|r{rr1)r:rr*rcrrrr`szComps.groups_by_patterncsfddjjDS)Nc3s|]}j|VqdS)N)ry)rr)r:rrrsz$Comps.groups_iter..)rfr )r:r)r:rr{szComps.groups_iterN)F)F)F)F)F)F)rFrGrHr;rrrryrrrpr rrrr rrr]rr rrr`r{rrrrrYs*         rc@s,eZdZddZddZddZddZd S) CompsTransPkgcCstjj|r&d|_||_d|_d|_n\t|tj j r\d|_|j |_|j t j@|_d|_n&|j|_|j|_|jt j@|_|j|_dS)NFT)r rr basearchonlyroptionalrequires isinstancelibdnf transactionZCompsGroupPackagegetNameZgetPackageTyperrr)r:Z pkg_or_namerrrr;s  zCompsTransPkg.__init__cCs0|j|jko.|j|jko.|j|jko.|j|jkS)N)rrrr)r:otherrrr__eq__s   zCompsTransPkg.__eq__cCs|jS)N)r)r:rrr__str__szCompsTransPkg.__str__cCst|j|j|j|jfS)N)hashrrrr)r:rrr__hash__szCompsTransPkg.__hash__N)rFrGrHr;rrrrrrrrsrc@seZdZddZddZddZeddZed d Z e j d d Z ed d Z e j dd Z eddZ e j ddZ eddZ e j ddZ dS)TransactionBunchcCs$t|_t|_t|_t|_dS)N)r'_install _install_opt_remove_upgrade)r:rrrr;szTransactionBunch.__init__cCsN|jj|j|jj|j|jj|j|j|jB|j|j|j|_|S)N)rrSrrr)r:rrrr__iadd__s  zTransactionBunch.__iadd__cCs(t|jt|jt|jt|jS)N)rinstall install_optupgraderemove)r:rrrrszTransactionBunch.__len__cCs6x0|D](}t|tr |j|q|jt|qWdS)N)rrr()Zparamvalitemrrr _set_values   zTransactionBunch._set_valuecCs|jS)z Packages to be installed with strict=True - transaction will fail if they cannot be installed due to dependency errors etc. )r)r:rrrrszTransactionBunch.installcCs|j|j|dS)N)rr)r:valuerrrr scCs|jS)zw Packages to be installed with strict=False - they will be skipped if they cannot be installed )r)r:rrrr szTransactionBunch.install_optcCs|j|j|dS)N)rr)r:rrrrrscCs|jS)N)r)r:rrrrszTransactionBunch.removecCs|j|j|dS)N)rr)r:rrrrrscCs|jS)N)r)r:rrrr!szTransactionBunch.upgradecCs|j|j|dS)N)rr)r:rrrrr%sN)rFrGrHr;rrrJrrprsetterrrrrrrrrs     rc@seZdZddZeddZeddZegfddZd d Zd d Z dddZ ddZ ddZ dddZ ddZddZddZd S)SolvercCs||_||_||_dS)N)rNrZ _reason_fn)r:rNrZ reason_fnrrrr;+szSolver.__init__cCsdd|jDS)NcSsh|] }|jqSr)r)rrxrrrr2sz.Solver._mandatory_group_set..)r)rZrrr_mandatory_group_set0szSolver._mandatory_group_setcCs"dd|j|j|j|jDS)NcSsh|] }|jqSr)r)rrrrrr6sz+Solver._full_package_set..)rrrr)rxrrr_full_package_set4szSolver._full_package_setcsvfdd}t}|t@r*|j||j|t@rB|j||j|t@rZ|j||j|t@rr|j||j |S)Ncsfdd|DS)Ncsg|]}|jkr|qSr)r)rr)r3rrr=sz8Solver._pkgs_of_type..filter..r)pkgs)r3rrfilter<sz$Solver._pkgs_of_type..filter) r'rrSrrrrrrr)r0 pkg_typesr3rrr)r3r _pkgs_of_type:s zSolver._pkgs_of_typecCstjj|st|jjj|S)N)r rrrrNr0Zis_removable_pkg)r:Zpkg_namerrr_removable_pkgKszSolver._removable_pkgcCstjj|st|jjj|S)N)r rrrrNrZZis_removable_group)r:group_idrrr_removable_grpOszSolver._removable_grpNTc Cstjj|st|jj|}|s4ttdt||j j j ||j |j |}|j j j|t}xD|jD]:} |r|| j|kr|qh||j| j|||7}|j| jdtqhWx.|jD]$} |r| j|krq|j| jdtqW|S)Nz#Environment id '%s' does not exist.TF)r rrrrrrrrrNrZnewrr)rrrr_group_installaddGrouprrr) r:env_idrr3r4r5 comps_envswdb_envtrans comps_grouprrr_environment_installSs"   zSolver._environment_installcCstjj|dkst|jjj|}|s6ttd||jjj |t }t dd|j D}x&|D]}|j |svqf||j|7}qfW|S)NTz%Environment id '%s' is not installed.cSsg|] }|jqSr)rU)rrQrrrrssz.Solver._environment_remove..)r rrrrNrZrErrrrr' getGroupsr _group_remove)r:rrrrzrrrr_environment_removejs  zSolver._environment_removecCsNtjj|st|jj|}|jjj|}|s>t t d||sRt t d|t dd|j D}|j }|jjj|j|j|j|}t}x\|jD]R}|j|kr|jjj|jr||j|j7}n||j|j|7}|j|jdtqWxL|jD]B}|j|kr(|jjj|jr(||j|j7}|j|jdtqW|jjj||S)Nz"Environment '%s' is not installed.z"Environment '%s' is not available.cSsg|] }|jqSr)rU)rrQrrrrsz/Solver._environment_upgrade..TF)r rrrrrrNrZrErrr'rgetPackageTypesrrrr)rrr0_group_upgraderrrrrr)r:rrrold_setrrrrrr_environment_upgradezs.    zSolver._environment_upgradec Cstjj|st|jj|}|s4ttdt||j j j ||j |j |}x(|jD]}|j|j dtj|jqVW|j j j|t} |r| jj|j||gdn| jj|j||gd| S)NzGroup id '%s' does not exist.F)r3)r rrrrrrrrrNr0rrr)r addPackagerrrrrrSrr) r:rrr3r4r5r swdb_grouprQrrrrrs zSolver._group_installcsbtjj|stjjj|}|s2ttd|jjj |t }fdd|j D|_ |S)Nz&Module or Group '%s' is not installed.csh|]}j|jr|qSr)rr)rr)r:rrrsz'Solver._group_remove..) r rrrrNr0rErrrr getPackages)r:rrrr)r:rrszSolver._group_removec s&tjj|st|jj|}|jjj|}g}|sP|r<|j n|}t t d||sdt t d||j }t dd|jD|j||||jjj||j|j |}x(|jD]}|j|jdtj|jqW|jjj|t}fddD|_fddD|_fd dD|_|S) Nz&Module or Group '%s' is not installed.z&Module or Group '%s' is not available.cSsg|] }|jqSr)r)rrQrrrrsz)Solver._group_upgrade..Fcsh|]}|jkr|qSr)r)rr)rrrrsz(Solver._group_upgrade..cs"h|]}|ddDkr|qS)cSsg|] }|jqSr)r)rrrrrrsz3Solver._group_upgrade...r)rr)new_setrrrscsh|]}|jkr|qSr)r)rr)rrrrs)r rrrrrrNr0rEr)rrrr'rrrrrrrrrrrrr) r:rrrr3ZargumentrrQrr)rrrrs* zSolver._group_upgradecCslxf|jjD]Z}|jj|}|jr t|jt|j}|jjjj |d}x|D]}|j j |qPWq WdS)N)r) Z persistorr r0rWr'Z full_listZ pkg_excludeZsackZqueryZfiltermZ_goalr)r:baser0Zp_grpZinstalled_pkg_namesZinstalled_pkgsrrrr'_exclude_packages_from_installed_groupss  z.Solver._exclude_packages_from_installed_groups)NTN)NTN)rFrGrHr;rJrrrrrrrrrrrrrrrrr*s   #  r)NTN)5Z __future__rrrZlibdnf.transactionrZdnf.exceptionsrZdnf.i18nrr functoolsrr Zdnf.utilr#rCrrr>Zloggingr r!r.Z getLoggerrrZCompsPackageType_CONDITIONALrZCompsPackageType_DEFAULTrZCompsPackageType_MANDATORYrZCompsPackageType_OPTIONALrZ ALL_TYPESrrr-r1r6objectr7rKrerqr~rrrrrrrrrrsP       !A'"f(CPK! *]yy&__pycache__/const.cpython-36.opt-1.pycnu[3 f@ @sddlmZddlZdZdZd&Zd'ZddddddgZdZ dZ dZ dZ dZ dZdZdZdZdZdZdZd ZejZejZd!Zd"ejjZd#Zd$eZejZd%Z dS)()unicode_literalsNz/etc/dnf/dnf.confz/etc/dnf/automatic.confsystem-release(releasever)system-release distribution-release(releasever)distribution-releaseredhat-release suse-release mandatorydefault conditionalZkernelz kernel-PAEzinstallonlypkg(kernel)zinstallonlypkg(kernel-module)zinstallonlypkg(vm)zmultiversion(kernel)zdnf.logz hawkey.logzdnf.librepo.logz--- logging initialized ---z dnf.rpm.logZDNFz /var/lib/dnfz/var/run/dnf.pidz/runz /run/userz/var/cache/dnfz /var/tmp/z/etc/dnf/pluginsz%s/dnf-pluginsz4.7.0zdnf/%szhttps://bugs.almalinux.org/)rrrrrr)r r r )!Z __future__rZdistutils.sysconfigZ distutilsZ CONF_FILENAMEZCONF_AUTOMATIC_FILENAMEZ DISTROVERPKGZGROUP_PACKAGE_TYPESZINSTALLONLYPKGSZLOGZ LOG_HAWKEYZ LOG_LIBREPOZ LOG_MARKERZLOG_RPMNAMEZ PERSISTDIRZ PID_FILENAMEZRUNDIRZ USER_RUNDIRZSYSTEM_CACHEDIRZTMPDIRZ VERBOSE_LEVELlowerZPREFIXZ PROGRAM_NAMEZPLUGINCONFPATH sysconfigZget_python_libZ PLUGINPATHVERSIONZ USER_AGENTZBUGTRACKER_COMPONENTZ BUGTRACKERrr/usr/lib/python3.6/const.pysB PK! *]yy __pycache__/const.cpython-36.pycnu[3 f@ @sddlmZddlZdZdZd&Zd'ZddddddgZdZ dZ dZ dZ dZ dZdZdZdZdZdZdZd ZejZejZd!Zd"ejjZd#Zd$eZejZd%Z dS)()unicode_literalsNz/etc/dnf/dnf.confz/etc/dnf/automatic.confsystem-release(releasever)system-release distribution-release(releasever)distribution-releaseredhat-release suse-release mandatorydefault conditionalZkernelz kernel-PAEzinstallonlypkg(kernel)zinstallonlypkg(kernel-module)zinstallonlypkg(vm)zmultiversion(kernel)zdnf.logz hawkey.logzdnf.librepo.logz--- logging initialized ---z dnf.rpm.logZDNFz /var/lib/dnfz/var/run/dnf.pidz/runz /run/userz/var/cache/dnfz /var/tmp/z/etc/dnf/pluginsz%s/dnf-pluginsz4.7.0zdnf/%szhttps://bugs.almalinux.org/)rrrrrr)r r r )!Z __future__rZdistutils.sysconfigZ distutilsZ CONF_FILENAMEZCONF_AUTOMATIC_FILENAMEZ DISTROVERPKGZGROUP_PACKAGE_TYPESZINSTALLONLYPKGSZLOGZ LOG_HAWKEYZ LOG_LIBREPOZ LOG_MARKERZLOG_RPMNAMEZ PERSISTDIRZ PID_FILENAMEZRUNDIRZ USER_RUNDIRZSYSTEM_CACHEDIRZTMPDIRZ VERBOSE_LEVELlowerZPREFIXZ PROGRAM_NAMEZPLUGINCONFPATH sysconfigZget_python_libZ PLUGINPATHVERSIONZ USER_AGENTZBUGTRACKER_COMPONENTZ BUGTRACKERrr/usr/lib/python3.6/const.pysB PK!7'__pycache__/crypto.cpython-36.opt-1.pycnu[3 ft` @s<ddlmZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z ddl Z ddl Z ddlZyddlmZddlmZWn<ek rddlZGdd d eZGd d d eZYnXd Ze jd ZddZddZddZddZddZddZejddZddZ d"ddZ!Gd d!d!eZ"dS)#)print_function)absolute_import)unicode_literals)_N)Context)Datac@sVeZdZddZddZddZeddZejd dZd d Z d d Z ddZ dS)rcCstj|jd<dS)Nctx)gpgmer__dict__)selfr /usr/lib/python3.6/crypto.py__init__*szContext.__init__cCs|S)Nr )r r r r __enter__-szContext.__enter__cCsdS)Nr )r typevaluetbr r r __exit__0szContext.__exit__cCs|jjS)N)rarmor)r r r r r3sz Context.armorcCs ||j_dS)N)rr)r rr r r r7scCs$t|trtj|}|jj|dS)N) isinstanceZ basestringioBytesIOrimport_)r key_for r r op_import;s  zContext.op_importcCs|jj||dS)N)rZexport)r patternmodeZkeydatar r r op_export@szContext.op_exportcCs t|j|S)N)getattrr)r namer r r __getattr__CszContext.__getattr__N) __name__ __module__ __qualname__rrrpropertyrsetterrrr r r r r r)s rc@s4eZdZddZddZddZddZd d Zd S) rcCstj|jd<dS)Nbuf)rrr )r r r r rHsz Data.__init__cCs|S)Nr )r r r r rKszData.__enter__cCsdS)Nr )r rrrr r r rNsz Data.__exit__cCs |jjS)N)r&getvalue)r r r r readQsz Data.readcCs t|j|S)N)rr&)r rr r r r TszData.__getattr__N)r!r"r#rrrr(r r r r r rGs rZ GNUPGHOMEdnfcCstjjdd|jDS)Ncss|]}|jr|VqdS)N)Zcan_sign).0subkeyr r r ]sz*_extract_signing_subkey..)r)utilfirstZsubkeys)keyr r r _extract_signing_subkey\sr0cs(fddtdtdD}dj|S)Nc3s|]}||dVqdS)Nr )r*i)fpr_hexr r r,asz)_printable_fingerprint..rr1 )rangelenjoin)r3Zsegmentsr )r3r _printable_fingerprint`sr8cCs|j}t|}x|jD]x}xrt||D]d}|j}||krNtjtd|j|q&|j j |s\q&t j j j|j|j|ddtjtd|j|q&WqWdS)Nzrepo %s: 0x%s already importedF)gpgdirZ make_ro_copyzrepo %s: imported key 0x%s.)Z _pubring_dirkeyids_from_pubringZgpgkeyretrieveid_loggerdebugridZ _key_importZ_confirmr)ZyumZmiscZimport_key_to_pubringraw_keyshort_id)repor9Z known_keyskeyurlkeyinfokeyidr r r import_repo_keyses   rFcCsltjj|sgSt|Jt8}g}x,|jD] }t|}|dk r0|j|jq0W|SQRXWdQRXdS)N) ospathexists pubring_dirrkeylistr0appendrE)r9rZkeyidskr+r r r r:vs r:cCs8td|j|jt|j|jjddf}tjd|dS)NzLImporting GPG key 0x%s: Userid : "%s" Fingerprint: %s From : %szfile://z%s) rrAuseridr8 fingerprinturlreplacer=critical)rDmsgr r r log_key_imports rUcCs8t||tjjjkr&tjtdntjtddS)Nz0Verified using DNS record with DNSSEC signature.zNOT verified using DNS record.)rUr)ZdnssecZValidityZVALIDr=rSr)rDZ dns_resultr r r log_dns_key_importsrVc csFtjjtd}|tjt<z dVWd|dkr6tjt=n |tjt<XdS)N)rGenvironget GPG_HOME_ENV)rJZorigr r r rJs   rJcCstj}g}t|t}|j|x2|jD]&}t|}|dkrHq2|jt||q2Wd|_ xF|D]>}t .}|j |j d||j dtj|j|_WdQRXqhWWdQRXWdQRXtjj||S)NTr)tempfileZmkdtemprJrrrKr0rLKeyrrrr<seekrGSEEK_SETr(r@r)r-Zrm_rf)rZpb_dirkeyinfosrr/r+infoZsinkr r r rawkey2infoss"  , r`c CsZ|jdrtjtd|j|tjj||d}t|}WdQRXx|D] }||_ qHW|S)Nzhttp:z.retrieving repo key for %s unencrypted from %s)rB) startswithr=Zwarningrr?r)r-Z_urlopenr`rQ)rCrBZhandler^rDr r r r;s   r;c@s,eZdZddZeddZeddZdS)r[cCs6|j|_|j|_d|_|j|_d|_|jdj|_ dS)Nr) rEr<ZfprrPr@Z timestamprQZuidsZuidrO)r r/r+r r r rs z Key.__init__cCs&tjjr dnd}|jddjd|S)N00i)r)ZpycompZPY3r<rjust)r Zrjr r r rAsz Key.short_idcCs |jjS)N)rAlower)r r r r rpm_idsz Key.rpm_idN)r!r"r#rr$rArgr r r r r[s r[)N)#Z __future__rrrZdnf.i18nr contextlibZ dnf.pycompr)Zdnf.utilZ dnf.yum.miscrZloggingrGrZZgpgrr ImportErrorr objectrYZ getLoggerr=r0r8rFr:rUrVcontextmanagerrJr`r;r[r r r r s<          PK!Dr -cli/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`@sDddlmZddlZGdddejjZddlmZddl m Z dS))absolute_importNc@seZdZdZdS)CliErrorzCLI Exception. :apiN)__name__ __module__ __qualname____doc__rr/usr/lib/python3.6/__init__.pyrsr)Cli)Command) Z __future__rZdnf.exceptionsZdnf exceptionsErrorrZ dnf.cli.clir Zdnf.cli.commandsr rrrr s  PK!Dr 'cli/__pycache__/__init__.cpython-36.pycnu[3 ft`@sDddlmZddlZGdddejjZddlmZddl m Z dS))absolute_importNc@seZdZdZdS)CliErrorzCLI Exception. :apiN)__name__ __module__ __qualname____doc__rr/usr/lib/python3.6/__init__.pyrsr)Cli)Command) Z __future__rZdnf.exceptionsZdnf exceptionsErrorrZ dnf.cli.clir Zdnf.cli.commandsr rrrr s  PK!./<<,cli/__pycache__/aliases.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlZddlm Z ddl Zddl Z ddl Z ddlZddlZe jdZdZejjedZejjed ZGd d d eZGd d d eZdS))absolute_import)unicode_literals)_N) PRIO_DEFAULTdnfz/etc/dnf/aliases.d/z ALIASES.confz USER.confc@s,eZdZddZeddZeddZdS) AliasesConfigcCs$||_tjj|_|jj|jdS)N)_pathlibdnfconfZ ConfigParser_parserread)selfpathr/usr/lib/python3.6/aliases.py__init__*s zAliasesConfig.__init__c CsHtjjd}y|jt|jjddWntk r>YnX|jS)NTmainenabled) r r OptionBoolsetrr ZgetData IndexErrorgetValue)r optionrrrr/s  zAliasesConfig.enabledcCsVtj}d}|jj|s|Sx4|jj|D]$}|jj||}|sBq*|j||<q*W|S)Naliases) collections OrderedDictr Z hasSectionZoptionsrsplit)r resultZsectionkeyvaluerrrr8s zAliasesConfig.aliasesN)__name__ __module__ __qualname__rpropertyrrrrrrr)s rc@sNeZdZddZddZddZddZdd d Zd d ZddZ ddZ d S)AliasescCsFtj|_d|_d|_|jr(d|_dS|j|js:dS|jdS)NTF)rrrr r_disabled_by_environ _load_main _load_aliases)r rrrrGs zAliases.__init__c Cshtjjd}y|jttjd|jStk r:dSt k rbt j t dtjddSXdS)NTZDNF_DISABLE_ALIASESFz@Unexpected value of environment variable: DNF_DISABLE_ALIASES=%s) r r rrrosenvironrKeyError RuntimeErrorloggerwarningr)r rrrrr%Ws  zAliases._disabled_by_environcCsyt|Stk rB}ztjjtd||fWYdd}~Xn:tk rz}ztjjtd||fWYdd}~XnXdS)NzParsing file "%s" failed: %szCannot read file "%s": %s)rr+r exceptions ConfigErrorrIOError)r rerrr _load_confds"zAliases._load_confcCsVy|jt|_|jj|_Wn6tjjk rP}ztjt d|WYdd}~XnXdS)NzConfig error: %s) r2ALIASES_CONF_PATHr rrr.r/r,debugr)r r1rrrr&ns  zAliases._load_mainNcCs|dkr.y |j}Wntjjk r,dSXxf|D]^}y"|j|}|jrX|jj|jWq4tjjk r}ztj t d|WYdd}~Xq4Xq4WdS)NzConfig error: %s) _dropin_dir_filenamesrr.r/r2rrupdater,r-r)r filenamesfilenamer r1rrrr'us   zAliases._load_aliasescstjjttjjtgfdd}g}yPtjjts@tjtx4ttj tD]"}||r^qP|j tjj t|qPWWn2t t fk r}ztjj|WYdd}~XnXtjjtr|j t|S)Ncs|kp|jdp|jd S)N..conf.CONF)r:r;) startswithendswith)r8)ignored_filenamesrr_ignore_filenames z7Aliases._dropin_dir_filenames.._ignore_filename)r(rbasenamer3ALIASES_USER_PATHexistsALIASES_DROPIN_DIRmkdirsortedlistdirappendjoinr0OSErrorrr.r/)r r?r7fnr1r)r>rr5s       zAliases._dropin_dir_filenamescs:gg_fddfdd|}j|S)NcsNd}x&|D]}|r |ddkr P|d7}q Wj|d|7_||dS)Nr-)prefix_options)argsZnumarg)r rr store_prefixs  z&Aliases._resolve..store_prefixc s|}| s*|djks*|djdrry.j|djdrV|ddd|d<Wntk rlYnX|S|dkrtjjtdj|dj|d}|r||ddS|ddSdS)Nr\rLz"Aliases contain infinite recursion) rr<poprrr.ErrorrrG)rNsuffixZcurrent_alias_result)r stackrP subresolverrrVs&  z$Aliases._resolve..subresolve)rM)r rNrTr)r rUrPrVr_resolves  zAliases._resolvecCsP|jrLy|j|}Wn6tjjk rJ}ztjtd|WYdd}~XnX|S)Nz%s, using original arguments.)rrWrr.rSr,errorr)r rNr1rrrresolves "zAliases.resolve)N) r r!r"rr%r2r&r'r5rWrYrrrrr$Fs   /r$)Z __future__rrZdnf.i18nrrZdnf.clirZdnf.conf.configrZdnf.exceptionsZ libdnf.confr Zloggingr(Zos.pathZ getLoggerr,rCrrHr3rAobjectrr$rrrrs     PK!./<<&cli/__pycache__/aliases.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlZddlm Z ddl Zddl Z ddl Z ddlZddlZe jdZdZejjedZejjed ZGd d d eZGd d d eZdS))absolute_import)unicode_literals)_N) PRIO_DEFAULTdnfz/etc/dnf/aliases.d/z ALIASES.confz USER.confc@s,eZdZddZeddZeddZdS) AliasesConfigcCs$||_tjj|_|jj|jdS)N)_pathlibdnfconfZ ConfigParser_parserread)selfpathr/usr/lib/python3.6/aliases.py__init__*s zAliasesConfig.__init__c CsHtjjd}y|jt|jjddWntk r>YnX|jS)NTmainenabled) r r OptionBoolsetrr ZgetData IndexErrorgetValue)r optionrrrr/s  zAliasesConfig.enabledcCsVtj}d}|jj|s|Sx4|jj|D]$}|jj||}|sBq*|j||<q*W|S)Naliases) collections OrderedDictr Z hasSectionZoptionsrsplit)r resultZsectionkeyvaluerrrr8s zAliasesConfig.aliasesN)__name__ __module__ __qualname__rpropertyrrrrrrr)s rc@sNeZdZddZddZddZddZdd d Zd d ZddZ ddZ d S)AliasescCsFtj|_d|_d|_|jr(d|_dS|j|js:dS|jdS)NTF)rrrr r_disabled_by_environ _load_main _load_aliases)r rrrrGs zAliases.__init__c Cshtjjd}y|jttjd|jStk r:dSt k rbt j t dtjddSXdS)NTZDNF_DISABLE_ALIASESFz@Unexpected value of environment variable: DNF_DISABLE_ALIASES=%s) r r rrrosenvironrKeyError RuntimeErrorloggerwarningr)r rrrrr%Ws  zAliases._disabled_by_environcCsyt|Stk rB}ztjjtd||fWYdd}~Xn:tk rz}ztjjtd||fWYdd}~XnXdS)NzParsing file "%s" failed: %szCannot read file "%s": %s)rr+r exceptions ConfigErrorrIOError)r rerrr _load_confds"zAliases._load_confcCsVy|jt|_|jj|_Wn6tjjk rP}ztjt d|WYdd}~XnXdS)NzConfig error: %s) r2ALIASES_CONF_PATHr rrr.r/r,debugr)r r1rrrr&ns  zAliases._load_mainNcCs|dkr.y |j}Wntjjk r,dSXxf|D]^}y"|j|}|jrX|jj|jWq4tjjk r}ztj t d|WYdd}~Xq4Xq4WdS)NzConfig error: %s) _dropin_dir_filenamesrr.r/r2rrupdater,r-r)r filenamesfilenamer r1rrrr'us   zAliases._load_aliasescstjjttjjtgfdd}g}yPtjjts@tjtx4ttj tD]"}||r^qP|j tjj t|qPWWn2t t fk r}ztjj|WYdd}~XnXtjjtr|j t|S)Ncs|kp|jdp|jd S)N..conf.CONF)r:r;) startswithendswith)r8)ignored_filenamesrr_ignore_filenames z7Aliases._dropin_dir_filenames.._ignore_filename)r(rbasenamer3ALIASES_USER_PATHexistsALIASES_DROPIN_DIRmkdirsortedlistdirappendjoinr0OSErrorrr.r/)r r?r7fnr1r)r>rr5s       zAliases._dropin_dir_filenamescs:gg_fddfdd|}j|S)NcsNd}x&|D]}|r |ddkr P|d7}q Wj|d|7_||dS)Nr-)prefix_options)argsZnumarg)r rr store_prefixs  z&Aliases._resolve..store_prefixc s|}| s*|djks*|djdrry.j|djdrV|ddd|d<Wntk rlYnX|S|dkrtjjtdj|dj|d}|r||ddS|ddSdS)Nr\rLz"Aliases contain infinite recursion) rr<poprrr.ErrorrrG)rNsuffixZcurrent_alias_result)r stackrP subresolverrrVs&  z$Aliases._resolve..subresolve)rM)r rNrTr)r rUrPrVr_resolves  zAliases._resolvecCsP|jrLy|j|}Wn6tjjk rJ}ztjtd|WYdd}~XnX|S)Nz%s, using original arguments.)rrWrr.rSr,errorr)r rNr1rrrresolves "zAliases.resolve)N) r r!r"rr%r2r&r'r5rWrYrrrrr$Fs   /r$)Z __future__rrZdnf.i18nrrZdnf.clirZdnf.conf.configrZdnf.exceptionsZ libdnf.confr Zloggingr(Zos.pathZ getLoggerr,rCrrHr3rAobjectrr$rrrrs     PK!Prixix(cli/__pycache__/cli.cpython-36.opt-1.pycnu[3 f @stdZddlmZddlmZddlmZyddlmZWn ek rXddlmZYnXddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlmZdd lmZdd lmZmZddlZddlZddlZddlZddlZddl Zddl!Zddl"Zddl#Zddl$Zddl%Zddl&Zddl'Zddl(Zddl)Zddl*Zddl+Zddl,Zddl-Zddl.Zddl/Zddl0Zddl1Zddl2Zddl3Zddl4Zddl5Zddl6Zddl7Zddl8Zddl9Zddl:Zddl;ZddlZddl?Zddl@ZddlAZddlBZddlCZddlDZe jEd ZFdd dZGddZHddZIddZJGdddejKZLGdddeMZNdS)z/ Command line interface yum class and related. )print_function)absolute_import)unicode_literals)SequenceN)output)CliError)ucd_dnfcCst|jdt|jt|}t|j}t|j}xFd|fd|fd|ffD],\}}||j|d|||d7<qLWdS)zl Get the length of each pkg's column. Add that to data. This "knows" about simpleList and printVer. rnaverridrN)lennamearchZevrZ _from_repo setdefault)datapkgindentr rrdvr/usr/lib/python3.6/cli.py_add_pkg_simple_list_lens]s     rcCsiiid}x<|j|j|j|j|j|jfD]}x|D]}t||q4Wq*Wt|jdkrx*|j D] \}}t||t||d dq`W|d|d|dg}|j |d d }|d |d  |d  fS) zA Work out the dynamic size of the columns to pass to fmtColumns. )r rrr )rr rrr)Zremainder_columnz ) installed availableextras autoremoveupdatesrecentrr obsoletesobsoletesTuplesZ calcColumns)ryplrZlstrZnpkgZopkgcolumnsrrr_list_cmd_calc_columnshs   r)c Csdd}tjj|}d}x|jjj|dD]}|r>tdd}|jdkrbd|j|j |j f}nd |j|j|j |j f}|j j |j }ttd ||||jfttd |jr|jnd||jfq.WdS) NcSstjdtj|S)Nz%c)timestrftimeZgmtime)xrrr sm_ui_timezsz"print_versions..sm_ui_timeF)rr T0z%s-%s.%sz %s:%s-%s.%sz Installed: %s-%s at %sz Built : %s at %s)r sack rpmdb_sackqueryrfiltermprintZepochversionreleasertermboldrr Z installtimeZpackagerZ buildtime) pkgsbaserr-r0donerrrrrrprint_versionsys    r;cCs>td}x0|jD]$\}}tj|j||d|dqWdS)NzTThe operation would result in switching of module '{0}' stream '{1}' to stream '{2}'rr)r itemsloggerwarningformat)switchedModulesZmsg1Z moduleNameZstreamsrrrreport_module_switchsrAcseZdZdZd fdd Zfffdd ZddZd d Zd d Zd dZ fdddfddZ ddZ ggdfddZ dfdfddZ d!ddZfddZddZZS)"BaseCliz#This is the base class for yum cli.Ncs4|p tjj}tt|j|dtj||j|_dS)N)conf)r rCZConfsuperrB__init__rZOutput)selfrC) __class__rrrEszBaseCli.__init__cstjjrJ|jjsJt|jj}|rJt|t dj tj j d}tj j||j}|jj|}|rjtj||rg}g}d}xF|D]>} | jtjjkr|j| jq| jtjjkrd}|j| jqW|`|s|jj|n|jj|||s|jjs|jr|jjs|jjr|jj r:tjt dj tj j!dn(d|jj"krbtjt dj tj j!d|j#r|jj$s|jj% rt&t dntjt d d S|rD|r:tjt d y|jj'} |j(||jj)| Wn\tj j*k r8} z:tj+j j,t-| } t d d | } t.tj j| WYd d } ~ XnX|j/||jj rRd St0|t1sd|g}tj2gt3|}t4t5|j6|}|d k r|j7j8|gd}tj9jj:|j7|j;}nd }|rt.t.dj<|jj=|t.x.|D]&} | j>t?jj@krtj jt dqW|S)zTake care of package downloading, checking, user confirmation and actually running the transaction. :param display: `rpm.callback.TransactionProgress` object(s) :return: history database transaction ID or None aQIt is not possible to switch enabled streams of a module unless explicitly enabled via configuration option module_stream_switch. It is recommended to rather remove all installed content from the module, and reset the module using '{prog} module reset ' command. After you reset the module, you can install the other stream.)progTFz7{prog} will only download packages for the transaction.ZtestzP{prog} will only download packages, install gpg keys, and check the transaction.zOperation aborted.zNothing to do.NzDownloading Packages:zError downloading packages:z %sr zTransaction failed)Ar r9Z WITH_MODULESrCZmodule_stream_switchdictZ_moduleContainerZgetSwitchedStreamsrAr r?util MAIN_PROG exceptionsErrorZ transactionrZlist_transactionr=infoactionZFORWARD_ACTIONSappendrZBACKWARD_ACTIONSZ_tsZreportRemoveSizeZreportDownloadSizeZ isChangedZ_historygroupenv downloadonlyMAIN_PROG_UPPERZtsflags _promptWantedassumeno userconfirmrZdownload_callback_total_cbZdownload_packagesprogressZ DownloadErrorcliZ indent_blockr r3 gpgsigcheck isinstancerZCliTransactionDisplaylistrDrBdo_transactionhistoryoldZdbZRPMTransactionZ_transjoinZpost_transaction_outputstatelibdnfZTransactionItemState_ERROR)rFZdisplayr@msgZtransZpkg_strZ install_pkgsZrmpkgsZ install_onlyZtsiZtotal_cbeZspecificZerrstrtid)rGrrr^s              zBaseCli.do_transactionc sg}x|D]}j|\}}|dkr(q q |dkrĈjjo@jj }tj sVtjj rl| rltjj t dfdd}yj ||Wqtjj t fk r}z|j t|WYdd}~XqXq |j |q W|rx|D]} tj| qWtjj t ddS)aPerform GPG signature verification on the given packages, installing keys if possible. :param pkgs: a list of package objects to verify the GPG signatures of :raises: Will raise :class:`Error` if there's a problem rrzTRefusing to automatically import keys when running unattended. Use "-y" to override.cs jjS)N)rrX)r,yz)rFrr$sz%BaseCli.gpgsigcheck..NzGPG check FAILED)Z_sig_check_pkgrC assumeyesrWsysstdinisattyr rMrNr Z_get_key_for_package ValueErrorrQstrr=critical) rFr8Zerror_messagesporesulterrmsgZayfnrerdr)rFrr[ s&  " zBaseCli.gpgsigcheckcsXdx:|jjjd|jD]$}|tj}|rtjj|dPqWfdd|j D}|S)zBReturn list of changelogs for package newer then installed versionNrrcs$g|]}dks|dkr|qS)N timestampr).0Zchlog)newestrr =sz-BaseCli.latest_changelogs..) Z_rpmconnZ readonly_tsZdbMatchrrpmZRPMTAG_CHANGELOGTIMEdatetimeZdateZ fromtimestamp changelogs)rFpackageZmiZchangelogtimesZchlogsr)rwrlatest_changelogs3s zBaseCli.latest_changelogscCs4d|djdtjj|dtjj|df}|S)z*Return changelog formatted as in spec filez * %s %s %s ruz%a %b %d %X %YZauthortext)r+r Zi18nr )rFZ changelogZ chlog_strrrrformat_changelogAs  zBaseCli.format_changelogcCst}x&|D]}|j|jp|jgj|q Wxdt|jD]T}||}ttdj dj dd|Dx$|j |dD]}t|j |qzWq.r) rJr source_namerrQsortedkeysr3r r?rar}r)rFZpackagesZbysrpmprZ bin_packagesZchlrrrprint_changelogsIs "zBaseCli.print_changelogsTFc CsR|jd||d}|jjs |jjr@|jd||d}|j|_|j|_|rDt|j|}t|jdkri}|jj j d} | rx>t |jD]0} | j } t jj| r| jr| || j| jf<qW|jj} |jj} |jj|jdd||| | dd |r|j|jt|jdkrDttd x0t |jtjdd D]}|jj|d|d q(W|jpP|jS) z?Check updates matching given *patterns* in selected repository.Zupgrades)reponamer%rr7r r])=znot in)Z outputType highlight_nar(highlight_modeszObsoleting Packages)key)r()returnPkgListsrCr%verboser&r)rrr#r6MODErZlocalPkgospathexistsZverifyLocalPkgrrcolor_update_localcolor_update_remotelistPkgsrr3r operator itemgetterupdatesObsoletesList)rFpatternsrZprint_r{r'Ztyplr( local_pkgs highlightrqZlocalculcurobtuprrr check_updatesUs:    zBaseCli.check_updatescCsr|jj}t|dkr |jnx|D]}|j|q&W|jj|}|dkrn|jj rntd}tjj|dS)ab Upgrade or downgrade packages to match the latest versions available in the enabled repositories. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage rz4No packages marked for distribution synchronization.N) Z_goalZ req_lengthrZ distro_syncZreq_has_distupgrade_allr r rMrN)rFZuserlistZoldcountpkg_specZcntrdrrrdistro_sync_userlist{s    zBaseCli.distro_sync_userlistc CsJd}xf|D]^}y|j||dd}Wq tjjk rf}z"tjtd|jjj |j WYdd}~Xq Xq Wx|D]}y|j ||dd}Wqrtjj k r}z$td} tj| |jjj |WYdd}~Xqrtjj k r}z"tjtd|jjj |jWYdd}~Xqrtjjk r*YqrXqrW|sFtjjtddS) aaAttempt to take the user specified list of packages or wildcards and downgrade them. If a complete version number is specified, attempt to downgrade them to the specified version :param specs: a list of names or wildcards specifying packages to downgrade :param file_pkgs: a list of pkg objects from local files F)strictTzNo match for argument: %sNzNo package %s available.z6Packages for argument %s available, but not installed.z!No packages marked for downgrade.)Zpackage_downgrader rMZ MarkingErrorr=rOr rr6r7locationZ downgrade_toZPackageNotFoundErrorZPackagesNotInstalledErrorrrN) rFZspecsZ file_pkgsrrrrreargerrrdrrr downgradePkgss,   ( & " zBaseCli.downgradePkgsallc!CsDy$|jjjd}|j||||d}Wn0tjjk rT}zdt|gfSd}~XnXi}i} i} d} |dkrzt|j|} |r|j rxB|j |j |j D],} | j | jf} | |ks| || kr| || <qW|o|jrx8|jD].} | j | jf} | | ks| | | kr| | | <qW|rP|jrPx2t|jD]$}|jtjkr(|| |j |jf<q(W|jj}|jj}|jj}|jj}|jj|j td||| ||||dd}|jj}|jj}|jj}|jj }|jj|jtd || | ||||d d}|jj|j!td || d }|jj|j"td || d }|jj#}|jj$}|jj|jtd|| | ||dd}t%|j&dkr|dkrt%|j&}t'tdxLt|j(t)j*ddD]}|jj+|d| d qWn|jj|j&td|| d }|jj|j,td|| d } t%|r@| dkr@|dkr@|dkr@|dkr@|dkr@|dkr@|dkr@tjjtddS)zJOutput selection *pkgnarrow* of packages matching *patterns* and *repoid*.r7)installed_availablerrNr]zInstalled Packages)>r9 cli_commandscommandr rZZdemandZ DemandSheetdemandsregister_commandZcommandsaliasZ AliasCommandr"ZAutoremoveCommandZcheckZ CheckCommandZcleanZ CleanCommandZ distrosyncZDistroSyncCommandZdeplistZDeplistCommandZ downgradeZDowngradeCommandrRZ GroupCommandr_ZHistoryCommandZinstallZInstallCommandZ makecacheZMakeCacheCommandZmarkZ MarkCommandmoduleZ ModuleCommandZ reinstallZReinstallCommandremoveZ RemoveCommandrepolistZRepoListCommandZ repoqueryZRepoQueryCommandsearchZ SearchCommandshellZ ShellCommandZswapZ SwapCommandZ updateinfoZUpdateInfoCommandZupgradeZUpgradeCommandZupgrademinimalZUpgradeMinimalCommandZ InfoCommandZ ListCommandZProvidesCommandZCheckUpdateCommandZRepoPkgsCommandZ HelpCommand)rFr9rrrrEfsBz Cli.__init__cCs|jj||jr^xJ|jjD]<\}}|jjj||jj|gd}|j||jj |dfqW|j r|jj dd |jj dd|j Dt }yzxt|jD]j\}}|jjj|}|s|jjjr|dkrtd} tjj| ||j||dkr|jq|jqWWnFtjjk rP} z$tj| |jjtjd WYdd} ~ XnXx|D]}tjtd |qXW|jjj } | dkr|jjj!} x,| D]$} |jjj"| }|r|j#j$qW|jj%j&\} |j_'|jjj(j)| t%j*|j|jj%}|jjj(j+|dS) N)Zbaseurlenabler*disablecSsg|] }|dfqS)rr)rvrrrrrxsz(Cli._configure_repos..zUnknown repo: '%s'rzNo repository match: %s)rr),r9Zread_all_reposZ repofrompathr<reposZ add_new_reporC_configure_from_optionsZrepos_edrQrepoinsertrsetZ get_matchingrr r rMZ RepoErroraddrr ConfigErrorr=rp optparser print_helprkexitr>Z_repo_persistorZget_expired_reposrget_repoexpirerZsetup_progress_callbacksZ _ds_callbackrset_progress_barZ CliKeyImportZ_set_key_import)rFoptsZlabelrZ this_repoZnotmatchrZ operationrrdreZ expired_reposrZbarZ key_importrrr_configure_repossL            zCli._configure_reposcCsvtjdjtjjdtjjtjtj j d|j tjtj j d|j j jtjtj j d|j j jtjd|j j jdS)Nz{prog} version: %s)rHz Command: %szInstallroot: %szReleasever: %sz cachedir: %s)r=debugr?r rKrUconstVERSIONlogloggingDDEBUG cmdstringr9rC installroot releasevercachedir)rFrrr_log_essentialss      zCli._log_essentialscCs|j}|jj}|jr.tjjs.tjjt d|j rLx|j D] }d|_ q>W|j s\|jjj rd|jj_ xn|jD]}|jjtjjqpWnL|jrxD|j D]}|jjqWn(|jsx |jD]}|jjtjjqW|jr|jj|jjrdnd|jjddS)Nz[This command has to be run with superuser privileges (under the root user on most systems).TautoF)load_system_repoZload_available_repos)rr9rZ root_userr rKZ am_i_rootrMrNr r{ iter_enabledZload_metadata_other cacheonlyrCvaluesrZsetSyncStrategyrZSYNC_ONLY_CACHEfreshest_metadatarZfresh_metadataZ SYNC_LAZYZsack_activationZ fill_sackrZavailable_repos)rFrrrrrr_process_demandss.    zCli._process_demandscCs|j}|jj|}|dkr~tjtd|tjd|jj j r`tjtdj t j jt j jd|ntjtdj t j jdt|||_tjt jjd|tjt jjd |dS) z,Check that the requested CLI command exists.Nz)No such command: %s. Please use %s --helprzLIt could be a {PROG} plugin command, try: "{prog} install 'dnf-command(%s)'")rHZPROGzRIt could be a {prog} plugin command, but loading of plugins is currently disabled.)rHzBase command: %szExtra commands: %s)rrrr=rpr rkargvr9rCZpluginsr?r rKrLrUrrrr)rFrrr command_clsrrr_parse_commandss      zCli._parse_commandsNc Cstjjj}|j|}|dkr*tjjjn||_|jj|}|j rpt tj j t |jjj|j|jjtjd|jrd|_d|_|jrtj j|_|_yh|jr|jjjd|jjjtjjd|j_|jjj||j|j d|kr|j!|jj_!|jjj"Wntj#j$t%fk rF}z t&j't(d|tjdWYdd}~XnXt)k r}z:d t*t+|t,|j-f}t&j't(d|tjdWYdd}~XnX|j.dk r|j.|jj_.|jjj/ r|j0dkrt&j't(dtjd|j1s|j2r|j0dkrt&j't(dtjd|j3dk r>t4j5t6j7|j3d|jj8|j9d|jj:|j;|j<||jj8|j9d|j0s|jj=tjd||j_>|jj?d|_@x$|jj>D]}|j@d|7_@qW|jAy|jB||Wn tCk rtjdYnX|jDr$|jj=|j0tjd|jjE|j0|}|jFrN|jF|j_Gd|j_H|jIr`|jI|j_I|jJrrd|jj_K|jLrd|jj_L|j0jM|jjN|jjO|jP||jjQ|jjj||j0jR|jjj.rtjSjT|jjj.|jjj.|jjUjV_W|jjjXdkr(|jjjYjZ|jjjXdt[j\ddkrd}x,|jjUj]D]}|j^rZqJd|_^d}qJW|jjj_sd|jj__d}|rt&j`t(ddS)aParse command line arguments, and set up :attr:`self.base.conf` and :attr:`self.cmds`, as well as logger objects in base instance. :param args: a list of command line arguments :param option_parser: a class for parsing cli options NrrrTrzConfig error: %srz%s: %sdownloadsystem-upgradereposync modulesynczb--destdir or --downloaddir must be used with --downloadonly or download or system-upgrade command.zconfig-managerz_--enable, --set-enabled and --disable, --set-disabled must be used with config-manager command.<mainZpluginrz%s r)colorz%_pkgverify_level signaturerFzWarning: Enforcing GPG signature check globally as per active RPM security policy (see 'gpgcheck' in dnf.conf(5) for how to squelch this message))rrrr)rr)ar rZaliasesZAliasesZresolve option_parserZ OptionParserrZparse_main_argsr4r3rrr;r9rCZhistory_record_packagesrrkrquietZ debuglevelZ errorlevelrZ VERBOSE_LEVELrZ _set_valueZsystem_cachedirZ PRIO_DEFAULTrr_read_conf_filerrZ_adjust_conf_optionsrMrrnr=rpr IOErrorr roreprfilenameZdestdirrTrZ set_enabledZ set_disabledZ sleeptimer*ZsleeprandomZ randrangeZ add_commandsrZ init_pluginsZ disablepluginZ enablepluginrrrHrrrrhelpZparse_command_argsZ allowerasingZ allow_erasingZ_allow_erasingrZ debugsolverZ debug_solverr%Z pre_configureZpre_configure_pluginsZ_activate_persistorrZconfigure_plugins configurerKZ ensure_dirrrZpkgdirrr6ZreinitryZ expandMacrorZgpgcheckZlocalpkg_gpgcheckr>) rFrrrrrerZforcingrrrrrs                                z Cli.configurecCsBtjjd}|jj}|jd|jd|jd}|jdtjj krht j j | rhtj jtdj||jtjjd|jd}|jdtjj krd}|j}|j||jdd|dkr|jdkrtjj|j}n|dkrtjj|}|dk r||_|jdkrtjtd xd D]}|j|qW|jjj|||S)NconfigZconfig_file_pathzConfig file "{}" does not exist)ZpriorityZreposdirvarsdir/)rzPUnable to detect release version (use '--releasever' to specify release version)rlogdir persistdir)rrr)r rZTimerr9rCZ_check_remote_fileZ_search_inside_installrootZ _get_valueZ _get_priorityZPRIO_COMMANDLINErrisfilerMrr r?readZPRIO_MAINCONFIGZ substitutionsZupdate_from_etcrryZdetect_releaseverrr=r>Zprepend_installroot_loggingZ_setup_from_dnf_conf)rFrZtimerrCrZ from_rootZsubstoptrrrrs6        zCli._read_conf_fileeqcCs|dkr|dkrdSg}|js"|r,|jd|js6|r@|jd|jsJ|rT|jd|js^|rh|jd|jj|||j|j|j |j ddS)zz :param opts: :param cmp_type: string supported "eq", "gte" :param all: :return: Nbugfix enhancement newpackagesecurity)typesadvisorybugzillacvesseverity) r rQr r r r9Zadd_security_filtersrrrr)rFrZcmp_typerr rrr _populate_update_security_filters        z$Cli._populate_update_security_filtercCs4|dk r|jjjj||dk r0|jjjj|dS)z Change minimal logger level for terminal output to stdout and stderr according to specific command requirements @param stdout: logging.INFO, logging.WARNING, ... @param stderr:logging.INFO, logging.WARNING, ... N)r9rZstdout_handlerZsetLevelZstderr_handler)rFstdoutstderrrrrredirect_loggerszCli.redirect_loggercCs.tjjj|}||jj_|jjjj|dS)N) r rZrYZMultiFileProgressMeterr9rrrr)rFZforYrrrredirect_repo_progresss zCli.redirect_repo_progresscCs|jjj}|dkrdS|jjjj|jd}|j}|jdd|}x|D]}||krL|}qLW||krtd|td|dS)N)rr )Z advisory_typez,Security: %s is an installed security updatez-Security: %s is the currently running version)r9r/Zget_running_kernelr1r2rrr3)rFZkernelqZikpkgrrrr_check_running_kernels    zCli._check_running_kernelcCs*t|jjtjjtdj||dS)Nz)argument {}: not allowed with argument {})r3rZ print_usager rMrNr r?)rFZoption_string_1Zoption_string_2rrr_option_conflicts zCli._option_conflictcCs<x6|jD],}||jkr*tjjtd|||j|<qWdS)zRegister a Command. :apizCommand "%s" already definedN)rrr rMrr )rFrrrrrrs  zCli.register_commandcCs|j|jjjr8tjtddjtt |jjj|jjj rhtjtddjtt |jjj xx|jj j D]h}|jrtjtd|j ddjtt |j|j rvtjtd|j ddjtt |j qvW|jjS)a2Call the base command, and pass it the extended commands or arguments. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage zExcludes in dnf.conf: z, zIncludes in dnf.conf: zExcludes in repo z: zIncludes in repo )rr9rCZ excludepkgsr=rr rarrZ includepkgsrridrrun)rFrrrrrs  " "(,zCli.run)N)N)rN)NN)rrrrErrrrrrrrrkrrrrrrrrrrres$3   -  r)r )OrZ __future__rrrcollections.abcr ImportError collectionsrzrrrrryrkr*rZlibdnf.transactionrcr rZdnf.clirZdnf.i18nr r r Zdnf.cli.aliasesZdnf.cli.commandsZdnf.cli.commands.aliasZdnf.cli.commands.autoremoveZdnf.cli.commands.checkZdnf.cli.commands.cleanZdnf.cli.commands.deplistZdnf.cli.commands.distrosyncZdnf.cli.commands.downgradeZdnf.cli.commands.groupZdnf.cli.commands.historyZdnf.cli.commands.installZdnf.cli.commands.makecacheZdnf.cli.commands.markZdnf.cli.commands.moduleZdnf.cli.commands.reinstallZdnf.cli.commands.removeZdnf.cli.commands.repolistZdnf.cli.commands.repoqueryZdnf.cli.commands.searchZdnf.cli.commands.shellZdnf.cli.commands.swapZdnf.cli.commands.updateinfoZdnf.cli.commands.upgradeZdnf.cli.commands.upgrademinimalZdnf.cli.demandZdnf.cli.formatZdnf.cli.option_parserZdnf.confZdnf.conf.substitutionsZ dnf.constZdnf.db.historyZdnf.exceptionsZ dnf.loggingZ dnf.persistorZ dnf.pluginZdnf.rpmZdnf.sackZdnf.transactionZdnf.utilZ dnf.yum.miscZ getLoggerr=rr)r;rAZBaserBobjectrrrrrs       OPK!exx"cli/__pycache__/cli.cpython-36.pycnu[3 f @stdZddlmZddlmZddlmZyddlmZWn ek rXddlmZYnXddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlmZdd lmZdd lmZmZddlZddlZddlZddlZddlZddl Zddl!Zddl"Zddl#Zddl$Zddl%Zddl&Zddl'Zddl(Zddl)Zddl*Zddl+Zddl,Zddl-Zddl.Zddl/Zddl0Zddl1Zddl2Zddl3Zddl4Zddl5Zddl6Zddl7Zddl8Zddl9Zddl:Zddl;ZddlZddl?Zddl@ZddlAZddlBZddlCZddlDZe jEd ZFdd dZGddZHddZIddZJGdddejKZLGdddeMZNdS)z/ Command line interface yum class and related. )print_function)absolute_import)unicode_literals)SequenceN)output)CliError)ucd_dnfcCst|jdt|jt|}t|j}t|j}xFd|fd|fd|ffD],\}}||j|d|||d7<qLWdS)zl Get the length of each pkg's column. Add that to data. This "knows" about simpleList and printVer. rnaverridrN)lennamearchZevrZ _from_repo setdefault)datapkgindentr rrdvr/usr/lib/python3.6/cli.py_add_pkg_simple_list_lens]s     rcCsiiid}x<|j|j|j|j|j|jfD]}x|D]}t||q4Wq*Wt|jdkrx*|j D] \}}t||t||d dq`W|d|d|dg}|j |d d }|d |d  |d  fS) zA Work out the dynamic size of the columns to pass to fmtColumns. )r rrr )rr rrr)Zremainder_columnz ) installed availableextras autoremoveupdatesrecentrr obsoletesobsoletesTuplesZ calcColumns)ryplrZlstrZnpkgZopkgcolumnsrrr_list_cmd_calc_columnshs   r)c Csdd}tjj|}d}x|jjj|dD]}|r>tdd}|jdkrbd|j|j |j f}nd |j|j|j |j f}|j j |j }ttd ||||jfttd |jr|jnd||jfq.WdS) NcSstjdtj|S)Nz%c)timestrftimeZgmtime)xrrr sm_ui_timezsz"print_versions..sm_ui_timeF)rr T0z%s-%s.%sz %s:%s-%s.%sz Installed: %s-%s at %sz Built : %s at %s)r sack rpmdb_sackqueryrfiltermprintZepochversionreleasertermboldrr Z installtimeZpackagerZ buildtime) pkgsbaserr-r0donerrrrrrprint_versionsys    r;cCs>td}x0|jD]$\}}tj|j||d|dqWdS)NzTThe operation would result in switching of module '{0}' stream '{1}' to stream '{2}'rr)r itemsloggerwarningformat)switchedModulesZmsg1Z moduleNameZstreamsrrrreport_module_switchsrAcseZdZdZd fdd Zfffdd ZddZd d Zd d Zd dZ fdddfddZ ddZ ggdfddZ dfdfddZ d!ddZfddZddZZS)"BaseCliz#This is the base class for yum cli.Ncs4|p tjj}tt|j|dtj||j|_dS)N)conf)r rCZConfsuperrB__init__rZOutput)selfrC) __class__rrrEszBaseCli.__init__cstjjrJ|jjsJt|jj}|rJt|t dj tj j d}tj j||j}|jj|}|rjtj||rg}g}d}xF|D]>} | jtjjkr|j| jq| jtjjkrd}|j| jqW|`|s|jj|n|jj|||s|jjs|jr|jjs|jjr|jj r:tjt dj tj j!dn(d|jj"krbtjt dj tj j!d|j#r|jj$s|jj% rt&t dntjt d d S|rD|r:tjt d y|jj'} |j(||jj)| Wn\tj j*k r8} z:tj+j j,t-| } t d d | } t.tj j| WYd d } ~ XnX|j/||jj rRd St0|t1sd|g}tj2gt3|}t4t5|j6|}|d k r|j7j8|gd}tj9jj:|j7|j;}nd }|rt.t.dj<|jj=|t.x.|D]&} | j>t?jj@krtj jt dqW|S)zTake care of package downloading, checking, user confirmation and actually running the transaction. :param display: `rpm.callback.TransactionProgress` object(s) :return: history database transaction ID or None aQIt is not possible to switch enabled streams of a module unless explicitly enabled via configuration option module_stream_switch. It is recommended to rather remove all installed content from the module, and reset the module using '{prog} module reset ' command. After you reset the module, you can install the other stream.)progTFz7{prog} will only download packages for the transaction.ZtestzP{prog} will only download packages, install gpg keys, and check the transaction.zOperation aborted.zNothing to do.NzDownloading Packages:zError downloading packages:z %sr zTransaction failed)Ar r9Z WITH_MODULESrCZmodule_stream_switchdictZ_moduleContainerZgetSwitchedStreamsrAr r?util MAIN_PROG exceptionsErrorZ transactionrZlist_transactionr=infoactionZFORWARD_ACTIONSappendrZBACKWARD_ACTIONSZ_tsZreportRemoveSizeZreportDownloadSizeZ isChangedZ_historygroupenv downloadonlyMAIN_PROG_UPPERZtsflags _promptWantedassumeno userconfirmrZdownload_callback_total_cbZdownload_packagesprogressZ DownloadErrorcliZ indent_blockr r3 gpgsigcheck isinstancerZCliTransactionDisplaylistrDrBdo_transactionhistoryoldZdbZRPMTransactionZ_transjoinZpost_transaction_outputstatelibdnfZTransactionItemState_ERROR)rFZdisplayr@msgZtransZpkg_strZ install_pkgsZrmpkgsZ install_onlyZtsiZtotal_cbeZspecificZerrstrtid)rGrrr^s              zBaseCli.do_transactionc sg}x|D]}j|\}}|dkr(q q |dkrĈjjo@jj }tj sVtjj rl| rltjj t dfdd}yj ||Wqtjj t fk r}z|j t|WYdd}~XqXq |j |q W|rx|D]} tj| qWtjj t ddS)aPerform GPG signature verification on the given packages, installing keys if possible. :param pkgs: a list of package objects to verify the GPG signatures of :raises: Will raise :class:`Error` if there's a problem rrzTRefusing to automatically import keys when running unattended. Use "-y" to override.cs jjS)N)rrX)r,yz)rFrr$sz%BaseCli.gpgsigcheck..NzGPG check FAILED)Z_sig_check_pkgrC assumeyesrWsysstdinisattyr rMrNr Z_get_key_for_package ValueErrorrQstrr=critical) rFr8Zerror_messagesporesulterrmsgZayfnrerdr)rFrr[ s&  " zBaseCli.gpgsigcheckcsXdx:|jjjd|jD]$}|tj}|rtjj|dPqWfdd|j D}|S)zBReturn list of changelogs for package newer then installed versionNrrcs$g|]}dks|dkr|qS)N timestampr).0Zchlog)newestrr =sz-BaseCli.latest_changelogs..) Z_rpmconnZ readonly_tsZdbMatchrrpmZRPMTAG_CHANGELOGTIMEdatetimeZdateZ fromtimestamp changelogs)rFpackageZmiZchangelogtimesZchlogsr)rwrlatest_changelogs3s zBaseCli.latest_changelogscCs4d|djdtjj|dtjj|df}|S)z*Return changelog formatted as in spec filez * %s %s %s ruz%a %b %d %X %YZauthortext)r+r Zi18nr )rFZ changelogZ chlog_strrrrformat_changelogAs  zBaseCli.format_changelogcCst}x&|D]}|j|jp|jgj|q Wxdt|jD]T}||}ttdj dj dd|Dx$|j |dD]}t|j |qzWq.r) rJr source_namerrQsortedkeysr3r r?rar}r)rFZpackagesZbysrpmprZ bin_packagesZchlrrrprint_changelogsIs "zBaseCli.print_changelogsTFc CsR|jd||d}|jjs |jjr@|jd||d}|j|_|j|_|rDt|j|}t|jdkri}|jj j d} | rx>t |jD]0} | j } t jj| r| jr| || j| jf<qW|jj} |jj} |jj|jdd||| | dd |r|j|jt|jdkrDttd x0t |jtjdd D]}|jj|d|d q(W|jpP|jS) z?Check updates matching given *patterns* in selected repository.Zupgrades)reponamer%rr7r r])=znot in)Z outputType highlight_nar(highlight_modeszObsoleting Packages)key)r()returnPkgListsrCr%verboser&r)rrr#r6MODErZlocalPkgospathexistsZverifyLocalPkgrrcolor_update_localcolor_update_remotelistPkgsrr3r operator itemgetterupdatesObsoletesList)rFpatternsrZprint_r{r'Ztyplr( local_pkgs highlightrqZlocalculcurobtuprrr check_updatesUs:    zBaseCli.check_updatescCsr|jj}t|dkr |jnx|D]}|j|q&W|jj|}|dkrn|jj rntd}tjj|dS)ab Upgrade or downgrade packages to match the latest versions available in the enabled repositories. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage rz4No packages marked for distribution synchronization.N) Z_goalZ req_lengthrZ distro_syncZreq_has_distupgrade_allr r rMrN)rFZuserlistZoldcountpkg_specZcntrdrrrdistro_sync_userlist{s    zBaseCli.distro_sync_userlistc CsTd}xf|D]^}y|j||dd}Wq tjjk rf}z"tjtd|jjj |j WYdd}~Xq Xq Wx|D]}y|j ||dd}Wqrtjj k r}z$td} tj| |jjj |WYdd}~Xqrtjj k r}z"tjtd|jjj |jWYdd}~Xqrtjjk r4ds0tYqrXqrW|sPtjjtddS) aaAttempt to take the user specified list of packages or wildcards and downgrade them. If a complete version number is specified, attempt to downgrade them to the specified version :param specs: a list of names or wildcards specifying packages to downgrade :param file_pkgs: a list of pkg objects from local files F)strictTzNo match for argument: %sNzNo package %s available.z6Packages for argument %s available, but not installed.z!No packages marked for downgrade.)Zpackage_downgrader rMZ MarkingErrorr=rOr rr6r7locationZ downgrade_toZPackageNotFoundErrorZPackagesNotInstalledErrorrAssertionErrorrN) rFZspecsZ file_pkgsrrrrreargerrrdrrr downgradePkgss,   ( & "zBaseCli.downgradePkgsallc!CsDy$|jjjd}|j||||d}Wn0tjjk rT}zdt|gfSd}~XnXi}i} i} d} |dkrzt|j|} |r|j rxB|j |j |j D],} | j | jf} | |ks| || kr| || <qW|o|jrx8|jD].} | j | jf} | | ks| | | kr| | | <qW|rP|jrPx2t|jD]$}|jtjkr(|| |j |jf<q(W|jj}|jj}|jj}|jj}|jj|j td||| ||||dd}|jj}|jj}|jj}|jj }|jj|jtd || | ||||d d}|jj|j!td || d }|jj|j"td || d }|jj#}|jj$}|jj|jtd|| | ||dd}t%|j&dkr|dkrt%|j&}t'tdxLt|j(t)j*ddD]}|jj+|d| d qWn|jj|j&td|| d }|jj|j,td|| d } t%|r@| dkr@|dkr@|dkr@|dkr@|dkr@|dkr@|dkr@tjjtddS)zJOutput selection *pkgnarrow* of packages matching *patterns* and *repoid*.r7)installed_availablerrNr]zInstalled Packages)>r9 cli_commandscommandr rZZdemandZ DemandSheetdemandsregister_commandZcommandsaliasZ AliasCommandr"ZAutoremoveCommandZcheckZ CheckCommandZcleanZ CleanCommandZ distrosyncZDistroSyncCommandZdeplistZDeplistCommandZ downgradeZDowngradeCommandrRZ GroupCommandr_ZHistoryCommandZinstallZInstallCommandZ makecacheZMakeCacheCommandZmarkZ MarkCommandmoduleZ ModuleCommandZ reinstallZReinstallCommandremoveZ RemoveCommandrepolistZRepoListCommandZ repoqueryZRepoQueryCommandsearchZ SearchCommandshellZ ShellCommandZswapZ SwapCommandZ updateinfoZUpdateInfoCommandZupgradeZUpgradeCommandZupgrademinimalZUpgradeMinimalCommandZ InfoCommandZ ListCommandZProvidesCommandZCheckUpdateCommandZRepoPkgsCommandZ HelpCommand)rFr9rrrrEfsBz Cli.__init__cCs|jj||jr^xJ|jjD]<\}}|jjj||jj|gd}|j||jj |dfqW|j r|jj dd |jj dd|j Dt }yzxt|jD]j\}}|jjj|}|s|jjjr|dkrtd} tjj| ||j||dkr|jq|jqWWnFtjjk rP} z$tj| |jjtjd WYdd} ~ XnXx|D]}tjtd |qXW|jjj } | dkr|jjj!} x,| D]$} |jjj"| }|r|j#j$qW|jj%j&\} |j_'|jjj(j)| t%j*|j|jj%}|jjj(j+|dS) N)Zbaseurlenabler*disablecSsg|] }|dfqS)rr)rvrrrrrxsz(Cli._configure_repos..zUnknown repo: '%s'rzNo repository match: %s)rr),r9Zread_all_reposZ repofrompathr<reposZ add_new_reporC_configure_from_optionsZrepos_edrQrepoinsertrsetZ get_matchingrr r rMZ RepoErroraddrr ConfigErrorr=rp optparser print_helprkexitr>Z_repo_persistorZget_expired_reposrget_repoexpirerZsetup_progress_callbacksZ _ds_callbackrset_progress_barZ CliKeyImportZ_set_key_import)rFoptsZlabelrZ this_repoZnotmatchrZ operationrrdreZ expired_reposrZbarZ key_importrrr_configure_repossL            zCli._configure_reposcCsvtjdjtjjdtjjtjtj j d|j tjtj j d|j j jtjtj j d|j j jtjd|j j jdS)Nz{prog} version: %s)rHz Command: %szInstallroot: %szReleasever: %sz cachedir: %s)r=debugr?r rKrUconstVERSIONlogloggingDDEBUG cmdstringr9rC installroot releasevercachedir)rFrrr_log_essentialss      zCli._log_essentialscCs|j}|jj}|jr.tjjs.tjjt d|j rLx|j D] }d|_ q>W|j s\|jjj rd|jj_ xn|jD]}|jjtjjqpWnL|jrxD|j D]}|jjqWn(|jsx |jD]}|jjtjjqW|jr|jj|jjrdnd|jjddS)Nz[This command has to be run with superuser privileges (under the root user on most systems).TautoF)load_system_repoZload_available_repos)rr9rZ root_userr rKZ am_i_rootrMrNr r{ iter_enabledZload_metadata_other cacheonlyrCvaluesrZsetSyncStrategyrZSYNC_ONLY_CACHEfreshest_metadatarZfresh_metadataZ SYNC_LAZYZsack_activationZ fill_sackrZavailable_repos)rFrrrrrr_process_demandss.    zCli._process_demandscCs|j}|jj|}|dkr~tjtd|tjd|jj j r`tjtdj t j jt j jd|ntjtdj t j jdt|||_tjt jjd|tjt jjd |dS) z,Check that the requested CLI command exists.Nz)No such command: %s. Please use %s --helprzLIt could be a {PROG} plugin command, try: "{prog} install 'dnf-command(%s)'")rHZPROGzRIt could be a {prog} plugin command, but loading of plugins is currently disabled.)rHzBase command: %szExtra commands: %s)rrrr=rpr rkargvr9rCZpluginsr?r rKrLrUrrrr)rFrrr command_clsrrr_parse_commandss      zCli._parse_commandsNc Cstjjj}|j|}|dkr*tjjjn||_|jj|}|j rpt tj j t |jjj|j|jjtjd|jrd|_d|_|jrtj j|_|_yh|jr|jjjd|jjjtjjd|j_|jjj||j|j d|kr|j!|jj_!|jjj"Wntj#j$t%fk rF}z t&j't(d|tjdWYdd}~XnXt)k r}z:d t*t+|t,|j-f}t&j't(d|tjdWYdd}~XnX|j.dk r|j.|jj_.|jjj/ r|j0dkrt&j't(dtjd|j1s|j2r|j0dkrt&j't(dtjd|j3dk r>t4j5t6j7|j3d|jj8|j9d|jj:|j;|j<||jj8|j9d|j0s|jj=tjd||j_>|jj?d|_@x$|jj>D]}|j@d|7_@qW|jAy|jB||Wn tCk rtjdYnX|jDr$|jj=|j0tjd|jjE|j0|}|jFrN|jF|j_Gd|j_H|jIr`|jI|j_I|jJrrd|jj_K|jLrd|jj_L|j0jM|jjN|jjO|jP||jjQ|jjj||j0jR|jjj.rtjSjT|jjj.|jjj.|jjUjV_W|jjjXdkr(|jjjYjZ|jjjXdt[j\ddkrd}x,|jjUj]D]}|j^rZqJd|_^d}qJW|jjj_sd|jj__d}|rt&j`t(ddS)aParse command line arguments, and set up :attr:`self.base.conf` and :attr:`self.cmds`, as well as logger objects in base instance. :param args: a list of command line arguments :param option_parser: a class for parsing cli options NrrrTrzConfig error: %srz%s: %sdownloadsystem-upgradereposync modulesynczb--destdir or --downloaddir must be used with --downloadonly or download or system-upgrade command.zconfig-managerz_--enable, --set-enabled and --disable, --set-disabled must be used with config-manager command.<mainZpluginrz%s r)colorz%_pkgverify_level signaturerFzWarning: Enforcing GPG signature check globally as per active RPM security policy (see 'gpgcheck' in dnf.conf(5) for how to squelch this message))rrrr)rr)ar rZaliasesZAliasesZresolve option_parserZ OptionParserrZparse_main_argsr4r3rrr;r9rCZhistory_record_packagesrrkrquietZ debuglevelZ errorlevelrZ VERBOSE_LEVELrZ _set_valueZsystem_cachedirZ PRIO_DEFAULTrr_read_conf_filerrZ_adjust_conf_optionsrMrrnr=rpr IOErrorr roreprfilenameZdestdirrTrZ set_enabledZ set_disabledZ sleeptimer*ZsleeprandomZ randrangeZ add_commandsrZ init_pluginsZ disablepluginZ enablepluginrrrHrrrrhelpZparse_command_argsZ allowerasingZ allow_erasingZ_allow_erasingrZ debugsolverZ debug_solverr%Z pre_configureZpre_configure_pluginsZ_activate_persistorrZconfigure_plugins configurerKZ ensure_dirrrZpkgdirrr6ZreinitryZ expandMacrorZgpgcheckZlocalpkg_gpgcheckr>) rFrrrrrerZforcingrrrrrs                                z Cli.configurecCsBtjjd}|jj}|jd|jd|jd}|jdtjj krht j j | rhtj jtdj||jtjjd|jd}|jdtjj krd}|j}|j||jdd|dkr|jdkrtjj|j}n|dkrtjj|}|dk r||_|jdkrtjtd xd D]}|j|qW|jjj|||S)NconfigZconfig_file_pathzConfig file "{}" does not exist)ZpriorityZreposdirvarsdir/)rzPUnable to detect release version (use '--releasever' to specify release version)rlogdir persistdir)rrr)r rZTimerr9rCZ_check_remote_fileZ_search_inside_installrootZ _get_valueZ _get_priorityZPRIO_COMMANDLINErrisfilerMrr r?readZPRIO_MAINCONFIGZ substitutionsZupdate_from_etcrryZdetect_releaseverrr=r>Zprepend_installroot_loggingZ_setup_from_dnf_conf)rFrZtimerrCrZ from_rootZsubstoptrrrrs6        zCli._read_conf_fileeqcCs|dkr|dkrdSg}|js"|r,|jd|js6|r@|jd|jsJ|rT|jd|js^|rh|jd|jj|||j|j|j |j ddS)zz :param opts: :param cmp_type: string supported "eq", "gte" :param all: :return: Nbugfix enhancement newpackagesecurity)typesadvisorybugzillacvesseverity) r rQr r r r9Zadd_security_filtersrrrr)rFrZcmp_typerrrrr _populate_update_security_filters        z$Cli._populate_update_security_filtercCs4|dk r|jjjj||dk r0|jjjj|dS)z Change minimal logger level for terminal output to stdout and stderr according to specific command requirements @param stdout: logging.INFO, logging.WARNING, ... @param stderr:logging.INFO, logging.WARNING, ... N)r9rZstdout_handlerZsetLevelZstderr_handler)rFstdoutstderrrrrredirect_loggerszCli.redirect_loggercCs.tjjj|}||jj_|jjjj|dS)N) r rZrYZMultiFileProgressMeterr9rrrr)rFZforYrrrredirect_repo_progresss zCli.redirect_repo_progresscCs|jjj}|dkrdS|jjjj|jd}|j}|jdd|}x|D]}||krL|}qLW||krtd|td|dS)N)rr )Z advisory_typez,Security: %s is an installed security updatez-Security: %s is the currently running version)r9r/Zget_running_kernelr1r2rrr3)rFZkernelqZikpkgrrrr_check_running_kernels    zCli._check_running_kernelcCs*t|jjtjjtdj||dS)Nz)argument {}: not allowed with argument {})r3rZ print_usager rMrNr r?)rFZoption_string_1Zoption_string_2rrr_option_conflicts zCli._option_conflictcCs<x6|jD],}||jkr*tjjtd|||j|<qWdS)zRegister a Command. :apizCommand "%s" already definedN)rrr rMrr )rFrrrrrrs  zCli.register_commandcCs|j|jjjr8tjtddjtt |jjj|jjj rhtjtddjtt |jjj xx|jj j D]h}|jrtjtd|j ddjtt |j|j rvtjtd|j ddjtt |j qvW|jjS)a2Call the base command, and pass it the extended commands or arguments. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage zExcludes in dnf.conf: z, zIncludes in dnf.conf: zExcludes in repo z: zIncludes in repo )rr9rCZ excludepkgsr=rr rarrZ includepkgsrridrrun)rFrrrrrs  " "(,zCli.run)N)N)r N)NN)rrrrErrrrrrrrrkrrrrrrrrrrres$3   -  r)r )OrZ __future__rrrcollections.abcr ImportError collectionsrzrrrrryrkr*rZlibdnf.transactionrcr rZdnf.clirZdnf.i18nr r r Zdnf.cli.aliasesZdnf.cli.commandsZdnf.cli.commands.aliasZdnf.cli.commands.autoremoveZdnf.cli.commands.checkZdnf.cli.commands.cleanZdnf.cli.commands.deplistZdnf.cli.commands.distrosyncZdnf.cli.commands.downgradeZdnf.cli.commands.groupZdnf.cli.commands.historyZdnf.cli.commands.installZdnf.cli.commands.makecacheZdnf.cli.commands.markZdnf.cli.commands.moduleZdnf.cli.commands.reinstallZdnf.cli.commands.removeZdnf.cli.commands.repolistZdnf.cli.commands.repoqueryZdnf.cli.commands.searchZdnf.cli.commands.shellZdnf.cli.commands.swapZdnf.cli.commands.updateinfoZdnf.cli.commands.upgradeZdnf.cli.commands.upgrademinimalZdnf.cli.demandZdnf.cli.formatZdnf.cli.option_parserZdnf.confZdnf.conf.substitutionsZ dnf.constZdnf.db.historyZdnf.exceptionsZ dnf.loggingZ dnf.persistorZ dnf.pluginZdnf.rpmZdnf.sackZdnf.transactionZdnf.utilZ dnf.yum.miscZ getLoggerr=rr)r;rAZBaserBobjectrrrrrs       OPK!.p!!6cli/__pycache__/completion_helper.cpython-36.opt-1.pycnu[3 f/ @s<ddlZddlZddlZddlZddZddZGdddejjj j Z Gdd d ejjj j ZGd d d ejjjjZGd d d ejjjZGdddejjjjZGdddejjjjZGdddejjjjZGdddejjjjZddZ e!dkr8ye ej"ddWn e#k r6ej$dYnXdS)Ncstfdd|S)Ncst|jS)N)str startswith)k)kw'/usr/lib/python3.6/completion_helper.pysz#filter_list_by_kw..)filter)rZlstr)rrfilter_list_by_kwsr cCstdd|DS)NcSsg|] }t|qSr)r).0xrrr !sz%listpkg_to_setstr..)set)pkgsrrrlistpkg_to_setstr srcs,eZdZfddZddZddZZS)RemoveCompletionCommandcstt|j|dS)N)superr__init__)selfargs) __class__rrr$sz RemoveCompletionCommand.__init__cCsd|jj_d|jj_dS)NFT)clidemands root_usersack_activation)rrrr configure's z!RemoveCompletionCommand.configurecCs,x&tj|j|jjD]}tt|qWdS)N)ListCompletionCommand installedbaseopts pkg_specsprintr)rpkgrrrrun+szRemoveCompletionCommand.run)__name__ __module__ __qualname__rrr# __classcell__rr)rrr#s rcs,eZdZfddZddZddZZS)InstallCompletionCommandcstt|j|dS)N)rr(r)rr)rrrr1sz!InstallCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrravailable_reposr)rrrrr4s  z"InstallCompletionCommand.configurecCsNttj|j|jj}ttj|j|jj}x||D]}tt|q6WdS)N) rrrrrr availabler!r)rrr*r"rrrr#9s     zInstallCompletionCommand.run)r$r%r&rrr#r'rr)rrr(0s r(cs,eZdZfddZddZddZZS)ReinstallCompletionCommandcstt|j|dS)N)rr+r)rr)rrrrCsz#ReinstallCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrFs  z$ReinstallCompletionCommand.configurecCsNttj|j|jj}ttj|j|jj}x||@D]}tt|q6WdS)N) rrrrrr r*r!r)rrr*r"rrrr#Ks     zReinstallCompletionCommand.run)r$r%r&rrr#r'rr)rrr+Bs r+csHeZdZfddZddZeddZeddZed d ZZ S) rcstt|j|dS)N)rrr)rr)rrrrTszListCompletionCommand.__init__cCs|j}|jj}|jj}t|dkrH|d|krHtdjt|d|n|dkr`|j|j |}n||dkrx|j |j |}nd|dkr|j |j |}nLt |j |j |}t |j|j |}||B}|stdjt|d|dSx|D]}tt |qWdS)N rr*updatesr)Z pkgnarrowsrZpackagesZpackages_actionlenr!joinr rrr*r.rr)rsubcmdsractionrr*rr"rrrr#Ws& zListCompletionCommand.runcCs |jjjjdj|ddS)Nz{}*r) name__glob)sackqueryrfiltermformat)rargrrrrnszListCompletionCommand.installedcCs |jjjjdj|ddS)Nz{}*r)r3)r4r5r*r6r7)rr8rrrr*rszListCompletionCommand.availablecCs|jdj|dgddS)Nz{}*rF)Zprint_)Z check_updatesr7)rr8rrrr.vszListCompletionCommand.updates) r$r%r&rr# staticmethodrr*r.r'rr)rrrSs    rcs$eZdZfddZddZZS)RepoListCompletionCommandcstt|j|dS)N)rr:r)rr)rrrr|sz"RepoListCompletionCommand.__init__cCs|j}|jdkr>tdjt|jddd|jjjDnn|jdkrvtdjt|jddd|jjjDn6|jdkrtdjt|jdd d|jjjDdS) Nenabledr-rcSsg|] }|jqSr)id)r rrrrr sz1RepoListCompletionCommand.run..ZdisabledcSsg|]}|js|jqSr)r;r<)r r=rrrr sallcSsg|] }|jqSr)r<)r r=rrrr s) rZ repos_actionr!r0r ZreposrZ iter_enabledr>)rrrrrr#s   zRepoListCompletionCommand.run)r$r%r&rr#r'rr)rrr:{s r:cs,eZdZfddZddZddZZS)UpgradeCompletionCommandcstt|j|dS)N)rr?r)rr)rrrrsz!UpgradeCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrs  z"UpgradeCompletionCommand.configurecCs,x&tj|j|jjD]}tt|qWdS)N)rr.rrr r!r)rr"rrrr#szUpgradeCompletionCommand.run)r$r%r&rrr#r'rr)rrr?s r?cs,eZdZfddZddZddZZS)DowngradeCompletionCommandcstt|j|dS)N)rr@r)rr)rrrrsz#DowngradeCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrs  z$DowngradeCompletionCommand.configurecCs0x*tj|j|jjjD]}tt|qWdS)N)rr*rrr Z downgradesr!r)rr"rrrr#szDowngradeCompletionCommand.run)r$r%r&rrr#r'rr)rrr@s r@cs$eZdZfddZddZZS)CleanCompletionCommandcstt|j|dS)N)rrAr)rr)rrrrszCleanCompletionCommand.__init__cCs0tjjjjj}tdjt|j j d|dS)Nr-r,) dnfrcommandscleanZ _CACHE_TYPESkeysr!r0r rtype)rr1rrrr#szCleanCompletionCommand.run)r$r%r&rr#r'rr)rrrAs rAc Cstjjj}tjj|}|ddkrP|jgg|tdjt|d|jdS|jj |j t |j t |j t |j t|j t|j t|j t|j t|j|y |jWn&ttjjfk rtjdYnXdS)NrZ_cmdsr-r,)rBrZBaseCliZCliZ init_pluginsr!r0r Z cli_commandsclearZregister_commandrr(r+rr:r?r@rArr#OSError exceptionsErrorsysexit)rrrrrrmains(              rM__main__r,)%Zdnf.exceptionsrBZdnf.cliZdnf.cli.commands.cleanrKr rrrCremoveZ RemoveCommandrZinstallZInstallCommandr(Z reinstallZReinstallCommandr+Z ListCommandrZrepolistZRepoListCommandr:ZupgradeZUpgradeCommandr?Z downgradeZDowngradeCommandr@rDZ CleanCommandrArMr$argvKeyboardInterruptrLrrrrs& (  PK!.p!!0cli/__pycache__/completion_helper.cpython-36.pycnu[3 f/ @s<ddlZddlZddlZddlZddZddZGdddejjj j Z Gdd d ejjj j ZGd d d ejjjjZGd d d ejjjZGdddejjjjZGdddejjjjZGdddejjjjZGdddejjjjZddZ e!dkr8ye ej"ddWn e#k r6ej$dYnXdS)Ncstfdd|S)Ncst|jS)N)str startswith)k)kw'/usr/lib/python3.6/completion_helper.pysz#filter_list_by_kw..)filter)rZlstr)rrfilter_list_by_kwsr cCstdd|DS)NcSsg|] }t|qSr)r).0xrrr !sz%listpkg_to_setstr..)set)pkgsrrrlistpkg_to_setstr srcs,eZdZfddZddZddZZS)RemoveCompletionCommandcstt|j|dS)N)superr__init__)selfargs) __class__rrr$sz RemoveCompletionCommand.__init__cCsd|jj_d|jj_dS)NFT)clidemands root_usersack_activation)rrrr configure's z!RemoveCompletionCommand.configurecCs,x&tj|j|jjD]}tt|qWdS)N)ListCompletionCommand installedbaseopts pkg_specsprintr)rpkgrrrrun+szRemoveCompletionCommand.run)__name__ __module__ __qualname__rrr# __classcell__rr)rrr#s rcs,eZdZfddZddZddZZS)InstallCompletionCommandcstt|j|dS)N)rr(r)rr)rrrr1sz!InstallCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrravailable_reposr)rrrrr4s  z"InstallCompletionCommand.configurecCsNttj|j|jj}ttj|j|jj}x||D]}tt|q6WdS)N) rrrrrr availabler!r)rrr*r"rrrr#9s     zInstallCompletionCommand.run)r$r%r&rrr#r'rr)rrr(0s r(cs,eZdZfddZddZddZZS)ReinstallCompletionCommandcstt|j|dS)N)rr+r)rr)rrrrCsz#ReinstallCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrFs  z$ReinstallCompletionCommand.configurecCsNttj|j|jj}ttj|j|jj}x||@D]}tt|q6WdS)N) rrrrrr r*r!r)rrr*r"rrrr#Ks     zReinstallCompletionCommand.run)r$r%r&rrr#r'rr)rrr+Bs r+csHeZdZfddZddZeddZeddZed d ZZ S) rcstt|j|dS)N)rrr)rr)rrrrTszListCompletionCommand.__init__cCs|j}|jj}|jj}t|dkrH|d|krHtdjt|d|n|dkr`|j|j |}n||dkrx|j |j |}nd|dkr|j |j |}nLt |j |j |}t |j|j |}||B}|stdjt|d|dSx|D]}tt |qWdS)N rr*updatesr)Z pkgnarrowsrZpackagesZpackages_actionlenr!joinr rrr*r.rr)rsubcmdsractionrr*rr"rrrr#Ws& zListCompletionCommand.runcCs |jjjjdj|ddS)Nz{}*r) name__glob)sackqueryrfiltermformat)rargrrrrnszListCompletionCommand.installedcCs |jjjjdj|ddS)Nz{}*r)r3)r4r5r*r6r7)rr8rrrr*rszListCompletionCommand.availablecCs|jdj|dgddS)Nz{}*rF)Zprint_)Z check_updatesr7)rr8rrrr.vszListCompletionCommand.updates) r$r%r&rr# staticmethodrr*r.r'rr)rrrSs    rcs$eZdZfddZddZZS)RepoListCompletionCommandcstt|j|dS)N)rr:r)rr)rrrr|sz"RepoListCompletionCommand.__init__cCs|j}|jdkr>tdjt|jddd|jjjDnn|jdkrvtdjt|jddd|jjjDn6|jdkrtdjt|jdd d|jjjDdS) Nenabledr-rcSsg|] }|jqSr)id)r rrrrr sz1RepoListCompletionCommand.run..ZdisabledcSsg|]}|js|jqSr)r;r<)r r=rrrr sallcSsg|] }|jqSr)r<)r r=rrrr s) rZ repos_actionr!r0r ZreposrZ iter_enabledr>)rrrrrr#s   zRepoListCompletionCommand.run)r$r%r&rr#r'rr)rrr:{s r:cs,eZdZfddZddZddZZS)UpgradeCompletionCommandcstt|j|dS)N)rr?r)rr)rrrrsz!UpgradeCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrs  z"UpgradeCompletionCommand.configurecCs,x&tj|j|jjD]}tt|qWdS)N)rr.rrr r!r)rr"rrrr#szUpgradeCompletionCommand.run)r$r%r&rrr#r'rr)rrr?s r?cs,eZdZfddZddZddZZS)DowngradeCompletionCommandcstt|j|dS)N)rr@r)rr)rrrrsz#DowngradeCompletionCommand.__init__cCs"d|jj_d|jj_d|jj_dS)NFT)rrrr)r)rrrrrs  z$DowngradeCompletionCommand.configurecCs0x*tj|j|jjjD]}tt|qWdS)N)rr*rrr Z downgradesr!r)rr"rrrr#szDowngradeCompletionCommand.run)r$r%r&rrr#r'rr)rrr@s r@cs$eZdZfddZddZZS)CleanCompletionCommandcstt|j|dS)N)rrAr)rr)rrrrszCleanCompletionCommand.__init__cCs0tjjjjj}tdjt|j j d|dS)Nr-r,) dnfrcommandscleanZ _CACHE_TYPESkeysr!r0r rtype)rr1rrrr#szCleanCompletionCommand.run)r$r%r&rr#r'rr)rrrAs rAc Cstjjj}tjj|}|ddkrP|jgg|tdjt|d|jdS|jj |j t |j t |j t |j t|j t|j t|j t|j t|j|y |jWn&ttjjfk rtjdYnXdS)NrZ_cmdsr-r,)rBrZBaseCliZCliZ init_pluginsr!r0r Z cli_commandsclearZregister_commandrr(r+rr:r?r@rArr#OSError exceptionsErrorsysexit)rrrrrrmains(              rM__main__r,)%Zdnf.exceptionsrBZdnf.cliZdnf.cli.commands.cleanrKr rrrCremoveZ RemoveCommandrZinstallZInstallCommandr(Z reinstallZReinstallCommandr+Z ListCommandrZrepolistZRepoListCommandr:ZupgradeZUpgradeCommandr?Z downgradeZDowngradeCommandr@rDZ CleanCommandrArMr$argvKeyboardInterruptrLrrrrs& (  PK!oZv  +cli/__pycache__/demand.cpython-36.opt-1.pycnu[3 ft` @s0ddlmZGdddeZGdddeZdS))unicode_literalsc@s&eZdZddZdddZddZdS) _BoolDefaultcCs ||_d|jjt|f|_dS)Nz__%s%x)default __class____name__id _storing_name)selfrr /usr/lib/python3.6/demand.py__init__sz_BoolDefault.__init__NcCs |j}|j|kr||jS|jS)N)__dict__rr)r objZobjtypeobjdictr r r __get__s  z_BoolDefault.__get__cCs8|j}|j|kr*||j}||kr*td|||j<dS)NzDemand already set.)r rAttributeError)r rvalrZ current_valr r r __set__#s   z_BoolDefault.__set__)N)r __module__ __qualname__r rrr r r r rs rc@speZdZdZedZedZedZedZedZ edZ dZ edZ edZ edZedZdZedZdS) DemandSheetzHCollection of demands that different CLI parts have on other parts. :apiFTrN)rrr__doc__rZ allow_erasingZavailable_reposZ resolvingZ root_userZsack_activationZload_system_repoZsuccess_exit_statusZ cacheonlyZfresh_metadataZfreshest_metadataZ changelogsZtransaction_displayZplugin_filtering_enabledr r r r r+srN)Z __future__robjectrrr r r r s PK!oZv  %cli/__pycache__/demand.cpython-36.pycnu[3 ft` @s0ddlmZGdddeZGdddeZdS))unicode_literalsc@s&eZdZddZdddZddZdS) _BoolDefaultcCs ||_d|jjt|f|_dS)Nz__%s%x)default __class____name__id _storing_name)selfrr /usr/lib/python3.6/demand.py__init__sz_BoolDefault.__init__NcCs |j}|j|kr||jS|jS)N)__dict__rr)r objZobjtypeobjdictr r r __get__s  z_BoolDefault.__get__cCs8|j}|j|kr*||j}||kr*td|||j<dS)NzDemand already set.)r rAttributeError)r rvalrZ current_valr r r __set__#s   z_BoolDefault.__set__)N)r __module__ __qualname__r rrr r r r rs rc@speZdZdZedZedZedZedZedZ edZ dZ edZ edZ edZedZdZedZdS) DemandSheetzHCollection of demands that different CLI parts have on other parts. :apiFTrN)rrr__doc__rZ allow_erasingZavailable_reposZ resolvingZ root_userZsack_activationZload_system_repoZsuccess_exit_statusZ cacheonlyZfresh_metadataZfreshest_metadataZ changelogsZtransaction_displayZplugin_filtering_enabledr r r r r+srN)Z __future__robjectrrr r r r s PK!1ƽ1 1 +cli/__pycache__/format.cpython-36.opt-1.pycnu[3 ft`@s8ddlmZddlmZd ddZd ddZdd Zd S) )unicode_literals)long c Csddddddddd g }|r d }nd }d }d }t|d}|dkrDd}x$||krh||krh|d}||}qFWt|ts~t|trd}n|dkrd}nd}|t|pd |||fS)aReturn a human-readable metric-like string representation of a number. :param number: the number to be converted to a human-readable form :param SI: If is 0, this function will use the convention that 1 kilobyte = 1024 bytes, otherwise, the convention that 1 kilobyte = 1000 bytes will be used :param space: string that will be placed between the number and the SI prefix :return: a human-readable metric-like string representation of *number* rkMGTPEZYg@@g@irNgz%i%s%sgfffff#@z%.1f%s%sz%.0f%s%s)len isinstanceintrfloat) ZnumberZSIZspaceZsymbolsstepZthreshdepthZ max_depthformatr/usr/lib/python3.6/format.py format_numbers4  rcCsx|dks|dkr|rdSdSnV|tdkr.dSt|}|d}|d}|rh|d}|d}d|||fSd ||fSdS) aReturn a human-readable string representation of a number of seconds. The string will show seconds, minutes, and optionally hours. :param seconds: the number of seconds to convert to a human-readable form :param use_hours: If use_hours is 0, the representation will be in minutes and seconds. Otherwise, it will be in hours, minutes, and seconds :return: a human-readable string representation of *seconds* Nrz--:--:--z--:--infZInfinite<z%02i:%02i:%02iz %02i:%02i)rr)ZsecondsZ use_hoursZminutesZhoursrrr format_timeIs rcCsdjdd|jDS)N css|]}d|VqdS)z Nr).0srrr hszindent_block..)join splitlines)rrrr indent_blockgsr!N)rr)r)Z __future__rZ dnf.pycomprrrr!rrrrs   5 PK!1ƽ1 1 %cli/__pycache__/format.cpython-36.pycnu[3 ft`@s8ddlmZddlmZd ddZd ddZdd Zd S) )unicode_literals)long c Csddddddddd g }|r d }nd }d }d }t|d}|dkrDd}x$||krh||krh|d}||}qFWt|ts~t|trd}n|dkrd}nd}|t|pd |||fS)aReturn a human-readable metric-like string representation of a number. :param number: the number to be converted to a human-readable form :param SI: If is 0, this function will use the convention that 1 kilobyte = 1024 bytes, otherwise, the convention that 1 kilobyte = 1000 bytes will be used :param space: string that will be placed between the number and the SI prefix :return: a human-readable metric-like string representation of *number* rkMGTPEZYg@@g@irNgz%i%s%sgfffff#@z%.1f%s%sz%.0f%s%s)len isinstanceintrfloat) ZnumberZSIZspaceZsymbolsstepZthreshdepthZ max_depthformatr/usr/lib/python3.6/format.py format_numbers4  rcCsx|dks|dkr|rdSdSnV|tdkr.dSt|}|d}|d}|rh|d}|d}d|||fSd ||fSdS) aReturn a human-readable string representation of a number of seconds. The string will show seconds, minutes, and optionally hours. :param seconds: the number of seconds to convert to a human-readable form :param use_hours: If use_hours is 0, the representation will be in minutes and seconds. Otherwise, it will be in hours, minutes, and seconds :return: a human-readable string representation of *seconds* Nrz--:--:--z--:--infZInfinite<z%02i:%02i:%02iz %02i:%02i)rr)ZsecondsZ use_hoursZminutesZhoursrrr format_timeIs rcCsdjdd|jDS)N css|]}d|VqdS)z Nr).0srrr hszindent_block..)join splitlines)rrrr indent_blockgsr!N)rr)r)Z __future__rZ dnf.pycomprrrr!rrrrs   5 PK!U?)cli/__pycache__/main.cpython-36.opt-1.pycnu[3 ft`f@sPdZddlmZddlmZddlmZddlmZddlmZddl m Z ddl m Z dd l mZdd l mZdd lZdd lZdd l Zdd lZdd l Zdd lZdd lZdd lZdd lZdd lZdd lZdd lZdd lZdd lZejd Zd dZddZ eee fddZ!ddZ"ddZ#ddZ$d ddZ%e&dkrLe%ej'dd ddd S)!z4 Entrance point for the yum command line interface. )print_function)absolute_import)unicode_literals)Conf)Cli) OptionParser)ucd)show_lock_owner)_NdnfcCs&tjtjjdddtjt|dS)NT)exc_info)loggerlogr loggingSUBDEBUGcriticalr)er/usr/lib/python3.6/main.py ex_IOError2srcCs6tjtjjddd|jdk r2tjtdt|dS)Nr T)r z Error: %sr) rrr rrvaluerr r)rrrrex_Error8s rcCsy6tjjtjjj|}t||||SQRXWntjjk rr}ztj |j t |j dSd}~XnLtjj k r}ztj |j dSd}~Xntjjk r}zdSd}~Xntjjk r}zt|Sd}~Xntjk r$}ztj tdt|dSd}~Xntjjk r\}ztj tdt|dSd}~Xnbtk r}zt|Sd}~Xn>tk r}z tj djt|jtddSd}~XnXdS)Nrz Error: %sz{}: {}z Terminated.)r Zi18nZ setup_stdoutcliZBaseCli_main exceptionsZProcessLockErrorrrrr pid LockError DepsolveErrorErrorrhawkey Exceptionr rlibdnferrorIOErrorrKeyboardInterruptformattype__name__)argsZ conf_class cli_classZoption_parser_classbaserrrrmain?s4    r.cCsb|jj||}y|jttt||Wn(ttfk rV}zt|Sd}~XnXt ||S)z2Run the dnf program from a command line interface.N) Z_loggingZ _presetupZ configurelistmaprr&OSErrorrcli_run)r-r+r,Z option_parserrrrrrr\s rc,Csy td}WnFtk rR}z*|jtjkrBtjtdtjdWYdd}~Xn X|j y |j Wn@t j j k rYn(ttfk r}zt|Sd}~XnX|jjryt||}Wnt j jk r}zt|d}|jj r|jjddr|tdjd7}|jjjrN|s<|td jd 7}n|td jd 7}|jjjr|jjjd }|t jjkr|s|td jd7}n|tdjd7}|rtjdj|WYdd}~XnX|r|S|jj |jj!S)N.z8No read/execute access in current directory, moving to //r T)Z availablez?try to add '{}' to command line to replace conflicting packagesz--allowerasingz.try to add '{}' to skip uninstallable packagesz --skip-brokenz' or '{}' to skip uninstallable packagesbestz7try to add '{}' to use not only best candidate packagesz--nobestz0 or '{}' to use not only best candidate packagesz({}))"openr&errnoZEACCESrrr oschdircloseZrunr rrr1rdemands resolvingr r allow_erasingZ_goalZproblem_conflictsr(r-Zconfstrictr5Z _get_priorityZPRIO_MAINCONFIGinfocommandZrun_transactionZsuccess_exit_status)rr-frretmsgZpriorrrr2msT             r2cCs |jdkr&|j|jjtjtd|jjg}|jj dk rN|j |jj y|j |dWnt j jk r}ztjt|dSd}~Xnvt jjk r}z$x|jj|D]}tj|qWdSd}~Xn4tk r}zt|Sd}~XnXtjtddS)z9Perform the depsolve, download and RPM transaction stage.NzDependencies resolved.)Zdisplayrz Complete!r)Z transactionZresolver;r=rr?r r@Z run_resolvedZtransaction_displayappendZdo_transactionr rZCliErrorr%rrZTransactionCheckErrorZget_error_outputrr&r)rr-ZdisplaysexcerrrCrrrrr<s(   r<FcCst|}|rtj||S)apCall one of the multiple main() functions based on environment variables. :param args: command line arguments passed into yum :param exit_code: if *exit_code* is True, this function will exit python with its exit code when it has finished executing. Otherwise, it will return its exit code. :return: the exit code from dnf.yum execution )r.sysexit)r+ exit_codeZerrcoderrr user_mains  rJ__main__rT)rI)F)(__doc__Z __future__rrrZdnf.confrZ dnf.cli.clirZdnf.cli.option_parserrZdnf.i18nrZ dnf.cli.utilsr r Zdnf.clir Zdnf.exceptionsZ dnf.loggingZdnf.utilr7r"Z libdnf.errorr$rr8Zos.pathrGZ getLoggerrrrr.rr2r<rJr*argvrrrrsB          5  PK!U?#cli/__pycache__/main.cpython-36.pycnu[3 ft`f@sPdZddlmZddlmZddlmZddlmZddlmZddl m Z ddl m Z dd l mZdd l mZdd lZdd lZdd l Zdd lZdd l Zdd lZdd lZdd lZdd lZdd lZdd lZdd lZdd lZdd lZejd Zd dZddZ eee fddZ!ddZ"ddZ#ddZ$d ddZ%e&dkrLe%ej'dd ddd S)!z4 Entrance point for the yum command line interface. )print_function)absolute_import)unicode_literals)Conf)Cli) OptionParser)ucd)show_lock_owner)_NdnfcCs&tjtjjdddtjt|dS)NT)exc_info)loggerlogr loggingSUBDEBUGcriticalr)er/usr/lib/python3.6/main.py ex_IOError2srcCs6tjtjjddd|jdk r2tjtdt|dS)Nr T)r z Error: %sr) rrr rrvaluerr r)rrrrex_Error8s rcCsy6tjjtjjj|}t||||SQRXWntjjk rr}ztj |j t |j dSd}~XnLtjj k r}ztj |j dSd}~Xntjjk r}zdSd}~Xntjjk r}zt|Sd}~Xntjk r$}ztj tdt|dSd}~Xntjjk r\}ztj tdt|dSd}~Xnbtk r}zt|Sd}~Xn>tk r}z tj djt|jtddSd}~XnXdS)Nrz Error: %sz{}: {}z Terminated.)r Zi18nZ setup_stdoutcliZBaseCli_main exceptionsZProcessLockErrorrrrr pid LockError DepsolveErrorErrorrhawkey Exceptionr rlibdnferrorIOErrorrKeyboardInterruptformattype__name__)argsZ conf_class cli_classZoption_parser_classbaserrrrmain?s4    r.cCsb|jj||}y|jttt||Wn(ttfk rV}zt|Sd}~XnXt ||S)z2Run the dnf program from a command line interface.N) Z_loggingZ _presetupZ configurelistmaprr&OSErrorrcli_run)r-r+r,Z option_parserrrrrrr\s rc,Csy td}WnFtk rR}z*|jtjkrBtjtdtjdWYdd}~Xn X|j y |j Wn@t j j k rYn(ttfk r}zt|Sd}~XnX|jjryt||}Wnt j jk r}zt|d}|jj r|jjddr|tdjd7}|jjjrN|s<|td jd 7}n|td jd 7}|jjjr|jjjd }|t jjkr|s|td jd7}n|tdjd7}|rtjdj|WYdd}~XnX|r|S|jj |jj!S)N.z8No read/execute access in current directory, moving to //r T)Z availablez?try to add '{}' to command line to replace conflicting packagesz--allowerasingz.try to add '{}' to skip uninstallable packagesz --skip-brokenz' or '{}' to skip uninstallable packagesbestz7try to add '{}' to use not only best candidate packagesz--nobestz0 or '{}' to use not only best candidate packagesz({}))"openr&errnoZEACCESrrr oschdircloseZrunr rrr1rdemands resolvingr r allow_erasingZ_goalZproblem_conflictsr(r-Zconfstrictr5Z _get_priorityZPRIO_MAINCONFIGinfocommandZrun_transactionZsuccess_exit_status)rr-frretmsgZpriorrrr2msT             r2cCs |jdkr&|j|jjtjtd|jjg}|jj dk rN|j |jj y|j |dWnt j jk r}ztjt|dSd}~Xnvt jjk r}z$x|jj|D]}tj|qWdSd}~Xn4tk r}zt|Sd}~XnXtjtddS)z9Perform the depsolve, download and RPM transaction stage.NzDependencies resolved.)Zdisplayrz Complete!r)Z transactionZresolver;r=rr?r r@Z run_resolvedZtransaction_displayappendZdo_transactionr rZCliErrorr%rrZTransactionCheckErrorZget_error_outputrr&r)rr-ZdisplaysexcerrrCrrrrr<s(   r<FcCst|}|rtj||S)apCall one of the multiple main() functions based on environment variables. :param args: command line arguments passed into yum :param exit_code: if *exit_code* is True, this function will exit python with its exit code when it has finished executing. Otherwise, it will return its exit code. :return: the exit code from dnf.yum execution )r.sysexit)r+ exit_codeZerrcoderrr user_mains  rJ__main__rT)rI)F)(__doc__Z __future__rrrZdnf.confrZ dnf.cli.clirZdnf.cli.option_parserrZdnf.i18nrZ dnf.cli.utilsr r Zdnf.clir Zdnf.exceptionsZ dnf.loggingZdnf.utilr7r"Z libdnf.errorr$rr8Zos.pathrGZ getLoggerrrrr.rr2r<rJr*argvrrrrsB          5  PK!TeAA2cli/__pycache__/option_parser.cpython-36.opt-1.pycnu[3 ft`4]@sddlmZddlmZddlmZddlZddlZddlZddl Zddl Zddl Z ddl Z ddlZddlZe jdZGdddejZGdd d ejZdS) )unicode_literals)_) _parse_specsNdnfcseZdZfddZZS)MultilineHelpFormattercs"d|kr|jStt|j||S)N ) splitlinessuperr _split_lines)selftextwidth) __class__#/usr/lib/python3.6/option_parser.pyr 'sz#MultilineHelpFormatter._split_lines)__name__ __module__ __qualname__r __classcell__rr)rrr&srcseZdZdZd.fdd ZddZGdddejZGd d d ejZ Gd d d ej Z Gd ddejZ GdddejZ GdddejZGdddejZGdddejZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd/fd*d+ Zd0fd,d- ZZS)1 OptionParserz5ArgumentParser like class to do things the "yum way".Tcs>tt|jdtdd|_d|_|j|r:i|_t|_ dS)NF)add_helpZformatter_class) r r__init__rcommand_positional_parser command_group_add_general_options _cmd_usageset _cmd_groups)r Z reset_usage)rrrr/s zOptionParser.__init__cCs&|jtjtd|tjddS)zOutput an error message, and exit the program. This method overrides standard argparser's error so that error output goes to the logger. :param msg: the error message to output zCommand line error: %sN) print_usageloggerZcriticalrsysexit)r msgrrrerror9szOptionParser.errorc@seZdZddZdS)zOptionParser._RepoCallbackcs@|dkr dndt||j}|jfddtjd|DdS)Nz --disablerepodisableenablec3s|]}|fVqdS)Nr).0x) operationrr Hsz6OptionParser._RepoCallback.__call__..z \s*[,\s]\s*)getattrdestextendresplit)r parser namespacevaluesopt_strlr)r)r__call__Es z#OptionParser._RepoCallback.__call__N)rrrr5rrrr _RepoCallbackDsr6c@seZdZddZdS)z OptionParser._RepoCallbackEnablecCs$|jj|ddft|d|dS)Nrr&Zreponame)repos_edappendsetattr)r r0r1r2r3rrrr5Ksz)OptionParser._RepoCallbackEnable.__call__N)rrrr5rrrr_RepoCallbackEnableJsr:cs$eZdZdZdZfddZZS)zOptionParser._SplitCallbackzN Split all strings in seq, at "," and whitespace. Returns a new list. z \s*[,\s]\s*csDd}x:tj|j|D](}|s |r8ttj|j||||d}qWdS)NTF)r.r/SPLITTERr r_SplitCallbackr5)r r0r1r2r3firstval)rrrr5Ts z$OptionParser._SplitCallback.__call__)rrr__doc__r;r5rrr)rrr<Osr<c@seZdZdZddZdS)z%OptionParser._SplitExtendDictCallbackz[ Split string at "," or whitespace to (key, value). Extends dict with {key: value}.c Cshy"|jd\}}| s| r tWn,tk rNtd|}tj||YnXt||j}|||<dS)N,zbad format: %s)r/ ValueErrorrargparseZ ArgumentErrorr+r,) r r0r1r2r3keyr>r#Zdctrrrr5bs   z.OptionParser._SplitExtendDictCallback.__call__N)rrrr?r5rrrr_SplitExtendDictCallback_srDc@seZdZdZddZdS)zOptionParser._SetoptsCallbackzY Parse setopts arguments and put them into main_ and repo_.c Cs|jd}t|dkr*tjtd|dSt|dkrJtjtd|dS|\}}|jd}|d kr|d|} ||dd}t|dr|j} ni} | j| ij|gj |t |d|j | n:t|d r|j } ni} | j|gj |t |d |j | dS) N=z'Setopt argument has multiple values: %sz Setopt argument has no value: %s.r repo_setoptsZrepo_ main_setoptsZmain_) r/lenr ZwarningrrfindhasattrrH setdefaultr8r9r,rI) r r0r1r2r3valskvZperiodrepoZrepooptsZmainoptsrrrr5ps,       z&OptionParser._SetoptsCallback.__call__N)rrrr?r5rrrr_SetoptsCallbackmsrSc@seZdZddZdS)z'OptionParser.ParseSpecGroupFileCallbackcCst||dS)N)r)r r0r1r2r3rrrr5sz0OptionParser.ParseSpecGroupFileCallback.__call__N)rrrr5rrrrParseSpecGroupFileCallbacksrTcs$eZdZfddZddZZS)zOptionParser.PkgNarrowCallbackcsi|_y&x dD]}|||j|<||=qWWn6tk rb}ztd|jj|fWYdd}~XnXg|d<ttj|j||dS)Nchoicesdefaultz"%s() missing mandatory argument %s)rUrV) pkgnarrowKeyError TypeErrorrrr rPkgNarrowCallbackr)r argskwargsrPe)rrrrs "z'OptionParser.PkgNarrowCallback.__init__cCsV|jd}| s"|d|jdkr.|jd}n |jd}t|||t||j|dS)NZ_actionrrUrV)r,rWpopr9)r r0r1r2r3Z dest_actionZnarrowrrrr5s     z'OptionParser.PkgNarrowCallback.__call__)rrrrr5rrr)rrrZs rZc@seZdZddZdS)zOptionParser.ForceArchActioncCsd|_||_dS)NT)Z ignorearchZarch)r r0r1r2r3rrrr5sz%OptionParser.ForceArchAction.__call__N)rrrr5rrrrForceArchActionsr_c Cs|jtdjtjjd}|jdddddtdd |jd d d d dtdd|jddd dtdd|jdd dtdjtjjdd|jdtddd|jdddgdtdd|jd d!dd"td#d$|jd%d&g|jtd'd(d)|jd*d+g|jtd,d(d)|jd-dtd.d/|jd0d1g|jtd2d3|jd4d5d dtd6d|jd7d8d9d d:td;d<|jd=d dtd>d|j }|jd?d@d dAdtdBdC|jdDd!dAtdEd<|jdFdGdHd dtdId|jdJdKdLt ddMtdNdO|jdPdQdRdSdtdTt dU|jdVd dtdWd|jdXdYd dtdZd|jd[d\dt td]d^|jd_dd`d tdajtjj ddb|jdcdtdddedf|jdgdhd dtdid|jdjd dtdkd|jdl|j dmgdntdodp|j }|jdq|j dmgdntdrdp|jdsdtdndu|jgtdvdw|j }|jdxdydzd td{db|jd|dyd}d td~db|jdddgd|jtddd|jddgd|jtddnd|jdi|j dtdd|jdd!ddtdd$|jdd!ddtdd$|jdddtdd|jddd tdd|jdddtdddd|jdddtdddd|jddddtdd|jddd dytdd|jdddtdd|jdd tdd|jdd tdd|jdd tdd|jdd tdd|jdddg|jtdd3|jddgd|jtddb|jddgd|jtdƒdb|jddddddggd|jtdʃdˍ|jddtj|jttjjjtd΃dύ|jddtjdҍdS)z0 Standard options known to all dnf subcommands. zGeneral {prog} options)progz-cz--configZconfig_file_pathNz [config file]zconfig file location)r,rVmetavarhelpz-qz--quietquiet store_truezquiet operation)r,actionrVrbz-vz --verbosezverbose operation)rerVrbz --versionzshow {prog} version and exitz --installrootzset install rootz[path])rbraz--nodocs store_constZnodocsZtsflagszdo not install documentations)reconstr,rbz --nopluginsZ store_falseZpluginszdisable all plugins)rerVr,rbz--enablepluginZ enablepluginzenable plugins by namez[plugin])r,rVrerbraz--disablepluginZ disablepluginzdisable plugins by namez --releaseverz:override the value of $releasever in config and repo files)rVrbz--setoptZsetoptsz%set arbitrary config and repo options)r,rVrerbz --skip-brokenZ skip_brokenz.resolve depsolve problems by skipping packagesz-hz--helpz --help-cmdrbzshow command help)rer,rbz--allowerasingz;allow erasing of installed packages to resolve dependenciesz-bz--bestZbestz8try the best available package versions in transactions.)rer,rVrbz--nobestz2do not limit the transaction to the best candidatez-Cz --cacheonlyZ cacheonlyz2run entirely from system cache, don't update cachez-Rz --randomwaitZ sleeptimez [minutes]zmaximum command wait time)r,typerVrarbz-dz --debuglevelZ debuglevelz [debug level]zdebugging output level)r,rarVrbrhz --debugsolverz)dumps detailed solving results into filesz--showduplicatesZshowdupesfromreposz2show duplicates, in repos, in list/search commandsz-ez --errorlevelzerror output level)rVrhrbz --obsoletesZ obsoleteszenables {prog}'s obsoletes processing logic for upgrade or display capabilities that the package obsoletes for info, list and repoquery)rVr,rerbz--rpmverbosityzdebugging output level for rpmz[debug level name])rVrbraz-yz --assumeyesz*automatically answer yes for all questionsz --assumenoz)automatically answer no for all questionsz --enablerepor7z[repo]z]Enable additional repositories. List option. Supports globs, can be specified multiple times.)rer,rVrarbz --disablerepozSDisable repositories. List option. Supports globs, can be specified multiple times.z--repoz--repoidrRzUenable just specific repositories by an id or a glob, can be specified multiple times)rar,rerVrbz--enableFZ set_enabledz>enable repos with config-manager command (automatically saves)z --disableZ set_disabledz?disable repos with config-manager command (automatically saves)z-xz --excludez --excludepkgsZ excludepkgsz exclude packages by name or globz [package])rVr,rerbraz--disableexcludesz--disableexcludepkgsZdisable_excludeszdisable excludepkgsz--repofrompathz [repo,path]zolabel and path to an additional repository to use (same path as in a baseurl), can be specified multiple times.)rVrerarbz--noautoremoveZclean_requirements_on_removez7disable removal of dependencies that are no longer usedz --nogpgcheckZgpgcheckz5disable gpg signature checking (if RPM policy allows)z--colorZcolorzcontrol whether color is used)r,rVrbz --refreshZfreshest_metadataz2set metadata as expired before running the command)r,rerbz-4Z ip_resolvezresolve to IPv4 addresses onlyZipv4)r,rVrbrergz-6zresolve to IPv6 addresses onlyZipv6z --destdirz --downloaddirZdestdirz!set directory to copy packages toz--downloadonlyZ downloadonlyzonly download packagesz --commentZcommentzadd a comment to transactionz--bugfixz,Include bugfix relevant packages, in updates)rerbz --enhancementz1Include enhancement relevant packages, in updatesz --newpackagez0Include newpackage relevant packages, in updatesz --securityz.Include security relevant packages, in updatesz --advisoryz --advisoriesZadvisoryz=Include packages needed to fix the given advisory, in updatesz--bzz--bzsZbugzillaz7Include packages needed to fix the given BZ, in updatesz--cvez--cvesZcvesz8Include packages needed to fix the given CVE, in updatesz--sec-severityz --secseverityZCriticalZ ImportantZModerateZLowZseverityzDInclude security relevant packages matching the severity, in updates)rUrVr,rerbz --forcearchZARCHz Force the use of an architecture)rar,rerUrbcommand?)nargsrb)add_argument_grouprformatrutilZMAIN_PROG_UPPER add_argumentr<rSZadd_mutually_exclusive_groupint MAIN_PROGr6rDrBZSUPPRESSr_sortedZrpmZ _BASEARCH_MAPkeys)r Z general_grpZ best_groupZ repo_groupZ enable_grouprrrrs:                                                                     z!OptionParser._add_general_optionscCsHtjj|j}tjj|jd}||jkrD||f|j|<|jj|dS)z- store usage info about a single dnf command.rN)rZi18nZucdsummaryaliasesrradd)r cmdgrouprtnamerrr_add_cmd_usageds  zOptionParser._add_cmd_usagecCs&x t|jD]}|j||qWdS)z store name & summary for dnf commands The stored information is used build usage information grouped by build-in & plugin commands. N)rr2rz)r Zcli_cmdsrxrwrrr add_commandslszOptionParser.add_commandscCstdtdd}dtjj}xfd D]^}||jkr4q$|d||7}xsz1OptionParser.cmd_add_argument..)allrhrror)r r[r\r)r rrszOptionParser.cmd_add_argumentcCs`xZ|D]R}y|jdWqtk rV}z"tjjtd|t|fWYdd}~XqXqWdS)Nzutf-8zCannot encode argument '%s': %s)encodeUnicodeEncodeErrorr exceptionsZ ConfigErrorrstr)r r[rr]rrr_check_encodings  zOptionParser._check_encodingcCs|j||j|\}}|S)N)rparse_known_args)r r[r1Z _unused_argsrrrparse_main_argss zOptionParser.parse_main_argscCs2|j||j|\}}|jj||}||_|jS)N)rrr parse_argsZopts)r rir[r1Z unused_argsrrrparse_command_argss  zOptionParser.parse_command_argsNcs,|jr|j|jj7_tt|j|dS)N)r_actionsr rr)r Zfile_)rrrrszOptionParser.print_usagecsd|rH|j s|jj|jkr$|j||j|jj7_|jj|jjn |j |_ t t |j dS)N)rrrrrrZ_action_groupsr8rrr~r r print_help)r ri)rrrrs  zOptionParser.print_help)T)N)N)rrrr?rr$rBZActionr6r:Z _AppendActionr<rDrSrTrZr_rrzr{rrrrrrrrrrr)rrr,s.  ;  r)Z __future__rZdnf.i18nrZdnf.utilrrBZdnf.exceptionsrZdnf.rpmZ dnf.yum.miscZloggingZos.pathosr.r!Z getLoggerr Z HelpFormatterrrrrrrrs    PK!TeAA,cli/__pycache__/option_parser.cpython-36.pycnu[3 ft`4]@sddlmZddlmZddlmZddlZddlZddlZddl Zddl Zddl Z ddl Z ddlZddlZe jdZGdddejZGdd d ejZdS) )unicode_literals)_) _parse_specsNdnfcseZdZfddZZS)MultilineHelpFormattercs"d|kr|jStt|j||S)N ) splitlinessuperr _split_lines)selftextwidth) __class__#/usr/lib/python3.6/option_parser.pyr 'sz#MultilineHelpFormatter._split_lines)__name__ __module__ __qualname__r __classcell__rr)rrr&srcseZdZdZd.fdd ZddZGdddejZGd d d ejZ Gd d d ej Z Gd ddejZ GdddejZ GdddejZGdddejZGdddejZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd/fd*d+ Zd0fd,d- ZZS)1 OptionParserz5ArgumentParser like class to do things the "yum way".Tcs>tt|jdtdd|_d|_|j|r:i|_t|_ dS)NF)add_helpZformatter_class) r r__init__rcommand_positional_parser command_group_add_general_options _cmd_usageset _cmd_groups)r Z reset_usage)rrrr/s zOptionParser.__init__cCs&|jtjtd|tjddS)zOutput an error message, and exit the program. This method overrides standard argparser's error so that error output goes to the logger. :param msg: the error message to output zCommand line error: %sN) print_usageloggerZcriticalrsysexit)r msgrrrerror9szOptionParser.errorc@seZdZddZdS)zOptionParser._RepoCallbackcs@|dkr dndt||j}|jfddtjd|DdS)Nz --disablerepodisableenablec3s|]}|fVqdS)Nr).0x) operationrr Hsz6OptionParser._RepoCallback.__call__..z \s*[,\s]\s*)getattrdestextendresplit)r parser namespacevaluesopt_strlr)r)r__call__Es z#OptionParser._RepoCallback.__call__N)rrrr5rrrr _RepoCallbackDsr6c@seZdZddZdS)z OptionParser._RepoCallbackEnablecCs$|jj|ddft|d|dS)Nrr&Zreponame)repos_edappendsetattr)r r0r1r2r3rrrr5Ksz)OptionParser._RepoCallbackEnable.__call__N)rrrr5rrrr_RepoCallbackEnableJsr:cs$eZdZdZdZfddZZS)zOptionParser._SplitCallbackzN Split all strings in seq, at "," and whitespace. Returns a new list. z \s*[,\s]\s*csDd}x:tj|j|D](}|s |r8ttj|j||||d}qWdS)NTF)r.r/SPLITTERr r_SplitCallbackr5)r r0r1r2r3firstval)rrrr5Ts z$OptionParser._SplitCallback.__call__)rrr__doc__r;r5rrr)rrr<Osr<c@seZdZdZddZdS)z%OptionParser._SplitExtendDictCallbackz[ Split string at "," or whitespace to (key, value). Extends dict with {key: value}.c Cshy"|jd\}}| s| r tWn,tk rNtd|}tj||YnXt||j}|||<dS)N,zbad format: %s)r/ ValueErrorrargparseZ ArgumentErrorr+r,) r r0r1r2r3keyr>r#Zdctrrrr5bs   z.OptionParser._SplitExtendDictCallback.__call__N)rrrr?r5rrrr_SplitExtendDictCallback_srDc@seZdZdZddZdS)zOptionParser._SetoptsCallbackzY Parse setopts arguments and put them into main_ and repo_.c Cs|jd}t|dkr*tjtd|dSt|dkrJtjtd|dS|\}}|jd}|d kr|d|} ||dd}t|dr|j} ni} | j| ij|gj |t |d|j | n:t|d r|j } ni} | j|gj |t |d |j | dS) N=z'Setopt argument has multiple values: %sz Setopt argument has no value: %s.r repo_setoptsZrepo_ main_setoptsZmain_) r/lenr ZwarningrrfindhasattrrH setdefaultr8r9r,rI) r r0r1r2r3valskvZperiodrepoZrepooptsZmainoptsrrrr5ps,       z&OptionParser._SetoptsCallback.__call__N)rrrr?r5rrrr_SetoptsCallbackmsrSc@seZdZddZdS)z'OptionParser.ParseSpecGroupFileCallbackcCst||dS)N)r)r r0r1r2r3rrrr5sz0OptionParser.ParseSpecGroupFileCallback.__call__N)rrrr5rrrrParseSpecGroupFileCallbacksrTcs$eZdZfddZddZZS)zOptionParser.PkgNarrowCallbackcsi|_y&x dD]}|||j|<||=qWWn6tk rb}ztd|jj|fWYdd}~XnXg|d<ttj|j||dS)Nchoicesdefaultz"%s() missing mandatory argument %s)rUrV) pkgnarrowKeyError TypeErrorrrr rPkgNarrowCallbackr)r argskwargsrPe)rrrrs "z'OptionParser.PkgNarrowCallback.__init__cCsV|jd}| s"|d|jdkr.|jd}n |jd}t|||t||j|dS)NZ_actionrrUrV)r,rWpopr9)r r0r1r2r3Z dest_actionZnarrowrrrr5s     z'OptionParser.PkgNarrowCallback.__call__)rrrrr5rrr)rrrZs rZc@seZdZddZdS)zOptionParser.ForceArchActioncCsd|_||_dS)NT)Z ignorearchZarch)r r0r1r2r3rrrr5sz%OptionParser.ForceArchAction.__call__N)rrrr5rrrrForceArchActionsr_c Cs|jtdjtjjd}|jdddddtdd |jd d d d dtdd|jddd dtdd|jdd dtdjtjjdd|jdtddd|jdddgdtdd|jd d!dd"td#d$|jd%d&g|jtd'd(d)|jd*d+g|jtd,d(d)|jd-dtd.d/|jd0d1g|jtd2d3|jd4d5d dtd6d|jd7d8d9d d:td;d<|jd=d dtd>d|j }|jd?d@d dAdtdBdC|jdDd!dAtdEd<|jdFdGdHd dtdId|jdJdKdLt ddMtdNdO|jdPdQdRdSdtdTt dU|jdVd dtdWd|jdXdYd dtdZd|jd[d\dt td]d^|jd_dd`d tdajtjj ddb|jdcdtdddedf|jdgdhd dtdid|jdjd dtdkd|jdl|j dmgdntdodp|j }|jdq|j dmgdntdrdp|jdsdtdndu|jgtdvdw|j }|jdxdydzd td{db|jd|dyd}d td~db|jdddgd|jtddd|jddgd|jtddnd|jdi|j dtdd|jdd!ddtdd$|jdd!ddtdd$|jdddtdd|jddd tdd|jdddtdddd|jdddtdddd|jddddtdd|jddd dytdd|jdddtdd|jdd tdd|jdd tdd|jdd tdd|jdd tdd|jdddg|jtdd3|jddgd|jtddb|jddgd|jtdƒdb|jddddddggd|jtdʃdˍ|jddtj|jttjjjtd΃dύ|jddtjdҍdS)z0 Standard options known to all dnf subcommands. zGeneral {prog} options)progz-cz--configZconfig_file_pathNz [config file]zconfig file location)r,rVmetavarhelpz-qz--quietquiet store_truezquiet operation)r,actionrVrbz-vz --verbosezverbose operation)rerVrbz --versionzshow {prog} version and exitz --installrootzset install rootz[path])rbraz--nodocs store_constZnodocsZtsflagszdo not install documentations)reconstr,rbz --nopluginsZ store_falseZpluginszdisable all plugins)rerVr,rbz--enablepluginZ enablepluginzenable plugins by namez[plugin])r,rVrerbraz--disablepluginZ disablepluginzdisable plugins by namez --releaseverz:override the value of $releasever in config and repo files)rVrbz--setoptZsetoptsz%set arbitrary config and repo options)r,rVrerbz --skip-brokenZ skip_brokenz.resolve depsolve problems by skipping packagesz-hz--helpz --help-cmdrbzshow command help)rer,rbz--allowerasingz;allow erasing of installed packages to resolve dependenciesz-bz--bestZbestz8try the best available package versions in transactions.)rer,rVrbz--nobestz2do not limit the transaction to the best candidatez-Cz --cacheonlyZ cacheonlyz2run entirely from system cache, don't update cachez-Rz --randomwaitZ sleeptimez [minutes]zmaximum command wait time)r,typerVrarbz-dz --debuglevelZ debuglevelz [debug level]zdebugging output level)r,rarVrbrhz --debugsolverz)dumps detailed solving results into filesz--showduplicatesZshowdupesfromreposz2show duplicates, in repos, in list/search commandsz-ez --errorlevelzerror output level)rVrhrbz --obsoletesZ obsoleteszenables {prog}'s obsoletes processing logic for upgrade or display capabilities that the package obsoletes for info, list and repoquery)rVr,rerbz--rpmverbosityzdebugging output level for rpmz[debug level name])rVrbraz-yz --assumeyesz*automatically answer yes for all questionsz --assumenoz)automatically answer no for all questionsz --enablerepor7z[repo]z]Enable additional repositories. List option. Supports globs, can be specified multiple times.)rer,rVrarbz --disablerepozSDisable repositories. List option. Supports globs, can be specified multiple times.z--repoz--repoidrRzUenable just specific repositories by an id or a glob, can be specified multiple times)rar,rerVrbz--enableFZ set_enabledz>enable repos with config-manager command (automatically saves)z --disableZ set_disabledz?disable repos with config-manager command (automatically saves)z-xz --excludez --excludepkgsZ excludepkgsz exclude packages by name or globz [package])rVr,rerbraz--disableexcludesz--disableexcludepkgsZdisable_excludeszdisable excludepkgsz--repofrompathz [repo,path]zolabel and path to an additional repository to use (same path as in a baseurl), can be specified multiple times.)rVrerarbz--noautoremoveZclean_requirements_on_removez7disable removal of dependencies that are no longer usedz --nogpgcheckZgpgcheckz5disable gpg signature checking (if RPM policy allows)z--colorZcolorzcontrol whether color is used)r,rVrbz --refreshZfreshest_metadataz2set metadata as expired before running the command)r,rerbz-4Z ip_resolvezresolve to IPv4 addresses onlyZipv4)r,rVrbrergz-6zresolve to IPv6 addresses onlyZipv6z --destdirz --downloaddirZdestdirz!set directory to copy packages toz--downloadonlyZ downloadonlyzonly download packagesz --commentZcommentzadd a comment to transactionz--bugfixz,Include bugfix relevant packages, in updates)rerbz --enhancementz1Include enhancement relevant packages, in updatesz --newpackagez0Include newpackage relevant packages, in updatesz --securityz.Include security relevant packages, in updatesz --advisoryz --advisoriesZadvisoryz=Include packages needed to fix the given advisory, in updatesz--bzz--bzsZbugzillaz7Include packages needed to fix the given BZ, in updatesz--cvez--cvesZcvesz8Include packages needed to fix the given CVE, in updatesz--sec-severityz --secseverityZCriticalZ ImportantZModerateZLowZseverityzDInclude security relevant packages matching the severity, in updates)rUrVr,rerbz --forcearchZARCHz Force the use of an architecture)rar,rerUrbcommand?)nargsrb)add_argument_grouprformatrutilZMAIN_PROG_UPPER add_argumentr<rSZadd_mutually_exclusive_groupint MAIN_PROGr6rDrBZSUPPRESSr_sortedZrpmZ _BASEARCH_MAPkeys)r Z general_grpZ best_groupZ repo_groupZ enable_grouprrrrs:                                                                     z!OptionParser._add_general_optionscCsHtjj|j}tjj|jd}||jkrD||f|j|<|jj|dS)z- store usage info about a single dnf command.rN)rZi18nZucdsummaryaliasesrradd)r cmdgrouprtnamerrr_add_cmd_usageds  zOptionParser._add_cmd_usagecCs&x t|jD]}|j||qWdS)z store name & summary for dnf commands The stored information is used build usage information grouped by build-in & plugin commands. N)rr2rz)r Zcli_cmdsrxrwrrr add_commandslszOptionParser.add_commandscCstdtdd}dtjj}xfd D]^}||jkr4q$|d||7}xsz1OptionParser.cmd_add_argument..)allrhrror)r r[r\r)r rrszOptionParser.cmd_add_argumentcCs`xZ|D]R}y|jdWqtk rV}z"tjjtd|t|fWYdd}~XqXqWdS)Nzutf-8zCannot encode argument '%s': %s)encodeUnicodeEncodeErrorr exceptionsZ ConfigErrorrstr)r r[rr]rrr_check_encodings  zOptionParser._check_encodingcCs|j||j|\}}|S)N)rparse_known_args)r r[r1Z _unused_argsrrrparse_main_argss zOptionParser.parse_main_argscCs2|j||j|\}}|jj||}||_|jS)N)rrr parse_argsZopts)r rir[r1Z unused_argsrrrparse_command_argss  zOptionParser.parse_command_argsNcs,|jr|j|jj7_tt|j|dS)N)r_actionsr rr)r Zfile_)rrrrszOptionParser.print_usagecsd|rH|j s|jj|jkr$|j||j|jj7_|jj|jjn |j |_ t t |j dS)N)rrrrrrZ_action_groupsr8rrr~r r print_help)r ri)rrrrs  zOptionParser.print_help)T)N)N)rrrr?rr$rBZActionr6r:Z _AppendActionr<rDrSrTrZr_rrzr{rrrrrrrrrrr)rrr,s.  ;  r)Z __future__rZdnf.i18nrZdnf.utilrrBZdnf.exceptionsrZdnf.rpmZ dnf.yum.miscZloggingZos.pathosr.r!Z getLoggerr Z HelpFormatterrrrrrrrs    PK!Fg+cli/__pycache__/output.cpython-36.opt-1.pycnu[3 ft`Z@sdZddlmZddlmZddlmZddlZddlZddlZddlZ ddl Z ddl Z ddl Z ddl Z ddlZddlZddlmZmZddlmZmZmZmZmZmZmZmZddlmZmZmZm Z m!Z!dd l"m#Z#dd l$m%Z%ddl&Z'ddl(Z'ddl)Z'ddl*Z'ddl+Z'ddl,Z'ddlZ'ddl-Z'ddl.Z'ddl/Z'e j0d Z1d d Z2Gddde3Z4Gddde'j5j6Z7Gddde'j5j8Z9Gddde#Z:dddZ;dS)z"Handle actual output from the cli.)absolute_import)print_function)unicode_literalsN) format_number format_time)_C_P_ucdfill_exact_width textwrap_fill exact_widthselect_short_long)xrange basestringlongunicode sys_maxsize)TransactionDisplay)MergedTransactionWrapperdnfcCsrtj|ftjd}t|}|d}| |}|s@tjd|}n|rR|jd|t|}tt|f|g|S)N)r) itertoolschainrepeatlenextenditerlistzip)Z cols_countZlabellstleftZ lst_lengthZ right_countZ missing_itemsZlst_iterr#/usr/lib/python3.6/output.py_spread_in_columns6s r%c @s eZdZdZdmZejdZddZddZ d d Z d d Z d dZ e ddZeddZeddZdnddZe ddZddZdoddZdpd!d"Zdqd#d$Zd%d&Zd'd(Zd)d*Zdrd,d-Zdsd.d/Zdtd0d1Zidifd2d3Zdud4d5Zd6d7Z d8d9Z!d:d;Z"dd?Z$d@dAZ%dvdBdCZ&dwdDdEZ'dxdFdGZ(dHdIZ)dJdKZ*dydLdMZ+dNdOZ,dPdQZ-dRdSZ.dTdUZ/dVdWZ0dzdXdYZ1d{dZd[Z2ge3fd\d]Z4gfd^d_Z5e6d`e6d`e6dae6dbe6dce6dde6dee6dfe6dge6dhe6didj Z7gfdkdlZ8dS)|Outputz+Main output class for the yum command line. z ^\*{0,2}/cCs$||_||_tjjj|_d|_dS)N)confbaserclitermZTermprogress)selfr*r)r#r#r$__init__IszOutput.__init__cCs0|jj}dd|}|jt||d}|||fS)Nz%s=r')r,columns fmtColumnsr )r.col_datarowZ term_widthZruleheaderr#r#r$_bannerOs zOutput._bannerc Cszdd|dD}xF|D]>}x8t|D],\}}||}t|}|j|dd||<q&WqW|j|ddd}tttj|S)NcSsg|] }tqSr#)dict).0rr#r#r$ Vsz&Output._col_widths..rrz )indent) enumeraterget calcColumnsrmapoperatorneg) r.rowsr3r4ivalZcol_dctZlengthcolsr#r#r$ _col_widthsUs zOutput._col_widthscCs(d}d}|snt|t s$|dkr2|jjd}n|dkr|St|}t||||d}|jddkr||dkrt|||d d}|S) zReturn a key value pair in the common two column output format. :param key: the key to be formatted :param val: the value associated with *key* :return: the key value pair formatted in two columns for output r'rZz: )riZinitial_indentZsubsequent_indent rr(z ...: )r r,r_rr r count)r.keyrCZkeylenrDZnxtrUr#r#r$ fmtKeyValFills zOutput.fmtKeyValFillr0cCsht|}|jjd}t|}||dkr6|d}}n$|||d}|||t|}d|||fS)aFormat and return a section header. The format of the header is a line with *name* centered, and *fill* repeated on either side to fill an entire line on the terminal. :param name: the name of the section :param fill: the character to repeat on either side of *name* to fill an entire line. *fill* must be a single character. :return: a string formatted to be a section header rZz%s %s %s)r r,r1r r)r.rwZfillrDZname_lenZbegrpr#r#r$ fmtSections   zOutput.fmtSectionc sdd}fdd}g}j|\}}tdtddtdd}|j||d ||j|f|jrv|j|td |jtdtdd tdd }|j|||j|j|td |jtdtdd tdd}|j|||j tdtddtdd}|j||t t |j |j|td|j tdtddtdd}|j|||j|jrjj|} | r|j|td| jjr>|j|td|j|j|tdtjj|j|jr|j|tdtjj|jjj|} | r>yt| jj} Wntk r"d} YnX|j|tdj| tdtddtdd}|j|||j |j!r|j|tdt"|j!|j|td|j#tdtddtdd}|j|||j$dj%|S)zPrint information about the given package. :param pkg: the package to print information about :param highlight: highlighting options for the name of the package cSsdjt|dddt|gS)Nr' :)joinr str)rrCr#r#r$format_key_valsz)Output.infoOutput..format_key_valcsjt|ddd|pdS)Nrz : r)rr )rrC)r.r#r$format_key_val_fillsz.Output.infoOutput..format_key_val_fillrshortNamerz%s%s%sZEpochVersionZReleaseArch ArchitectureSizeZSourceRepo Repositoryz From repoZPackagerZ Buildtimez Install timeNz Installed byZSummaryZURLZLicenseZ Descriptionr)&rPrrr`rwZepochrversionreleaserxrfloat_sizeZ sourcerpmrepoid _from_systemrXZrepor)verboseZpackagerrutilZnormalize_timeZ buildtimeZ installtimeZ package_dataint_itemZgetInstalledBy ValueError_pwd_ui_usernamesummaryurlr license descriptionr) r.r|rMrrZ output_listrNrOrZ history_repoZ history_pkguidr#)r.r$ infoOutputsh                   zOutput.infoOutputc Cs|\}}|dk rV|jj}|jtjkr,|jj}|j|||d|j||d|jjddS|j}d|j |j f}|j } t d|| ||fdS) a{Print a simple string that explains the relationship between the members of an update or obsoletes tuple. :param uotup: an update or obsoletes tuple. The first member is the new package, and the second member is the old package :param changetype: a string indicating what the change between the packages is, e.g. 'updates' or 'obsoletes' :param columns: a tuple containing information about how to format the columns of output. The absolute value of each number in the tuple indicates how much space has been allocated for the corresponding column. If the number is negative, the text in the column will be left justified, and if it is positive, the text will be right justified. The columns of output are the package name, version, and repository N)r1rMr'r)r1r:rMz%s.%sz%-35.35s [%.12s] %.10s %-20.20sz ) r)color_update_remotereponamehawkeyZSYSTEM_REPO_NAMEcolor_update_localrcolor_update_installedZ compactPrintrwrxrr{) r.ZuotupZ changetyper1Z changePkgZinstPkgZchiZ c_compactZ i_compactZc_repor#r#r$updatesObsoletesLists   zOutput.updatesObsoletesListc Csl|dkrht|dkr`td|t}|dkrbi}x"|D]} | |t| t| j<q' - highlighting used when the package has a higher version number :return: number of packages listed rinforwnevrarz%sFznot inrGr0>rF    zOutput.userconfirmcCs~|jjjj}|jjjj}i}xPtjtt|dD]6}||kr^||d||<q@||kr@||d||<q@W|S)Nrr) rYquery installedZ _name_dict availablerrrr )r.sectionsrrrfpkg_namer#r#r$_pkgs2name_dictszOutput._pkgs2name_dictc Csi}i}x~tjtt|dD]d}|j|}|dkr8q tt|t|j}tt|j}|j|dd||<|j|dd||<q W||fS)Nrr) rrrr r<r r GRP_PACKAGE_INDENTr) r.r name_dictZ nevra_lengthsZ repo_lengthsrr|Znevra_lZrepo_lr#r#r$_pkgs2col_lengthss zOutput._pkgs2col_lengthscCs$x|D]}td|j|fqWdS)Nz%s%s)r{r)r. pkg_namesrwr#r#r$_display_packagess zOutput._display_packagescCspxj|D]b}y ||}Wn(tk r>td|j|fwYnXd}|jsR|jj}|j|d|j||dqWdS)Nz%s%sFT)r}r:rMr1)KeyErrorr{rrr)Zcolor_list_available_installr)r.rrr1rwr|rMr#r#r$_display_packages_verboses  z Output._display_packages_verbosec Csldd}tdtd|j|jj}|r@ttdt|j|jr`ttdt|jp\d|jrxttd|jtd ||j ftd ||j ftd ||j ftd ||j ff}|r0|j |}|j||}|j|}|d  |d f}xp|D].\}} t| dkrqt||j| ||qWn8x6|D].\}} t| dkrPq6t||j| q6WdS)zOutput information about the packages in a given group :param group: a Group object to output information about cSstdd|DS)Ncss|] }|jVqdS)N)rw)r8r|r#r#r$ sz?Output.display_pkgs_in_groups..names..)r])packagesr#r#r$namessz,Output.display_pkgs_in_groups..namesrz Group: %sz Group-Id: %sz Description: %srz Language: %sz Mandatory Packages:z Default Packages:z Optional Packages:z Conditional Packages:rrN)r{rui_namer)rr idui_descriptionZ lang_onlyZmandatory_packagesZdefault_packagesZoptional_packagesZconditional_packagesrrr=rrr) r.grouprrrrZ col_lengthsr1 section_namerr#r#r$display_pkgs_in_groupss8   zOutput.display_pkgs_in_groupscCsdd}ttd|j|jjr8ttdt|j|jr\t|jpJd}ttd|td||jftd||j ff}x0|D](\}}t |d krqt||j |qWd S) zOutput information about the packages in a given environment :param environment: an Environment object to output information about cSstdd|DS)Ncss|] }|jVqdS)N)rw)r8rr#r#r$r szFOutput.display_groups_in_environment..names..)r])groupsr#r#r$rsz3Output.display_groups_in_environment..nameszEnvironment Group: %sz Environment-Id: %srz Description: %sz Mandatory Groups:z Optional Groups:rN) r{rrr)rr rrZmandatory_groupsZoptional_groupsrr)r.Z environmentrrrrrr#r#r$display_groups_in_environments z$Output.display_groups_in_environmentcsVdfdd fdd}jjr4d}ndjjf}j|jpRd}r|d krjjjj|d d }t||d krjj }|sd Stt d j d}d} xXt |D]J} j| krd } qˆj | krt d } | | |d dd }qˆj| kr,t d} | | |ddd }qˆj| krVt d} | | |ddd }q|| |rhd }qt d} xjD]} t| } tj| | r| | |ddd }n`| jd} t dtfdd| Dr| jd}n| }tj| |rx| | |ddd }qxWqWt|| gsLx*t |D]} t d} | | |ddq*Wtd S)aOutput search/provides type callback matches. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output :param verbose: whether to output extra verbose information :param highlight: highlighting options for the highlighted matches Fcsd|sttdt|pd}|dkr(dSr>j|dd}|rTtj||n t||dS)Nz Matched from:rT) ignore_case)r{rr rSr)ritemZprinted_headline can_overflow)rMmatchforr.r#r$print_highlighted_key_item's  z8Output.matchcallback..print_highlighted_key_itemcsTjj|sdStd}d}x2jD](}tj||r$|||p@|ddd}q$W|S)NFzFilename : %s)rT)FILE_PROVIDE_REmatchrfilesfnmatch)r printed_matchrZ file_matchfilename)porr.r#r$print_file_provides4s   z1Output.matchcallback..print_file_providesz%s : z%s.%s : rNT)rzRepo : %szDescription : )rzURL : %szLicense : %szProvide : %srz=<>c3s|]}|kVqdS)Nr#)r8char)possibler#r$rpsz'Output.matchcallback..zOther : %s)F)r)ZshowdupesfromreposrwrxrrZcolor_search_matchrSr{rrrrrrrZprovidesrrrKany)r.rrrrrMrrorZ name_matchrrZprovideZ first_provideZitem_newr#)rMrrrrr.r$ matchcallbacksp           zOutput.matchcallbackcCs|j|||ddS)aqOutput search/provides type callback matches. This will output more information than :func:`matchcallback`. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output T)r)r)r.rrrr#r#r$matchcallback_verboses zOutput.matchcallback_verbosec Csd}d}d}d}x|D]}yrt|j}||7}y|jr@||7}Wntk rVYnX|s^wyt|j}Wntk rYnX||7}Wqtk rd}td} tj| PYqXqW|s|rtjtdt |||krtjtdt |||rtjtdt |dS) zReport the total download size for a set of packages :param packages: a list of package objects :param installonly: whether the transaction consists only of installations rFTz2There was an error calculating total download sizezTotal size: %szTotal download size: %szInstalled size: %sN) rrZverifyLocalPkg ExceptionZ installsizerloggererrorrr) r.rZ installonlytotsizeZlocsizeZinsizerr|sizeror#r#r$reportDownloadSizesD          zOutput.reportDownloadSizec Csrd}d}xL|D]D}y|j}||7}Wqtk rPd}td}tj|PYqXqW|sntjtdt|dS)zmReport the total size of packages being removed. :param packages: a list of package objects rFTz-There was an error calculating installed sizezFreed space: %sN)rrrrrrr)r.rrrr|rror#r#r$reportRemoveSizes    zOutput.reportRemoveSizec Cs*|sdSg}g}|jr$|jtdxJ|jD]@}t|j|}|j|}|rR|jn|} |jtdd| |q,W|j r|jtdx@|j D]6}t|j |}|j j |j} |jtdd| |qW|r |j |} x$|D]} |j|jt| | dqW|j| tdtdddf|d d <d j|S) Nz+Marking packages as installed by the group:r@z)Marking packages as removed by the group:r'ZGroupPackagesrrr)Z new_groupsr`rrZadded_packagesZ _group_by_idrrr%Zremoved_groupsZremoved_packagesrr<rEr2r r6r) r.compsrXrhoutrAZgrp_idZpkgsZ group_objectZgrp_namer3r4r#r#r$list_group_transactions.     $zOutput.list_group_transactioncQ s tjtjBtjBtjBtjBtjB}t}t}|dkr\}}x2t+|D]&}|j&d||fddddddfqWqW|j&| |ft!t'j j(j,j*}|rt d} g}xF|D]>\}}x2t+|D]&}|j&d||fddddddfqWqW|j&| |ft!t'j j(j-j*}|rW|j&| |fj j1j7j3}"|"rt d$} g}x |"j4D]}|j&||qW|j&| |fj j1j7j5}#|#rt d%} g}x |#j4D]}|j&||qW|j&| |fj j1j7j6}$|$rNt d&} g}x |$j4D]}|j&||q(W|j&| |fj8j9 rvj j:j;|@rvg}j j|&}'d/d0|D}|j&|'|fg}x*t!|j*D]\}(}%| ||| |%g} qWt d1}'j j8j?rN|'d}'n |'t d2}'d3d0|D}|j&|'|fj@jA})|d4 rˆj j(jB rˆj j1oj j1j7pj j1j2 rdS|d4i|d5|d6ig}d| ddd7g}*jC|d8|*d9|d:}*|*\}+} },}-}.tD|*d7}/|)|/kr&|)n|/})tE|+td;d<td=d<}0tE| td;d>td=d?}1tE|,td;d@td=d@}2tE|-td;dAtd=dB}3tE|.td;dCtd=dC}4dDdE|)jF|0|+ f|1| f|2|, f|3|- f|4|.ffd.dE|)fg}5x|D]\} }| rdF| }6x|D]\}7}8}9}:};}<}=|7|+ |=f|8| f|9|, f|:|- f|;|.ff}*jF|*d.dG}>jGj8jH\}?}@xBt!|<D]6}AdHt dIdJ}B|B|?|AjI|@|AjJ|AjKf;}B|>|B7}> qrW|6|>}6 q W|r|5j&|6qW|5j&t dKdE|)t dLtL|jtL|jtL|jtL|jdft dMtL|jdft dNtL|jtL|jtL|jdft dOtL|jdft dPtL|tL|dff}Cd}Dd}Ed}Fd}Gx|CD]\} }H}I|H r|I r qtMd|5j&|>|P|E|HdU|F|Jf|G|I|Ofn$dV}>|5j&|>|P|E|Fd.|G|I|Ofn&|H r$dW}>|5j&|>tQ| |D|E|H|Jf q$Wdj>|5S)Xz]Return a string representation of the transaction in an easy-to-read format. N)rvrrcs|j\}}}}} |j} |j} t|j} |dkr2d}|jrBjj} n|jrRjj } njj } |j ||| | | || fxRdt |fdt | fdt | ffD],\}}||j |d|||d7<qWt|t |}|S)NZnoarchrrrrr)Zpkgtupryrzrrrr)rZ _from_cmdlinerrr`r setdefaultmax)linesrca_widr obsoletesraerrryrrhirf)r.r#r$ _add_lines"   ,z*Output.list_transaction.._add_linez Installing group/module packageszInstalling group packagesrZ InstallingZ UpgradingZ ReinstallingzInstalling dependencieszInstalling weak dependenciesZRemovingzRemoving dependent packageszRemoving unused dependenciesZ DowngradingcSs|jS)N)r|)xr#r#r$4sz)Output.list_transaction..)rzInstalling module profilesz%s/%srzDisabling module profileszEnabling module streamszSwitching module streamsz%s -> %srzDisabling moduleszResetting modulescSs&|j}|r|ntdddddddfS)Nz r)ZgetNamer)rrwr#r#r$ format_lineqsz,Output.list_transaction..format_linezInstalling Environment GroupszUpgrading Environment GroupszRemoving Environment GroupszInstalling GroupszUpgrading GroupszRemoving GroupsT)Zreport_problems transactioncss|]}t||fVqdS)N)r)r8r|r#r#r$rsz*Output.list_transaction..z--bestz--allowerasingzSSkipping packages with conflicts: (add '%s' to command line to force their upgrade)r'cSsg|]}|dddqS)Nrrr[)rr#)r8rBr#r#r$r9sz+Output.list_transaction..z,Skipping packages with broken dependencies%sz or part of a groupcSsg|]}|dddqS)Nrrr[)rr#)r8rBr#r#r$r9srrrz rZ)r:r1rdrerZPackagerrrrrrrz %s %s %s r0z%s: rz Z replacingz %s%s%s.%s %s z Transaction Summary %s InstallUpgradeZRemove DowngradeZSkiprzDependent packagezDependent packagesz%s %*d %s (+%*d %s) z%-*sz%s %s ( %*d %s) z %s %*d %s )RrZUPGRADEZ UPGRADE_ALLZ DISTUPGRADEZDISTUPGRADE_ALLZ DOWNGRADEINSTALLrrrZ _make_listsr*Z WITH_MODULESrrrZupgradedZ reinstalledZinstalled_groupZ installed_depZinstalled_weakZerasedZ erased_depZ erased_cleanZ downgradedactionlibdnfrZTransactionItemAction_OBSOLETEDrZ getReplacedByrrr]ZFORWARD_ACTIONSZTransactionItemAction_REMOVEr<r|r`r7Z_moduleContainerZgetInstalledProfilesr^rZgetRemovedProfilesZgetEnabledStreamsZgetSwitchedStreamsZgetDisabledModulesZgetResetModulesZ_historyenvZ _installedrZ _upgradedZ_removedrr)ZbestZ_goalactionsZ_skipped_packagesZ_allow_erasingrZupgrade_group_objects_upgrader,r1Z isChangedr=rarr2rPrrwrxryrr r rrr )Qr.rreZforward_actionsZskipped_conflictsZskipped_brokenZ list_bunchZ pkglist_linesrcrrZ ins_group_msgrZpkglistrZreplacestsirBZ obsoletedZinstalledProfilesrwZprofilesZprofileZremovedProfilesZenabledStreamsstreamZswitchedStreamsZdisabledModulesZ resetModulesrZinstall_env_grouprZupgrade_env_groupZremove_env_groupZ install_groupZ upgrade_groupZ remove_groupr|ZrecommendationsZskip_strrZ output_widthr1Zn_widZv_widZr_widZs_widZ real_widthZ msg_packageZmsg_archZ msg_versionZmsg_repositoryZmsg_sizerZtotalmsgrrryrrrrrorNrOZobspoZappendedZ summary_dataZmax_msg_actionZ max_msg_countZ max_msg_pkgsZmax_msg_depcountrZdepcountZmsg_pkgsZlen_msg_actionZ len_msg_countZ len_msg_pkgsZlen_msg_depcountZ msg_deppkgsZ action_msgr#)r.r$list_transactions$         ,,.               $                   zOutput.list_transactionc sfdd}|sdSg}g}|jdj|x|D]}|jt|q2Wxd D]}|||}|rNPqNW|szjjd  g}xD|r|dt|} |jd jjt| ||t|d}q|W|S)Ncst||krgSjj|dd}|dkr0gSdg|}d}x`|D]X}t|||krt|||}||krtgS||8}t|||<|d7}|t|;}qDWx8tt|D](}||||7<||d9<qW|S)zb Work out how many columns we can use to display stuff, in the post trans output. rrZrr[)rr,r1r\)msgsnumr"Zcol_lensrgrorh)r.r#r$ _fits_in_colsKs(    z+Output._pto_callback.._fits_in_colsrz{}:r rr(rZz {})rrrr rr(rZ)r`formatrr,r1rr2r ) r.rZtsisrrrrrrDZ current_msgsr#)r.r$ _pto_callbackHs&    zOutput._pto_callbackcCstjj|j||jS)z{ Return a human-readable summary of the transaction. Packages in sections are arranged to columns. )rrZ_post_transaction_outputr*r)r.rr#r#r$post_transaction_outputzszOutput.post_transaction_outputcCs@d}|jjdkr6tjjjtjd}tjjjtjd|_|tfS)z_Set up the progress callbacks and various output bars based on debug level. NrZ)Zfo) r)Z debuglevelrr+r-ZMultiFileProgressMetersysstdoutDepSolveProgressCallBack)r. progressbarr#r#r$setup_progress_callbackss  zOutput.setup_progress_callbackscCsz|dkr dS|jj}tjd|tdtj|}dt||t|t|f}tt d|t ||}tj|dS)a!Outputs summary information about the download process. :param remote_size: the total amount of information that was downloaded, in bytes :param download_start_timestamp: the time when the download process started, in seconds since the epoch rN-g{Gz?z %5sB/s | %5sB %9s ZTotal) r,r1rrrtimerrr rr)r.Z remote_sizeZdownload_start_timestampriZdl_timeror#r#r$download_callback_total_cbs  z!Output.download_callback_total_cbcCst}t}d}xD|D]<}|jtjjtjjfkr2q|j|j|j|j|d7}qWt |dkrt|dj t |fS|dj t |fS)Nrrz, r) rrrrZTransactionItemAction_UPGRADEDZ TransactionItemAction_DOWNGRADEDr action_nameZ action_shortrrr]r)r.ZhpkgsrZ actions_shortrr|r#r#r$_history_uiactionss     zOutput._history_uiactionsc st|trfdd|DS|dks.|dkrftd}tdd|}dk r^t|kr^|}t|Sdd }yrtjt|}|t|jd d }t|j }d ||f}dk rt|krd |||f}t|krd|}|St k rt|SXdS)Ncsg|]}j|qSr#)r)r8u)limitr.r#r$r9sz+Output._pwd_ui_username..zZSystemr'cWs|j|}|sdS|dS)zf Split gives us a [0] for everything _but_ '', this function returns '' in that case. rr)rK)textargsrUr#r#r$ _safe_split_0s z.Output._pwd_ui_username.._safe_split_0;rZz%s <%s>z %s ... <%s>z<%s>)r*r+) rIrrrr pwdgetpwuidrZpw_gecosZpw_namer) r.rr)Zloginidrwr.userfullnameZ user_namer#)r)r.r$rs*    zOutput._pwd_ui_usernamec Csj|jj|}|jjdkr"ddg}nV|jjdkr6dg}nBt}d}d}x2|D]*}|d7}|jdkrh|d7}|j|jqJWd}t|dkrt d} |j j } | dkrt j j jd} | dkrd } | d kr| d nd } n t d } d } t|tt d ddt| | | tt dddtt dddtt dddfd"| dddddd} td| d}|dkrlt|}x|D]}t|dkr|jpd} n|j|jd } t| } tjdtj|j} |j|j\}}t| | | } t|dd}d}}|jdkrd}}n"|jrd}}n|jr&d}}|jr2d}|jr>d }t||j| | ||fd!||fqrWdS)#zOutput a list of information about the history of yum transactions. :param tids: transaction Ids; lists all transactions if empty ZusersrrZZcommandsrNz%s | %s | %s | %s | %sz Command lineO7z User nameZIDrz Date and timersz Action(s)ZAlteredrr(r#z%6u | %s | %-16.16s | %s | %4uTrz%Y-%m-%d %H:%Mr'*#Errz%s%s )rXoldr)Zhistory_list_viewrcmdlinerloginuidrrr,r_rr+Z_real_term_widthr{r reversedrr r$strftime localtime beg_timestampr'rc return_codeZ is_outputaltered_lt_rpmdbaltered_gt_rpmdbtid)r.tidsreverse transactionsZuidsdoneZblanksrfmtrwZ real_colsZ name_widthZ table_widthZtmrZuiactsZrmarkZlmarkr#r#r$historyListCmdsp                 zOutput.historyListCmdcCst|}|jj}|dkr8tjtdtjjtd|j }|j }g}|sz|jjdd}|dk r|j |j |j |n |jj |}|stjtdtjjtdd \}} d} d} |rt|}|j\}} x|D]} |dk o| j |kr|jj} | jt| d}d}| j |krL| j | krL| dkr} t td | | |jdk r(|jrt td!|jdnt td!|jt|ttfrvt} xD|D],}|| krVqD| j|t td"|qDWnt td"|t|jttfr|j}|ddkrt td#dtd$d|dd}nHt|st td#td%n*|rnt td#td&d'jd(d|DnV|jdkr.Z InstalledZErasedUpgraded Downgraded)rBrorz Not installedZOlderZNewercSsg|] }t|qSr#)r)r8rr#r#r$r9zsmaxlenFrTc sd|}|r}n}|d}jjjj|jdj} | sH|d}nBjj| d} | r|j| } | dkrpn| dkr|d}n|d}|rj d\} } nj d \} } t ||d }d }|r|j }t d || || |t ||fdS) Nr'rB)rwrrrXrrFrGrYrz%s%s%s%s %-*s %s)rYrrZfiltermrwZrunrXpackageZcomparerPr rr{r)r|Z prefix_len was_installedrM pkg_max_lenZ show_reporlZ _pkg_statesstateZipkgsZinst_pkgresrNrOZui_repo)_pkg_states_available_pkg_states_installedr.r#r$ _simple_pkg~s2    z+Output._historyInfoCmd.._simple_pkgrzTransaction ID :z%u..%uz%czBegin time :zBegin rpmdb :z**r <z (%u seconds)z (%u minutes)r4z (%u hours)z (%u days)zEnd time :zEnd rpmdb :zUser :zReturn-Code :ZAbortedZSuccessz Failures:z, cSsg|] }t|qSr#)r)r8rBr#r#r$r9szFailure:zReleasever :zCommand Line :zComment :zTransaction performed with:r)r[r\zPackages Altered:zScriptlet output:z%4dzErrors:)FFrTr[i,i,iPFi,iPFiiiiQ)"r>rIrrrrrrGrr{rrBr$r@rAZbeg_rpmdb_versionrDZ end_timestamprQrErnrrrCallrZ releaseverr=commentZperformed_withrhistoryInfoCmdPkgsAlteredoutputr)r.r<rTr>rwrYrarGZbegtZbegtmZendtZendtmrhseenrBZcodesr=rdZ perf_withZmax_lenZwith_pkgZstr_lenZt_outrlineZt_errr#)r_r`r.r$rSps   (             &                   zOutput._historyInfoCmdr z Dep-Install Obsoleted ObsoletingErase Reinstallr rWr rV) z True-Installr z Dep-Installrirjrkrlr rWZUpdateZUpdatedc s|j}d}d}|j}xH|D]@|jjj}|t|krDt|}tt}||kr|}qWx|D]d } jtjj krd} d} |rt fdd|Drd} |j | \} } |jjj}t t ||}td | | || |tjfqfWd S) aPrint information about how packages are altered in a transaction. :param old: the :class:`DnfSwdbTrans` to print information about :param pats: a list of patterns. Packages that match a patten in *pats* will be highlighted in the output rr'rz ** rGcsg|]}j|qSr#)r)r8Zpat)r|r#r$r9Bsz4Output.historyInfoCmdPkgsAltered..rFz%s%s%s%s %-*s %sNz )_history_state2uistaterr<r&rrr]rrZTransactionItemState_DONErrPr r r{r) r.r<rTZ all_uistatesrYr\rZuistateZpkg_lenrlrMrNrOr#)r|r$re"s2      z Output.historyInfoCmdPkgsAlteredz )NrNr)rr)FrFN)FrFN)r0)F)N)NN)NNN)N)F)N)N)F)9__name__ __module__ __qualname____doc__rrecompilerr/r6rErPrS staticmethodrWpropertyrXrYr=rjrkr2rrrrrrrrrrrrrrrrrrrrrrrrr"r%r'rrLrrUrSrrmrer#r#r#r$r&Cs       /    V $N -  ' c / _2  " MM ' r&c@s(eZdZdZddZddZddZdS) r zGProvides text output callback functions for Dependency Solver callback.cCsd}|dkrtd}n||dkr(td}nj|dkr:td}nX|dkrLtd }nF|d kr^td }n4|d krptd }n"|dkrtd}n|dkrtd}|rtj||j|j|jdS)aPrint information about a package being added to the transaction set. :param pkgtup: tuple containing the package name, arch, version, and repository :param mode: a short string indicating why the package is being added to the transaction set. Valid current values for *mode* are:: i = the package will be installed u = the package will be an update e = the package will be erased r = the package will be reinstalled d = the package will be a downgrade o = the package will be obsoleting another package ud = the package will be updated od = the package will be obsoleted NrBz'---> Package %s.%s %s will be installedr(z(---> Package %s.%s %s will be an upgraderz$---> Package %s.%s %s will be erasedrz)---> Package %s.%s %s will be reinstalledrfz)---> Package %s.%s %s will be a downgraderXz(---> Package %s.%s %s will be obsoletingZudz&---> Package %s.%s %s will be upgradedZodz'---> Package %s.%s %s will be obsoleted)rrdebugrwrxry)r.r|moderfr#r#r$ pkg_addedPs&       z"DepSolveProgressCallBack.pkg_addedcCstjtddS)zRPerform setup at the beginning of the dependency solving process. z"--> Starting dependency resolutionN)rrvr)r.r#r#r$startyszDepSolveProgressCallBack.startcCstjtddS)zAOutput a message stating that dependency resolution has finished.z"--> Finished dependency resolutionN)rrvr)r.r#r#r$rpszDepSolveProgressCallBack.endN)rnrorprqrxryrpr#r#r#r$r Ms)r c@seZdZddZddZdS) CliKeyImportcCs||_||_dS)N)r*rf)r.r*rfr#r#r$r/szCliKeyImport.__init__cCsbdd}td|||tjj||jddf}tjd||jjj rJdS|jjj rXdS|j j S) NcSs$tjjr dnd}|ddjd|S)N00ri)rZpycompZPY3rjust)rZrjr#r#r$short_idsz'CliKeyImport._confirm..short_idzLImporting GPG key 0x%s: Userid : "%s" Fingerprint: %s From : %szfile://rz%sTF) rrZcryptoZ_printable_fingerprintrJrrNr*r)Z assumeyesZassumenorfr)r.rZuseridZ fingerprintrZ timestampr~ror#r#r$_confirms    zCliKeyImport._confirmN)rnrorpr/rr#r#r#r$rzsrzcsNeZdZdZeddZfddZddZdd Zd d Z dddZ Z S)CliTransactionDisplayz1A YUM specific callback class for RPM operations.cCs tjjjS)N)rr+r, _term_width)r.r#r#r$rszCliTransactionDisplay.cs0tt|jd|_d|_d|_d|_d|_dS)NrTr0rr)superrr/lastmsg lastpackagerfmarkmarks)r.) __class__r#r$r/s zCliTransactionDisplay.__init__c Csjtjjj|}|dkrdS|j}t|} ||_|dkr>d} n|td|} |j||||| || |dS)aOutput information about an rpm operation. This may include a text progress bar. :param package: the package involved in the event :param action: the type of action that is taking place. Valid values are given by :func:`rpmtrans.TransactionDisplay.action.keys()` :param ti_done: a number representing the amount of work already done in the current transaction :param ti_total: a number representing the total amount of work to be done in the current transaction :param ts_done: the number of the current transaction in transaction set :param ts_total: the total number of transactions in the transaction set Nrd) rrACTIONSr<_max_action_widthr rr _out_progress) r.rZrti_doneti_totalts_donets_totalZ action_strwid1pkgnamepercentr#r#r$r-s zCliTransactionDisplay.progresscCsHt|ds>d}x(tjjjD]}t|}||kr|}qW||_|j}|S)N_max_action_wid_cacher)hasattrrrrrr r)r.rrCZwid_valr#r#r$rs z'CliTransactionDisplay._max_action_widthc Cs|jrtjjs||kr|j|||tjj||d\} }} t|}| t|||t|| | f} | |jkrtj j d| tj| |_||krt ddS)N)r-rrZ write_flushr') rfrrisatty_makefmtr r rrr_terminal_messengerr{) r.rrrrrZprocessrrrKwid2ror#r#r$rs   z#CliTransactionDisplay._out_progressTNcCstt|}d||f}d|d|d} | ||f} |dkrFd} nt|} d|d} | d|d7} | d7} | d7} | d7} |j} | | kr| } | | 8} | | dkr| d} |j| | }d||f} d| d }| |d}|r|d krd | }|}n|rD|d kr*||jt||d f}nd}d|d| }| }nL|d kr\d| }|}n4|d krx||j|f}nd}d|d| }| }|||fS)Nz%s.%s%zs/%srrrZrz[%-zs]rz %s: %s r gY@rz %s: %s r'z %s: %s z %s: %s )rrr rirr)r.rrrr-rrlrZfmt_donerJZpnlZoverheadrirZfmt_barZfull_pnlrKrZbarr#r#r$rsP            zCliTransactionDisplay._makefmt)TNr) rnrorprqrurir/r-rrr __classcell__r#r#)rr$rs    rc Csd}tjjsdS|dkr d}n|dkr6t||}nd}tjjj}|dkrZ||krZd}d||f}|t|d8}|dkrd}|dkr|d8}|dkrd}|t ||}d|||f}n||krd t ||||f}nb|d 8}|dkrd}|d} | t |krt |} || 8}|t ||}d t || | |||f}||krZtj j d |tj||krvtj j d d tjtj j dtjddS)aIOutput the current status to the terminal using a simple text progress bar consisting of 50 # marks. :param current: a number representing the amount of work already done :param total: a number representing the total amount of work to be done :param name: a name to label the progress bar with r9Nrr#z %d/%drrZz [%-*s]%sz %s%srz %s: [%-*s]%swriterflush)r)rrrrrr+r,rrrr r rr) rTZtotalrwrrrirpZhashbarrfZnwidr#r#r$r!sL       r!)N)sb   (    7PK!dǦ-((%cli/__pycache__/output.cpython-36.pycnu[3 ft`Z@sdZddlmZddlmZddlmZddlZddlZddlZddlZ ddl Z ddl Z ddl Z ddl Z ddlZddlZddlmZmZddlmZmZmZmZmZmZmZmZddlmZmZmZm Z m!Z!dd l"m#Z#dd l$m%Z%ddl&Z'ddl(Z'ddl)Z'ddl*Z'ddl+Z'ddl,Z'ddlZ'ddl-Z'ddl.Z'ddl/Z'e j0d Z1d d Z2Gddde3Z4Gddde'j5j6Z7Gddde'j5j8Z9Gddde#Z:dddZ;dS)z"Handle actual output from the cli.)absolute_import)print_function)unicode_literalsN) format_number format_time)_C_P_ucdfill_exact_width textwrap_fill exact_widthselect_short_long)xrange basestringlongunicode sys_maxsize)TransactionDisplay)MergedTransactionWrapperdnfcCsrtj|ftjd}t|}|d}| |}|s@tjd|}n|rR|jd|t|}tt|f|g|S)N)r) itertoolschainrepeatlenextenditerlistzip)Z cols_countZlabellstleftZ lst_lengthZ right_countZ missing_itemsZlst_iterr#/usr/lib/python3.6/output.py_spread_in_columns6s r%c @s eZdZdZdmZejdZddZddZ d d Z d d Z d dZ e ddZeddZeddZdnddZe ddZddZdoddZdpd!d"Zdqd#d$Zd%d&Zd'd(Zd)d*Zdrd,d-Zdsd.d/Zdtd0d1Zidifd2d3Zdud4d5Zd6d7Z d8d9Z!d:d;Z"dd?Z$d@dAZ%dvdBdCZ&dwdDdEZ'dxdFdGZ(dHdIZ)dJdKZ*dydLdMZ+dNdOZ,dPdQZ-dRdSZ.dTdUZ/dVdWZ0dzdXdYZ1d{dZd[Z2ge3fd\d]Z4gfd^d_Z5e6d`e6d`e6dae6dbe6dce6dde6dee6dfe6dge6dhe6didj Z7gfdkdlZ8dS)|Outputz+Main output class for the yum command line. z ^\*{0,2}/cCs$||_||_tjjj|_d|_dS)N)confbaserclitermZTermprogress)selfr*r)r#r#r$__init__IszOutput.__init__cCs0|jj}dd|}|jt||d}|||fS)Nz%s=r')r,columns fmtColumnsr )r.col_datarowZ term_widthZruleheaderr#r#r$_bannerOs zOutput._bannerc Cszdd|dD}xF|D]>}x8t|D],\}}||}t|}|j|dd||<q&WqW|j|ddd}tttj|S)NcSsg|] }tqSr#)dict).0rr#r#r$ Vsz&Output._col_widths..rrz )indent) enumeraterget calcColumnsrmapoperatorneg) r.rowsr3r4ivalZcol_dctZlengthcolsr#r#r$ _col_widthsUs zOutput._col_widthscCs(d}d}|snt|t s$|dkr2|jjd}n|dkr|St|}t||||d}|jddkr||dkrt|||d d}|S) zReturn a key value pair in the common two column output format. :param key: the key to be formatted :param val: the value associated with *key* :return: the key value pair formatted in two columns for output r'rZz: )riZinitial_indentZsubsequent_indent rr(z ...: )r r,r_rr r count)r.keyrCZkeylenrDZnxtrUr#r#r$ fmtKeyValFills zOutput.fmtKeyValFillr0cCsht|}|jjd}t|}||dkr6|d}}n$|||d}|||t|}d|||fS)aFormat and return a section header. The format of the header is a line with *name* centered, and *fill* repeated on either side to fill an entire line on the terminal. :param name: the name of the section :param fill: the character to repeat on either side of *name* to fill an entire line. *fill* must be a single character. :return: a string formatted to be a section header rZz%s %s %s)r r,r1r r)r.rxZfillrDZname_lenZbegrqr#r#r$ fmtSections   zOutput.fmtSectionc sdd}fdd}g}j|\}}tdtddtdd}|j||d ||j|f|jrv|j|td |jtdtdd tdd }|j|||j|j|td |jtdtdd tdd}|j|||j tdtddtdd}|j||t t |j |j|td|j tdtddtdd}|j|||j|jrjj|} | r|j|td| jjr>|j|td|j|j|tdtjj|j|jr|j|tdtjj|jjj|} | r>yt| jj} Wntk r"d} YnX|j|tdj| tdtddtdd}|j|||j |j!r|j|tdt"|j!|j|td|j#tdtddtdd}|j|||j$dj%|S)zPrint information about the given package. :param pkg: the package to print information about :param highlight: highlighting options for the name of the package cSsdjt|dddt|gS)Nr' :)joinr str)rrCr#r#r$format_key_valsz)Output.infoOutput..format_key_valcsjt|ddd|pdS)Nrz : r)rr )rrC)r.r#r$format_key_val_fillsz.Output.infoOutput..format_key_val_fillrshortNamerz%s%s%sZEpochVersionZReleaseArch ArchitectureSizeZSourceRepo Repositoryz From repoZPackagerZ Buildtimez Install timeNz Installed byZSummaryZURLZLicenseZ Descriptionr)&rPrrr`rxZepochrversionreleaseryrfloat_sizeZ sourcerpmrepoid _from_systemrXZrepor)verboseZpackagerrutilZnormalize_timeZ buildtimeZ installtimeZ package_dataint_itemZgetInstalledBy ValueError_pwd_ui_usernamesummaryurlr license descriptionr) r.r}rMrrZ output_listrNrOrZ history_repoZ history_pkguidr#)r.r$ infoOutputsh                   zOutput.infoOutputc Cs|\}}|dk rV|jj}|jtjkr,|jj}|j|||d|j||d|jjddS|j}d|j |j f}|j } t d|| ||fdS) a{Print a simple string that explains the relationship between the members of an update or obsoletes tuple. :param uotup: an update or obsoletes tuple. The first member is the new package, and the second member is the old package :param changetype: a string indicating what the change between the packages is, e.g. 'updates' or 'obsoletes' :param columns: a tuple containing information about how to format the columns of output. The absolute value of each number in the tuple indicates how much space has been allocated for the corresponding column. If the number is negative, the text in the column will be left justified, and if it is positive, the text will be right justified. The columns of output are the package name, version, and repository N)r1rMr'r)r1r:rMz%s.%sz%-35.35s [%.12s] %.10s %-20.20sz ) r)color_update_remotereponamehawkeyZSYSTEM_REPO_NAMEcolor_update_localrcolor_update_installedZ compactPrintrxryrr|) r.ZuotupZ changetyper1Z changePkgZinstPkgZchiZ c_compactZ i_compactZc_repor#r#r$updatesObsoletesLists   zOutput.updatesObsoletesListc Csl|dkrht|dkr`td|t}|dkrbi}x"|D]} | |t| t| j<q' - highlighting used when the package has a higher version number :return: number of packages listed rinforxnevrarz%sFznot inrGr0>rF    zOutput.userconfirmcCs~|jjjj}|jjjj}i}xPtjtt|dD]6}||kr^||d||<q@||kr@||d||<q@W|S)Nrr) rYquery installedZ _name_dict availablerrrr )r.sectionsrrrfpkg_namer#r#r$_pkgs2name_dictszOutput._pkgs2name_dictc Csi}i}x~tjtt|dD]d}|j|}|dkr8q tt|t|j}tt|j}|j|dd||<|j|dd||<q W||fS)Nrr) rrrr r<r r GRP_PACKAGE_INDENTr) r.r name_dictZ nevra_lengthsZ repo_lengthsrr}Znevra_lZrepo_lr#r#r$_pkgs2col_lengthss zOutput._pkgs2col_lengthscCs$x|D]}td|j|fqWdS)Nz%s%s)r|r)r. pkg_namesrxr#r#r$_display_packagess zOutput._display_packagescCspxj|D]b}y ||}Wn(tk r>td|j|fwYnXd}|jsR|jj}|j|d|j||dqWdS)Nz%s%sFT)r~r:rMr1)KeyErrorr|rrr)Zcolor_list_available_installr)r.rrr1rxr}rMr#r#r$_display_packages_verboses  z Output._display_packages_verbosec Csldd}tdtd|j|jj}|r@ttdt|j|jr`ttdt|jp\d|jrxttd|jtd ||j ftd ||j ftd ||j ftd ||j ff}|r0|j |}|j||}|j|}|d  |d f}xp|D].\}} t| dkrqt||j| ||qWn8x6|D].\}} t| dkrPq6t||j| q6WdS)zOutput information about the packages in a given group :param group: a Group object to output information about cSstdd|DS)Ncss|] }|jVqdS)N)rx)r8r}r#r#r$ sz?Output.display_pkgs_in_groups..names..)r])packagesr#r#r$namessz,Output.display_pkgs_in_groups..namesrz Group: %sz Group-Id: %sz Description: %srz Language: %sz Mandatory Packages:z Default Packages:z Optional Packages:z Conditional Packages:rrN)r|rui_namer)rr idui_descriptionZ lang_onlyZmandatory_packagesZdefault_packagesZoptional_packagesZconditional_packagesrrr=rrr) r.grouprrrrZ col_lengthsr1 section_namerr#r#r$display_pkgs_in_groupss8   zOutput.display_pkgs_in_groupscCsdd}ttd|j|jjr8ttdt|j|jr\t|jpJd}ttd|td||jftd||j ff}x0|D](\}}t |d krqt||j |qWd S) zOutput information about the packages in a given environment :param environment: an Environment object to output information about cSstdd|DS)Ncss|] }|jVqdS)N)rx)r8rr#r#r$r szFOutput.display_groups_in_environment..names..)r])groupsr#r#r$rsz3Output.display_groups_in_environment..nameszEnvironment Group: %sz Environment-Id: %srz Description: %sz Mandatory Groups:z Optional Groups:rN) r|rrr)rr rrZmandatory_groupsZoptional_groupsrr)r.Z environmentrrrrrr#r#r$display_groups_in_environments z$Output.display_groups_in_environmentcsVdfdd fdd}jjr4d}ndjjf}j|jpRd}r|d krjjjj|d d }t||d krjj }|sd Stt d j d}d} xXt |D]J} j| krd } qˆj | krt d } | | |d dd }qˆj| kr,t d} | | |ddd }qˆj| krVt d} | | |ddd }q|| |rhd }qt d} xjD]} t| } tj| | r| | |ddd }n`| jd} t dtfdd| Dr| jd}n| }tj| |rx| | |ddd }qxWqWt|| gsLx*t |D]} t d} | | |ddq*Wtd S)aOutput search/provides type callback matches. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output :param verbose: whether to output extra verbose information :param highlight: highlighting options for the highlighted matches Fcsd|sttdt|pd}|dkr(dSr>j|dd}|rTtj||n t||dS)Nz Matched from:rT) ignore_case)r|rr rSr)ritemZprinted_headline can_overflow)rMmatchforr.r#r$print_highlighted_key_item's  z8Output.matchcallback..print_highlighted_key_itemcsTjj|sdStd}d}x2jD](}tj||r$|||p@|ddd}q$W|S)NFzFilename : %s)rT)FILE_PROVIDE_REmatchrfilesfnmatch)r printed_matchrZ file_matchfilename)porr.r#r$print_file_provides4s   z1Output.matchcallback..print_file_providesz%s : z%s.%s : rNT)rzRepo : %szDescription : )rzURL : %szLicense : %szProvide : %srz=<>c3s|]}|kVqdS)Nr#)r8char)possibler#r$rpsz'Output.matchcallback..zOther : %s)F)r)ZshowdupesfromreposrxryrrZcolor_search_matchrSr|rrrrrrrZprovidesrrrKany)r.rrrrrMrrprZ name_matchrrZprovideZ first_provideZitem_newr#)rMrrrrr.r$ matchcallbacksp           zOutput.matchcallbackcCs|j|||ddS)aqOutput search/provides type callback matches. This will output more information than :func:`matchcallback`. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output T)r)r)r.rrrr#r#r$matchcallback_verboses zOutput.matchcallback_verbosec Csd}d}d}d}x|D]}yrt|j}||7}y|jr@||7}Wntk rVYnX|s^wyt|j}Wntk rYnX||7}Wqtk rd}td} tj| PYqXqW|s|rtjtdt |||krtjtdt |||rtjtdt |dS) zReport the total download size for a set of packages :param packages: a list of package objects :param installonly: whether the transaction consists only of installations rFTz2There was an error calculating total download sizezTotal size: %szTotal download size: %szInstalled size: %sN) rrZverifyLocalPkg ExceptionZ installsizerloggererrorrr) r.rZ installonlytotsizeZlocsizeZinsizerr}sizerpr#r#r$reportDownloadSizesD          zOutput.reportDownloadSizec Csrd}d}xL|D]D}y|j}||7}Wqtk rPd}td}tj|PYqXqW|sntjtdt|dS)zmReport the total size of packages being removed. :param packages: a list of package objects rFTz-There was an error calculating installed sizezFreed space: %sN)rrrrrrr)r.rrrr}rrpr#r#r$reportRemoveSizes    zOutput.reportRemoveSizec Cs*|sdSg}g}|jr$|jtdxJ|jD]@}t|j|}|j|}|rR|jn|} |jtdd| |q,W|j r|jtdx@|j D]6}t|j |}|j j |j} |jtdd| |qW|r |j |} x$|D]} |j|jt| | dqW|j| tdtdddf|d d <d j|S) Nz+Marking packages as installed by the group:r@z)Marking packages as removed by the group:r'ZGroupPackagesrrr)Z new_groupsr`rrZadded_packagesZ _group_by_idrrr%Zremoved_groupsZremoved_packagesrr<rEr2r r6r) r.compsrXrhoutrAZgrp_idZpkgsZ group_objectZgrp_namer3r4r#r#r$list_group_transactions.     $zOutput.list_group_transactioncQ s tjtjBtjBtjBtjBtjB}t}t}|dkr\}}x2t+|D]&}|j&d||fddddddfqWqW|j&| |ft!t'j j(j,j*}|rt d} g}xF|D]>\}}x2t+|D]&}|j&d||fddddddfqWqW|j&| |ft!t'j j(j-j*}|rW|j&| |fj j1j7j3}"|"rt d$} g}x |"j4D]}|j&||qW|j&| |fj j1j7j5}#|#rt d%} g}x |#j4D]}|j&||qW|j&| |fj j1j7j6}$|$rNt d&} g}x |$j4D]}|j&||q(W|j&| |fj8j9 rvj j:j;|@rvg}j j|&}'d/d0|D}|j&|'|fg}x*t!|j*D]\}(}%| ||| |%g} qWt d1}'j j8j?rN|'d}'n |'t d2}'d3d0|D}|j&|'|fj@jA})|d4 rˆj j(jB rˆj j1oj j1j7pj j1j2 rdS|d4i|d5|d6ig}d| ddd7g}*jC|d8|*d9|d:}*|*\}+} },}-}.tD|*d7}/|)|/kr&|)n|/})tE|+td;d<td=d<}0tE| td;d>td=d?}1tE|,td;d@td=d@}2tE|-td;dAtd=dB}3tE|.td;dCtd=dC}4dDdE|)jF|0|+ f|1| f|2|, f|3|- f|4|.ffd.dE|)fg}5x|D]\} }| rdF| }6x|D]\}7}8}9}:};}<}=|7|+ |=f|8| f|9|, f|:|- f|;|.ff}*jF|*d.dG}>jGj8jH\}?}@xBt!|<D]6}AdHt dIdJ}B|B|?|AjI|@|AjJ|AjKf;}B|>|B7}> qrW|6|>}6 q W|r|5j&|6qW|5j&t dKdE|)t dLtL|jtL|jtL|jtL|jdft dMtL|jdft dNtL|jtL|jtL|jdft dOtL|jdft dPtL|tL|dff}Cd}Dd}Ed}Fd}Gx|CD]\} }H}I|H r|I r qtMd|5j&|>|P|E|HdU|F|Jf|G|I|Ofn$dV}>|5j&|>|P|E|Fd.|G|I|Ofn&|H r$dW}>|5j&|>tQ| |D|E|H|Jf q$Wdj>|5S)Xz]Return a string representation of the transaction in an easy-to-read format. N)rvrrcs|j\}}}}} |j} |j} t|j} |dkr2d}|jrBjj} n|jrRjj } njj } |j ||| | | || fxRdt |fdt | fdt | ffD],\}}||j |d|||d7<qWt|t |}|S)NZnoarchrrrrr)Zpkgtuprzr{rrrr)rZ _from_cmdlinerrr`r setdefaultmax)linesrca_widr obsoletesraerrrzrrhirf)r.r#r$ _add_lines"   ,z*Output.list_transaction.._add_linez Installing group/module packageszInstalling group packagesrZ InstallingZ UpgradingZ ReinstallingzInstalling dependencieszInstalling weak dependenciesZRemovingzRemoving dependent packageszRemoving unused dependenciesZ DowngradingcSs|jS)N)r})xr#r#r$4sz)Output.list_transaction..)rzInstalling module profilesz%s/%srzDisabling module profileszEnabling module streamszSwitching module streamsz%s -> %srzDisabling moduleszResetting modulescSs&|j}|r|ntdddddddfS)Nz r)ZgetNamer)rrxr#r#r$ format_lineqsz,Output.list_transaction..format_linezInstalling Environment GroupszUpgrading Environment GroupszRemoving Environment GroupszInstalling GroupszUpgrading GroupszRemoving GroupsT)Zreport_problems transactioncss|]}t||fVqdS)N)r)r8r}r#r#r$rsz*Output.list_transaction..z--bestz--allowerasingzSSkipping packages with conflicts: (add '%s' to command line to force their upgrade)r'cSsg|]}|dddqS)Nrrr[)rr#)r8rBr#r#r$r9sz+Output.list_transaction..z,Skipping packages with broken dependencies%sz or part of a groupcSsg|]}|dddqS)Nrrr[)rr#)r8rBr#r#r$r9srrrz rZ)r:r1rdrerZPackagerrrrrrrz %s %s %s r0z%s: rz Z replacingz %s%s%s.%s %s z Transaction Summary %s InstallUpgradeZRemove DowngradeZSkiprzDependent packagezDependent packagesz%s %*d %s (+%*d %s) z%-*sz%s %s ( %*d %s) z %s %*d %s )RrZUPGRADEZ UPGRADE_ALLZ DISTUPGRADEZDISTUPGRADE_ALLZ DOWNGRADEINSTALLrrrZ _make_listsr*Z WITH_MODULESrrrZupgradedZ reinstalledZinstalled_groupZ installed_depZinstalled_weakZerasedZ erased_depZ erased_cleanZ downgradedactionlibdnfr ZTransactionItemAction_OBSOLETEDrZ getReplacedByrrr]ZFORWARD_ACTIONSZTransactionItemAction_REMOVEr<r}r`r7Z_moduleContainerZgetInstalledProfilesr^rZgetRemovedProfilesZgetEnabledStreamsZgetSwitchedStreamsZgetDisabledModulesZgetResetModulesZ_historyenvZ _installedrZ _upgradedZ_removedrr)ZbestZ_goalactionsZ_skipped_packagesZ_allow_erasingrZupgrade_group_objects_upgrader,r1Z isChangedr=rarr2rPrrxryrzrr r rrr )Qr.r reZforward_actionsZskipped_conflictsZskipped_brokenZ list_bunchZ pkglist_linesrcrrZ ins_group_msgrZpkglistrZreplacestsirBZ obsoletedZinstalledProfilesrxZprofilesZprofileZremovedProfilesZenabledStreamsstreamZswitchedStreamsZdisabledModulesZ resetModulesrZinstall_env_grouprZupgrade_env_groupZremove_env_groupZ install_groupZ upgrade_groupZ remove_groupr}ZrecommendationsZskip_strrZ output_widthr1Zn_widZv_widZr_widZs_widZ real_widthZ msg_packageZmsg_archZ msg_versionZmsg_repositoryZmsg_sizerZtotalmsgrrrzrrrrrprNrOZobspoZappendedZ summary_dataZmax_msg_actionZ max_msg_countZ max_msg_pkgsZmax_msg_depcountrZdepcountZmsg_pkgsZlen_msg_actionZ len_msg_countZ len_msg_pkgsZlen_msg_depcountZ msg_deppkgsZ action_msgr#)r.r$list_transactions$         ,,.               $                   zOutput.list_transactionc sfdd}|sdSg}g}|jdj|x|D]}|jt|q2Wxd D]}|||}|rNPqNW|szjjd  g}xD|r|dt|} |jd jjt| ||t|d}q|W|S)Ncst||krgSjj|dd}|dkr0gSdg|}d}x`|D]X}t|||krt|||}||krtgS||8}t|||<|d7}|t|;}qDWx8tt|D](}||||7<||d9<qW|S)zb Work out how many columns we can use to display stuff, in the post trans output. rrZrr[)rr,r1r\)msgsnumr"Zcol_lensrgrprh)r.r#r$ _fits_in_colsKs(    z+Output._pto_callback.._fits_in_colsrz{}:r rr(rZz {})rrrr rr(rZ)r`formatrr,r1rr2r ) r.rZtsisrrrrrrDZ current_msgsr#)r.r$ _pto_callbackHs&    zOutput._pto_callbackcCstjj|j||jS)z{ Return a human-readable summary of the transaction. Packages in sections are arranged to columns. )rrZ_post_transaction_outputr*r)r.r r#r#r$post_transaction_outputzszOutput.post_transaction_outputcCs@d}|jjdkr6tjjjtjd}tjjjtjd|_|tfS)z_Set up the progress callbacks and various output bars based on debug level. NrZ)Zfo) r)Z debuglevelrr+r-ZMultiFileProgressMetersysstdoutDepSolveProgressCallBack)r. progressbarr#r#r$setup_progress_callbackss  zOutput.setup_progress_callbackscCsz|dkr dS|jj}tjd|tdtj|}dt||t|t|f}tt d|t ||}tj|dS)a!Outputs summary information about the download process. :param remote_size: the total amount of information that was downloaded, in bytes :param download_start_timestamp: the time when the download process started, in seconds since the epoch rN-g{Gz?z %5sB/s | %5sB %9s ZTotal) r,r1rrrtimerrr rr)r.Z remote_sizeZdownload_start_timestampriZdl_timerpr#r#r$download_callback_total_cbs  z!Output.download_callback_total_cbcCst}t}d}xD|D]<}|jtjjtjjfkr2q|j|j|j|j|d7}qWt |dkrt|dj t |fS|dj t |fS)Nrrz, r) rrrr ZTransactionItemAction_UPGRADEDZ TransactionItemAction_DOWNGRADEDr action_nameZ action_shortrrr]r)r.ZhpkgsrZ actions_shortrr}r#r#r$_history_uiactionss     zOutput._history_uiactionsc st|trfdd|DS|dks.|dkrftd}tdd|}dk r^t|kr^|}t|Sdd }yrtjt|}|t|jd d }t|j }d ||f}dk rt|krd |||f}t|krd|}|St k rt|SXdS)Ncsg|]}j|qSr#)r)r8u)limitr.r#r$r9sz+Output._pwd_ui_username..zZSystemr'cWs|j|}|sdS|dS)zf Split gives us a [0] for everything _but_ '', this function returns '' in that case. rr)rK)textargsrUr#r#r$ _safe_split_0s z.Output._pwd_ui_username.._safe_split_0;rZz%s <%s>z %s ... <%s>z<%s>)r+r,) rIrrrr pwdgetpwuidrZpw_gecosZpw_namer) r.rr*Zloginidrxr/userfullnameZ user_namer#)r*r.r$rs*    zOutput._pwd_ui_usernamec Csz|jj|}|jjdkr"ddg}nf|jjdkr6dg}nR|jjdksFtt}d}d}x2|D]*}|d7}|jdkrx|d7}|j|jqZWd}t |dkrt d } |j j } | dkrt jj jd} | dkrd } | d kr| d nd } n t d } d } t|tt dddt| | | tt dddtt dddtt dddfd#| dddddd} td| d}|dkr|t|}x|D]}t |dkr|jpd} n|j|jd } t| } tjdtj|j} |j|j\}}t| | | } t|dd}d}}|jdkrd}}n"|jr&d}}n|jr6d}}|jrBd }|jrNd!}t||j| | ||fd"||fqWdS)$zOutput a list of information about the history of yum transactions. :param tids: transaction Ids; lists all transactions if empty ZusersrrZZcommandszsingle-user-commandsrNz%s | %s | %s | %s | %sz Command lineO7z User nameZIDrz Date and timertz Action(s)ZAlteredrr(r$z%6u | %s | %-16.16s | %s | %4uTrz%Y-%m-%d %H:%Mr'*#Errz%s%s ) rXoldr)Zhistory_list_viewrkrcmdlinerloginuidrrr,r_rr+Z_real_term_widthr|r reversedrr r%strftime localtime beg_timestampr(rc return_codeZ is_outputaltered_lt_rpmdbaltered_gt_rpmdbtid)r.tidsreverse transactionsZuidsdoneZblanksr fmtrxZ real_colsZ name_widthZ table_widthZtmrZuiactsZrmarkZlmarkr#r#r$historyListCmdsr                 zOutput.historyListCmdcCst|}|jj}|dkr8tjtdtjjtd|j }|j }g}|sz|jjdd}|dk r|j |j |j |n |jj |}|stjtdtjjtdd \}} d} d} |rt|}|j\}} x|D]} |dk o| j |kr|jj} | jt| d}d}| j |krL| j | krL| dkr} t td | | |jdk r(|jrt td!|jdnt td!|jt|ttfrvt} xD|D],}|| krVqD| j|t td"|qDWnt td"|t|jttfr|j}|ddkrt td#dtd$d|dd}nHt|st td#td%n*|rnt td#td&d'jd(d|DnV|jdkr.Z InstalledZErasedUpgraded Downgraded)rBrorz Not installedZOlderZNewercSsg|] }t|qSr#)r)r8rr#r#r$r9zsmaxlenFrTc sd|}|r}n}|d}jjjj|jdj} | sH|d}nBjj| d} | r|j| } | dkrpn| dkr|d}n|d}|rj d\} } nj d \} } t ||d }d }|r|j }t d || || |t ||fdS) Nr'rB)rxrrrYrrFrGrZrz%s%s%s%s %-*s %s)rYrrZfiltermrxZrunrXpackageZcomparerPr rr|r)r}Z prefix_len was_installedrM pkg_max_lenZ show_repormZ _pkg_statesstateZipkgsZinst_pkgresrNrOZui_repo)_pkg_states_available_pkg_states_installedr.r#r$ _simple_pkg~s2    z+Output._historyInfoCmd.._simple_pkgrzTransaction ID :z%u..%uz%czBegin time :zBegin rpmdb :z**r <z (%u seconds)z (%u minutes)r5z (%u hours)z (%u days)zEnd time :zEnd rpmdb :zUser :zReturn-Code :ZAbortedZSuccessz Failures:z, cSsg|] }t|qSr#)r)r8rBr#r#r$r9szFailure:zReleasever :zCommand Line :zComment :zTransaction performed with:r)r\r]zPackages Altered:zScriptlet output:z%4dzErrors:)FFrTr[i,i,iPFi,iPFiiiiQ)"r?rIrrrrrrHrr|rrCr%rArBZbeg_rpmdb_versionrEZ end_timestamprRrFrorrrDallrZ releaseverr>commentZperformed_withrhistoryInfoCmdPkgsAlteredoutputr)r.r=rUr?rxrZrbrHZbegtZbegtmZendtZendtmrhseenrBZcodesr>reZ perf_withZmax_lenZwith_pkgZstr_lenZt_outrlineZt_errr#)r`rar.r$rTps   (             &                   zOutput._historyInfoCmdr z Dep-Install Obsoleted ObsoletingErase Reinstallr rXr rW) z True-Installr z Dep-Installrjrkrlrmr rXZUpdateZUpdatedc s|j}d}d}|j}xH|D]@|jjj}|t|krDt|}tt}||kr|}qWx|D]d } jtjj krd} d} |rt fdd|Drd} |j | \} } |jjj}t t ||}td | | || |tjfqfWd S) aPrint information about how packages are altered in a transaction. :param old: the :class:`DnfSwdbTrans` to print information about :param pats: a list of patterns. Packages that match a patten in *pats* will be highlighted in the output rr'rz ** rGcsg|]}j|qSr#)r)r8Zpat)r}r#r$r9Bsz4Output.historyInfoCmdPkgsAltered..rFz%s%s%s%s %-*s %sNz )_history_state2uistaterr<r'rrr^rr ZTransactionItemState_DONErrPr r r|r) r.r=rUZ all_uistatesrZr]rZuistateZpkg_lenrmrMrNrOr#)r}r$rf"s2      z Output.historyInfoCmdPkgsAlteredz )NrNr)rr)FrFN)FrFN)r0)F)N)NN)NNN)N)F)N)N)F)9__name__ __module__ __qualname____doc__rrecompilerr/r6rErPrS staticmethodrWpropertyrXrYr=rjrlr2rrrrrrrrrrrrrrrrrrrrrrrrr#r&r(rrMrrVrTrrnrfr#r#r#r$r&Cs       /    V $N -  ' c / _2  " MM ' r&c@s(eZdZdZddZddZddZdS) r!zGProvides text output callback functions for Dependency Solver callback.cCsd}|dkrtd}n||dkr(td}nj|dkr:td}nX|dkrLtd }nF|d kr^td }n4|d krptd }n"|dkrtd}n|dkrtd}|rtj||j|j|jdS)aPrint information about a package being added to the transaction set. :param pkgtup: tuple containing the package name, arch, version, and repository :param mode: a short string indicating why the package is being added to the transaction set. Valid current values for *mode* are:: i = the package will be installed u = the package will be an update e = the package will be erased r = the package will be reinstalled d = the package will be a downgrade o = the package will be obsoleting another package ud = the package will be updated od = the package will be obsoleted NrBz'---> Package %s.%s %s will be installedr)z(---> Package %s.%s %s will be an upgraderz$---> Package %s.%s %s will be erasedrz)---> Package %s.%s %s will be reinstalledrfz)---> Package %s.%s %s will be a downgraderYz(---> Package %s.%s %s will be obsoletingZudz&---> Package %s.%s %s will be upgradedZodz'---> Package %s.%s %s will be obsoleted)rrdebugrxryrz)r.r}modergr#r#r$ pkg_addedPs&       z"DepSolveProgressCallBack.pkg_addedcCstjtddS)zRPerform setup at the beginning of the dependency solving process. z"--> Starting dependency resolutionN)rrwr)r.r#r#r$startyszDepSolveProgressCallBack.startcCstjtddS)zAOutput a message stating that dependency resolution has finished.z"--> Finished dependency resolutionN)rrwr)r.r#r#r$rqszDepSolveProgressCallBack.endN)rorprqrrryrzrqr#r#r#r$r!Ms)r!c@seZdZddZddZdS) CliKeyImportcCs||_||_dS)N)r*rg)r.r*rgr#r#r$r/szCliKeyImport.__init__cCsbdd}td|||tjj||jddf}tjd||jjj rJdS|jjj rXdS|j j S) NcSs$tjjr dnd}|ddjd|S)N00ri)rZpycompZPY3rjust)rZrjr#r#r$short_idsz'CliKeyImport._confirm..short_idzLImporting GPG key 0x%s: Userid : "%s" Fingerprint: %s From : %szfile://rz%sTF) rrZcryptoZ_printable_fingerprintrJrrOr*r)Z assumeyesZassumenorgr)r.rZuseridZ fingerprintrZ timestamprrpr#r#r$_confirms    zCliKeyImport._confirmN)rorprqr/rr#r#r#r$r{sr{csNeZdZdZeddZfddZddZdd Zd d Z dddZ Z S)CliTransactionDisplayz1A YUM specific callback class for RPM operations.cCs tjjjS)N)rr+r, _term_width)r.r#r#r$rszCliTransactionDisplay.cs0tt|jd|_d|_d|_d|_d|_dS)NrTr0rs)superrr/lastmsg lastpackagergmarkmarks)r.) __class__r#r$r/s zCliTransactionDisplay.__init__c Csjtjjj|}|dkrdS|j}t|} ||_|dkr>d} n|td|} |j||||| || |dS)aOutput information about an rpm operation. This may include a text progress bar. :param package: the package involved in the event :param action: the type of action that is taking place. Valid values are given by :func:`rpmtrans.TransactionDisplay.action.keys()` :param ti_done: a number representing the amount of work already done in the current transaction :param ti_total: a number representing the total amount of work to be done in the current transaction :param ts_done: the number of the current transaction in transaction set :param ts_total: the total number of transactions in the transaction set Nrd) rr ACTIONSr<_max_action_widthr rr _out_progress) r.r[rti_doneti_totalts_donets_totalZ action_strwid1pkgnamepercentr#r#r$r-s zCliTransactionDisplay.progresscCsHt|ds>d}x(tjjjD]}t|}||kr|}qW||_|j}|S)N_max_action_wid_cacher)hasattrrr rrr r)r.rrCZwid_valr#r#r$rs z'CliTransactionDisplay._max_action_widthc Cs|jrtjjs||kr|j|||tjj||d\} }} t|}| t|||t|| | f} | |jkrtj j d| tj| |_||krt ddS)N)r-rrZ write_flushr') rgrr isatty_makefmtr r rrr_terminal_messengerr|) r.rrrrrZprocessrrrLwid2rpr#r#r$rs   z#CliTransactionDisplay._out_progressTNcCstt|}d||f}d|d|d} | ||f} |dkrFd} nt|} d|d} | d|d7} | d7} | d7} | d7} |j} | | kr| } | | 8} | | dkr| d} |j| | }d||f} d| d }| |d}|r|d krd | }|}n|rD|d kr*||jt||d f}nd}d|d| }| }nL|d kr\d| }|}n4|d krx||j|f}nd}d|d| }| }|||fS)Nz%s.%s%zs/%srsrZrz[%-zs]rz %s: %s r gY@rz %s: %s r'z %s: %s z %s: %s )rrr rirr)r.rrrr-rrlrZfmt_donerKZpnlZoverheadrirZfmt_barZfull_pnlrLrZbarr#r#r$rsP            zCliTransactionDisplay._makefmt)TNr) rorprqrrrvrir/r-rrr __classcell__r#r#)rr$rs    rc Csd}tjjsdS|dkr d}n|dkr6t||}nd}tjjj}|dkrZ||krZd}d||f}|t|d8}|dkrd}|dkr|d8}|dkrd}|t ||}d|||f}n||krd t ||||f}nb|d 8}|dkrd}|d} | t |krt |} || 8}|t ||}d t || | |||f}||krZtj j d |tj||krvtj j d d tjtj j dtjddS)aIOutput the current status to the terminal using a simple text progress bar consisting of 50 # marks. :param current: a number representing the amount of work already done :param total: a number representing the total amount of work to be done :param name: a name to label the progress bar with r:Nrr$z %d/%drrZz [%-*s]%sz %s%srz %s: [%-*s]%swriterflush)r)rr rrrr+r,rrrr r rr) rTZtotalrxrrrirqZhashbarrgZnwidr#r#r$r"sL       r")N)sb   (    7PK!i[-cli/__pycache__/progress.cpython-36.opt-1.pycnu[3 ft`@spddlmZddlmZmZddlmZddlmZddl m Z ddl Z ddl Z ddl Z Gddde jjZdS) )unicode_literals) format_number format_time) _term_width)unicode)timeNc@sreZdZdZejjdejjdejjdejj diZ e j dddfd d Z d d ZdddZddZddZddZdS)MultiFileProgressMeterz"Multi-file download progress meterZFAILEDZSKIPPEDZMIRRORZDRPMg333333?g?g@cCsp||_||_||_||_d|_d|_tjj|_d|_ d|_ d|_ g|_ i|_ d|_d|_d|_d|_d|_dS)zCreates a new progress meter instance update_period -- how often to update the progress bar tick_period -- how fast to cycle through concurrent downloads rate_average -- time constant for average speed calculation rN)fo update_period tick_period rate_averageunknown_progres total_drpmsysstdoutisatty done_drpm done_files done_sizeactivestate last_time last_sizerate total_files total_size)selfr r r r r/usr/lib/python3.6/progress.py__init__&s" zMultiFileProgressMeter.__init__cCstjjd||jdS)NZ write_flush)dnfutilZ_terminal_messengerr )rmsgrrrmessage?szMultiFileProgressMeter.messagercCsF||_||_||_d|_d|_d|_g|_i|_d|_d|_ d|_ dS)Nr) rrrrrrrrrrr)rrrZ total_drpmsrrrstartBszMultiFileProgressMeter.startcCst}t|}t|j}t|}||jkrD|df|j|<|jj||j|\}}||f|j|<|j||7_||j|j kr||j kr||_ |j |dS)Nr) rrint download_sizerrappendrrr r_update)rpayloaddonenowtextZtotalr$oldrrrprogressSs    zMultiFileProgressMeter.progresscCsJ|jrj||j}|j|j}|dkrj|dkrj||}|jdk rdt||jd}|||jd|}||_||_|j|_|jsdS|jt||j t |j}|j dkrd|j d}t |jdkr|d|j t |j7}d||j |f}|jo|j rt|j |j|j}nd}d|jr,t|jndt|j|f} tt | } | d d } | d kr0|j r|jd |j } t|j| d |j d \}} d |d| }d| | || f} | | d 8} nj|jd}d} |dkrdn|}d|d | }d| || f} | | d 8} |jd| kr*|jdnd|_|jd| | || fdS)Nrz%dz-%dz (%s/%d): %sz--:--z %5sB/s | %5sB %9s ETA z--- d=-z%3d%% [%-*s]%s z [%-*s]%sz%-*.*s%s)rrrrminr rrr%r lenrrrrrrdivmodr r#)rr+Z delta_timeZ delta_sizerZweightr,nZtime_etar"leftZblZpctpZbarrrrr(gsX        zMultiFileProgressMeter._updatec Cst}}t|}t|j}d}|tjjkr.n|tjjkrJ|jd7_nt||j kr|j j |\}}|j j |||8}|j d7_ |j|7_n(|tjjkr|j d7_ |j|7_|r*|tjjkr|jdkrd|j||j|j|f} nd|j||f} tt| d} d| | |f} nl|jdkrHd|j |j|f}t||d} dtt|| t|t| f} tt| } d | | || f} |j| |j r|j|dS) Nrr/z[%s %d/%d] %s: z [%s] %s: z%s%-*s z (%d/%d): %sgMbP?z %5sB/s | %5sB %9s z%-*.*s%s)rrr%r&r callback STATUS_MIRROR STATUS_DRPMrrpoprremoverrSTATUS_ALREADY_EXISTSr STATUS_2_STRrr9rmaxrfloatrr#r() rr)ZstatusZerr_msgr$r+r,sizer*r"r<ZtmrrrendsH          zMultiFileProgressMeter.endN)r)__name__ __module__ __qualname____doc__r r>Z STATUS_FAILEDrCr?r@rDrstderrrr#r$r.r(rHrrrrrs  5r)Z __future__rZdnf.cli.formatrrZ dnf.cli.termrZ dnf.pycomprrrZ dnf.callbackr Zdnf.utilr>ZDownloadProgressrrrrrs    PK!i['cli/__pycache__/progress.cpython-36.pycnu[3 ft`@spddlmZddlmZmZddlmZddlmZddl m Z ddl Z ddl Z ddl Z Gddde jjZdS) )unicode_literals) format_number format_time) _term_width)unicode)timeNc@sreZdZdZejjdejjdejjdejj diZ e j dddfd d Z d d ZdddZddZddZddZdS)MultiFileProgressMeterz"Multi-file download progress meterZFAILEDZSKIPPEDZMIRRORZDRPMg333333?g?g@cCsp||_||_||_||_d|_d|_tjj|_d|_ d|_ d|_ g|_ i|_ d|_d|_d|_d|_d|_dS)zCreates a new progress meter instance update_period -- how often to update the progress bar tick_period -- how fast to cycle through concurrent downloads rate_average -- time constant for average speed calculation rN)fo update_period tick_period rate_averageunknown_progres total_drpmsysstdoutisatty done_drpm done_files done_sizeactivestate last_time last_sizerate total_files total_size)selfr r r r r/usr/lib/python3.6/progress.py__init__&s" zMultiFileProgressMeter.__init__cCstjjd||jdS)NZ write_flush)dnfutilZ_terminal_messengerr )rmsgrrrmessage?szMultiFileProgressMeter.messagercCsF||_||_||_d|_d|_d|_g|_i|_d|_d|_ d|_ dS)Nr) rrrrrrrrrrr)rrrZ total_drpmsrrrstartBszMultiFileProgressMeter.startcCst}t|}t|j}t|}||jkrD|df|j|<|jj||j|\}}||f|j|<|j||7_||j|j kr||j kr||_ |j |dS)Nr) rrint download_sizerrappendrrr r_update)rpayloaddonenowtextZtotalr$oldrrrprogressSs    zMultiFileProgressMeter.progresscCsJ|jrj||j}|j|j}|dkrj|dkrj||}|jdk rdt||jd}|||jd|}||_||_|j|_|jsdS|jt||j t |j}|j dkrd|j d}t |jdkr|d|j t |j7}d||j |f}|jo|j rt|j |j|j}nd}d|jr,t|jndt|j|f} tt | } | d d } | d kr0|j r|jd |j } t|j| d |j d \}} d |d| }d| | || f} | | d 8} nj|jd}d} |dkrdn|}d|d | }d| || f} | | d 8} |jd| kr*|jdnd|_|jd| | || fdS)Nrz%dz-%dz (%s/%d): %sz--:--z %5sB/s | %5sB %9s ETA z--- d=-z%3d%% [%-*s]%s z [%-*s]%sz%-*.*s%s)rrrrminr rrr%r lenrrrrrrdivmodr r#)rr+Z delta_timeZ delta_sizerZweightr,nZtime_etar"leftZblZpctpZbarrrrr(gsX        zMultiFileProgressMeter._updatec Cst}}t|}t|j}d}|tjjkr.n|tjjkrJ|jd7_nt||j kr|j j |\}}|j j |||8}|j d7_ |j|7_n(|tjjkr|j d7_ |j|7_|r*|tjjkr|jdkrd|j||j|j|f} nd|j||f} tt| d} d| | |f} nl|jdkrHd|j |j|f}t||d} dtt|| t|t| f} tt| } d | | || f} |j| |j r|j|dS) Nrr/z[%s %d/%d] %s: z [%s] %s: z%s%-*s z (%d/%d): %sgMbP?z %5sB/s | %5sB %9s z%-*.*s%s)rrr%r&r callback STATUS_MIRROR STATUS_DRPMrrpoprremoverrSTATUS_ALREADY_EXISTSr STATUS_2_STRrr9rmaxrfloatrr#r() rr)ZstatusZerr_msgr$r+r,sizer*r"r<ZtmrrrendsH          zMultiFileProgressMeter.endN)r)__name__ __module__ __qualname____doc__r r>Z STATUS_FAILEDrCr?r@rDrstderrrr#r$r.r(rHrrrrrs  5r)Z __future__rZdnf.cli.formatrrZ dnf.cli.termrZ dnf.pycomprrrZ dnf.callbackr Zdnf.utilr>ZDownloadProgressrrrrrs    PK!FD9+--)cli/__pycache__/term.cpython-36.opt-1.pycnu[3 ft`f9@sxddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl Z d ddZ d ddZ Gd d d e ZdS) )absolute_import)unicode_literalsNc CsBy(d}tj|tj|}tjd|d}|Stk r<dSXdS)z Get the real terminal width ZabcdefghshhhhrN)fcntlZioctltermiosZ TIOCGWINSZstructunpackIOError)fdZbufretr /usr/lib/python3.6/term.py_real_term_widthsrcCs&tdd}|sdS|dkrdS|SdS)z@ Compute terminal width falling to default 80 in case of troubler)r PN)r)r Ztwr r r _term_width)s  rc @seZdZdZdZeddZeddZdddd Zd d d d dddddZ d d d d dddddZ dddddddZ dddddd d!d"dZ d#d$d%d&d'd(d)d*dZ d+d,ZdFd/d0ZdGd1d2Zd3d4Zd5d6Zd7d8ZdHd:d;Zdd?Zd@dAZdBdCZdDdEZd-S)ITermz>A class to provide some terminal "UI" helpers based on curses.TcCstS)N)r)selfr r r @sz Term.cCstS)N)r)rr r r rAsZsmulZrevZsgr0) underlinereversenormalrr)blackbluegreencyanredmagentayellowwhite)rr"r r$rr#r!r%zzzzz(B)boldblinkdimrrrzzzzzzzzzzzzzzzzcCs|j|_|j|_|j|_dS)N)_Term__ansi_forced_MODEMODE_Term__ansi_forced_FG_COLORFG_COLOR_Term__ansi_forced_BG_COLORBG_COLOR)rr r r Z __forced_initzszTerm.__forced_initNautoc CsLd|_d|_|dkr |jdSddddddd|_ddddddddd|_ddddddddd|_|dkrvd |_dS|stj}|jsd |_dSyt j |j d Wnt k rd |_dSXt j |_t jd |_x8|jD].}|}||jkr|j|}|j||j|<qW|jd jd }|r\x4|jjD]&\}}t j||jpNd|j|<q2W|jdjd }|rx8|jjD]*\}}t j||jpd}||j|<q~W|jdjd } | rx4|jjD]&\}}t j| |jpd|j|<qW|jdjd } | rHx8|jjD]*\}}t j| |jp6d} | |j|<qWdS)a Reinitializes the :class:`Term`. :param term_stream: the terminal stream that the :class:`Term` should be initialized to use. If *term_stream* is not given, :attr:`sys.stdout` is used. :param color: when to colorize output. Valid values are 'always', 'auto', and 'never'. 'always' will use ANSI codes to always colorize output, 'auto' will decide whether do colorize depending on the terminal, and 'never' will never colorize. TalwaysNr&)r'r(r)rrr)rrr r!r"r#r$r%ZneverF)r linesZsetfzutf-8ZsetafZsetbZsetab)_Term__enabledr3_Term__forced_initr+r-r/sysstdoutisattycursesZ setuptermfileno ExceptionZtigetstr _ctigetstrZtigetnum_Term__cap_names _tigetstrencode _Term__colorsitemsZtparmdecode_Term__ansi_colors) r term_streamcolorcap_namemodeZset_fgvalZ set_fg_ansiZfg_colorZset_bgZ set_bg_ansiZbg_colorr r r reinits        ""z Term.reinitcCs|j||dS)N)rI)rrDrEr r r __init__sz Term.__init__cCs0|j|p d}tjj|r"|j}tjdd|S)Nr&z \$<\d+>[/*]?)r<dnfZpycompZ is_py3bytesrBresub)rrFZcapr r r r>s zTerm._tigetstrcCs|j|t||jdS)zColorize string with colorr)r+str)rrEsr r r rEsz Term.colorcCs |jd|S)zMake string bold.r')rE)rrOr r r r'sz Term.boldFc s\|js |S|stj}fdd}x4|D],}||} |rFtj| tj} tj| ||}q(W|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with *end*, then return the modified string. For example:: >>> yt = Term() >>> yt.sub('spam and eggs', 'x', 'z', ['and']) 'spam xandz eggs' This is particularly useful for emphasizing certain words in output: for example, calling :func:`sub` with *beg* = MODE['bold'] and *end* = MODE['normal'] will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. Note, however, that the :func:`sub_mode`, :func:`sub_bold`, :func:`sub_fg`, and :func:`sub_bg` methods provide convenient ways to access this same emphasizing functionality. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :param escape: a function that accepts a string and returns the same string with problematic characters escaped. By default, :func:`re.escape` is used. :param ignore_case: whether case should be ignored when searching for matches :return: *haystack* with *beg* prefixing, and *end* postfixing, occurrences of the strings in *needles* cs|jS)N)group)match)begendr r rszTerm.sub..)r4rLescapetemplateIrM) rhaystackrRrSneedlesrTZ ignore_caseZrenderZneedleZpatr )rRrSr rMs  zTerm.subcKs|j|||jd|f|S)aOSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with self.MODE['normal'], then return the modified string. If *beg* is an ANSI escape code, such as given by self.MODE['bold'], this method will return *haystack* with the formatting given by the code only applied to the strings in *needles*. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with *beg* prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* r)rMr+)rrWrRrXkwdsr r r sub_norm&sz Term.sub_normcKs|j||j||f|S)aTSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE[*mode*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given *mode*. :param haystack: the string to be modified :param mode: the mode to set the matches to be in. Valid values are given by self.MODE.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE[*mode*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )rZr+)rrWrGrXrYr r r sub_mode9sz Term.sub_modecKs|j|d|f|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE['bold'], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. :param haystack: the string to be modified :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE['bold'] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* r')r[)rrWrXrYr r r sub_boldMsz Term.sub_boldcKs|j||j||f|S)acSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.FG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given color. :param haystack: the string to be modified :param color: the color to set the matches to be in. Valid values are given by self.FG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.FG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )rZr-)rrWrErXrYr r r sub_fg_sz Term.sub_fgcKs|j||j||f|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.BG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* highlighted in the given background color. :param haystack: the string to be modified :param color: the background color to set the matches to be in. Valid values are given by self.BG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.BG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )rZr/)rrWrErXrYr r r sub_bgssz Term.sub_bg)Nr0)Nr0)NF)__name__ __module__ __qualname____doc__r4propertyZ real_columnscolumnsr=r@rCr*r,r.r5rIrJr>rEr'rMrZr[r\r]r^r r r r r4sr    f   -r)r)r)Z __future__rrr9Z dnf.pycomprKrrLrr6rrrobjectrr r r r s   PK!s..#cli/__pycache__/term.cpython-36.pycnu[3 ft`f9@sxddlmZddlmZddlZddlZddlZddlZddlZddl Z ddl Z d ddZ d ddZ Gd d d e ZdS) )absolute_import)unicode_literalsNc CsBy(d}tj|tj|}tjd|d}|Stk r<dSXdS)z Get the real terminal width ZabcdefghshhhhrN)fcntlZioctltermiosZ TIOCGWINSZstructunpackIOError)fdZbufretr /usr/lib/python3.6/term.py_real_term_widthsrcCs&tdd}|sdS|dkrdS|SdS)z@ Compute terminal width falling to default 80 in case of troubler)r PN)r)r Ztwr r r _term_width)s  rc @seZdZdZdZeddZeddZdddd Zd d d d dddddZ d d d d dddddZ dddddddZ dddddd d!d"dZ d#d$d%d&d'd(d)d*dZ d+d,ZdFd/d0ZdGd1d2Zd3d4Zd5d6Zd7d8ZdHd:d;Zdd?Zd@dAZdBdCZdDdEZd-S)ITermz>A class to provide some terminal "UI" helpers based on curses.TcCstS)N)r)selfr r r @sz Term.cCstS)N)r)rr r r rAsZsmulZrevZsgr0) underlinereversenormalrr)blackbluegreencyanredmagentayellowwhite)rr"r r$rr#r!r%zzzzz(B)boldblinkdimrrrzzzzzzzzzzzzzzzzcCs|j|_|j|_|j|_dS)N)_Term__ansi_forced_MODEMODE_Term__ansi_forced_FG_COLORFG_COLOR_Term__ansi_forced_BG_COLORBG_COLOR)rr r r Z __forced_initzszTerm.__forced_initNautoc CsXd|_d|_|dkr |jdSddddddd|_ddddddddd|_ddddddddd|_|dkrvd |_dS|d kst|stj}|j sd |_dSyt j |j d Wnt k rd |_dSXt j|_t jd |_x8|jD].}|}||jkr|j|}|j||j|<qW|jd jd}|rhx4|jjD]&\}}t j||jpZd|j|<q>W|jdjd}|rx8|jjD]*\}}t j||jpd}||j|<qW|jdjd} | rx4|jjD]&\}}t j| |jpd|j|<qW|jdjd} | rTx8|jjD]*\}}t j| |jpBd} | |j|<q&WdS)a Reinitializes the :class:`Term`. :param term_stream: the terminal stream that the :class:`Term` should be initialized to use. If *term_stream* is not given, :attr:`sys.stdout` is used. :param color: when to colorize output. Valid values are 'always', 'auto', and 'never'. 'always' will use ANSI codes to always colorize output, 'auto' will decide whether do colorize depending on the terminal, and 'never' will never colorize. TalwaysNr&)r'r(r)rrr)rrr r!r"r#r$r%ZneverFr0)r linesZsetfzutf-8ZsetafZsetbZsetab)_Term__enabledr3_Term__forced_initr+r-r/AssertionErrorsysstdoutisattycursesZ setuptermfileno ExceptionZtigetstr _ctigetstrZtigetnum_Term__cap_names _tigetstrencode _Term__colorsitemsZtparmdecode_Term__ansi_colors) r term_streamcolorcap_namemodeZset_fgvalZ set_fg_ansiZfg_colorZset_bgZ set_bg_ansiZbg_colorr r r reinits         ""z Term.reinitcCs|j||dS)N)rJ)rrErFr r r __init__sz Term.__init__cCs0|j|p d}tjj|r"|j}tjdd|S)Nr&z \$<\d+>[/*]?)r=dnfZpycompZ is_py3bytesrCresub)rrGZcapr r r r?s zTerm._tigetstrcCs|j|t||jdS)zColorize string with colorr)r+str)rrFsr r r rFsz Term.colorcCs |jd|S)zMake string bold.r')rF)rrPr r r r'sz Term.boldFc s\|js |S|stj}fdd}x4|D],}||} |rFtj| tj} tj| ||}q(W|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with *end*, then return the modified string. For example:: >>> yt = Term() >>> yt.sub('spam and eggs', 'x', 'z', ['and']) 'spam xandz eggs' This is particularly useful for emphasizing certain words in output: for example, calling :func:`sub` with *beg* = MODE['bold'] and *end* = MODE['normal'] will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. Note, however, that the :func:`sub_mode`, :func:`sub_bold`, :func:`sub_fg`, and :func:`sub_bg` methods provide convenient ways to access this same emphasizing functionality. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :param escape: a function that accepts a string and returns the same string with problematic characters escaped. By default, :func:`re.escape` is used. :param ignore_case: whether case should be ignored when searching for matches :return: *haystack* with *beg* prefixing, and *end* postfixing, occurrences of the strings in *needles* cs|jS)N)group)match)begendr r rszTerm.sub..)r4rMescapetemplateIrN) rhaystackrSrTneedlesrUZ ignore_caseZrenderZneedleZpatr )rSrTr rNs  zTerm.subcKs|j|||jd|f|S)aOSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with self.MODE['normal'], then return the modified string. If *beg* is an ANSI escape code, such as given by self.MODE['bold'], this method will return *haystack* with the formatting given by the code only applied to the strings in *needles*. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with *beg* prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* r)rNr+)rrXrSrYkwdsr r r sub_norm&sz Term.sub_normcKs|j||j||f|S)aTSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE[*mode*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given *mode*. :param haystack: the string to be modified :param mode: the mode to set the matches to be in. Valid values are given by self.MODE.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE[*mode*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )r[r+)rrXrHrYrZr r r sub_mode9sz Term.sub_modecKs|j|d|f|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE['bold'], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. :param haystack: the string to be modified :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE['bold'] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* r')r\)rrXrYrZr r r sub_boldMsz Term.sub_boldcKs|j||j||f|S)acSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.FG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given color. :param haystack: the string to be modified :param color: the color to set the matches to be in. Valid values are given by self.FG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.FG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )r[r-)rrXrFrYrZr r r sub_fg_sz Term.sub_fgcKs|j||j||f|S)aSearch the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.BG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* highlighted in the given background color. :param haystack: the string to be modified :param color: the background color to set the matches to be in. Valid values are given by self.BG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.BG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* )r[r/)rrXrFrYrZr r r sub_bgssz Term.sub_bg)Nr0)Nr0)NF)__name__ __module__ __qualname____doc__r4propertyZ real_columnscolumnsr>rArDr*r,r.r5rJrKr?rFr'rNr[r\r]r^r_r r r r r4sr    f   -r)r)r)Z __future__rrr:Z dnf.pycomprLrrMrr7rrrobjectrr r r r s   PK!r9a a *cli/__pycache__/utils.cpython-36.opt-1.pycnu[3 ft`@sdZddlmZddlmZddlmZddlmZddlZ ddl Z ddl Z ddl Z e j e jdZe jdZd d Zd d Zd dZddZdS)z/Various utility functions, and a utility class.)absolute_import)unicode_literals) format_number)_N SC_CLK_TCKdnfcCs t|tS)zConvert a number of jiffies to seconds. How many jiffies are in a second is system-dependent, e.g. 100 jiffies = 1 second is common. :param jiffies: a number of jiffies :return: the equivalent number of seconds )int_USER_HZ)Zjiffiesr /usr/lib/python3.6/utils.pyjiffies_to_secondssr cCsj|dkr0d|d |d d|dd|dfS|d krVd|d |dd|dfSd|d|dfS) aReturn a human-readable string representation of the length of a time interval given in seconds. :param seconds: the length of the time interval in seconds :return: a human-readable string representation of the length of the time interval <z%d day(s) %d:%02d:%02dz %d:%02d:%02dz %02d:%02diiQiiQiiir )Zsecondsr r r seconds_to_ui_time)s    rcCst|}tjjd| s:tjjd s:tjjd| r>dSi}td|v}xn|D]f}|ddkrhqV|ddjdd}t|dkrqVtjj |dd |d<|dj ||d j j <qVWWdQRXd |krdSd |krdSd}td4}x,|D]$}|j d rt|td d}PqWWdQRX|dkr6dStd|^}|j j}|t|d|d<tdtdtdtdtddj|dtd|d<WdQRX|S)z!Return info dict about a process.z/proc/%d/statusz /proc/statz /proc/%d/statN z: z kBrvmrssvmsizezbtime  start_timeZRunningZSleepingZUninterruptibleZZombiezTraced/Stopped)RSDZTZUnknownstaterr)rospathexistsopensplitlenrutilZrtrimstriplower startswithreadr rget)pidpsZ status_filelinedataZ boot_timeZ stat_fileZps_statr r r get_process_info<sJ   *      r.cCst|}|s$td}tj||dStd||df}tjd|tjtdtt|ddtt|d dtttj|d }tjtd tj j |d |tjtd |d dS)z0Output information about process holding a lock.z=Unable to find information about the locking process (PID %d)Nz$ The application with PID %d is: %snamez%sz Memory : %5s RSS (%5sB VSZ)rirrz Started: %s - %s agoz State : %sr) r.rloggerZcriticalrrrtimerr$Znormalize_time)r*r+msgZagor r r show_lock_ownerls    r3)__doc__Z __future__rrZdnf.cli.formatrZdnf.i18nrZdnf.utilrZloggingrr1sysconf sysconf_namesr Z getLoggerr0r rr.r3r r r r s      0PK!r9a a $cli/__pycache__/utils.cpython-36.pycnu[3 ft`@sdZddlmZddlmZddlmZddlmZddlZ ddl Z ddl Z ddl Z e j e jdZe jdZd d Zd d Zd dZddZdS)z/Various utility functions, and a utility class.)absolute_import)unicode_literals) format_number)_N SC_CLK_TCKdnfcCs t|tS)zConvert a number of jiffies to seconds. How many jiffies are in a second is system-dependent, e.g. 100 jiffies = 1 second is common. :param jiffies: a number of jiffies :return: the equivalent number of seconds )int_USER_HZ)Zjiffiesr /usr/lib/python3.6/utils.pyjiffies_to_secondssr cCsj|dkr0d|d |d d|dd|dfS|d krVd|d |dd|dfSd|d|dfS) aReturn a human-readable string representation of the length of a time interval given in seconds. :param seconds: the length of the time interval in seconds :return: a human-readable string representation of the length of the time interval <z%d day(s) %d:%02d:%02dz %d:%02d:%02dz %02d:%02diiQiiQiiir )Zsecondsr r r seconds_to_ui_time)s    rcCst|}tjjd| s:tjjd s:tjjd| r>dSi}td|v}xn|D]f}|ddkrhqV|ddjdd}t|dkrqVtjj |dd |d<|dj ||d j j <qVWWdQRXd |krdSd |krdSd}td4}x,|D]$}|j d rt|td d}PqWWdQRX|dkr6dStd|^}|j j}|t|d|d<tdtdtdtdtddj|dtd|d<WdQRX|S)z!Return info dict about a process.z/proc/%d/statusz /proc/statz /proc/%d/statN z: z kBrvmrssvmsizezbtime  start_timeZRunningZSleepingZUninterruptibleZZombiezTraced/Stopped)RSDZTZUnknownstaterr)rospathexistsopensplitlenrutilZrtrimstriplower startswithreadr rget)pidpsZ status_filelinedataZ boot_timeZ stat_fileZps_statr r r get_process_info<sJ   *      r.cCst|}|s$td}tj||dStd||df}tjd|tjtdtt|ddtt|d dtttj|d }tjtd tj j |d |tjtd |d dS)z0Output information about process holding a lock.z=Unable to find information about the locking process (PID %d)Nz$ The application with PID %d is: %snamez%sz Memory : %5s RSS (%5sB VSZ)rirrz Started: %s - %s agoz State : %sr) r.rloggerZcriticalrrrtimerr$Znormalize_time)r*r+msgZagor r r show_lock_ownerls    r3)__doc__Z __future__rrZdnf.cli.formatrZdnf.i18nrZdnf.utilrZloggingrr1sysconf sysconf_namesr Z getLoggerr0r rr.r3r r r r s      0PK!&R  1cli/commands/__pycache__/downgrade.cpython-36.pycnu[3 ft` @sRddlmZddlmZddlmZddlmZddlmZGdddej Z dS) )absolute_import)unicode_literals)commands) OptionParser)_c@s8eZdZdZd ZedZeddZddZ d d Z d S) DowngradeCommandzWA class containing methods needed by the cli to execute the downgrade command. downgradedgzDowngrade a packagecCs|jddtdtjddS)Npackage*zPackage to downgrade)nargshelpaction) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/downgrade.py set_argparser$szDowngradeCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)NT) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure)szDowngradeCommand.configurecCsJ|jj|jjd|jjjd}|jj|jjdd|jjD||jj j dS)NF)strictprogresscSsg|] }d|qS)@r).0xrrr 8sz(DowngradeCommand.run..)Zspecs file_pkgsr) rZadd_remote_rpmsrroutputrZ downgradePkgsZ pkg_specsZ grp_specsZconfr)rr rrrrun4s zDowngradeCommand.runN)rr ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrr"rrrrrs   rN) Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZCommandrrrrrs     PK! b((3cli/commands/__pycache__/group.cpython-36.opt-1.pycnu[3 f:@sddlmZddlmZddlmZddlmZddlmZm Z ddl Z ddlZ ddl Z ddlZ ddlZejdZGdd d ejZdS) )absolute_import)unicode_literals) CompsQuery)commands)_ucdNdnfcseZdZdZdddddddZd-eejZed Z d dd Z d.Z d/Z ddZ fddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zed%d&Zd'd(Zd)d*Zd+d,ZZS)0 GroupCommandz; Single sub-command interface for most groups interaction. listinstallremoveinfo)Z grouplistZ groupinstallZ groupupdateZ groupremoveZ grouperaseZ groupinfogroupgroupsgrpz'display, or use, the groups informationupgrade)updateZerasesummarymarkcCsn|jj|jj}|r<|jjdk r4|jjjd|jj||j_|jjdkrPd|j_|jj|jj|jj|j_dS)Nrr)direct_commandsgetoptsZcommandsubcmdargsinsert _CMD_ALIASES)selfZdirectr/usr/lib/python3.6/group.py _canonical6s   zGroupCommand._canonicalcstt|j|d|_dS)NF)superr __init___remark)rcli) __class__rrr!CszGroupCommand.__init__cCs$td}t|jjs tjj|dS)Nz4No group data available for configured repositories.)rlenbasecompsr exceptionsZ CompsError)rmsgrrr _assert_compsGs zGroupCommand._assert_compscsTfdd}j|dkr(jjj}njjjdj|}tjjt tjj ||S)Ncsjjjj|j}| S)N)r&historyenvrid)r,Z env_found)rrravailable_predMsz7GroupCommand._environment_lists..available_pred,) r*r&r' environmentsenvironments_by_patternjoinrutilZmapallr partition)rpatternsr.envsr)rr_environment_listsLs   zGroupCommand._environment_listsc sfdd}g}g}j|dkr0jjj}njjjdj|}x2|D]*}|}||r^|}| sj|jrJ|j|qJW||fS)Ncsjjjj|j}|rdSdS)NTF)r&r+rrr-)rZ group_found)rrrinstalled_predZsz1GroupCommand._group_lists..installed_predr/)r*r&r'rgroups_by_patternr2 uservisibleappend) rr:r5r8 installed availableZgrpsrZtgt_listr)rr _group_listsYs    zGroupCommand._group_listscCs~xt|D]l}d}x&|jjj|D]}|jj|d}qWx&|jjj|D]}|jj|d}qFW|stjt d|qWdgfS)NFTz!Warning: Group %s does not exist.r) r&r'r1outputZdisplay_groups_in_environmentr9Zdisplay_pkgs_in_groupsloggererrorr)ruserlistZstrngZ group_matchedr,rrrr_infoqs   zGroupCommand._infocsd}d}d}|jjjp|jjxz|r|ddkr@d}|jdq |ddkr\d}|jdq |ddkrxd}|jdq |ddkrd|jdq Pq W|jjrd}|jjrd}|jjrd}|sd}d}|dk r@x\|D]T}|jj }t |j |dk}t |j |dk} | r| rt jtd d |d}qW|r@dgfS|j|\} } |j||\} } fd d }fd d}|s|td| |s|td| |s dx,| D]$}|jrq|td|dqWdx,| D]$}|jsq|td|dqW|rdgfSdx,| D]$}|jr2q"|td|dq"Wdx,| D]$}|jsdqT|td|dqTWdgfS)Nrhiddenr<r=idsTFzWarning: No groups match:z %scs`s t|d|jdk r|jntd}r:|d|j7}|jrN|d|j7}tdj|dS)Nz %sz z (%s)z [%s]z{})printui_namerr- lang_onlyformat)sectrr))done print_idsrr_out_grpsz$GroupCommand._list.._out_grpcsT|r t|xB|D]:}d|jdk r(|jntd}rD|d|j7}t|qWdS)Nz %sz z (%s))rGrHrr-)rKr6er))rMrr_out_envs z$GroupCommand._list.._out_envzAvailable Environment Groups:zInstalled Environment Groups:zInstalled Groups:zInstalled Language Groups:zAvailable Groups:zAvailable Language Groups:)r&confverboserrFpoprEr<r=r'r%r9r1r@rArr7r>rI)rrBr:Z showinstalledZ showavailableZerrsrr'Zin_groupZin_environmentZenv_instZ env_availr<r=rNrPr)rLrMr_lists                    zGroupCommand._listc Cst|jj|jjtjtjBtjtjB}|jj}|j |}|j j rXt |jj jdg}nt |jj j}tjj|}x|jD]}|j||qzWx|jD]}|j||qWdS)Noptional)rr&r'r+GROUPS ENVIRONMENTSZ AVAILABLE INSTALLED_build_comps_solverrr with_optionaltuplerQgroup_package_typeslibdnfZ transactionZlistToCompsPackageTyper0Z_environment_installrZ_group_install) rr5qsolverrestypesZ pkg_typesenv_idZgroup_idrrr _mark_installs      zGroupCommand._mark_installcCslt|jj|jjtjtjBtj}|jj}|j|}x|j D]}|j |qd}|j||\}}dd}d}x|D]}|jrlq`|d7}q`W|td|d}x|D]}|jsq|d7}qW|td|d}x|D]}|jrq|d7}qW|td |d}x|D]}|jsq|d7}qW|td |dgfS) NrDrrEcSs|sdStjd||dS)Nz%s %u)r@r )rKZnumrrrrNsz'GroupCommand._summary.._out_grpzInstalled Groups:zInstalled Language Groups:FzAvailable Groups:zAvailable Language Groups:)r%rSrrEr>rIr)rrBr:r<r=rNrLrrrr_summary sH           zGroupCommand._summaryc Cs|jddtdd|j}|jddtdd|jddtdd|jd dtd d|jd dtd d|jd ddtdjtjddjtjddd|jdddtdddS)Nz--with-optional store_truez$include optional packages from group)actionhelpz--hiddenzshow also hidden groupsz --installedzshow only installed groupsz --availablezshow only available groupsz--idszshow also ID of groupsr?ZCOMMANDz'available subcommands: {} (default), {}rz, rD)nargsmetavarrkr*Z COMMAND_ARGzargument for group subcommand) add_argumentrZadd_mutually_exclusive_grouprJr _GROUP_SUBCOMMANDSr2)parserZ grpparserrrr set_argparser<s"       zGroupCommand.set_argparsercCs|j|jj}|jj}||jkrBtjtddj|jt j j |d krf| rf|j j j |t j j |j j}d|_|d krd|_d|_|dkrd|_d |_nd|_|dkrtj|j|d krtj|j|j dS) Nz$Invalid groups sub-command, use: %s.z, r r rr TrF)r r rr )r rr r)r r)rrrrrqr@Zcriticalrr2rr#ZCliErrorZ optparserZ print_helpdemandsZsack_activationZ root_userZ resolvingZ allow_erasingZavailable_reposrZ_checkEnabledRepor&Z _checkGPGKey)rcmdrrtrrr configurePs.   zGroupCommand.configurecCs|jj}|jj}|dkr"|j|S|dkr4|j|S|dkrF|j|S|dkrx|j|\}}|dkrn|j|S|j|S|dkr$|jj rt |j j j dg}nt |j j j }d|_y|j j|||j j jStjjk r"}z6td }tj||j jjj|tjjtd WYdd}~XnX|d kr:|j j|S|dkrx<|D]4}y|j j|gWntjjk rzYnXqJWdS) Nrr r rr r rUTzNo package %s available.z)Unable to find a mandatory group package.r)rrrrhrTrCrgrdrcrZr[r&rQr\r"Zenv_group_installstrictrr(Z MarkingErrorrr@r r?ZtermZboldZPackagesNotAvailableErrorZenv_group_upgradeZenv_group_removeError)rrurfrrarOr)argrrrrunosD            zGroupCommand.runcCsf|js dS|jj}|jj}|j}x@|jjjjj|dD]$}|j j |}|j ||j ||q:WdS)N)name) r"r&Z_goalr+Z group_membersZsackZqueryr<ZfiltermZrpmZ get_reasonZ set_reasonZ group_reason)rZgoalr+namesZpkgreasonrrrrun_transactions zGroupCommand.run_transaction)rrr)r r )rr r r r rr)__name__ __module__ __qualname____doc__rr[keysaliasesrrrrerqrr!r*r7r>rCrTrcrdrgrh staticmethodrsrvrzr~ __classcell__rr)r$rr $s8  h / *r )Z __future__rrZ dnf.compsrZdnf.clirZdnf.i18nrrZlibdnf.transactionr]rZdnf.exceptionsZdnf.utilZloggingZ getLoggerr@ZCommandr rrrrs     PK!Jޑ((-cli/commands/__pycache__/group.cpython-36.pycnu[3 f:@sddlmZddlmZddlmZddlmZddlmZm Z ddl Z ddlZ ddl Z ddlZ ddlZejdZGdd d ejZdS) )absolute_import)unicode_literals) CompsQuery)commands)_ucdNdnfcseZdZdZdddddddZd-eejZed Z d dd Z d.Z d/Z ddZ fddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zed%d&Zd'd(Zd)d*Zd+d,ZZS)0 GroupCommandz; Single sub-command interface for most groups interaction. listinstallremoveinfo)Z grouplistZ groupinstallZ groupupdateZ groupremoveZ grouperaseZ groupinfogroupgroupsgrpz'display, or use, the groups informationupgrade)updateZerasesummarymarkcCsn|jj|jj}|r<|jjdk r4|jjjd|jj||j_|jjdkrPd|j_|jj|jj|jj|j_dS)Nrr)direct_commandsgetoptsZcommandsubcmdargsinsert _CMD_ALIASES)selfZdirectr/usr/lib/python3.6/group.py _canonical6s   zGroupCommand._canonicalcstt|j|d|_dS)NF)superr __init___remark)rcli) __class__rrr!CszGroupCommand.__init__cCs$td}t|jjs tjj|dS)Nz4No group data available for configured repositories.)rlenbasecompsr exceptionsZ CompsError)rmsgrrr _assert_compsGs zGroupCommand._assert_compscsTfdd}j|dkr(jjj}njjjdj|}tjjt tjj ||S)Ncsjjjj|j}| S)N)r&historyenvrid)r,Z env_found)rrravailable_predMsz7GroupCommand._environment_lists..available_pred,) r*r&r' environmentsenvironments_by_patternjoinrutilZmapallr partition)rpatternsr.envsr)rr_environment_listsLs   zGroupCommand._environment_listsc sfdd}g}g}j|dkr0jjj}njjjdj|}x2|D]*}|}||r^|}| sj|jrJ|j|qJW||fS)Ncsjjjj|j}|rdSdS)NTF)r&r+rrr-)rZ group_found)rrrinstalled_predZsz1GroupCommand._group_lists..installed_predr/)r*r&r'rgroups_by_patternr2 uservisibleappend) rr:r5r8 installed availableZgrpsrZtgt_listr)rr _group_listsYs    zGroupCommand._group_listscCs~xt|D]l}d}x&|jjj|D]}|jj|d}qWx&|jjj|D]}|jj|d}qFW|stjt d|qWdgfS)NFTz!Warning: Group %s does not exist.r) r&r'r1outputZdisplay_groups_in_environmentr9Zdisplay_pkgs_in_groupsloggererrorr)ruserlistZstrngZ group_matchedr,rrrr_infoqs   zGroupCommand._infocsd}d}d}|jjjp|jjxz|r|ddkr@d}|jdq |ddkr\d}|jdq |ddkrxd}|jdq |ddkrd|jdq Pq W|jjrd}|jjrd}|jjrd}|sd}d}|dk r@x\|D]T}|jj }t |j |dk}t |j |dk} | r| rt jtd d |d}qW|r@dgfS|j|\} } |j||\} } fd d }fd d}|s|td| |s|td| |s dx,| D]$}|jrq|td|dqWdx,| D]$}|jsq|td|dqW|rdgfSdx,| D]$}|jr2q"|td|dq"Wdx,| D]$}|jsdqT|td|dqTWdgfS)Nrhiddenr<r=idsTFzWarning: No groups match:z %scs`s t|d|jdk r|jntd}r:|d|j7}|jrN|d|j7}tdj|dS)Nz %sz z (%s)z [%s]z{})printui_namerr- lang_onlyformat)sectrr))done print_idsrr_out_grpsz$GroupCommand._list.._out_grpcsT|r t|xB|D]:}d|jdk r(|jntd}rD|d|j7}t|qWdS)Nz %sz z (%s))rGrHrr-)rKr6er))rMrr_out_envs z$GroupCommand._list.._out_envzAvailable Environment Groups:zInstalled Environment Groups:zInstalled Groups:zInstalled Language Groups:zAvailable Groups:zAvailable Language Groups:)r&confverboserrFpoprEr<r=r'r%r9r1r@rArr7r>rI)rrBr:Z showinstalledZ showavailableZerrsrr'Zin_groupZin_environmentZenv_instZ env_availr<r=rNrPr)rLrMr_lists                    zGroupCommand._listc Cst|jj|jjtjtjBtjtjB}|jj}|j |}|j j rXt |jj jdg}nt |jj j}tjj|}x|jD]}|j||qzWx|jD]}|j||qWdS)Noptional)rr&r'r+GROUPS ENVIRONMENTSZ AVAILABLE INSTALLED_build_comps_solverrr with_optionaltuplerQgroup_package_typeslibdnfZ transactionZlistToCompsPackageTyper0Z_environment_installrZ_group_install) rr5qsolverrestypesZ pkg_typesenv_idZgroup_idrrr _mark_installs      zGroupCommand._mark_installcCst|jj|jjtjtjBtj}|jj}|j|}x(|j D]}t j j |sPt |j|qd}|j||\}}dd}d}x|D]}|jrlq`|d7}q`W|td|d}x|D]}|jsq|d7}qW|td|d}x|D]}|jrq|d7}qW|td |d}x|D]}|jsq|d7}qW|td |dgfS) NrDrrEcSs|sdStjd||dS)Nz%s %u)r@r )rKZnumrrrrNsz'GroupCommand._summary.._out_grpzInstalled Groups:zInstalled Language Groups:FzAvailable Groups:zAvailable Language Groups:)r%rSrrEr>rIr)rrBr:r<r=rNrLrrrr_summary sH           zGroupCommand._summaryc Cs|jddtdd|j}|jddtdd|jddtdd|jd dtd d|jd dtd d|jd ddtdjtjddjtjddd|jdddtdddS)Nz--with-optional store_truez$include optional packages from group)actionhelpz--hiddenzshow also hidden groupsz --installedzshow only installed groupsz --availablezshow only available groupsz--idszshow also ID of groupsr?ZCOMMANDz'available subcommands: {} (default), {}rz, rD)nargsmetavarrlr*Z COMMAND_ARGzargument for group subcommand) add_argumentrZadd_mutually_exclusive_grouprJr _GROUP_SUBCOMMANDSr2)parserZ grpparserrrr set_argparser<s"       zGroupCommand.set_argparsercCs|j|jj}|jj}||jkrBtjtddj|jt j j |d krf| rf|j j j |t j j |j j}d|_|d krd|_d|_|dkrd|_d |_nd|_|dkrtj|j|d krtj|j|j dS) Nz$Invalid groups sub-command, use: %s.z, r r rr TrF)r r rr )r rr r)r r)rrrrrrr@Zcriticalrr2rr#ZCliErrorZ optparserZ print_helpdemandsZsack_activationZ root_userZ resolvingZ allow_erasingZavailable_reposrZ_checkEnabledRepor&Z _checkGPGKey)rcmdrrurrr configurePs.   zGroupCommand.configurecCs|jj}|jj}|dkr"|j|S|dkr4|j|S|dkrF|j|S|dkr|j|\}}|dkrn|j|S|dkszt|j |S|dkr0|jj rt |j j jdg}nt |j j j}d|_y|j j|||j j jStjjk r.}z6td }tj||j jjj|tjjtd WYdd}~XnX|d krF|j j|S|dkrx<|D]4}y|j j|gWntjjk rYnXqVWdS) Nrr r rr r rUTzNo package %s available.z)Unable to find a mandatory group package.r)rrrrirTrCrhrerdrcrZr[r&rQr\r"Zenv_group_installstrictrr(Z MarkingErrorrr@r r?ZtermZboldZPackagesNotAvailableErrorZenv_group_upgradeZenv_group_removeError)rrvrgrrarOr)argrrrrunosF             zGroupCommand.runcCsf|js dS|jj}|jj}|j}x@|jjjjj|dD]$}|j j |}|j ||j ||q:WdS)N)name) r"r&Z_goalr+Z group_membersZsackZqueryr<ZfiltermZrpmZ get_reasonZ set_reasonZ group_reason)rZgoalr+namesZpkgreasonrrrrun_transactions zGroupCommand.run_transaction)rrr)r r )rr r r r rr)__name__ __module__ __qualname____doc__rr[keysaliasesrrrrfrrrr!r*r7r>rCrTrcrerhri staticmethodrtrwr{r __classcell__rr)r$rr $s8  h / *r )Z __future__rrZ dnf.compsrZdnf.clirZdnf.i18nrrZlibdnf.transactionr]rZdnf.exceptionsZdnf.utilZloggingZ getLoggerr@ZCommandr rrrrs     PK!)7,,5cli/commands/__pycache__/history.cpython-36.opt-1.pycnu[3 f%F@sddlmZddlmZddlmZddlZddlZddlmZmZddl m Z ddl m Z m Z ddl ZddlZddlZddlZddlZddlZddlZejdZGd d d e jZdS) )absolute_import)print_function)unicode_literalsN)_ucd)commands)TransactionReplayserialize_transactiondnfcseZdZdZd+ZedZddddd d d d gZfd dZe ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*ZZS),HistoryCommandzUA class containing methods needed by the cli to execute the history command. historyhistz(display, or use, the transaction historylistinforedoreplayrollbackstoreundo userinstalledcstt|j||d|_dS)NF)superr __init___require_one_transaction_id)selfargskw) __class__/usr/lib/python3.6/history.pyr4szHistoryCommand.__init__c Cs|jddddjtjddjtjddd|jd d d d |jd ddtdd|jdd tdd |jdd tdd |jdd tdd |jddddd|jddddddS)Ntransactions_action?ZCOMMANDz$Available commands: {} (default), {}rz, )nargsmetavarhelpz --reverse store_truez$display history list output reversed)actionr$z-oz--outputz, 'last' or 'last-' for one transaction, .. for a range)transaction_filenameZTRANSACTION_FILEzEFor the replay command, path to the stored transaction file to replay) add_argumentformatr _CMDSjoinr)parserrrr set_argparser9s$        zHistoryCommand.set_argparsercCs.|jjs|jd|j_n0|jj|jkrH|jjjd|jj|jd|j_tdj|jj|_|jj }|jjdkr|jjst jj tdt |jjdkrt jj tdt jj|jjd|j_g|j_d|_d|_d|_d|jj_d|jj_t jjj|j|jn|jjd kr6d|_|jjst jj td n|jjdkrd|_d|_d|_d|_|jjstd }tj|t jj |n,t |jjdkrtj|jt jj |jd|_t jjj|j|jnd|_d|_|jjjdkr*t j |jjjt j! r*td|jjj}tj|t jj |dS)NrzUFound more than one transaction ID. '{}' requires one transaction ID or package name.rzNo transaction file name given.r!z6More than one argument given as transaction file name.TFrz(No transaction ID or package name given.rrrz:memory:z+You don't have access to the history DB: %s)rrr)"optsrr-r(insertrr,_require_one_transaction_id_msgclidemandsr CliErrorlenospathabspathr*Zavailable_reposZ resolvingZ root_userbaseconfZclean_requirements_on_removeZinstall_weak_depsrZ _checkGPGKeyrloggercriticalZfresh_metadataZsack_activationr accessR_OK)rr5msgrrr configureUsZ       ( zHistoryCommand.configurecCst|tjjrv|jjdkr2|jj\}td|fS|jjdkrv|jjddkrV|jjn|jjdd\}td|fStjj j j ||S) z.Get suggestions for resolving the given error.rzVCannot undo transaction %s, doing so would result in an inconsistent package database.rrforcer!NzZCannot rollback transaction %s, doing so would result in an inconsistent package database.) isinstancer exceptionsZTransactionCheckErrorr1rr(rr4rCommandget_error_output)rerrorZid_rrrrGs   zHistoryCommand.get_error_outputcCs:|j|}t|}t|j|dd|jjd|_|jjdS)NT)dataignore_installed ignore_extrasskip_unavailable)_history_get_transactionr rr;r1rLrrun)rextcmdsoldrIrrr _hcmd_redos zHistoryCommand._hcmd_redocCsD|stjjtd|jjj|}|s@tjjtdj|d|S)NzNo transaction ID givenzTransaction ID "{0}" not found.r)r r4r6rr;r rPr,)rrOrPrrr_history_get_transactionss z(HistoryCommand._history_get_transactionscCs.|j|}t|dkr&tjjtd|dS)Nr!z#Found more than one transaction ID!r)rRr7r r4r6r)rrOrPrrrrMs  z'HistoryCommand._history_get_transactioncCs|j|}|j|dS)N)rM_revert_transaction)rrOrPrrr _hcmd_undos zHistoryCommand._hcmd_undocCs|j|}|jjj}d}|j|jkrx|jjjtt|jd|jdD]X}|jrjt j t d|jn|j rt j t d|j|dkrt jjj|}qL|j|qLW|j|dS)Nr!z-Transaction history is incomplete, before %u.z,Transaction history is incomplete, after %u.)rMr;r lasttidrPrrangeZaltered_lt_rpmdbr=ZwarningrZaltered_gt_rpmdbr ZdbZMergedTransactionWrappermergerS)rrOrPrUZ merged_transtransrrr_hcmd_rollbacks   *zHistoryCommand._hcmd_rollbackc Cs&dddddddddd d d }t|}xdD]}x|j|gD]}||d|d<|ddkrt|jdddkrtd|d<|dd krd|krtj|d}|jtjgdd}|jjjj |j |j |j dd}t jj||d<|jdtjkr.str2transaction_idz..zWInvalid transaction ID range definition '{}'. Use '..'.zNCan't convert '{}' to transaction ID. Use '', 'last', 'last-'.r!z8No transaction which manipulates package '{}' was found.T)reverse)setr1r(split ValueErrorr=r>rr,r r4r6rr3addupdaterWrkr searchrsorted) rrvrm merged_tidstZbegin_transaction_idZend_transaction_idZcant_convert_msgZtransact_ids_from_pkgnamerAr)rr_args2transaction_ids sV          z$HistoryCommand._args2transaction_idsc Cs@|jj}|dkrDt|j|jj|jj|jj|jjd|_|jj n|j \}}|dkr~|sf|jj r~|j j ||jjdn|dkr|s|jj r|j j||jj |n|dkr|j|nz|dkr|j|nd|dkr|j|nN|d kr|jn8|d kr<|j|}t|}y|jj dk r8|jj nd }|jjjsV|jjj rtjj|rtd j|}|jjjs|jj jd j|dj|d rttdj|dSt |d"}t!j"||ddd|j#dWdQRXttdj|Wn>t$k r:} z t%j&j'tdjt(| WYdd} ~ XnXdS)Nr)filenamerJrKrLr)rxrrrrrrztransaction.jsonz{} exists, overwrite?z {} [y/N]: z {} [Y/n]: )rAZdefaultyes_msgzNot overwriting {}, exiting.wrrT)indentZ sort_keys zTransaction saved to {}.zError storing transaction: {}))r1rrr;r*rJrKrLrrNrr(rkZhistoryListCmdrxZhistoryInfoCmdrTrQrZrprMr r<ZassumenoZ assumeyesr8r9isfilerr,Z userconfirmprintopenjsondumpwriteOSErrorr r4r6str) rZvcmdrmrrVrIrrAferrrrNMsN     ( zHistoryCommand.runcCs|jjdkrdS|jjdS)Nrrrr)rrrr)r1rrZpost_transaction)rrrr run_resolveds zHistoryCommand.run_resolvedcCsX|jjdkrdS|jj}|rTtjtjjt dx |D]}tjtjjd|q8WdS)NrrrrzEWarning, the following problems occurred while running a transaction:z )rrrr) r1rrZ get_warningsr=logr loggingZWARNINGr)rwarningsrrrrrun_transactions    zHistoryCommand.run_transaction)r r )__name__ __module__ __qualname____doc__aliasesrZsummaryr-r staticmethodr0rBrGrQrRrMrTrZrSrprrNrr __classcell__rr)rrr *s&  =  0@2r )Z __future__rrrrnrjZdnf.i18nrrZdnf.clirZdnf.transaction_srrr r Zdnf.exceptionsZdnf.transactionZdnf.utilrrr8Z getLoggerr=rFr rrrrs     PK!)7,,/cli/commands/__pycache__/history.cpython-36.pycnu[3 f%F@sddlmZddlmZddlmZddlZddlZddlmZmZddl m Z ddl m Z m Z ddl ZddlZddlZddlZddlZddlZddlZejdZGd d d e jZdS) )absolute_import)print_function)unicode_literalsN)_ucd)commands)TransactionReplayserialize_transactiondnfcseZdZdZd+ZedZddddd d d d gZfd dZe ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*ZZS),HistoryCommandzUA class containing methods needed by the cli to execute the history command. historyhistz(display, or use, the transaction historylistinforedoreplayrollbackstoreundo userinstalledcstt|j||d|_dS)NF)superr __init___require_one_transaction_id)selfargskw) __class__/usr/lib/python3.6/history.pyr4szHistoryCommand.__init__c Cs|jddddjtjddjtjddd|jd d d d |jd ddtdd|jdd tdd |jdd tdd |jdd tdd |jddddd|jddddddS)Ntransactions_action?ZCOMMANDz$Available commands: {} (default), {}rz, )nargsmetavarhelpz --reverse store_truez$display history list output reversed)actionr$z-oz--outputz, 'last' or 'last-' for one transaction, .. for a range)transaction_filenameZTRANSACTION_FILEzEFor the replay command, path to the stored transaction file to replay) add_argumentformatr _CMDSjoinr)parserrrr set_argparser9s$        zHistoryCommand.set_argparsercCs.|jjs|jd|j_n0|jj|jkrH|jjjd|jj|jd|j_tdj|jj|_|jj }|jjdkr|jjst jj tdt |jjdkrt jj tdt jj|jjd|j_g|j_d|_d|_d|_d|jj_d|jj_t jjj|j|jn|jjd kr6d|_|jjst jj td n|jjdkrd|_d|_d|_d|_|jjstd }tj|t jj |n,t |jjdkrtj|jt jj |jd|_t jjj|j|jnd|_d|_|jjjdkr*t j |jjjt j! r*td|jjj}tj|t jj |dS)NrzUFound more than one transaction ID. '{}' requires one transaction ID or package name.rzNo transaction file name given.r!z6More than one argument given as transaction file name.TFrz(No transaction ID or package name given.rrrz:memory:z+You don't have access to the history DB: %s)rrr)"optsrr-r(insertrr,_require_one_transaction_id_msgclidemandsr CliErrorlenospathabspathr*Zavailable_reposZ resolvingZ root_userbaseconfZclean_requirements_on_removeZinstall_weak_depsrZ _checkGPGKeyrloggercriticalZfresh_metadataZsack_activationr accessR_OK)rr5msgrrr configureUsZ       ( zHistoryCommand.configurecCst|tjjrv|jjdkr2|jj\}td|fS|jjdkrv|jjddkrV|jjn|jjdd\}td|fStjj j j ||S) z.Get suggestions for resolving the given error.rzVCannot undo transaction %s, doing so would result in an inconsistent package database.rrforcer!NzZCannot rollback transaction %s, doing so would result in an inconsistent package database.) isinstancer exceptionsZTransactionCheckErrorr1rr(rr4rCommandget_error_output)rerrorZid_rrrrGs   zHistoryCommand.get_error_outputcCs:|j|}t|}t|j|dd|jjd|_|jjdS)NT)dataignore_installed ignore_extrasskip_unavailable)_history_get_transactionr rr;r1rLrrun)rextcmdsoldrIrrr _hcmd_redos zHistoryCommand._hcmd_redocCsD|stjjtd|jjj|}|s@tjjtdj|d|S)NzNo transaction ID givenzTransaction ID "{0}" not found.r)r r4r6rr;r rPr,)rrOrPrrr_history_get_transactionss z(HistoryCommand._history_get_transactionscCs.|j|}t|dkr&tjjtd|dS)Nr!z#Found more than one transaction ID!r)rRr7r r4r6r)rrOrPrrrrMs  z'HistoryCommand._history_get_transactioncCs|j|}|j|dS)N)rM_revert_transaction)rrOrPrrr _hcmd_undos zHistoryCommand._hcmd_undocCs|j|}|jjj}d}|j|jkrx|jjjtt|jd|jdD]X}|jrjt j t d|jn|j rt j t d|j|dkrt jjj|}qL|j|qLW|j|dS)Nr!z-Transaction history is incomplete, before %u.z,Transaction history is incomplete, after %u.)rMr;r lasttidrPrrangeZaltered_lt_rpmdbr=ZwarningrZaltered_gt_rpmdbr ZdbZMergedTransactionWrappermergerS)rrOrPrUZ merged_transtransrrr_hcmd_rollbacks   *zHistoryCommand._hcmd_rollbackc Cs&dddddddddd d d }t|}xdD]}x|j|gD]}||d|d<|ddkrt|jdddkrtd|d<|dd krd|krtj|d}|jtjgdd}|jjjj |j |j |j dd}t jj||d<|jdtjkr.str2transaction_idz..zWInvalid transaction ID range definition '{}'. Use '..'.zNCan't convert '{}' to transaction ID. Use '', 'last', 'last-'.r!z8No transaction which manipulates package '{}' was found.T)reverse)setr1r(split ValueErrorr=r>rr,r r4r6rr3addupdaterWrkr searchrsorted) rrvrm merged_tidstZbegin_transaction_idZend_transaction_idZcant_convert_msgZtransact_ids_from_pkgnamerAr)rr_args2transaction_ids sV          z$HistoryCommand._args2transaction_idsc Cs@|jj}|dkrDt|j|jj|jj|jj|jjd|_|jj n|j \}}|dkr~|sf|jj r~|j j ||jjdn|dkr|s|jj r|j j||jj |n|dkr|j|nz|dkr|j|nd|dkr|j|nN|d kr|jn8|d kr<|j|}t|}y|jj dk r8|jj nd }|jjjsV|jjj rtjj|rtd j|}|jjjs|jj jd j|dj|d rttdj|dSt |d"}t!j"||ddd|j#dWdQRXttdj|Wn>t$k r:} z t%j&j'tdjt(| WYdd} ~ XnXdS)Nr)filenamerJrKrLr)rxrrrrrrztransaction.jsonz{} exists, overwrite?z {} [y/N]: z {} [Y/n]: )rAZdefaultyes_msgzNot overwriting {}, exiting.wrrT)indentZ sort_keys zTransaction saved to {}.zError storing transaction: {}))r1rrr;r*rJrKrLrrNrr(rkZhistoryListCmdrxZhistoryInfoCmdrTrQrZrprMr r<ZassumenoZ assumeyesr8r9isfilerr,Z userconfirmprintopenjsondumpwriteOSErrorr r4r6str) rZvcmdrmrrVrIrrAferrrrNMsN     ( zHistoryCommand.runcCs|jjdkrdS|jjdS)Nrrrr)rrrr)r1rrZpost_transaction)rrrr run_resolveds zHistoryCommand.run_resolvedcCsX|jjdkrdS|jj}|rTtjtjjt dx |D]}tjtjjd|q8WdS)NrrrrzEWarning, the following problems occurred while running a transaction:z )rrrr) r1rrZ get_warningsr=logr loggingZWARNINGr)rwarningsrrrrrun_transactions    zHistoryCommand.run_transaction)r r )__name__ __module__ __qualname____doc__aliasesrZsummaryr-r staticmethodr0rBrGrQrRrMrTrZrSrprrNrr __classcell__rr)rrr *s&  =  0@2r )Z __future__rrrrnrjZdnf.i18nrrZdnf.clirZdnf.transaction_srrr r Zdnf.exceptionsZdnf.transactionZdnf.utilrrr8Z getLoggerr=rFr rrrrs     PK!5cli/commands/__pycache__/install.cpython-36.opt-1.pycnu[3 ft`S@sddlmZddlmZddlZddlmZddlZddlZddl m Z ddl m Z ddl mZejdZGd d d e jZdS) )absolute_import)unicode_literalsN)chain)commands) OptionParser)_dnfc@seZdZdZejejejdZdZ de ej Z e dZedd Zd d Zd d ZddZddZddZddZddZddZddZdS)InstallCommandzUA class containing methods needed by the cli to execute the install command. )z install-nz install-naz install-nevrazalternative-for({})install localinstallinz,install a package or packages on your systemcCs"|jddtdtjtdddS)Npackage+ZPACKAGEzPackage to install)nargsmetavaractionhelp) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/install.py set_argparser1szInstallCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)zVerify that conditions are met so that this command can run. That there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. TN) clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure7szInstallCommand.configurec CsPg}g}g}|j}|jj|j|jjdkrf|jjs>|jjrf|j|jj|jj j rft j j tdg}|jjo||jjdkrTt jjrLy,t jjj|j}|j|jj|jj j dWnt j jk rH}zp|jrx|jD]}|j|qW|jrx|jD]}|jd|qW|j} | r8tjt jjj| dWYdd}~XnXn|jj}|jjr|r|j|jj|jj j rt j j tdn|j}|r|r|j||jj j rt j j tdn|r|jjdkr|j||jjdkr|j |}t!|dks$t!|dks$|rL|jj j rLt j j"tddj#||ddS) Nr zNothing to do.)strict@rzUnable to find a match )pkg_specZpackages)$_get_nevra_forms_from_commandrZ _populate_update_security_filterrcommand grp_specs pkg_specs_log_not_valid_rpm_file_pathsrconfrr exceptionsErrorrZ WITH_MODULESmodule module_baseZ ModuleBaser Z MarkingErrorsZno_match_group_specsappendZerror_group_specsmodule_depsolv_errorsloggererrorZformat_modular_solver_errorsr_inform_not_a_valid_combination_install_files_install_groups_install_packageslenZPackagesNotAvailableErrorjoin) rerr_pkgserrsZerror_module_specs nevra_formsZskipped_grp_specsr,eZe_specr.rrrrunEsX            . zInstallCommand.runcCs&|jj|jkr|j|jjgSgSdS)N)rr$r9)rrrrr#zsz,InstallCommand._get_nevra_forms_from_commandcCsJtdd|}x6t|jj|D]$}td}tj||jjj j |qWdS)NcSsd|S)Nr r)grrrsz>InstallCommand._log_not_valid_rpm_file_paths..zNot a valid rpm file path: %s) maprrr&rr/inforoutputtermbold)rr%Z group_namespkgmsgrrrr'sz,InstallCommand._log_not_valid_rpm_file_pathscCs2x,|D]$}td}tj||jjjj|qWdS)NzNot a valid form: %s)rr/Zwarningrr@rArB)rformsZformrDrrrr1s z.InstallCommand._inform_not_a_valid_combinationc Csg}|jjj}x~|jj|jj||jjjdD]^}y|jj||dWq,t j j k rt d}t j||jjjj|j|j|Yq,Xq,W|S)N)rprogress)rzNo match for argument: %s)rr(rZadd_remote_rpmsrrr@rFZpackage_installrr) MarkingErrorrr/r?rArBlocationr-)rr7rrCrDrrrr2s zInstallCommand._install_filesc CsPy&|jj|t|jjj|jjjdWn$tjjk rJ|jjjrFYnXdS)N)r) rZenv_group_installtupler(Zgroup_package_typesrrr)r*)rr%rrrr3s  zInstallCommand._install_groupscCsV|jjjj|jj|d}|rRtd}tj|j|dj t t dd|DdS)N)Zprovidesz/There are following alternatives for "{0}": {1}z, cSsg|] }|jqSr)name).0Zaltrrr sz7InstallCommand._report_alternatives..) rZsackqueryZfiltermalternatives_provideformatrr/r?r6sortedset)rr"rMrDrrr_report_alternativess z#InstallCommand._report_alternativescCsg}|jjj}x|jjD]}y|jj|||dWqtjjk r}zJdj |j |jj j j |}tj||jj||j||j|WYdd}~XqXqW|S)N)rrEz{}: {})rr(rrr&r rr)rGrOvaluer@rArBr/r?Z_report_icase_hintrRr-)rr9r8rr"r:rDrrrr4s     z InstallCommand._install_packagesN)r r r )__name__ __module__ __qualname____doc__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRAr9rNrIkeysaliasesrZsummary staticmethodrrr;r#r'r1r2r3rRr4rrrrr %s"  5  r )Z __future__rrZlogging itertoolsrrXZdnf.exceptionsrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZ getLoggerr/ZCommandr rrrrs       PK!/cli/commands/__pycache__/install.cpython-36.pycnu[3 ft`S@sddlmZddlmZddlZddlmZddlZddlZddl m Z ddl m Z ddl mZejdZGd d d e jZdS) )absolute_import)unicode_literalsN)chain)commands) OptionParser)_dnfc@seZdZdZejejejdZdZ de ej Z e dZedd Zd d Zd d ZddZddZddZddZddZddZddZdS)InstallCommandzUA class containing methods needed by the cli to execute the install command. )z install-nz install-naz install-nevrazalternative-for({})install localinstallinz,install a package or packages on your systemcCs"|jddtdtjtdddS)Npackage+ZPACKAGEzPackage to install)nargsmetavaractionhelp) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/install.py set_argparser1szInstallCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)zVerify that conditions are met so that this command can run. That there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. TN) clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure7szInstallCommand.configurec CsPg}g}g}|j}|jj|j|jjdkrf|jjs>|jjrf|j|jj|jj j rft j j tdg}|jjo||jjdkrTt jjrLy,t jjj|j}|j|jj|jj j dWnt j jk rH}zp|jrx|jD]}|j|qW|jrx|jD]}|jd|qW|j} | r8tjt jjj| dWYdd}~XnXn|jj}|jjr|r|j|jj|jj j rt j j tdn|j}|r|r|j||jj j rt j j tdn|r|jjdkr|j||jjdkr|j |}t!|dks$t!|dks$|rL|jj j rLt j j"tddj#||ddS) Nr zNothing to do.)strict@rzUnable to find a match )pkg_specZpackages)$_get_nevra_forms_from_commandrZ _populate_update_security_filterrcommand grp_specs pkg_specs_log_not_valid_rpm_file_pathsrconfrr exceptionsErrorrZ WITH_MODULESmodule module_baseZ ModuleBaser Z MarkingErrorsZno_match_group_specsappendZerror_group_specsmodule_depsolv_errorsloggererrorZformat_modular_solver_errorsr_inform_not_a_valid_combination_install_files_install_groups_install_packageslenZPackagesNotAvailableErrorjoin) rerr_pkgserrsZerror_module_specs nevra_formsZskipped_grp_specsr,eZe_specr.rrrrunEsX            . zInstallCommand.runcCs&|jj|jkr|j|jjgSgSdS)N)rr$r9)rrrrr#zsz,InstallCommand._get_nevra_forms_from_commandcCsJtdd|}x6t|jj|D]$}td}tj||jjj j |qWdS)NcSsd|S)Nr r)grrrsz>InstallCommand._log_not_valid_rpm_file_paths..zNot a valid rpm file path: %s) maprrr&rr/inforoutputtermbold)rr%Z group_namespkgmsgrrrr'sz,InstallCommand._log_not_valid_rpm_file_pathscCs2x,|D]$}td}tj||jjjj|qWdS)NzNot a valid form: %s)rr/Zwarningrr@rArB)rformsZformrDrrrr1s z.InstallCommand._inform_not_a_valid_combinationc Csg}|jjj}x~|jj|jj||jjjdD]^}y|jj||dWq,t j j k rt d}t j||jjjj|j|j|Yq,Xq,W|S)N)rprogress)rzNo match for argument: %s)rr(rZadd_remote_rpmsrrr@rFZpackage_installrr) MarkingErrorrr/r?rArBlocationr-)rr7rrCrDrrrr2s zInstallCommand._install_filesc CsPy&|jj|t|jjj|jjjdWn$tjjk rJ|jjjrFYnXdS)N)r) rZenv_group_installtupler(Zgroup_package_typesrrr)r*)rr%rrrr3s  zInstallCommand._install_groupscCsV|jjjj|jj|d}|rRtd}tj|j|dj t t dd|DdS)N)Zprovidesz/There are following alternatives for "{0}": {1}z, cSsg|] }|jqSr)name).0Zaltrrr sz7InstallCommand._report_alternatives..) rZsackqueryZfiltermalternatives_provideformatrr/r?r6sortedset)rr"rMrDrrr_report_alternativess z#InstallCommand._report_alternativescCsg}|jjj}x|jjD]}y|jj|||dWqtjjk r}zJdj |j |jj j j |}tj||jj||j||j|WYdd}~XqXqW|S)N)rrEz{}: {})rr(rrr&r rr)rGrOvaluer@rArBr/r?Z_report_icase_hintrRr-)rr9r8rr"r:rDrrrr4s     z InstallCommand._install_packagesN)r r r )__name__ __module__ __qualname____doc__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRAr9rNrIkeysaliasesrZsummary staticmethodrrr;r#r'r1r2r3rRr4rrrrr %s"  5  r )Z __future__rrZlogging itertoolsrrXZdnf.exceptionsrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZ getLoggerr/ZCommandr rrrrs       PK!x7cli/commands/__pycache__/makecache.cpython-36.opt-1.pycnu[3 ft`m@sxddlmZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z e j dZ GdddejZdS) )absolute_import)unicode_literals)commands)_Ndnfc@s,eZdZd ZedZeddZddZdS) MakeCacheCommand makecachemczgenerate the metadata cachecCs,|jdddd|jdddgdtjddS)Nz--timer store_true timer_opt)actiondesttimer?)nargschoicesmetavarhelp) add_argumentargparseZSUPPRESS)parserr/usr/lib/python3.6/makecache.py set_argparser's zMakeCacheCommand.set_argparsercCs2|jjdk p|jj}td}tj||jj|S)Nz*Making cache files for all metadata files.)Zoptsrr rloggerdebugbaseZ update_cache)selfrmsgrrrrun.s zMakeCacheCommand.runN)rr ) __name__ __module__ __qualname__aliasesrZsummary staticmethodrrrrrrr#s r)Z __future__rrZdnf.clirZdnf.i18nrrrZdnf.exceptionsZdnf.utilZloggingZ getLoggerrZCommandrrrrrs     PK!x1cli/commands/__pycache__/makecache.cpython-36.pycnu[3 ft`m@sxddlmZddlmZddlmZddlmZddlZddlZddl Zddl Zddl Z e j dZ GdddejZdS) )absolute_import)unicode_literals)commands)_Ndnfc@s,eZdZd ZedZeddZddZdS) MakeCacheCommand makecachemczgenerate the metadata cachecCs,|jdddd|jdddgdtjddS)Nz--timer store_true timer_opt)actiondesttimer?)nargschoicesmetavarhelp) add_argumentargparseZSUPPRESS)parserr/usr/lib/python3.6/makecache.py set_argparser's zMakeCacheCommand.set_argparsercCs2|jjdk p|jj}td}tj||jj|S)Nz*Making cache files for all metadata files.)Zoptsrr rloggerdebugbaseZ update_cache)selfrmsgrrrrun.s zMakeCacheCommand.runN)rr ) __name__ __module__ __qualname__aliasesrZsummary staticmethodrrrrrrr#s r)Z __future__rrZdnf.clirZdnf.i18nrrrZdnf.exceptionsZdnf.utilZloggingZ getLoggerrZCommandrrrrrs     PK!X:e e 2cli/commands/__pycache__/mark.cpython-36.opt-1.pycnu[3 ft` @spddlmZddlmZddlZddlmZddlmZddl Z ddl Z ddl Z e j dZ GdddejZdS) )print_function)unicode_literalsN)_)commandsdnfc@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS) MarkCommandmarkz7mark or unmark installed packages as installed by user.cCs6|jdddddgtdd|jdd d td d dS) NrZinstallremovegroupzhinstall: mark as installed by user remove: unmark as installed by user group: mark as installed by group)nargschoiceshelppackage+ZPACKAGEzPackage specification)r metavarr) add_argumentr)parserr/usr/lib/python3.6/mark.py set_argparser)s  zMarkCommand.set_argparsercCs,|jjj|tjjtjtdt |dS)Nz%s marked as user installed.) basehistory set_reasonlibdnf transactionZTransactionItemReason_USERloggerinforstr)selfpkgrrr _mark_install2szMarkCommand._mark_installcCs,|jjj|tjjtjtdt |dS)Nz%s unmarked as user installed.) rrrrrZ TransactionItemReason_DEPENDENCYrrrr)rr rrr _mark_remove6szMarkCommand._mark_removecCs,|jjj|tjjtjtdt |dS)Nz%s marked as group installed.) rrrrrZTransactionItemReason_GROUPrrrr)rr rrr _mark_group:szMarkCommand._mark_groupcCs$|jj}d|_d|_d|_d|_dS)NTF)clidemandsZsack_activationZ root_userZavailable_reposZ resolving)rr%rrr configure>s zMarkCommand.configurec Cs|jjd}|jj}tjt|d|}g}xR|D]J}tjj|}|j |j j }x|D] }||qVWt |dkr2|j |q2W|rtjtdx|D]}tjtd|qWtjj|j jj}|dkr|j j} n|j} |j jj| gg|j jj| dS)NrZ_mark_zError:zPackage %s is not installed.)Zoptsrr functoolspartialgetattrrZsubjectZSubjectZget_best_queryrZsacklenappendrerrorrr$ZCliErrorrZlastZ_rpmdb_versionZend_rpmdb_versionZbegend) rcmdZpkgsZ mark_funcZnotfoundr ZsubjqoldZ rpmdb_versionrrrrunEs,         zMarkCommand.runN)r) __name__ __module__ __qualname__aliasesrZsummary staticmethodrr!r"r#r&r1rrrrr$s r)Z __future__rrZlibdnf.transactionrZdnf.i18nrZdnf.clirrr'ZloggingZ getLoggerrZCommandrrrrrs     PK!X:e e ,cli/commands/__pycache__/mark.cpython-36.pycnu[3 ft` @spddlmZddlmZddlZddlmZddlmZddl Z ddl Z ddl Z e j dZ GdddejZdS) )print_function)unicode_literalsN)_)commandsdnfc@sLeZdZdZedZeddZddZddZ d d Z d d Z d dZ dS) MarkCommandmarkz7mark or unmark installed packages as installed by user.cCs6|jdddddgtdd|jdd d td d dS) NrZinstallremovegroupzhinstall: mark as installed by user remove: unmark as installed by user group: mark as installed by group)nargschoiceshelppackage+ZPACKAGEzPackage specification)r metavarr) add_argumentr)parserr/usr/lib/python3.6/mark.py set_argparser)s  zMarkCommand.set_argparsercCs,|jjj|tjjtjtdt |dS)Nz%s marked as user installed.) basehistory set_reasonlibdnf transactionZTransactionItemReason_USERloggerinforstr)selfpkgrrr _mark_install2szMarkCommand._mark_installcCs,|jjj|tjjtjtdt |dS)Nz%s unmarked as user installed.) rrrrrZ TransactionItemReason_DEPENDENCYrrrr)rr rrr _mark_remove6szMarkCommand._mark_removecCs,|jjj|tjjtjtdt |dS)Nz%s marked as group installed.) rrrrrZTransactionItemReason_GROUPrrrr)rr rrr _mark_group:szMarkCommand._mark_groupcCs$|jj}d|_d|_d|_d|_dS)NTF)clidemandsZsack_activationZ root_userZavailable_reposZ resolving)rr%rrr configure>s zMarkCommand.configurec Cs|jjd}|jj}tjt|d|}g}xR|D]J}tjj|}|j |j j }x|D] }||qVWt |dkr2|j |q2W|rtjtdx|D]}tjtd|qWtjj|j jj}|dkr|j j} n|j} |j jj| gg|j jj| dS)NrZ_mark_zError:zPackage %s is not installed.)Zoptsrr functoolspartialgetattrrZsubjectZSubjectZget_best_queryrZsacklenappendrerrorrr$ZCliErrorrZlastZ_rpmdb_versionZend_rpmdb_versionZbegend) rcmdZpkgsZ mark_funcZnotfoundr ZsubjqoldZ rpmdb_versionrrrrunEs,         zMarkCommand.runN)r) __name__ __module__ __qualname__aliasesrZsummary staticmethodrr!r"r#r&r1rrrrr$s r)Z __future__rrZlibdnf.transactionrZdnf.i18nrZdnf.clirrr'ZloggingZ getLoggerrZCommandrrrrrs     PK!3",t:t:4cli/commands/__pycache__/module.cpython-36.opt-1.pycnu[3 ft`A@sddlmZddlmZmZddlmZddlmZddl m Z ddl Z ddl Z ddl Z ddlZddlZddlZ ddlZ GdddejZdS) )print_function)commandsCliError)_)NoModuleException)loggerNc s*eZdZGdddejZGdddeZGdddeZGdddeZGd d d eZ Gd d d eZ Gd ddeZ GdddeZ GdddeZ GdddeZGdddeZGdddeZeeee e e e e eeeh ZehZd%ZedZfddZddZdd Zd!d"Zd#d$ZZS)& ModuleCommandcs,eZdZfddZddZddZZS)zModuleCommand.SubCommandcs(ttj|j|tjjj|j|_dS)N) superr SubCommand__init__dnfmodule module_baseZ ModuleBasebase)selfcli) __class__/usr/lib/python3.6/module.pyr (sz!ModuleCommand.SubCommand.__init__c Cst}x|jjD]}|jj|\}}|dkr.q|jr:|jnd}|jrJ|jnd}|jr^|jdksd|jrxt j t dj ||j r|j nd}|jjj||dd|}|j|qW|S)NzjOnly module name, stream, architecture or profile is used. Ignoring unneeded information in argument: '{}')setopts module_specr _get_modulesnamestreamversioncontextrinforformatarchr_moduleContainerqueryupdate) rmodules_from_specsr__Znsvcaprrr"modulesrrr#_get_modules_from_name_stream_specs,s zs   z3ModuleCommand.SubCommand._get_module_artifact_names)__name__ __module__ __qualname__r r)r, __classcell__rr)rrr &s r c@s(eZdZdZedZddZddZdS) zModuleCommand.ListSubCommandlistz,list all module streams, profiles and statescCs|jj}d|_d|_dS)NT)rdemandsavailable_repossack_activation)rr2rrr configureRsz&ModuleCommand.ListSubCommand.configurecCs|j}|jjr&|j|jjtjjj}nV|jj rF|j|jjtjjj }n6|jj rf|j|jjtjjj }n|j|jjtjjj }|rt|dS|jjrtd}tjj|dS)NzNo matching Modules to list)rrenabledZ_get_brief_descriptionrlibdnfr ModulePackageContainerZModuleState_ENABLEDdisabledZModuleState_DISABLED installedZModuleState_INSTALLEDZModuleState_UNKNOWNprintrr exceptionsError)rZmodsoutputmsgrrr run_on_moduleWs(z*ModuleCommand.ListSubCommand.run_on_moduleN)r1)r-r.r/aliasesrsummaryr5r@rrrrListSubCommandMsrCc@s(eZdZdZedZddZddZdS) zModuleCommand.InfoSubCommandr z)print detailed information about a modulecCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr5tsz&ModuleCommand.InfoSubCommand.configurecCsf|jjr|jj|jj}n*|jjr4|jj|jj}n|jj|jj}|rRt|nt j j t ddS)NzNo matching Modules to list) rverboserZ_get_full_inforprofileZ_get_info_profilesZ _get_infor;r r<r=r)rr>rrrr@ys z*ModuleCommand.InfoSubCommand.run_on_moduleN)r )r-r.r/rArrBr5r@rrrrInfoSubCommandosrFc@s(eZdZdZedZddZddZdS) zModuleCommand.EnableSubCommandenablezenable a module streamcCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4 resolving root_user)rr2rrrr5s z(ModuleCommand.EnableSubCommand.configurecCsy|jj|jjWnltjjk r}zL|jjj rb|j s@|j rD||j rb|j dt jjjkrb|tjt|WYdd}~XnXdS)Nr)rrGrrr r< MarkingErrorsrconfstrictno_match_group_specserror_group_specsmodule_depsolv_errorsr7r r8!ModuleErrorType_ERROR_IN_DEFAULTSrerrorstr)rerrrr@s   z,ModuleCommand.EnableSubCommand.run_on_moduleN)rG)r-r.r/rArrBr5r@rrrrEnableSubCommandsrTc@s(eZdZdZedZddZddZdS) zModuleCommand.DisableSubCommanddisablez%disable a module with all its streamscCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z)ModuleCommand.DisableSubCommand.configurecCsy|jj|jjWnltjjk r}zL|jjj rb|j s@|j rD||j rb|j dt jjjkrb|tjt|WYdd}~XnXdS)Nr)rrUrrr r<rJrrKrLrMrNrOr7r r8rPrrQrR)rrSrrrr@s   z-ModuleCommand.DisableSubCommand.run_on_moduleN)rU)r-r.r/rArrBr5r@rrrrDisableSubCommandsrVc@s(eZdZdZedZddZddZdS) zModuleCommand.ResetSubCommandresetzreset a modulecCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z'ModuleCommand.ResetSubCommand.configurecCsby|jj|jjWnHtjjk r\}z(|jjj r>|j r>|t j t |WYdd}~XnXdS)N)rrWrrr r<rJrrKrLrMrrQrR)rrSrrrr@s z+ModuleCommand.ResetSubCommand.run_on_moduleN)rW)r-r.r/rArrBr5r@rrrrResetSubCommandsrXc@s(eZdZdZedZddZddZdS) zModuleCommand.InstallSubCommandinstallz/install a module profile including its packagescCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z)ModuleCommand.InstallSubCommand.configurecCspy|jj|jj|jjjWnNtjj k rj}z.|jjjrL|j sH|j rL|t j t|WYdd}~XnXdS)N)rrYrrrrKrLr r<rJrMrNrrQrR)rrSrrrr@s  z-ModuleCommand.InstallSubCommand.run_on_moduleN)rY)r-r.r/rArrBr5r@rrrrInstallSubCommandsrZc@s(eZdZdZedZddZddZdS) zModuleCommand.UpdateSubCommandr%z0update packages associated with an active streamcCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z(ModuleCommand.UpdateSubCommand.configurecCs&|jj|jj}|r"tdj|dS)Nz, )rZupgraderrrjoin)rZ module_specsrrrr@sz,ModuleCommand.UpdateSubCommand.run_on_moduleN)r%)r-r.r/rArrBr5r@rrrrUpdateSubCommandsr\c@s(eZdZd ZedZddZddZdS) zModuleCommand.RemoveSubCommandremoveerasez3remove installed module profiles and their packagescCs0|jj}d|_d|_d|_d|_d|_d|_dS)NTF)rr2Z allow_erasingr3Zfresh_metadatarHrIr4)rr2rrrr5sz(ModuleCommand.RemoveSubCommand.configurec Cs|jj|jj}|jjr|j}|j|t\}}|j|jj j |\}}|jj j j j|d}|jj j j j|d}xF|D]>}||krtdj|} tj| q|jjj||jjjdqW|sdStjtjj|ddS)N)rz0Package {} belongs to multiple modules, skipping)Z clean_deps)rM)rr]rrallr)r,rrr#ZgetModulePackagessackr$r:filtermrr!rr Zgoalr^rKZclean_requirements_on_removerQr r<rJ) rZskipped_groupsr&Zremove_names_from_specr'Z keep_namesZ remove_queryZ keep_querypkgr?rrrr@s&  z,ModuleCommand.RemoveSubCommand.run_on_moduleN)r]r^)r-r.r/rArrBr5r@rrrrRemoveSubCommands rcc@s(eZdZdZedZddZddZdS) z ModuleCommand.SwitchToSubCommand switch-toz7switch a module to a stream and distrosync rpm packagescCs.|jj}d|_d|_d|_d|_d|jj_dS)NT) rr2r3r4rHrIrrKZmodule_stream_switch)rr2rrrr5s z*ModuleCommand.SwitchToSubCommand.configurecCsry|jj|jj|jjjdWnNtjj k rl}z.|jjjrN|j sJ|j rN|t j t|WYdd}~XnXdS)N)rL)rZ switch_torrrrKrLr r<rJrMrNrrQrR)rrSrrrr@"s  z.ModuleCommand.SwitchToSubCommand.run_on_moduleN)rd)r-r.r/rArrBr5r@rrrrSwitchToSubCommandsrec@s(eZdZdZedZddZddZdS) z ModuleCommand.ProvidesSubCommandprovideszlist modular packagescCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr50sz*ModuleCommand.ProvidesSubCommand.configurecCs |jj|jj}|rt|dS)N)rZ_what_providesrrr;)rr>rrrr@5sz.ModuleCommand.ProvidesSubCommand.run_on_moduleN)rf)r-r.r/rArrBr5r@rrrrProvidesSubCommand+srgc@s(eZdZdZedZddZddZdS) z!ModuleCommand.RepoquerySubCommand repoqueryz#list packages belonging to a modulecCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr5?sz+ModuleCommand.RepoquerySubCommand.configurec Cst}x*|jjD]}|jj|\}}|j|qW|j|t\}}t}|jjs\|jj r|j j j jj |d}x|D]} |j t| qzW|jjr|j j j jj |d}x|D]} |j t| qWdjt|} t| dS)N)Z nevra_strict)r )rrrrrr%r, availabler:rr`r$rar+rRr[sortedr;) rr&rr(r'Znames_from_specZspec_artifactsZpackage_stringsr$rbr>rrrr@Ds"  z/ModuleCommand.RepoquerySubCommand.run_on_moduleN)rh)r-r.r/rArrBr5r@rrrrRepoquerySubCommand:srlr zInteract with Modules.cs>tt|jfdd|jD}d|_dd|D|_dS)Nc3s|]}|VqdS)Nr).0subcmd)rrr dsz)ModuleCommand.__init__..cSsi|]}|jD] }||qqSr)rA)rmrnaliasrrr fsz*ModuleCommand.__init__..)r rr SUBCMDSrn_subcmd_name2obj)rrZ subcmd_objs)r)rrr bs zModuleCommand.__init__cCs|j}|jdddtdd|jdddtdd|jd d dtd d|jd d dtdd|jdddtdd|jdddtddg}g}xHt|jdddD]2}|j|jd|jdj|jd|jpdqW|jdd|ddj |d|jd d!d"td#d$dS)%Nz --enabledr6 store_truezshow only enabled modules)destactionhelpz --disabledr9zshow only disabled modulesz --installedr:z'show only installed modules or packagesz --profilerEzshow profile contentz --availablerjzshow only available packagesz--allr_zremove all modular packagescSs |jdS)Nr)rA)xrrr~sz-ModuleCommand.set_argparser..)keyrz{}: {}rrnrzri)nargschoicesmetavarrwrz module-spec*zModule specification)r}r{rw) Zadd_mutually_exclusive_group add_argumentrrkrrappendrAr!rBr[)rparserZnarrowsZsubcommand_choicesZsubcommand_helprnrrr set_argparseris8       "  zModuleCommand.set_argparserc CsZy|j|jjd|_Wn(ttfk r@|jjjtYnX|j|j_|jjdS)Nr) rsrrnrKeyErrorrZ optparserZ print_usager5)rrrrr5s   zModuleCommand.configurecCs|j|jjdS)N)check_required_argumentrnr@)rrrrrunszModuleCommand.runcCsRdd|jD}|jjd|krN|jjsNttdjtjj |jj |jjddS)NcSsg|]}|jD]}|qqSr)rA)rmrnrprrr sz9ModuleCommand.check_required_argument..rz{} {} {}: too few arguments) SUBCMDS_NOT_REQUIRED_ARGrrnrrrr!r utilZ MAIN_PROGZcommand)rZnot_required_argumentrrrrs z%ModuleCommand.check_required_argument)r )r-r.r/rCommandr rCrFrTrVrXrZr\rcrergrlrrrrArrBr rr5rrr0rr)rrr%s.'"%   r)Z __future__rZdnf.clirrZdnf.i18nrZdnf.module.exceptionsrZdnf.utilrr sysosr*r7Zdnf.module.module_baseZdnf.exceptionsrrrrrrs    PK!3",t:t:.cli/commands/__pycache__/module.cpython-36.pycnu[3 ft`A@sddlmZddlmZmZddlmZddlmZddl m Z ddl Z ddl Z ddl Z ddlZddlZddlZ ddlZ GdddejZdS) )print_function)commandsCliError)_)NoModuleException)loggerNc s*eZdZGdddejZGdddeZGdddeZGdddeZGd d d eZ Gd d d eZ Gd ddeZ GdddeZ GdddeZ GdddeZGdddeZGdddeZeeee e e e e eeeh ZehZd%ZedZfddZddZdd Zd!d"Zd#d$ZZS)& ModuleCommandcs,eZdZfddZddZddZZS)zModuleCommand.SubCommandcs(ttj|j|tjjj|j|_dS)N) superr SubCommand__init__dnfmodule module_baseZ ModuleBasebase)selfcli) __class__/usr/lib/python3.6/module.pyr (sz!ModuleCommand.SubCommand.__init__c Cst}x|jjD]}|jj|\}}|dkr.q|jr:|jnd}|jrJ|jnd}|jr^|jdksd|jrxt j t dj ||j r|j nd}|jjj||dd|}|j|qW|S)NzjOnly module name, stream, architecture or profile is used. Ignoring unneeded information in argument: '{}')setopts module_specr _get_modulesnamestreamversioncontextrinforformatarchr_moduleContainerqueryupdate) rmodules_from_specsr__Znsvcaprrr"modulesrrr#_get_modules_from_name_stream_specs,s zs   z3ModuleCommand.SubCommand._get_module_artifact_names)__name__ __module__ __qualname__r r)r, __classcell__rr)rrr &s r c@s(eZdZdZedZddZddZdS) zModuleCommand.ListSubCommandlistz,list all module streams, profiles and statescCs|jj}d|_d|_dS)NT)rdemandsavailable_repossack_activation)rr2rrr configureRsz&ModuleCommand.ListSubCommand.configurecCs|j}|jjr&|j|jjtjjj}nV|jj rF|j|jjtjjj }n6|jj rf|j|jjtjjj }n|j|jjtjjj }|rt|dS|jjrtd}tjj|dS)NzNo matching Modules to list)rrenabledZ_get_brief_descriptionrlibdnfr ModulePackageContainerZModuleState_ENABLEDdisabledZModuleState_DISABLED installedZModuleState_INSTALLEDZModuleState_UNKNOWNprintrr exceptionsError)rZmodsoutputmsgrrr run_on_moduleWs(z*ModuleCommand.ListSubCommand.run_on_moduleN)r1)r-r.r/aliasesrsummaryr5r@rrrrListSubCommandMsrCc@s(eZdZdZedZddZddZdS) zModuleCommand.InfoSubCommandr z)print detailed information about a modulecCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr5tsz&ModuleCommand.InfoSubCommand.configurecCsf|jjr|jj|jj}n*|jjr4|jj|jj}n|jj|jj}|rRt|nt j j t ddS)NzNo matching Modules to list) rverboserZ_get_full_inforprofileZ_get_info_profilesZ _get_infor;r r<r=r)rr>rrrr@ys z*ModuleCommand.InfoSubCommand.run_on_moduleN)r )r-r.r/rArrBr5r@rrrrInfoSubCommandosrFc@s(eZdZdZedZddZddZdS) zModuleCommand.EnableSubCommandenablezenable a module streamcCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4 resolving root_user)rr2rrrr5s z(ModuleCommand.EnableSubCommand.configurecCsy|jj|jjWnltjjk r}zL|jjj rb|j s@|j rD||j rb|j dt jjjkrb|tjt|WYdd}~XnXdS)Nr)rrGrrr r< MarkingErrorsrconfstrictno_match_group_specserror_group_specsmodule_depsolv_errorsr7r r8!ModuleErrorType_ERROR_IN_DEFAULTSrerrorstr)rerrrr@s   z,ModuleCommand.EnableSubCommand.run_on_moduleN)rG)r-r.r/rArrBr5r@rrrrEnableSubCommandsrTc@s(eZdZdZedZddZddZdS) zModuleCommand.DisableSubCommanddisablez%disable a module with all its streamscCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z)ModuleCommand.DisableSubCommand.configurecCsy|jj|jjWnltjjk r}zL|jjj rb|j s@|j rD||j rb|j dt jjjkrb|tjt|WYdd}~XnXdS)Nr)rrUrrr r<rJrrKrLrMrNrOr7r r8rPrrQrR)rrSrrrr@s   z-ModuleCommand.DisableSubCommand.run_on_moduleN)rU)r-r.r/rArrBr5r@rrrrDisableSubCommandsrVc@s(eZdZdZedZddZddZdS) zModuleCommand.ResetSubCommandresetzreset a modulecCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z'ModuleCommand.ResetSubCommand.configurecCsby|jj|jjWnHtjjk r\}z(|jjj r>|j r>|t j t |WYdd}~XnXdS)N)rrWrrr r<rJrrKrLrMrrQrR)rrSrrrr@s z+ModuleCommand.ResetSubCommand.run_on_moduleN)rW)r-r.r/rArrBr5r@rrrrResetSubCommandsrXc@s(eZdZdZedZddZddZdS) zModuleCommand.InstallSubCommandinstallz/install a module profile including its packagescCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z)ModuleCommand.InstallSubCommand.configurecCspy|jj|jj|jjjWnNtjj k rj}z.|jjjrL|j sH|j rL|t j t|WYdd}~XnXdS)N)rrYrrrrKrLr r<rJrMrNrrQrR)rrSrrrr@s  z-ModuleCommand.InstallSubCommand.run_on_moduleN)rY)r-r.r/rArrBr5r@rrrrInstallSubCommandsrZc@s(eZdZdZedZddZddZdS) zModuleCommand.UpdateSubCommandr%z0update packages associated with an active streamcCs$|jj}d|_d|_d|_d|_dS)NT)rr2r3r4rHrI)rr2rrrr5s z(ModuleCommand.UpdateSubCommand.configurecCs&|jj|jj}|r"tdj|dS)Nz, )rZupgraderrrjoin)rZ module_specsrrrr@sz,ModuleCommand.UpdateSubCommand.run_on_moduleN)r%)r-r.r/rArrBr5r@rrrrUpdateSubCommandsr\c@s(eZdZd ZedZddZddZdS) zModuleCommand.RemoveSubCommandremoveerasez3remove installed module profiles and their packagescCs0|jj}d|_d|_d|_d|_d|_d|_dS)NTF)rr2Z allow_erasingr3Zfresh_metadatarHrIr4)rr2rrrr5sz(ModuleCommand.RemoveSubCommand.configurec Cs|jj|jj}|jjr|j}|j|t\}}|j|jj j |\}}|jj j j j|d}|jj j j j|d}xF|D]>}||krtdj|} tj| q|jjj||jjjdqW|sdStjtjj|ddS)N)rz0Package {} belongs to multiple modules, skipping)Z clean_deps)rM)rr]rrallr)r,rrr#ZgetModulePackagessackr$r:filtermrr!rr Zgoalr^rKZclean_requirements_on_removerQr r<rJ) rZskipped_groupsr&Zremove_names_from_specr'Z keep_namesZ remove_queryZ keep_querypkgr?rrrr@s&  z,ModuleCommand.RemoveSubCommand.run_on_moduleN)r]r^)r-r.r/rArrBr5r@rrrrRemoveSubCommands rcc@s(eZdZdZedZddZddZdS) z ModuleCommand.SwitchToSubCommand switch-toz7switch a module to a stream and distrosync rpm packagescCs.|jj}d|_d|_d|_d|_d|jj_dS)NT) rr2r3r4rHrIrrKZmodule_stream_switch)rr2rrrr5s z*ModuleCommand.SwitchToSubCommand.configurecCsry|jj|jj|jjjdWnNtjj k rl}z.|jjjrN|j sJ|j rN|t j t|WYdd}~XnXdS)N)rL)rZ switch_torrrrKrLr r<rJrMrNrrQrR)rrSrrrr@"s  z.ModuleCommand.SwitchToSubCommand.run_on_moduleN)rd)r-r.r/rArrBr5r@rrrrSwitchToSubCommandsrec@s(eZdZdZedZddZddZdS) z ModuleCommand.ProvidesSubCommandprovideszlist modular packagescCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr50sz*ModuleCommand.ProvidesSubCommand.configurecCs |jj|jj}|rt|dS)N)rZ_what_providesrrr;)rr>rrrr@5sz.ModuleCommand.ProvidesSubCommand.run_on_moduleN)rf)r-r.r/rArrBr5r@rrrrProvidesSubCommand+srgc@s(eZdZdZedZddZddZdS) z!ModuleCommand.RepoquerySubCommand repoqueryz#list packages belonging to a modulecCs|jj}d|_d|_dS)NT)rr2r3r4)rr2rrrr5?sz+ModuleCommand.RepoquerySubCommand.configurec Cst}x*|jjD]}|jj|\}}|j|qW|j|t\}}t}|jjs\|jj r|j j j jj |d}x|D]} |j t| qzW|jjr|j j j jj |d}x|D]} |j t| qWdjt|} t| dS)N)Z nevra_strict)r )rrrrrr%r, availabler:rr`r$rar+rRr[sortedr;) rr&rr(r'Znames_from_specZspec_artifactsZpackage_stringsr$rbr>rrrr@Ds"  z/ModuleCommand.RepoquerySubCommand.run_on_moduleN)rh)r-r.r/rArrBr5r@rrrrRepoquerySubCommand:srlr zInteract with Modules.cs>tt|jfdd|jD}d|_dd|D|_dS)Nc3s|]}|VqdS)Nr).0subcmd)rrr dsz)ModuleCommand.__init__..cSsi|]}|jD] }||qqSr)rA)rmrnaliasrrr fsz*ModuleCommand.__init__..)r rr SUBCMDSrn_subcmd_name2obj)rrZ subcmd_objs)r)rrr bs zModuleCommand.__init__cCs|j}|jdddtdd|jdddtdd|jd d dtd d|jd d dtdd|jdddtdd|jdddtddg}g}xHt|jdddD]2}|j|jd|jdj|jd|jpdqW|jdd|ddj |d|jd d!d"td#d$dS)%Nz --enabledr6 store_truezshow only enabled modules)destactionhelpz --disabledr9zshow only disabled modulesz --installedr:z'show only installed modules or packagesz --profilerEzshow profile contentz --availablerjzshow only available packagesz--allr_zremove all modular packagescSs |jdS)Nr)rA)xrrr~sz-ModuleCommand.set_argparser..)keyrz{}: {}rrnrzri)nargschoicesmetavarrwrz module-spec*zModule specification)r}r{rw) Zadd_mutually_exclusive_group add_argumentrrkrrappendrAr!rBr[)rparserZnarrowsZsubcommand_choicesZsubcommand_helprnrrr set_argparseris8       "  zModuleCommand.set_argparserc CsZy|j|jjd|_Wn(ttfk r@|jjjtYnX|j|j_|jjdS)Nr) rsrrnrKeyErrorrZ optparserZ print_usager5)rrrrr5s   zModuleCommand.configurecCs|j|jjdS)N)check_required_argumentrnr@)rrrrrunszModuleCommand.runcCsRdd|jD}|jjd|krN|jjsNttdjtjj |jj |jjddS)NcSsg|]}|jD]}|qqSr)rA)rmrnrprrr sz9ModuleCommand.check_required_argument..rz{} {} {}: too few arguments) SUBCMDS_NOT_REQUIRED_ARGrrnrrrr!r utilZ MAIN_PROGZcommand)rZnot_required_argumentrrrrs z%ModuleCommand.check_required_argument)r )r-r.r/rCommandr rCrFrTrVrXrZr\rcrergrlrrrrArrBr rr5rrr0rr)rrr%s.'"%   r)Z __future__rZdnf.clirrZdnf.i18nrZdnf.module.exceptionsrZdnf.utilrr sysosr*r7Zdnf.module.module_baseZdnf.exceptionsrrrrrrs    PK! D< 7cli/commands/__pycache__/reinstall.cpython-36.opt-1.pycnu[3 ft`]@slddlmZddlmZddlmZddlmZddlmZddl Z ddl Z e j dZ Gdd d ejZdS) )absolute_import)unicode_literals)commands) OptionParser)_Ndnfc@s8eZdZdZd ZedZeddZddZ d d Z d S) ReinstallCommandzSA class containing methods needed by the cli to execute the reinstall command. reinstallreizreinstall a packagecCs"|jddtdtjtdddS)Npackages+zPackage to reinstallZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/reinstall.py set_argparser(szReinstallCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)a Verify that conditions are met so that this command can run. These include that the program is being run by the root user, that there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. TN) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure.szReinstallCommand.configurecCsd}xp|jj|jjd|jjjdD]P}y|jj|Wn6tjj k rlt j t d|jjj j|jYq"Xd}q"WxD|jjdd|jjDD]$}y|jj|Wntjjk r}zPx,|jD]"}t j t d|jj j|jPqWt j t d|jjj j|WYdd}~Xqtjjk r}z^xV|jD]L}d}|jjj|}|rdt d |}t d }t j ||jjj j||qLsz(ReinstallCommand.run..z(Package %s available, but not installed.z (from %s)z%Installed package %s%s not available.z!No packages marked for reinstall.)rZadd_remote_rpmsrroutputrZpackage_reinstallr exceptionsZ MarkingErrorloggerinforZtermZboldlocationZ pkg_specsZ grp_specsr ZPackagesNotInstalledErrorr nameZPackagesNotAvailableErrorhistoryZrepoError)rdoneZpkgZpkg_specerrZxmsgZpkgrepomsgrrrrun=sB $   "  zReinstallCommand.runN)r r ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrr.rrrrr!s  r)Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZdnf.exceptionsrZloggingZ getLoggerr%ZCommandrrrrrs      PK!.% 1cli/commands/__pycache__/reinstall.cpython-36.pycnu[3 ft`]@slddlmZddlmZddlmZddlmZddlmZddl Z ddl Z e j dZ Gdd d ejZdS) )absolute_import)unicode_literals)commands) OptionParser)_Ndnfc@s8eZdZdZd ZedZeddZddZ d d Z d S) ReinstallCommandzSA class containing methods needed by the cli to execute the reinstall command. reinstallreizreinstall a packagecCs"|jddtdtjtdddS)Npackages+zPackage to reinstallZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/reinstall.py set_argparser(szReinstallCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)a Verify that conditions are met so that this command can run. These include that the program is being run by the root user, that there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. TN) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure.szReinstallCommand.configurecCsd}xp|jj|jjd|jjjdD]P}y|jj|Wn6tjj k rlt j t d|jjj j|jYq"Xd}q"WxR|jjdd|jjDD]2}y|jj|Wntjjk r}zPx,|jD]"}t j t d|jj j|jPqWt j t d|jjj j|WYdd}~Xqtjjk r}z^xV|jD]L}d}|jjj|}|rdt d |}t d }t j ||jjj j||qLsz(ReinstallCommand.run..z(Package %s available, but not installed.z (from %s)z%Installed package %s%s not available.z+Only the above marking errors are expected.z!No packages marked for reinstall.)rZadd_remote_rpmsrroutputrZpackage_reinstallr exceptionsZ MarkingErrorloggerinforZtermZboldlocationZ pkg_specsZ grp_specsr ZPackagesNotInstalledErrorr nameZPackagesNotAvailableErrorhistoryZrepoAssertionErrorError)rdoneZpkgZpkg_specerrZxmsgZpkgrepomsgrrrrun=sB $   "  zReinstallCommand.runN)r r ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrr/rrrrr!s  r)Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZdnf.exceptionsrZloggingZ getLoggerr%ZCommandrrrrrs      PK!ϷAQQ4cli/commands/__pycache__/remove.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z ddl Z ddlZejdZGdd d ejZdS) )absolute_import)unicode_literals)commands)_) OptionParserNdnfc@sbeZdZdZejejejejejejdZde ej Z e dZ eddZd d Zd d Zd S) RemoveCommandzRemove command.)zremove-nz remove-naz remove-nevrazerase-nzerase-naz erase-nevraremoveerasermz-remove a package or packages from your systemcCsf|j}|jdddtdd|jddtjd|jddtd d|jd d td tjtd ddS)Nz --duplicates store_true duplicatedzremove duplicated packages)actiondesthelpz --duplicated)rrz--oldinstallonlyz*remove installonly packages over the limitZpackages*zPackage to removeZPACKAGE)nargsrrmetavar)Zadd_mutually_exclusive_group add_argumentrargparseZSUPPRESSrZParseSpecGroupFileCallback)parserZmgroupr/usr/lib/python3.6/remove.py set_argparser0s   zRemoveCommand.set_argparsercCs^|jj}d|_d|_d|_|jjr*d|_n0tj j rN|jj rNd|_d|_ d|_ n d|_ d|_dS)NTF)ZclidemandsZ resolvingZ root_userZsack_activationoptsr Zavailable_reposrbase WITH_MODULES grp_specsZfresh_metadataZ allow_erasing)selfrrrr configure?szRemoveCommand.configurecCs\g}|jj|jkr"|j|jjg}|jj|jj7_d}|jjrD|jjj}|jj |j }|jj |}|st j jtdx|jjD]\\}}}t|dkrq|jddy|jjt|dWnHt j jk rd} td} tj| |jjjjt|d| YnXx"|d dD]} |jj| q&WqWdS|jjr|jjj}|jj |j jd}|jjj} | dk r|j | j!| j"| j#d } | r|j | }|rx,|D]} |jj| qWnt j jtd dS|jj$r*|r*x|jj$D]&}td } tj| |jjjj|qWn|jj$rt jj%rxt j&j'j(|j}|j)|jj$}t|jj$t|krd}n|jj$}|rxB|D]:}y|jj*|grd}Wnt j jk rYnXqWxx|jjD]l}y|jj)||d WnLt j j+k r8}z*dj,|j-|jjjj|} tj.| WYdd}~XnXd}qW|sXtjtddS)NFz)No duplicated packages found for removal.T)reverserz%Installed package %s%s not available.)epochversionreleasez.No old installonly packages found for removal.zNot a valid form: %s)formsz{}: {}zNo packages marked for removal.)/rZcommand nevra_formsZ pkg_specs filenamesr rZsackZqueryZ_get_installonly_queryZ installed differencer exceptionsErrorrZ_na_dictitemslensortZ reinstallstrZPackagesNotAvailableErrorloggerZwarningoutputZtermZboldZpackage_removeZoldinstallonlyZlatestZget_running_kernelfilterr%r&r'rrmodule module_baseZ ModuleBaser Zenv_group_removeZ MarkingErrorformatvalueinfo)rr(doneqZinstonlyZdupsnameZarchZ pkgs_listZxmsgmsgZpkgZkernelZrunning_installonlyZgrp_specr7Z skipped_grpsgroupZpkg_specerrrrunPs    (             zRemoveCommand.runN)r r r )__name__ __module__ __qualname____doc__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRAr*tuplekeysaliasesrZsummary staticmethodrr rArrrrr#s  r)Z __future__rrZdnf.clirZdnf.i18nrZdnf.cli.option_parserrZdnf.baserrrFZdnf.exceptionsZloggingZ getLoggerr3ZCommandrrrrrs      PK!ϷAQQ.cli/commands/__pycache__/remove.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z ddl Z ddlZejdZGdd d ejZdS) )absolute_import)unicode_literals)commands)_) OptionParserNdnfc@sbeZdZdZejejejejejejdZde ej Z e dZ eddZd d Zd d Zd S) RemoveCommandzRemove command.)zremove-nz remove-naz remove-nevrazerase-nzerase-naz erase-nevraremoveerasermz-remove a package or packages from your systemcCsf|j}|jdddtdd|jddtjd|jddtd d|jd d td tjtd ddS)Nz --duplicates store_true duplicatedzremove duplicated packages)actiondesthelpz --duplicated)rrz--oldinstallonlyz*remove installonly packages over the limitZpackages*zPackage to removeZPACKAGE)nargsrrmetavar)Zadd_mutually_exclusive_group add_argumentrargparseZSUPPRESSrZParseSpecGroupFileCallback)parserZmgroupr/usr/lib/python3.6/remove.py set_argparser0s   zRemoveCommand.set_argparsercCs^|jj}d|_d|_d|_|jjr*d|_n0tj j rN|jj rNd|_d|_ d|_ n d|_ d|_dS)NTF)ZclidemandsZ resolvingZ root_userZsack_activationoptsr Zavailable_reposrbase WITH_MODULES grp_specsZfresh_metadataZ allow_erasing)selfrrrr configure?szRemoveCommand.configurecCs\g}|jj|jkr"|j|jjg}|jj|jj7_d}|jjrD|jjj}|jj |j }|jj |}|st j jtdx|jjD]\\}}}t|dkrq|jddy|jjt|dWnHt j jk rd} td} tj| |jjjjt|d| YnXx"|d dD]} |jj| q&WqWdS|jjr|jjj}|jj |j jd}|jjj} | dk r|j | j!| j"| j#d } | r|j | }|rx,|D]} |jj| qWnt j jtd dS|jj$r*|r*x|jj$D]&}td } tj| |jjjj|qWn|jj$rt jj%rxt j&j'j(|j}|j)|jj$}t|jj$t|krd}n|jj$}|rxB|D]:}y|jj*|grd}Wnt j jk rYnXqWxx|jjD]l}y|jj)||d WnLt j j+k r8}z*dj,|j-|jjjj|} tj.| WYdd}~XnXd}qW|sXtjtddS)NFz)No duplicated packages found for removal.T)reverserz%Installed package %s%s not available.)epochversionreleasez.No old installonly packages found for removal.zNot a valid form: %s)formsz{}: {}zNo packages marked for removal.)/rZcommand nevra_formsZ pkg_specs filenamesr rZsackZqueryZ_get_installonly_queryZ installed differencer exceptionsErrorrZ_na_dictitemslensortZ reinstallstrZPackagesNotAvailableErrorloggerZwarningoutputZtermZboldZpackage_removeZoldinstallonlyZlatestZget_running_kernelfilterr%r&r'rrmodule module_baseZ ModuleBaser Zenv_group_removeZ MarkingErrorformatvalueinfo)rr(doneqZinstonlyZdupsnameZarchZ pkgs_listZxmsgmsgZpkgZkernelZrunning_installonlyZgrp_specr7Z skipped_grpsgroupZpkg_specerrrrunPs    (             zRemoveCommand.runN)r r r )__name__ __module__ __qualname____doc__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRAr*tuplekeysaliasesrZsummary staticmethodrr rArrrrr#s  r)Z __future__rrZdnf.clirZdnf.i18nrZdnf.cli.option_parserrZdnf.baserrrFZdnf.exceptionsZloggingZ getLoggerr3ZCommandrrrrrs      PK!96cli/commands/__pycache__/repolist.cpython-36.opt-1.pycnu[3 ft`z2@sddlmZddlmZddlmZddlmZmZmZm Z ddl m Z ddl Z ddlZ ddlZ ddlZddlZddlZddlZejdZdd Zd d Zd d ZddZGdddejZdS))absolute_import)unicode_literals)commands)_ucdfill_exact_width exact_width) OptionParserNdnfcCsd|rtjj|jjntd}|jdkr4td|S|jsFtd|St|j}td||fSdS)NunknownzNever (last: %s)zInstant (last: %s)z%s second(s) (last: %s))r utilnormalize_time_repo getTimestamprZmetadata_expire _num2ui_num)repomdZlastnumr/usr/lib/python3.6/repolist.py _expire_str%s    rcCsttjjd|dS)Nz%dT)rr Zpycompformat)rrrrr0srcCsF|jj}|jj}x,|D]$}tj||r.dStj||rdSqWdS)NTF)idlowernamefnmatch)rZpatternsridZrnmZpatrrr _repo_match4s     rcCs>d}x*|jtjdj|jdD]}||j7}qWtjjj |S)Nr)flags) reponame__eq) queryhawkeyIGNORE_EXCLUDESfiltermrZ_sizer clirZ format_number)sackrretZpkgrrr _repo_size?sr)c@s@eZdZdZdZedZeddZddZ d d Z d d Z d S)RepoListCommandzVA class containing methods needed by the cli to execute the repolist command. repolistrepoinfoz,display the configured software repositoriesc Csz|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddddd gtjtdddS)Nz--all _repos_action store_constallzshow all repos)destactionconstdefaulthelpz --enabledenabledzshow enabled repos (default))r0r1r2r4z --disableddisabledzshow disabled reposrepos*zenabled-defaultZ REPOSITORYzRepository specification)nargsr3metavarchoicesr1r4)Zadd_mutually_exclusive_group add_argumentrr ZPkgNarrowCallback)parserZ repolimitrrr set_argparserNs    zRepoListCommand.set_argparsercCs |jjs|jjtjtjddS)N)stdoutstderr)optsquietr&Zredirect_loggerloggingZWARNINGINFO)selfrrr pre_configure_szRepoListCommand.pre_configurecCsT|jjs|jj|jj}|jjjs0|jjdkrpsz'RepoListCommand.run..r)keyZgreenZboldZredZnormalrzNo repositories availabler/zenabled-defaultTr6r,r5FzRepo-id : zRepo-name : zRepo-status : zRepo-revision : zRepo-tags : z, cSsi|]\}}||qSrr)rNkvrrr sz'RepoListCommand.run..zRepo-distro-tags : z[%s]: %s)r )r!zRepo-updated : zRepo-pkgs : zRepo-available-pkgs: zRepo-size : zRepo-metalink : z Updated : zRepo-mirrors : zRepo-baseurl : z %s (%d more)r zRepo-expire : zRepo-exclude : zRepo-include : zRepo-excluded : zRepo-filename :  z zrepo idZstatusz repo namez%s %sz%s %s %szTotal packages: {})rRrRrR)=rArLr7rHrIrJlistvaluessortoperator attrgetteroutputtermZFG_COLORZMODEloggerZwarningrlenrr5rKrr)r'rrappendrZmetadataZ fmtKeyValFillrZ getRevisionZgetContentTagsjoinsortedZ getDistroTagsitemsr"r#r$r%rr rrZgetMaxTimestampZmetalinkrZ mirrorlistZbaseurlZ getMirrorsrZ excludepkgsZ includepkgsZrepofilemapprintcolumnsrr)-rEargZextcmdsrJr7r^Z on_ehibegZ on_dhibegZon_hiendZtot_numZcolsZinclude_statusZrepoinfo_outputrZehibegZdhibegZhiendZ ui_enabledZ ui_endis_widZui_excludes_numr5Zui_sizerroutZtagsZdistroTagsDictZdistrorZ num_availableZui_numZui_num_availableZtsZbaseurlsZmirrorsZurlZexpireZid_lenZnm_lenZst_lenZrnameleftZtxt_ridZtxt_rnammsgrrrrunns.          "               zRepoListCommand.runN)r+r,) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodr>rFrMrlrrrrr*Fs  r*)Z __future__rrZdnf.clirZdnf.i18nrrrrZdnf.cli.option_parserr Zdnf.cli.formatr Z dnf.pycompZdnf.utilrr#rCr[Z getLoggerr_rrrr)ZCommandr*rrrrs"       PK!90cli/commands/__pycache__/repolist.cpython-36.pycnu[3 ft`z2@sddlmZddlmZddlmZddlmZmZmZm Z ddl m Z ddl Z ddlZ ddlZ ddlZddlZddlZddlZejdZdd Zd d Zd d ZddZGdddejZdS))absolute_import)unicode_literals)commands)_ucdfill_exact_width exact_width) OptionParserNdnfcCsd|rtjj|jjntd}|jdkr4td|S|jsFtd|St|j}td||fSdS)NunknownzNever (last: %s)zInstant (last: %s)z%s second(s) (last: %s))r utilnormalize_time_repo getTimestamprZmetadata_expire _num2ui_num)repomdZlastnumr/usr/lib/python3.6/repolist.py _expire_str%s    rcCsttjjd|dS)Nz%dT)rr Zpycompformat)rrrrr0srcCsF|jj}|jj}x,|D]$}tj||r.dStj||rdSqWdS)NTF)idlowernamefnmatch)rZpatternsridZrnmZpatrrr _repo_match4s     rcCs>d}x*|jtjdj|jdD]}||j7}qWtjjj |S)Nr)flags) reponame__eq) queryhawkeyIGNORE_EXCLUDESfiltermrZ_sizer clirZ format_number)sackrretZpkgrrr _repo_size?sr)c@s@eZdZdZdZedZeddZddZ d d Z d d Z d S)RepoListCommandzVA class containing methods needed by the cli to execute the repolist command. repolistrepoinfoz,display the configured software repositoriesc Csz|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddddd gtjtdddS)Nz--all _repos_action store_constallzshow all repos)destactionconstdefaulthelpz --enabledenabledzshow enabled repos (default))r0r1r2r4z --disableddisabledzshow disabled reposrepos*zenabled-defaultZ REPOSITORYzRepository specification)nargsr3metavarchoicesr1r4)Zadd_mutually_exclusive_group add_argumentrr ZPkgNarrowCallback)parserZ repolimitrrr set_argparserNs    zRepoListCommand.set_argparsercCs |jjs|jjtjtjddS)N)stdoutstderr)optsquietr&Zredirect_loggerloggingZWARNINGINFO)selfrrr pre_configure_szRepoListCommand.pre_configurecCsT|jjs|jj|jj}|jjjs0|jjdkrpsz'RepoListCommand.run..r)keyZgreenZboldZredZnormalrzNo repositories availabler/zenabled-defaultTr6r,r5FzRepo-id : zRepo-name : zRepo-status : zRepo-revision : zRepo-tags : z, cSsi|]\}}||qSrr)rNkvrrr sz'RepoListCommand.run..zRepo-distro-tags : z[%s]: %s)r )r!zRepo-updated : zRepo-pkgs : zRepo-available-pkgs: zRepo-size : zRepo-metalink : z Updated : zRepo-mirrors : zRepo-baseurl : z %s (%d more)r zRepo-expire : zRepo-exclude : zRepo-include : zRepo-excluded : zRepo-filename :  z zrepo idZstatusz repo namez%s %sz%s %s %szTotal packages: {})rRrRrR)=rArLr7rHrIrJlistvaluessortoperator attrgetteroutputtermZFG_COLORZMODEloggerZwarningrlenrr5rKrr)r'rrappendrZmetadataZ fmtKeyValFillrZ getRevisionZgetContentTagsjoinsortedZ getDistroTagsitemsr"r#r$r%rr rrZgetMaxTimestampZmetalinkrZ mirrorlistZbaseurlZ getMirrorsrZ excludepkgsZ includepkgsZrepofilemapprintcolumnsrr)-rEargZextcmdsrJr7r^Z on_ehibegZ on_dhibegZon_hiendZtot_numZcolsZinclude_statusZrepoinfo_outputrZehibegZdhibegZhiendZ ui_enabledZ ui_endis_widZui_excludes_numr5Zui_sizerroutZtagsZdistroTagsDictZdistrorZ num_availableZui_numZui_num_availableZtsZbaseurlsZmirrorsZurlZexpireZid_lenZnm_lenZst_lenZrnameleftZtxt_ridZtxt_rnammsgrrrrunns.          "               zRepoListCommand.runN)r+r,) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodr>rFrMrlrrrrr*Fs  r*)Z __future__rrZdnf.clirZdnf.i18nrrrrZdnf.cli.option_parserr Zdnf.cli.formatr Z dnf.pycompZdnf.utilrr#rCr[Z getLoggerr_rrrr)ZCommandr*rrrrs"       PK!N WW7cli/commands/__pycache__/repoquery.cpython-36.opt-1.pycnu[3 ft`ن @sddlmZddlmZddlmZddlmZddlmZddlm Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZe jdZd Ze jd Zd Zd d dddddddd ZddZGddde jZGdddejZGddde Z!dS))absolute_import)print_function)unicode_literals)_)commands) OptionParserNdnfz.%{name}-%{epoch}:%{version}-%{release}.%{arch}z%(-?\d*?){([:.\w]+?)}a name, arch, epoch, version, release, reponame (repoid), from_repo, evr, debug_name, source_name, source_debug_name, installtime, buildtime, size, downloadsize, installsize, provides, requires, obsoletes, conflicts, sourcerpm, description, summary, license, url, reason conflictsenhances obsoletesprovides recommendsrequiresZ requires_presuggests supplements) r r r r r rz requires-prerrcCsdd}dd}|jddjdd}x tjD]\}}|j||}q.Wd }d }x>tj|D]0}|||||j7}|||7}|j}qZW||||d 7}|S) z:Convert a rpm like QUERYFMT to an python .format() string.cSs^|jd}|jd}|rJ|ddkr:d|dd}nd|}d|}d|j|dS) Nr-><:z{0.})groupslower)ZmatchobjZfillkeyr/usr/lib/python3.6/repoquery.pyfmt_replDs   zrpm2py_format..fmt_replcSs|jddjddS)N{z{{rz}})replace)ZtxtrrrbracketsOszrpm2py_format..bracketsz\n z\t rN)r OPTS_MAPPINGitems QFORMAT_MATCHfinditerstartend) queryformatrrrvalueZfmtspositemrrr rpm2py_formatBs   r-c@seZdZdZdS)_CommaSplitCallbackz\s*,\s*N)__name__ __module__ __qualname__ZSPLITTERrrrrr._sr.c@seZdZdZejejejdZd%e ej Z e dZ eddZedd Zd d Zd d ZddZddZd&ddZd'ddZd(ddZddZddZddZd d!Zd*d#d$ZdS)+RepoQueryCommandzSA class containing methods needed by the cli to execute the repoquery command. )z repoquery-nz repoquery-nazrepoquery-nevra repoqueryrqz$search for packages matching keywordcCs,|jr|j|jd|jr(|j|jd|S)z'Filter query by repoid and arch options)Zreponame)arch)Zrepofiltermarches)optsqueryrrrfilter_repo_archms z!RepoQueryCommand.filter_repo_archc Cs|jddddtdd|jddtdd |jd d d gtd tdd|jddddtdd|jdgtdtdd|jdgtdtdd|jdgtdtdd|jdgtdtdd|jd gtdtd!d|jd"gtdtd#d|jd$gtdtd%d|jd&gtdtd'd|jd(gtdtd)d|j}|jd*dtd+d |jd,dtd-d |jd.dtd/d |jd0dtd1d |jd2dtd3d |jd4dtd5d |jd6dtd7d |jd8d9ttd:d;|jdd?d@dAdtdBdC|jdDdEdFdAdtdGdC|jdHdIdJdAdtdKdC|jdLdMdAdtdNdC|jdOdPdQttdRdS|jdTdtdUd |jdVdQtdWtdXdY|jdZdQd[dWtd\dY|jd]dQd^dWtd_dY|jd`dtdad |j}|jdbdcdddWtdedY|jdfdcdddWtjdY|jdgdcdhdWtdidY|jdjdcdkdWtdldY|jdmdtdnd |j}tdotdptdqtdrtdstdttdutdvtdwdx }x2|jD]&\}}dy|}|j|dzdW||d{qW|jd|dtd}d td~tdtdtdj t j j dtdd} |j} x2| jD]&\} } dy| } | j| ddW| | d{q4W| jdddWdtjd{|jddtdd |jdddtdddS)Nz-az--allZqueryall store_truezNQuery all packages (shorthand for repoquery '*' or repoquery without argument))destactionhelpz--show-duplicatesz(Query all versions of packages (default))r=r>z--archz --archlistr7z[arch]z show only results from this ARCH)r<defaultr=metavarr>z-fz--fileFILE+z show only results that owns FILE)r@nargsr>z--whatconflictsZREQz#show only results that conflict REQ)r?r=r@r>z --whatdependszishows results that requires, suggests, supplements, enhances,or recommends package provides and files REQz--whatobsoletesz#show only results that obsolete REQz--whatprovidesz"show only results that provide REQz--whatrequiresz:shows results that requires package provides and files REQz--whatrecommendsz$show only results that recommend REQz--whatenhancesz"show only results that enhance REQz--whatsuggestsz"show only results that suggest REQz--whatsupplementsz%show only results that supplement REQz --alldepsz=check non-explicit dependencies (files and Provides); defaultz --exactdepsz:check dependencies exactly as given, opposite of --alldepsz --recursivezOused with --whatrequires, and --requires --resolve, query packages recursively.z --deplistz>show a list of all dependencies and what packages provide themz --resolvez.resolve capabilities to originating package(s)z--treez"show recursive tree for package(s)z--srpmz#operate on corresponding source RPMz--latest-limit latest_limitzOshow N latest packages for a given name.arch (or latest but N if N is negative))r<typer>z--disable-modular-filteringz-list also packages of inactive module streamsz-iz--info queryinfoFz+show detailed information about the package)r<r?r=r>z-lz--list queryfilelistz!show list of files in the packagez-sz--sourcequerysourcerpmzshow package source RPM namez --changelogsquerychangelogszshow changelogs of the packagez--qfz --queryformatr)zfdisplay format for listing packages: "%%{name} %%{version} ...", use --querytags to view full tag list)r<r?r>z --querytagsz-show available tags to use with --queryformatz--nevra store_constzZuse name-epoch:version-release.architecture format for displaying found packages (default))r<constr=r>z--nvrz%{name}-%{version}-%{release}zQuse name-version-release format for displaying found packages (rpm query default)z--envraz.%{epoch}:%{name}-%{version}-%{release}.%{arch}zPuse epoch:name-version-release.architecture format for displaying found packagesz --groupmemberz=Display in which comps groups are presented selected packagesz --duplicates pkgfilter duplicatedz/limit the query to installed duplicate packagesz --duplicatedz --installonly installonlyz1limit the query to installed installonly packagesz --unsatisfied unsatisfiedzClimit the query to installed packages with unsatisfied dependenciesz --locationz5show a location from where packages can be downloadedz5Display capabilities that the package conflicts with.zaDisplay capabilities that the package can depend on, enhance, recommend, suggest, and supplement.z2Display capabilities that the package can enhance.z-Display capabilities provided by the package.z1Display capabilities that the package recommends.z1Display capabilities that the package depends on.zIf the package is not installed display capabilities that it depends on for running %%pre and %%post scriptlets. If the package is installed display capabilities that is depends for %%pre, %%post, %%preun and %%postun.z/Display capabilities that the package suggests.z5Display capabilities that the package can supplement.) r dependsr r r rz requires-prerrz--%s packageatr)r<r=rKr>z --availablez Display only available packages.z Display only installed packages.zLDisplay only packages that are not present in any of available repositories.zQDisplay only packages that provide an upgrade for some already installed package.zIDisplay only packages that can be removed by "{prog} autoremove" command.)progz2Display only packages that were installed by user.) installedZextrasZupgradesunneeded userinstalledlistz --autoremoverTz--recentz%Display only recently edited packagesr*ZKEYzthe key to search for)rCr@r>) add_argumentrr.Zadd_mutually_exclusive_groupintQFORMAT_DEFAULTargparseZSUPPRESSr$formatrutil MAIN_PROG)parserZwhatrequiresformZoutformrLZpackage_attributeZ help_msgsargZhelp_msgnameZ help_listZ list_groupZlist_argZhelp_argZswitchrrr set_argparservs                                                                 zRepoQueryCommand.set_argparsercCs |jjs|jjtjtjddS)N)stdoutstderr)r8quietcliZredirect_loggerloggingZWARNINGINFO)selfrrr pre_configureszRepoQueryCommand.pre_configurecCsj|jjs|jj|jj}|jjrJ|jjrB|jjdd|jjnd|j_|jjrVdS|jj rx|jj rxt jj t d|jj r|jjr|jjddt|jj|jjdko|jj gst jj t d|jjs|jjr|jjp|jjst jj t d j|jjrd nd|jjr$|jjj|jjdkr@|jjd ksJ|jjrPd|_d|_|jjrfd|_dS)Nz --obsoletesz--r zOption '--resolve' has to be used together with one of the '--conflicts', '--depends', '--enhances', '--provides', '--recommends', '--requires', '--requires-pre', '--suggests' or '--supplements' optionsz --recursivez --exactdepsrzOption '--recursive' has to be used with '--whatrequires ' (optionally with '--alldeps', but not with '--exactdeps'), or with '--requires --resolve'z;argument {} requires --whatrequires or --whatdepends optionz --alldepsrSrUrNT)rSrU)r8rerfZredirect_repo_progressdemandsr rQZ_option_conflict querytagsresolverZCliErrorr recursive exactdepsany whatrequiresalldeps whatdependsr\srpmbaseZreposZenable_source_reposrVrL availableZavailable_reposZsack_activationrI changelogs)rirkrrr configures@      zRepoQueryCommand.configurec Cs|jrpg}|jdt|xH|jD]>}|d}|jd|jdtjj|dtjj|dfq$Wdj|Syht |}|j r|j j j |S|jr|j}|sttdj|tjd |S|jr|jSt|jj|SWn4tk r }ztjjt|WYdd}~XnXdS) NzChangelog for %s timestampz * %s %s %s z %a %b %d %YZauthortextr zPackage {} contains no files)file)rIappendstrrwstrftimeri18nucdjoinPackageWrapperrFruoutputZ infoOutputrGfilesprintrr\sysrdrHZ sourcerpmr-r)AttributeError exceptionsError) rir8pkgoutZchlogdtZpoZfilelisterrrbuild_format_fnGs.  z RepoQueryCommand.build_format_fncCsN|jjjjdd}x4|D],}|j|jtjj|j |jjddd}qW|S)NT)emptyF) with_providesZwith_filenames) rusackr9r6union intersectionrsubjectSubjectget_best_query)riZnevrasZ base_queryresolved_nevras_queryZnevrarrr_resolve_nevrascs  z RepoQueryCommand._resolve_nevrasNcCsD|r|n|}|j|d}|j|}|j|}|r@|j|||d}|S)N)r)done)filter differencer_do_recursive_deps)riquery_in query_selectrZquery_requiredrrrrps    z#RepoQueryCommand._do_recursive_depsFcCs|j||}|j|d}|j|j|d}|r|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d }|j|j|d }|jjr|j||}|S) N)requires__glob)r)recommends__glob)enhances__glob)supplements__glob)suggests__glob)r )r )r)r)rrrr8rnr)rinamesr9Z all_dep_typesrZdepqueryrrr by_all_deps}s   zRepoQueryCommand.by_all_depscCs|r|n|jjjjdd}|jjjjdd}x$|jD]}|j|j|jd}q:W|j|}|rz|j |||j|d}|j|S)NT)r)r )r) rurr9r6runrrrr_get_recursive_providers_query)rir providersrtrrrrrrs z/RepoQueryCommand._get_recursive_providers_querycCsxg}g}xN|jjD]B}tjjj|d}|jdr>|j|q|r|d kr|j|qW|rt|jj|d|jj j d}|S) Nrz.rpmhttpftpr{httpsF)strictprogress)rrr{r) r8rrZpycompZurlparseendswithr|ruZadd_remote_rpmsrr)riZrpmnamesremote_packagesrZschemesrrr_add_add_remote_packagess   z)RepoQueryCommand._add_add_remote_packagesc Cs|jjrttdS|jj|j|jjj|jj r8t j nt j d}|jj r|j}i}|jj|jkrx|j|jjg|d<g}|jdd}|r|j|jjjj|d}x>|jj D]2}|jtjj|ddj|jjfd|d|}qW|}|jjr|j|jjj}|jjrX|jjr|jjd krt|jjjtjj t!d j"d d |jjnH|jjd krx|j#|jj$j%}n(|jjr|jjdkrt&||jj}|jj'dkr|jj(|}|j)|j*}n|jj'dkr|jj(|}n|jj'dkrVtjj+|j}|j,|jjj-|jjj.tj/j0|} d| _1| j2dd} | sRttj3j4| j5dS|jjsh|j}|j6|j|}|} |jj7r|j|jj7d|jj8r|j|jj8d} | j|j|j9|jj8|d}|jj:r|j|jj:d|jj;r|j|jj;d} | r | }n|j|jj;d|jj<rR|jj=rB|j|jj|jj<|}|jj?r|jj=r|j|jj?d}|j|j|jj?d}|j|j|jj?d}|j|j|jj?d}|j|j|jj?d}n|j>|jj?|d}|jj@r|j|jj@d} | j|j|j9|jj@|d}|jjArR|j|jjAd} | j|j|j9|jjA|d}|jjBr|j|jjBd} | j|j|j9|jjB|d}|jjCr|j|jjCd} | j|j|j9|jjC|d }|jjDr|jE|jjD}|jjF|dd!}|jjGrRg}xD|D]<}|jH}|dk r|jjjj||jId"d#}||j27}qW|jjjj|d}|jjJr|jj< r|jjKd9krtjj t!d,j"tj3jLd-|jM|| |jdStN}|jjKrtN} x||j2D]p}|jjdks|jj$jO|r|jjKd.kr| jP|jQ|jR|jS|jT|jUn| jPt&|tV|jjKqW|jjWr|jjd krj|j6|j|jjj}n|j6|j|jjjj}|j| d/}|jjXr|j|jY||}tN}x@|jEj2D]}|jZ|j[|j|qWn|jPd0d1| Dn|jj\r6x.|j2D]"}|j]}|dk r |jZ|q Wnv|jj^rNg}xt_tN|j2D]}|jjdksx|jj$jO|rVg}|j`d2ta|xt_d3d4|jQDD]x}|j`d5|tjj|}|j|jj}|j6|j|j}|jjbs|jE}x$|j2D]}|j`d6ta|qWqW|j`d7jc|qVW|rJtd8jc|dS|jjdrf|je|dSxD|j2D]8}|jjdks|jj$jO|rp|jZ|j[|j|qpW|r|jjfrtd8jct_|ntd7jct_|dS):N)flagsZformsT)r)r)Z ignore_caseF)rr9rSz)argument {}: not allowed with argument {}z --availablez--rTrUrMrNrO)Zverify)Z file__glob)Zconflicts__glob)r )r )Zprovides__glob)r)r)r)r)r)r )r )r)r)Zwarningsrc)raevrr5r r r r r rrrzNo valid switch specified usage: {prog} repoquery [--conflicts|--enhances|--obsoletes|--provides|--recommends|--requires|--suggest|--supplements|--whatrequires] [key] [--tree] description: For the given packages print a tree of thepackages.)rRrP)r css|]}t|VqdS)N)r}).0Zrelrrr Qsz'RepoQueryCommand.run..z package: cSsg|] }t|qSr)r})rreqrrr ]sz(RepoQueryCommand.run..z dependency: z provider: r z )r r r r r rrr)gr8rlr QUERY_TAGSrfZ _populate_update_security_filterrurr9Zdisable_modular_filteringhawkeyZIGNORE_MODULAR_EXCLUDESZAPPLY_EXCLUDESrrZcommand nevra_formsrrr6rrrrZrecentZ_recentZconfrvrVZ optparserZ print_usagerrrr\Z _unneededhistoryZswdbgetattrrLZ_get_installonly_queryrrMZ rpmdb_sackZ _configureZinstallonlypkgsZinstallonly_limitgoalZGoalZprotect_running_kernelrr]Z_format_resolve_problemsZ problem_rulesr:r{Z whatconflictsrZ whatobsoletesZ whatprovidesrqrorrsZwhatrecommendsZ whatenhancesZwhatsupplementsZ whatsuggestsrDZlatestZ_merge_update_filtersrtZ source_namerZtreerQr^ tree_seedsetZuser_installedupdaterr rrr r#rmrnraddrlocationZremote_locationZdeplistsortedr|r}verboserZ groupmember_group_member_reportrF)riqrZkwarkZpkgsZ query_resultsrrNZrpmdbrZsolvedZorqueryZrelsZquery_for_provideZ dependsqueryZpkg_listrZsrcnameZ tmp_queryr9rrZdeplist_outputrrZproviderrrrrsH                           "           zRepoQueryCommand.runc Cs&i}x.|jjjD] }tdd|jD||j<qWi}g}xr|jD]f}g}x(|jD]\}} |j| krX|j |qXW|r|j dj t |gj t |qF|j t |qFWg} xDt |jD]4\} } | j dj t | t dd| jdDqW| j dj t || r"tdj | dS)NcSsg|] }|jqSr)ra)rrrrrr}sz9RepoQueryCommand._group_member_report..$r cSsg|] }d|qS)z @r)ridrrrrs)rucompsrrZ packages_iterrrr$rar| setdefaultrrr}splitr) rir9Zpackage_conf_dictgroupZgroup_package_dictZpkg_not_in_grouprZ group_id_listZgroup_idZpackage_name_setrrZ package_listrrrrzs*  ,z%RepoQueryCommand._group_member_reportc Cs|j||}|d kr t|dSd}xtd|D] }|d7}q0Wg}x|jD]}|jt|qLWdtt|ddj|d} t|d |d| dS) Nr rz| [z: z, ]z\_ )rrrangerr|r}lenr) rilevelrr8Z pkg_stringZspacingxrZ requirepkgZreqstrrrr grow_trees   "zRepoQueryCommand.grow_treerc Cs8x0tt|jdddD]}|dks2|d kr8tn|}|jjdsT|jjdrXdS|j|||||kr|j||jrt||j}i}xFt|D]:} |j j j j | d} x | D]} | || jd| j <qWqW|j j j j t|jd } n&|jr |j|jf|n |j|jd } |j| |||d|qWdS) NcSs|jS)N)ra)prrrsz,RepoQueryCommand.tree_seed..)rrZrpmlibZsolvable)r .)r)rr)rrrra startswithrrrQrrurr9r6r5rVvaluesrrrrr) rir9Zaqueryr8rZusedpkgsrZstrpkgarraZpkgqueryZquerypkgrrrrs$"   zRepoQueryCommand.tree_seed)r3r4)N)F)Nr)rN)r/r0r1__doc__rZ FORM_NAMEZFORM_NAZ FORM_NEVRArtuplekeysaliasesrZsummary staticmethodr:rbrjrxrrrrrrrrrrrrrrr2cs,  0  Hr2c@sDeZdZdZddZddZeddZedd Z ed d Z d S) rz>Wrapper for dnf.package.Package, so we can control formatting.cCs ||_dS)N)_pkg)rirrrr__init__szPackageWrapper.__init__cCsFt|j|}|dkrdSt|tr:djtdd|DStjj|S)Nz(none)r cSsh|]}tjj|qSr)rrr)rZreldeprrr sz-PackageWrapper.__getattr__..) rr isinstancerVrrrrr)riattrZatrrrr __getattr__s   zPackageWrapper.__getattr__cCs&|dkrtjj|}|jdSdSdS)Nrz%Y-%m-%d %H:%Mr")datetimeZutcfromtimestampr~)ryrrrr_get_timestamps  zPackageWrapper._get_timestampcCs|j|jjS)N)rr buildtime)rirrrrszPackageWrapper.buildtimecCs|j|jjS)N)rr installtime)rirrrrszPackageWrapper.installtimeN) r/r0r1rrrrrpropertyrrrrrrrs   r)"Z __future__rrrZdnf.i18nrZdnf.clirZdnf.cli.option_parserrr[rrgrerrZdnf.exceptionsZ dnf.subjectZdnf.utilrZ getLoggerZloggerrZcompiler%rr#r-Z_SplitCallbackr.ZCommandr2objectrrrrrsJ        WPK!N WW1cli/commands/__pycache__/repoquery.cpython-36.pycnu[3 ft`ن @sddlmZddlmZddlmZddlmZddlmZddlm Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZe jdZd Ze jd Zd Zd d dddddddd ZddZGddde jZGdddejZGddde Z!dS))absolute_import)print_function)unicode_literals)_)commands) OptionParserNdnfz.%{name}-%{epoch}:%{version}-%{release}.%{arch}z%(-?\d*?){([:.\w]+?)}a name, arch, epoch, version, release, reponame (repoid), from_repo, evr, debug_name, source_name, source_debug_name, installtime, buildtime, size, downloadsize, installsize, provides, requires, obsoletes, conflicts, sourcerpm, description, summary, license, url, reason conflictsenhances obsoletesprovides recommendsrequiresZ requires_presuggests supplements) r r r r r rz requires-prerrcCsdd}dd}|jddjdd}x tjD]\}}|j||}q.Wd }d }x>tj|D]0}|||||j7}|||7}|j}qZW||||d 7}|S) z:Convert a rpm like QUERYFMT to an python .format() string.cSs^|jd}|jd}|rJ|ddkr:d|dd}nd|}d|}d|j|dS) Nr-><:z{0.})groupslower)ZmatchobjZfillkeyr/usr/lib/python3.6/repoquery.pyfmt_replDs   zrpm2py_format..fmt_replcSs|jddjddS)N{z{{rz}})replace)ZtxtrrrbracketsOszrpm2py_format..bracketsz\n z\t rN)r OPTS_MAPPINGitems QFORMAT_MATCHfinditerstartend) queryformatrrrvalueZfmtspositemrrr rpm2py_formatBs   r-c@seZdZdZdS)_CommaSplitCallbackz\s*,\s*N)__name__ __module__ __qualname__ZSPLITTERrrrrr._sr.c@seZdZdZejejejdZd%e ej Z e dZ eddZedd Zd d Zd d ZddZddZd&ddZd'ddZd(ddZddZddZddZd d!Zd*d#d$ZdS)+RepoQueryCommandzSA class containing methods needed by the cli to execute the repoquery command. )z repoquery-nz repoquery-nazrepoquery-nevra repoqueryrqz$search for packages matching keywordcCs,|jr|j|jd|jr(|j|jd|S)z'Filter query by repoid and arch options)Zreponame)arch)Zrepofiltermarches)optsqueryrrrfilter_repo_archms z!RepoQueryCommand.filter_repo_archc Cs|jddddtdd|jddtdd |jd d d gtd tdd|jddddtdd|jdgtdtdd|jdgtdtdd|jdgtdtdd|jdgtdtdd|jd gtdtd!d|jd"gtdtd#d|jd$gtdtd%d|jd&gtdtd'd|jd(gtdtd)d|j}|jd*dtd+d |jd,dtd-d |jd.dtd/d |jd0dtd1d |jd2dtd3d |jd4dtd5d |jd6dtd7d |jd8d9ttd:d;|jdd?d@dAdtdBdC|jdDdEdFdAdtdGdC|jdHdIdJdAdtdKdC|jdLdMdAdtdNdC|jdOdPdQttdRdS|jdTdtdUd |jdVdQtdWtdXdY|jdZdQd[dWtd\dY|jd]dQd^dWtd_dY|jd`dtdad |j}|jdbdcdddWtdedY|jdfdcdddWtjdY|jdgdcdhdWtdidY|jdjdcdkdWtdldY|jdmdtdnd |j}tdotdptdqtdrtdstdttdutdvtdwdx }x2|jD]&\}}dy|}|j|dzdW||d{qW|jd|dtd}d td~tdtdtdj t j j dtdd} |j} x2| jD]&\} } dy| } | j| ddW| | d{q4W| jdddWdtjd{|jddtdd |jdddtdddS)Nz-az--allZqueryall store_truezNQuery all packages (shorthand for repoquery '*' or repoquery without argument))destactionhelpz--show-duplicatesz(Query all versions of packages (default))r=r>z--archz --archlistr7z[arch]z show only results from this ARCH)r<defaultr=metavarr>z-fz--fileFILE+z show only results that owns FILE)r@nargsr>z--whatconflictsZREQz#show only results that conflict REQ)r?r=r@r>z --whatdependszishows results that requires, suggests, supplements, enhances,or recommends package provides and files REQz--whatobsoletesz#show only results that obsolete REQz--whatprovidesz"show only results that provide REQz--whatrequiresz:shows results that requires package provides and files REQz--whatrecommendsz$show only results that recommend REQz--whatenhancesz"show only results that enhance REQz--whatsuggestsz"show only results that suggest REQz--whatsupplementsz%show only results that supplement REQz --alldepsz=check non-explicit dependencies (files and Provides); defaultz --exactdepsz:check dependencies exactly as given, opposite of --alldepsz --recursivezOused with --whatrequires, and --requires --resolve, query packages recursively.z --deplistz>show a list of all dependencies and what packages provide themz --resolvez.resolve capabilities to originating package(s)z--treez"show recursive tree for package(s)z--srpmz#operate on corresponding source RPMz--latest-limit latest_limitzOshow N latest packages for a given name.arch (or latest but N if N is negative))r<typer>z--disable-modular-filteringz-list also packages of inactive module streamsz-iz--info queryinfoFz+show detailed information about the package)r<r?r=r>z-lz--list queryfilelistz!show list of files in the packagez-sz--sourcequerysourcerpmzshow package source RPM namez --changelogsquerychangelogszshow changelogs of the packagez--qfz --queryformatr)zfdisplay format for listing packages: "%%{name} %%{version} ...", use --querytags to view full tag list)r<r?r>z --querytagsz-show available tags to use with --queryformatz--nevra store_constzZuse name-epoch:version-release.architecture format for displaying found packages (default))r<constr=r>z--nvrz%{name}-%{version}-%{release}zQuse name-version-release format for displaying found packages (rpm query default)z--envraz.%{epoch}:%{name}-%{version}-%{release}.%{arch}zPuse epoch:name-version-release.architecture format for displaying found packagesz --groupmemberz=Display in which comps groups are presented selected packagesz --duplicates pkgfilter duplicatedz/limit the query to installed duplicate packagesz --duplicatedz --installonly installonlyz1limit the query to installed installonly packagesz --unsatisfied unsatisfiedzClimit the query to installed packages with unsatisfied dependenciesz --locationz5show a location from where packages can be downloadedz5Display capabilities that the package conflicts with.zaDisplay capabilities that the package can depend on, enhance, recommend, suggest, and supplement.z2Display capabilities that the package can enhance.z-Display capabilities provided by the package.z1Display capabilities that the package recommends.z1Display capabilities that the package depends on.zIf the package is not installed display capabilities that it depends on for running %%pre and %%post scriptlets. If the package is installed display capabilities that is depends for %%pre, %%post, %%preun and %%postun.z/Display capabilities that the package suggests.z5Display capabilities that the package can supplement.) r dependsr r r rz requires-prerrz--%s packageatr)r<r=rKr>z --availablez Display only available packages.z Display only installed packages.zLDisplay only packages that are not present in any of available repositories.zQDisplay only packages that provide an upgrade for some already installed package.zIDisplay only packages that can be removed by "{prog} autoremove" command.)progz2Display only packages that were installed by user.) installedZextrasZupgradesunneeded userinstalledlistz --autoremoverTz--recentz%Display only recently edited packagesr*ZKEYzthe key to search for)rCr@r>) add_argumentrr.Zadd_mutually_exclusive_groupintQFORMAT_DEFAULTargparseZSUPPRESSr$formatrutil MAIN_PROG)parserZwhatrequiresformZoutformrLZpackage_attributeZ help_msgsargZhelp_msgnameZ help_listZ list_groupZlist_argZhelp_argZswitchrrr set_argparservs                                                                 zRepoQueryCommand.set_argparsercCs |jjs|jjtjtjddS)N)stdoutstderr)r8quietcliZredirect_loggerloggingZWARNINGINFO)selfrrr pre_configureszRepoQueryCommand.pre_configurecCsj|jjs|jj|jj}|jjrJ|jjrB|jjdd|jjnd|j_|jjrVdS|jj rx|jj rxt jj t d|jj r|jjr|jjddt|jj|jjdko|jj gst jj t d|jjs|jjr|jjp|jjst jj t d j|jjrd nd|jjr$|jjj|jjdkr@|jjd ksJ|jjrPd|_d|_|jjrfd|_dS)Nz --obsoletesz--r zOption '--resolve' has to be used together with one of the '--conflicts', '--depends', '--enhances', '--provides', '--recommends', '--requires', '--requires-pre', '--suggests' or '--supplements' optionsz --recursivez --exactdepsrzOption '--recursive' has to be used with '--whatrequires ' (optionally with '--alldeps', but not with '--exactdeps'), or with '--requires --resolve'z;argument {} requires --whatrequires or --whatdepends optionz --alldepsrSrUrNT)rSrU)r8rerfZredirect_repo_progressdemandsr rQZ_option_conflict querytagsresolverZCliErrorr recursive exactdepsany whatrequiresalldeps whatdependsr\srpmbaseZreposZenable_source_reposrVrL availableZavailable_reposZsack_activationrI changelogs)rirkrrr configures@      zRepoQueryCommand.configurec Cs|jrpg}|jdt|xH|jD]>}|d}|jd|jdtjj|dtjj|dfq$Wdj|Syht |}|j r|j j j |S|jr|j}|sttdj|tjd |S|jr|jSt|jj|SWn4tk r }ztjjt|WYdd}~XnXdS) NzChangelog for %s timestampz * %s %s %s z %a %b %d %YZauthortextr zPackage {} contains no files)file)rIappendstrrwstrftimeri18nucdjoinPackageWrapperrFruoutputZ infoOutputrGfilesprintrr\sysrdrHZ sourcerpmr-r)AttributeError exceptionsError) rir8pkgoutZchlogdtZpoZfilelisterrrbuild_format_fnGs.  z RepoQueryCommand.build_format_fncCsN|jjjjdd}x4|D],}|j|jtjj|j |jjddd}qW|S)NT)emptyF) with_providesZwith_filenames) rusackr9r6union intersectionrsubjectSubjectget_best_query)riZnevrasZ base_queryresolved_nevras_queryZnevrarrr_resolve_nevrascs  z RepoQueryCommand._resolve_nevrasNcCsD|r|n|}|j|d}|j|}|j|}|r@|j|||d}|S)N)r)done)filter differencer_do_recursive_deps)riquery_in query_selectrZquery_requiredrrrrps    z#RepoQueryCommand._do_recursive_depsFcCs|j||}|j|d}|j|j|d}|r|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d}|j|j|d }|j|j|d }|jjr|j||}|S) N)requires__glob)r)recommends__glob)enhances__glob)supplements__glob)suggests__glob)r )r )r)r)rrrr8rnr)rinamesr9Z all_dep_typesrZdepqueryrrr by_all_deps}s   zRepoQueryCommand.by_all_depscCs|r|n|jjjjdd}|jjjjdd}x$|jD]}|j|j|jd}q:W|j|}|rz|j |||j|d}|j|S)NT)r)r )r) rurr9r6runrrrr_get_recursive_providers_query)rir providersrtrrrrrrs z/RepoQueryCommand._get_recursive_providers_querycCsxg}g}xN|jjD]B}tjjj|d}|jdr>|j|q|r|d kr|j|qW|rt|jj|d|jj j d}|S) Nrz.rpmhttpftpr{httpsF)strictprogress)rrr{r) r8rrZpycompZurlparseendswithr|ruZadd_remote_rpmsrr)riZrpmnamesremote_packagesrZschemesrrr_add_add_remote_packagess   z)RepoQueryCommand._add_add_remote_packagesc Cs|jjrttdS|jj|j|jjj|jj r8t j nt j d}|jj r|j}i}|jj|jkrx|j|jjg|d<g}|jdd}|r|j|jjjj|d}x>|jj D]2}|jtjj|ddj|jjfd|d|}qW|}|jjr|j|jjj}|jjrX|jjr|jjd krt|jjjtjj t!d j"d d |jjnH|jjd krx|j#|jj$j%}n(|jjr|jjdkrt&||jj}|jj'dkr|jj(|}|j)|j*}n|jj'dkr|jj(|}n|jj'dkrVtjj+|j}|j,|jjj-|jjj.tj/j0|} d| _1| j2dd} | sRttj3j4| j5dS|jjsh|j}|j6|j|}|} |jj7r|j|jj7d|jj8r|j|jj8d} | j|j|j9|jj8|d}|jj:r|j|jj:d|jj;r|j|jj;d} | r | }n|j|jj;d|jj<rR|jj=rB|j|jj|jj<|}|jj?r|jj=r|j|jj?d}|j|j|jj?d}|j|j|jj?d}|j|j|jj?d}|j|j|jj?d}n|j>|jj?|d}|jj@r|j|jj@d} | j|j|j9|jj@|d}|jjArR|j|jjAd} | j|j|j9|jjA|d}|jjBr|j|jjBd} | j|j|j9|jjB|d}|jjCr|j|jjCd} | j|j|j9|jjC|d }|jjDr|jE|jjD}|jjF|dd!}|jjGrRg}xD|D]<}|jH}|dk r|jjjj||jId"d#}||j27}qW|jjjj|d}|jjJr|jj< r|jjKd9krtjj t!d,j"tj3jLd-|jM|| |jdStN}|jjKrtN} x||j2D]p}|jjdks|jj$jO|r|jjKd.kr| jP|jQ|jR|jS|jT|jUn| jPt&|tV|jjKqW|jjWr|jjd krj|j6|j|jjj}n|j6|j|jjjj}|j| d/}|jjXr|j|jY||}tN}x@|jEj2D]}|jZ|j[|j|qWn|jPd0d1| Dn|jj\r6x.|j2D]"}|j]}|dk r |jZ|q Wnv|jj^rNg}xt_tN|j2D]}|jjdksx|jj$jO|rVg}|j`d2ta|xt_d3d4|jQDD]x}|j`d5|tjj|}|j|jj}|j6|j|j}|jjbs|jE}x$|j2D]}|j`d6ta|qWqW|j`d7jc|qVW|rJtd8jc|dS|jjdrf|je|dSxD|j2D]8}|jjdks|jj$jO|rp|jZ|j[|j|qpW|r|jjfrtd8jct_|ntd7jct_|dS):N)flagsZformsT)r)r)Z ignore_caseF)rr9rSz)argument {}: not allowed with argument {}z --availablez--rTrUrMrNrO)Zverify)Z file__glob)Zconflicts__glob)r )r )Zprovides__glob)r)r)r)r)r)r )r )r)r)Zwarningsrc)raevrr5r r r r r rrrzNo valid switch specified usage: {prog} repoquery [--conflicts|--enhances|--obsoletes|--provides|--recommends|--requires|--suggest|--supplements|--whatrequires] [key] [--tree] description: For the given packages print a tree of thepackages.)rRrP)r css|]}t|VqdS)N)r}).0Zrelrrr Qsz'RepoQueryCommand.run..z package: cSsg|] }t|qSr)r})rreqrrr ]sz(RepoQueryCommand.run..z dependency: z provider: r z )r r r r r rrr)gr8rlr QUERY_TAGSrfZ _populate_update_security_filterrurr9Zdisable_modular_filteringhawkeyZIGNORE_MODULAR_EXCLUDESZAPPLY_EXCLUDESrrZcommand nevra_formsrrr6rrrrZrecentZ_recentZconfrvrVZ optparserZ print_usagerrrr\Z _unneededhistoryZswdbgetattrrLZ_get_installonly_queryrrMZ rpmdb_sackZ _configureZinstallonlypkgsZinstallonly_limitgoalZGoalZprotect_running_kernelrr]Z_format_resolve_problemsZ problem_rulesr:r{Z whatconflictsrZ whatobsoletesZ whatprovidesrqrorrsZwhatrecommendsZ whatenhancesZwhatsupplementsZ whatsuggestsrDZlatestZ_merge_update_filtersrtZ source_namerZtreerQr^ tree_seedsetZuser_installedupdaterr rrr r#rmrnraddrlocationZremote_locationZdeplistsortedr|r}verboserZ groupmember_group_member_reportrF)riqrZkwarkZpkgsZ query_resultsrrNZrpmdbrZsolvedZorqueryZrelsZquery_for_provideZ dependsqueryZpkg_listrZsrcnameZ tmp_queryr9rrZdeplist_outputrrZproviderrrrrsH                           "           zRepoQueryCommand.runc Cs&i}x.|jjjD] }tdd|jD||j<qWi}g}xr|jD]f}g}x(|jD]\}} |j| krX|j |qXW|r|j dj t |gj t |qF|j t |qFWg} xDt |jD]4\} } | j dj t | t dd| jdDqW| j dj t || r"tdj | dS)NcSsg|] }|jqSr)ra)rrrrrr}sz9RepoQueryCommand._group_member_report..$r cSsg|] }d|qS)z @r)ridrrrrs)rucompsrrZ packages_iterrrr$rar| setdefaultrrr}splitr) rir9Zpackage_conf_dictgroupZgroup_package_dictZpkg_not_in_grouprZ group_id_listZgroup_idZpackage_name_setrrZ package_listrrrrzs*  ,z%RepoQueryCommand._group_member_reportc Cs|j||}|d kr t|dSd}xtd|D] }|d7}q0Wg}x|jD]}|jt|qLWdtt|ddj|d} t|d |d| dS) Nr rz| [z: z, ]z\_ )rrrangerr|r}lenr) rilevelrr8Z pkg_stringZspacingxrZ requirepkgZreqstrrrr grow_trees   "zRepoQueryCommand.grow_treerc Cs8x0tt|jdddD]}|dks2|d kr8tn|}|jjdsT|jjdrXdS|j|||||kr|j||jrt||j}i}xFt|D]:} |j j j j | d} x | D]} | || jd| j <qWqW|j j j j t|jd } n&|jr |j|jf|n |j|jd } |j| |||d|qWdS) NcSs|jS)N)ra)prrrsz,RepoQueryCommand.tree_seed..)rrZrpmlibZsolvable)r .)r)rr)rrrra startswithrrrQrrurr9r6r5rVvaluesrrrrr) rir9Zaqueryr8rZusedpkgsrZstrpkgarraZpkgqueryZquerypkgrrrrs$"   zRepoQueryCommand.tree_seed)r3r4)N)F)Nr)rN)r/r0r1__doc__rZ FORM_NAMEZFORM_NAZ FORM_NEVRArtuplekeysaliasesrZsummary staticmethodr:rbrjrxrrrrrrrrrrrrrrr2cs,  0  Hr2c@sDeZdZdZddZddZeddZedd Z ed d Z d S) rz>Wrapper for dnf.package.Package, so we can control formatting.cCs ||_dS)N)_pkg)rirrrr__init__szPackageWrapper.__init__cCsFt|j|}|dkrdSt|tr:djtdd|DStjj|S)Nz(none)r cSsh|]}tjj|qSr)rrr)rZreldeprrr sz-PackageWrapper.__getattr__..) rr isinstancerVrrrrr)riattrZatrrrr __getattr__s   zPackageWrapper.__getattr__cCs&|dkrtjj|}|jdSdSdS)Nrz%Y-%m-%d %H:%Mr")datetimeZutcfromtimestampr~)ryrrrr_get_timestamps  zPackageWrapper._get_timestampcCs|j|jjS)N)rr buildtime)rirrrrszPackageWrapper.buildtimecCs|j|jjS)N)rr installtime)rirrrrszPackageWrapper.installtimeN) r/r0r1rrrrrpropertyrrrrrrrs   r)"Z __future__rrrZdnf.i18nrZdnf.clirZdnf.cli.option_parserrr[rrgrerrZdnf.exceptionsZ dnf.subjectZdnf.utilrZ getLoggerZloggerrZcompiler%rr#r-Z_SplitCallbackr.ZCommandr2objectrrrrrsJ        WPK!^**4cli/commands/__pycache__/search.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlmZddlmZddl m Z m Z m Z ddl Z ddlZ ddlZ ddlZddlZejdZGd d d ejZdS) )absolute_import)print_function)unicode_literalsN)commands) OptionParser)ucd_C_dnfc@sPeZdZdZdZedZeddZddZ d d Z d d Z d dZ ddZ dS) SearchCommandzTA class containing methods needed by the cli to execute the search command. searchsez+search package details for the given stringc Cs<|jddtdd|jddtddgdtjtd d dS) Nz--all store_truez'search also package description and URL)actionhelp query_string+ZKEYWORDallzKeyword to search for)nargsmetavarchoicesdefaultrr) add_argumentrrZPkgNarrowCallback)parserr/usr/lib/python3.6/search.py set_argparser0s  zSearchCommand.set_argparsercs4tjdtddfdtddfdtddfdtd fffd d fd d }tjj}x(|D] }j|d|j|d|qbWjj rxd|D] }j|d|j|d|qWn:t |}t |j }x$|D]}t |j ||kr||=qWd}d} d} d} d} jjjs0jjjj|j dj} t} x|jd| dD]}jjjs~|j|j| krlqF| j|j|j||j|kr|j|}d} | |j |kr|j |} d} | |j|| kkr|j|| k} d} | r|| || d} jjj||j||qFWt |dkr0tjtddS)z0Search for simple text tags in a package object.nameZlongNamesummaryZSummary descriptionZ DescriptionZurlZURLc sy|S|SdS)Nr)attr) TRANS_TBLrr_translate_attrCsz.SearchCommand._search.._translate_attrcs^t|}tdj|}|r*td|}n td|}jjj|dj|}tt|dS)Nz & z%s Exactly Matched: %%sz%s Matched: %%sz, )maprjoinbaseoutputZ fmtSectionprintr) exact_matchZattrskeysZ trans_attrsZtrans_attrs_strZ section_textZ formatted)r#selfrr_print_section_headerIs  z4SearchCommand._search.._print_section_headerNF)pkgT)reverseZlimit_torzNo matches found.) collections OrderedDictr rr Z match_counterZ MatchCounter_search_countedoptsrlenlistr*matched_needlesr&ZconfZshowdupesfromrepossackqueryfiltermZlatestsetsortedrZarchaddZ matched_keysZmatched_haystacksr'Z matchcallbackloggerinfo)r+argsr,counterargZneedlesZpkgsr-Z used_attrsr5r)Zprint_section_headerlimitseenr)r"r#r+r_search9s`               zSearchCommand._searchcCs`d||i}tjj|r$d||i}|jjjjtjf|}x|j D]}|j |||qFW|S)Nz %s__substrz%s__glob) r utilZis_glob_patternr&r6r7r8hawkeyZICASErunr;)r+r?r!ZneedleZfdictqr-rrrr1s   zSearchCommand._search_countedcCs |jjs|jjtjtjddS)N)stdoutstderr)r2quietcliZredirect_loggerloggingZWARNINGINFO)r+rrr pre_configureszSearchCommand.pre_configurecCsD|jjs|jj|jj}d|_d|_d|_|jjp:|jj |j_dS)NTF) r2rJrKZredirect_repo_progressdemandsZavailable_reposZfresh_metadataZsack_activationrZquery_string_action)r+rOrrr configures zSearchCommand.configurecCstjtd|j|jjS)NzSearching Packages: )r<debugrrCr2r)r+rrrrFszSearchCommand.runN)r r )__name__ __module__ __qualname____doc__aliasesrr staticmethodrrCr1rNrPrFrrrrr (s O  r )Z __future__rrrr/Zdnf.clirZdnf.cli.option_parserrZdnf.i18nrrr r Zdnf.match_counterZdnf.utilrErLZ getLoggerr<ZCommandr rrrrs      PK!^**.cli/commands/__pycache__/search.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlmZddlmZddl m Z m Z m Z ddl Z ddlZ ddlZ ddlZddlZejdZGd d d ejZdS) )absolute_import)print_function)unicode_literalsN)commands) OptionParser)ucd_C_dnfc@sPeZdZdZdZedZeddZddZ d d Z d d Z d dZ ddZ dS) SearchCommandzTA class containing methods needed by the cli to execute the search command. searchsez+search package details for the given stringc Cs<|jddtdd|jddtddgdtjtd d dS) Nz--all store_truez'search also package description and URL)actionhelp query_string+ZKEYWORDallzKeyword to search for)nargsmetavarchoicesdefaultrr) add_argumentrrZPkgNarrowCallback)parserr/usr/lib/python3.6/search.py set_argparser0s  zSearchCommand.set_argparsercs4tjdtddfdtddfdtddfdtd fffd d fd d }tjj}x(|D] }j|d|j|d|qbWjj rxd|D] }j|d|j|d|qWn:t |}t |j }x$|D]}t |j ||kr||=qWd}d} d} d} d} jjjs0jjjj|j dj} t} x|jd| dD]}jjjs~|j|j| krlqF| j|j|j||j|kr|j|}d} | |j |kr|j |} d} | |j|| kkr|j|| k} d} | r|| || d} jjj||j||qFWt |dkr0tjtddS)z0Search for simple text tags in a package object.nameZlongNamesummaryZSummary descriptionZ DescriptionZurlZURLc sy|S|SdS)Nr)attr) TRANS_TBLrr_translate_attrCsz.SearchCommand._search.._translate_attrcs^t|}tdj|}|r*td|}n td|}jjj|dj|}tt|dS)Nz & z%s Exactly Matched: %%sz%s Matched: %%sz, )maprjoinbaseoutputZ fmtSectionprintr) exact_matchZattrskeysZ trans_attrsZtrans_attrs_strZ section_textZ formatted)r#selfrr_print_section_headerIs  z4SearchCommand._search.._print_section_headerNF)pkgT)reverseZlimit_torzNo matches found.) collections OrderedDictr rr Z match_counterZ MatchCounter_search_countedoptsrlenlistr*matched_needlesr&ZconfZshowdupesfromrepossackqueryfiltermZlatestsetsortedrZarchaddZ matched_keysZmatched_haystacksr'Z matchcallbackloggerinfo)r+argsr,counterargZneedlesZpkgsr-Z used_attrsr5r)Zprint_section_headerlimitseenr)r"r#r+r_search9s`               zSearchCommand._searchcCs`d||i}tjj|r$d||i}|jjjjtjf|}x|j D]}|j |||qFW|S)Nz %s__substrz%s__glob) r utilZis_glob_patternr&r6r7r8hawkeyZICASErunr;)r+r?r!ZneedleZfdictqr-rrrr1s   zSearchCommand._search_countedcCs |jjs|jjtjtjddS)N)stdoutstderr)r2quietcliZredirect_loggerloggingZWARNINGINFO)r+rrr pre_configureszSearchCommand.pre_configurecCsD|jjs|jj|jj}d|_d|_d|_|jjp:|jj |j_dS)NTF) r2rJrKZredirect_repo_progressdemandsZavailable_reposZfresh_metadataZsack_activationrZquery_string_action)r+rOrrr configures zSearchCommand.configurecCstjtd|j|jjS)NzSearching Packages: )r<debugrrCr2r)r+rrrrFszSearchCommand.runN)r r )__name__ __module__ __qualname____doc__aliasesrr staticmethodrrCr1rNrPrFrrrrr (s O  r )Z __future__rrrr/Zdnf.clirZdnf.cli.option_parserrZdnf.i18nrrr r Zdnf.match_counterZdnf.utilrErLZ getLoggerr<ZCommandr rrrrs      PK!mp3cli/commands/__pycache__/shell.cpython-36.opt-1.pycnu[3 ft`l&@sddlmZddlmZmZddlZddlZddlZddlZddl Z ddl Z ddl Z e j dZ GdddeZGdddejejZdS) )commands)_ucdNdnfc@seZdZdZdZdZdZdS)ShellDemandSheetTN)__name__ __module__ __qualname__Zavailable_reposZ resolvingZ root_userZsack_activationr r /usr/lib/python3.6/shell.pyr%src @seZdZd*ZedjejjdZ dddddddd d d d Z d dZ e ddZ ddZddZddZddZd+ddZd,ddZd-ddZd.d d!Zd"d#Zd/d$d%Zd0d&d'Zd1d(d)ZdS)2 ShellCommandshellshzrun an interactive {prog} shell)progrepoquitZts_run transactionconfigresolvehelp) r repositoryexitrruntsrr resolvedeprcCs$tjj||tjj|d|_dS)Nz> )rCommand__init__cmdCmdprompt)selfclir r r r=s zShellCommand.__init__cCs*|jddtdtdjtjjdddS)Nscript?ZSCRIPTzScript to run in {prog} shell)r)nargsmetavarr) add_argumentrformatrutilMAIN_PROG_UPPER)parserr r r set_argparserBszShellCommand.set_argparserc Csr|jj}t|j_xZt|D]N}|jdr,qyt|jj|Wqtk rht|jj|t||YqXqWdS)N__)r!demandsrdir startswithgetattrAttributeErrorsetattr)r Zdefault_demandsattrr r r configureHs  zShellCommand.configurecCs$|jjr|j|jjn|jdS)N)optsr" _run_scriptZcmdloop)r r r r rUszShellCommand.runcCs |jjd|j_|jjdS)N)baseZ_finalize_base _transaction fill_sack)r r r r _clean[s zShellCommand._cleancCs`| s|dkrdS|dkrd}ytj|}Wn|jdS|jjjdd|jjj|}|jdkrldS|j|jkrt |d|j|j|ddn|jj j |j}|dk rT||j}y|jjj ||}Wnt k rdSXy&tj|jj|j_|j|jWn@tjjk rP}ztjtd d t|dSd}~XnXn|jdS) N ZEOFrF)Z reset_usager rzError: )shlexsplit_helpr! optparserrZparse_main_argsZcommandMAPPINGr0Z cli_commandsgetZparse_command_args SystemExitcopydeepcopyr-r4rr exceptionsErrorloggererrorrr)r lineZs_liner5Zcmd_clsrer r r onecmd`s<  $   zShellCommand.onecmdNc Csdd}| st|dkr(|jddS|d}t|dkrD|dnd}|jd}|d kr|d|}||dd}|jjj|}x|D]}||||qW|stjtd|n||||jj dS) Nc SsP|rt|||n:ytdj|t|t|WntjtdYnXdS)Nz{}: {}zUnsupported key value.)r2printr'r0strrIwarningr)keyvalconfr r r print_or_sets z*ShellCommand._config..print_or_setrrr<.zCould not find repository: %s) lenr@findr7repos get_matchingrIrPrrS) r argsrTrQrRZperiodZ repo_namerZrr r r _configs"      zShellCommand._configcCst|trt|dkr|dn|}d}|r|dkrBtdj|}n|dkrZtdj|}nv|dkrrtd j|}n^|d krtd j|}nF|dkrtdj|}n.|dkrtdj|}n|dkrtdj|}|s|jjjtd}td|dS)zOutput help information. :param args: the command to output help information about. If *args* is an empty, general help will be output. rNrz{} arg [value] arg: debuglevel, errorlevel, obsoletes, gpgcheck, assumeyes, exclude, repo_id.gpgcheck, repo_id.exclude If no value is given it prints the current value. If value is given it sets that value.rz{} [command] print helprrz{} arg [option] list: lists repositories and their status. option = [all | id | glob] enable: enable repositories. option = repository id disable: disable repositories. option = repository idrz"{} resolve the transaction setrrzy{} arg list: lists the contents of the transaction reset: reset (zero-out) the transaction run: run the transactionrz{} run the transactionrrz{} exit the shellaShell specific arguments: config set config options help print help repository (or repo) enable, disable or list repositories resolvedep resolve the transaction set transaction (or ts) list, reset or run the transaction set run resolve and run the transaction set exit (or quit) exit the shellr;)rr)rr)rr) isinstancelistrXrr'r!rAZ print_helprN)r r\argmsgr r r r@s:"  zShellCommand._helpcCs|r |dnd}|d kr6|jddj|ddn|dkr|jjj}d}x\|ddD]L}|j|}|r~t||d }qZtjt d dt d |jj j j |qZW|r|jj d|j_n |jd dS)Nrr_z repolist r=r<enabledisableFTzError:zUnknown repo: '%s'r)r_N)rbrc)rMjoinr!r7rZr[r0rIZcriticalroutputtermboldr9Z_compsr@)r r\rrZr9rrr r r _repos"     zShellCommand._repocCsLy|jjj|jjjWn.tjjk rF}zt|WYdd}~XnXdS)N) r!r7rr-Z allow_erasingrrGZ DepsolveErrorrN)r r\rLr r r _resolveszShellCommand._resolvecCsyDt|d0}|j}x |D]}|jds|j|qWWdQRXWn:tk r~tjtd|jj j j |t j dYnXdS)Nrh#z!Error: Cannot open %s for readingr<)open readlinesr/rMIOErrorrIinforr7rerfrgsysr)r filefdlinesrKr r r r6s   zShellCommand._run_scriptcCs|r |dnd}|dkr$|jdS|j|d krZ|jjr|jjj|jj}tj|nz|dkry|jjWn@t j j k r}z tj t ddt|WYdd}~XnXtjt d|jn |jddS) Nrresetr_rzError:r=z Complete!r)r_N)r:rjr7r8reZlist_transactionrIroZdo_transactionrrGrHrJrrr@)r r\routrLr r r r8 s" , zShellCommand._transactioncCs|jdgdS)Nr)r8)r r\r r r _ts_run"szShellCommand._ts_runcCstjtdtjddS)Nz Leaving Shellr)rIrorrpr)r r\r r r _quit%szShellCommand._quit)r r)N)N)N)N)N)N)N)rrr aliasesrr'rr(r)ZsummaryrBr staticmethodr+r4rr:rMr]r@rirjr6r8rvrwr r r r r ,s4  &  ;    r )Zdnf.clirZdnf.i18nrrZdnf.utilrrrEZloggingr>rpZ getLoggerrIobjectrrrr r r r r s  PK!mp-cli/commands/__pycache__/shell.cpython-36.pycnu[3 ft`l&@sddlmZddlmZmZddlZddlZddlZddlZddl Z ddl Z ddl Z e j dZ GdddeZGdddejejZdS) )commands)_ucdNdnfc@seZdZdZdZdZdZdS)ShellDemandSheetTN)__name__ __module__ __qualname__Zavailable_reposZ resolvingZ root_userZsack_activationr r /usr/lib/python3.6/shell.pyr%src @seZdZd*ZedjejjdZ dddddddd d d d Z d dZ e ddZ ddZddZddZddZd+ddZd,ddZd-ddZd.d d!Zd"d#Zd/d$d%Zd0d&d'Zd1d(d)ZdS)2 ShellCommandshellshzrun an interactive {prog} shell)progrepoquitZts_run transactionconfigresolvehelp) r repositoryexitrruntsrr resolvedeprcCs$tjj||tjj|d|_dS)Nz> )rCommand__init__cmdCmdprompt)selfclir r r r=s zShellCommand.__init__cCs*|jddtdtdjtjjdddS)Nscript?ZSCRIPTzScript to run in {prog} shell)r)nargsmetavarr) add_argumentrformatrutilMAIN_PROG_UPPER)parserr r r set_argparserBszShellCommand.set_argparserc Csr|jj}t|j_xZt|D]N}|jdr,qyt|jj|Wqtk rht|jj|t||YqXqWdS)N__)r!demandsrdir startswithgetattrAttributeErrorsetattr)r Zdefault_demandsattrr r r configureHs  zShellCommand.configurecCs$|jjr|j|jjn|jdS)N)optsr" _run_scriptZcmdloop)r r r r rUszShellCommand.runcCs |jjd|j_|jjdS)N)baseZ_finalize_base _transaction fill_sack)r r r r _clean[s zShellCommand._cleancCs`| s|dkrdS|dkrd}ytj|}Wn|jdS|jjjdd|jjj|}|jdkrldS|j|jkrt |d|j|j|ddn|jj j |j}|dk rT||j}y|jjj ||}Wnt k rdSXy&tj|jj|j_|j|jWn@tjjk rP}ztjtd d t|dSd}~XnXn|jdS) N ZEOFrF)Z reset_usager rzError: )shlexsplit_helpr! optparserrZparse_main_argsZcommandMAPPINGr0Z cli_commandsgetZparse_command_args SystemExitcopydeepcopyr-r4rr exceptionsErrorloggererrorrr)r lineZs_liner5Zcmd_clsrer r r onecmd`s<  $   zShellCommand.onecmdNc Csdd}| st|dkr(|jddS|d}t|dkrD|dnd}|jd}|d kr|d|}||dd}|jjj|}x|D]}||||qW|stjtd|n||||jj dS) Nc SsP|rt|||n:ytdj|t|t|WntjtdYnXdS)Nz{}: {}zUnsupported key value.)r2printr'r0strrIwarningr)keyvalconfr r r print_or_sets z*ShellCommand._config..print_or_setrrr<.zCould not find repository: %s) lenr@findr7repos get_matchingrIrPrrS) r argsrTrQrRZperiodZ repo_namerZrr r r _configs"      zShellCommand._configcCst|trt|dkr|dn|}d}|r|dkrBtdj|}n|dkrZtdj|}nv|dkrrtd j|}n^|d krtd j|}nF|dkrtdj|}n.|dkrtdj|}n|dkrtdj|}|s|jjjtd}td|dS)zOutput help information. :param args: the command to output help information about. If *args* is an empty, general help will be output. rNrz{} arg [value] arg: debuglevel, errorlevel, obsoletes, gpgcheck, assumeyes, exclude, repo_id.gpgcheck, repo_id.exclude If no value is given it prints the current value. If value is given it sets that value.rz{} [command] print helprrz{} arg [option] list: lists repositories and their status. option = [all | id | glob] enable: enable repositories. option = repository id disable: disable repositories. option = repository idrz"{} resolve the transaction setrrzy{} arg list: lists the contents of the transaction reset: reset (zero-out) the transaction run: run the transactionrz{} run the transactionrrz{} exit the shellaShell specific arguments: config set config options help print help repository (or repo) enable, disable or list repositories resolvedep resolve the transaction set transaction (or ts) list, reset or run the transaction set run resolve and run the transaction set exit (or quit) exit the shellr;)rr)rr)rr) isinstancelistrXrr'r!rAZ print_helprN)r r\argmsgr r r r@s:"  zShellCommand._helpcCs|r |dnd}|d kr6|jddj|ddn|dkr|jjj}d}x\|ddD]L}|j|}|r~t||d }qZtjt d dt d |jj j j |qZW|r|jj d|j_n |jd dS)Nrr_z repolist r=r<enabledisableFTzError:zUnknown repo: '%s'r)r_N)rbrc)rMjoinr!r7rZr[r0rIZcriticalroutputtermboldr9Z_compsr@)r r\rrZr9rrr r r _repos"     zShellCommand._repocCsLy|jjj|jjjWn.tjjk rF}zt|WYdd}~XnXdS)N) r!r7rr-Z allow_erasingrrGZ DepsolveErrorrN)r r\rLr r r _resolveszShellCommand._resolvecCsyDt|d0}|j}x |D]}|jds|j|qWWdQRXWn:tk r~tjtd|jj j j |t j dYnXdS)Nrh#z!Error: Cannot open %s for readingr<)open readlinesr/rMIOErrorrIinforr7rerfrgsysr)r filefdlinesrKr r r r6s   zShellCommand._run_scriptcCs|r |dnd}|dkr$|jdS|j|d krZ|jjr|jjj|jj}tj|nz|dkry|jjWn@t j j k r}z tj t ddt|WYdd}~XnXtjt d|jn |jddS) Nrresetr_rzError:r=z Complete!r)r_N)r:rjr7r8reZlist_transactionrIroZdo_transactionrrGrHrJrrr@)r r\routrLr r r r8 s" , zShellCommand._transactioncCs|jdgdS)Nr)r8)r r\r r r _ts_run"szShellCommand._ts_runcCstjtdtjddS)Nz Leaving Shellr)rIrorrpr)r r\r r r _quit%szShellCommand._quit)r r)N)N)N)N)N)N)N)rrr aliasesrr'rr(r)ZsummaryrBr staticmethodr+r4rr:rMr]r@rirjr6r8rvrwr r r r r ,s4  &  ;    r )Zdnf.clirZdnf.i18nrrZdnf.utilrrrEZloggingr>rpZ getLoggerrIobjectrrrr r r r r s  PK!Qx__2cli/commands/__pycache__/swap.cpython-36.opt-1.pycnu[3 ft`s @s`ddlmZddlmZddlmZddlmZddlZddl Z e j dZ Gdddej Z dS) )absolute_import)unicode_literals)_)commandsNdnfc@sLeZdZdZdZedjejj dZ e ddZ ddZ d d Zd d Zd S) SwapCommandzNA class containing methods needed by the cli to execute the swap command. swapz=run an interactive {prog} mod for remove and install one spec)progcCs,|jddtdd|jddtdddS)N remove_specZstorezThe specs that will be removed)actionhelp install_specz The specs that will be installed) add_argumentr)parserr/usr/lib/python3.6/swap.py set_argparser&s zSwapCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|jtj |j|j j gdS)NT) clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseZ_checkEnabledRepooptsr )selfrrrr configure,szSwapCommand.configurecCs@|jjj|}|dk r<||j}|jjj|||g|jdS)N)rZ cli_commandsgetZ optparserZparse_command_argsrun)rZcmd_strspecZcmd_clscmdrrr_perform5s  zSwapCommand._performcCs$|jd|jj|jd|jjdS)NremoveZinstall)rrr r )rrrrr<szSwapCommand.runN)r)__name__ __module__ __qualname____doc__aliasesrformatrutilZMAIN_PROG_UPPERZsummary staticmethodrrrrrrrrrs   r)Z __future__rrZdnf.i18nrZdnf.clirZdnf.utilrZloggingZ getLoggerZloggerZCommandrrrrrs     PK!Qx__,cli/commands/__pycache__/swap.cpython-36.pycnu[3 ft`s @s`ddlmZddlmZddlmZddlmZddlZddl Z e j dZ Gdddej Z dS) )absolute_import)unicode_literals)_)commandsNdnfc@sLeZdZdZdZedjejj dZ e ddZ ddZ d d Zd d Zd S) SwapCommandzNA class containing methods needed by the cli to execute the swap command. swapz=run an interactive {prog} mod for remove and install one spec)progcCs,|jddtdd|jddtdddS)N remove_specZstorezThe specs that will be removed)actionhelp install_specz The specs that will be installed) add_argumentr)parserr/usr/lib/python3.6/swap.py set_argparser&s zSwapCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|jtj |j|j j gdS)NT) clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseZ_checkEnabledRepooptsr )selfrrrr configure,szSwapCommand.configurecCs@|jjj|}|dk r<||j}|jjj|||g|jdS)N)rZ cli_commandsgetZ optparserZparse_command_argsrun)rZcmd_strspecZcmd_clscmdrrr_perform5s  zSwapCommand._performcCs$|jd|jj|jd|jjdS)NremoveZinstall)rrr r )rrrrr<szSwapCommand.runN)r)__name__ __module__ __qualname____doc__aliasesrformatrutilZMAIN_PROG_UPPERZsummary staticmethodrrrrrrrrrs   r)Z __future__rrZdnf.i18nrZdnf.clirZdnf.utilrZloggingZ getLoggerZloggerZCommandrrrrrs     PK!Q6Q68cli/commands/__pycache__/updateinfo.cpython-36.opt-1.pycnu[3 ft`2J@sdZddlmZddlmZddlmZddlZddlZddlZddlm Z ddl m Z ddl m Z mZdd lmZd d ZGd d d e jZdS)zUpdateInfo CLI command.)absolute_import)print_function)unicode_literalsN)commands) OptionParser)_ exact_width)unicodecCstdd|DS)z7Return maximum length of items in a non-empty iterable.css|]}t|VqdS)N)r).0itemr /usr/lib/python3.6/updateinfo.py &sz_maxlen..)max)iterabler r r _maxlen$src s.eZdZdZejedejedejedej edej ediZ ededed ed d Z d d d d d d ddZ dgee jZedZdZdddegZfddZeddZddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(d)Zd*d+Zd,d-Z d.d/Z!d0d1Z"d2d3Z#d4d5Z$Z%S)6UpdateInfoCommandz)Implementation of the UpdateInfo command.bugfix enhancementsecurityunknown newpackagez Critical/Sec.zImportant/Sec.z Moderate/Sec.zLow/Sec.)Critical ImportantModerateLowlistinfosummary)zlist-updateinfoz list-securityzlist-seczinfo-updateinfoz info-securityzinfo-seczsummary-updateinfoZ updateinfoz!display advisories about packages available installedupdatesallcstt|j|d|_dS)zInitialize the command.N)superr__init___installed_query)selfcli) __class__r r r$CszUpdateInfoCommand.__init__c Cs|j}|jddddtdd|jddddtd d|jd dd dtd d|jd dddtdddddg}|j}|jddddtdd|jddddtdd|jddddtdd|jddddtdd|jd d!ddtd"d|jd#d$d%||d&tjtd'd(dS))Nz --available _availabilityr store_constz?advisories about newer versions of installed packages (default))destconstactionhelpz --installedr z?advisories about equal and older versions of installed packagesz --updatesr!zbadvisories about newer versions of those installed packages for which a newer version is availablez--allr"z3advisories about any versions of installed packagesrrrz --summary _spec_actionz$show summary of advisories (default)z--listzshow list of advisoriesz--infozshow info of advisoriesz --with-cvewith_cveF store_truez'show only advisories with CVE reference)r+defaultr-r.z --with-bzwith_bzz,show only advisories with bugzilla referencespec*ZSPECrzPackage specification)nargsmetavarchoicesr2r-r.)Zadd_mutually_exclusive_group add_argumentrrZPkgNarrowCallback)parser availabilityZcmdsZ output_formatr r r set_argparserHsD                zUpdateInfoCommand.set_argparsercCsd|jj_d|jj_|jj|jkr6|j|jj|j_n|jjrJ|jj|j_|jj r`|jj |j_ n:|jj s||jj d|j kr|j |j_ n|jj jd|j_ t|j_|jjr|jjjtj|jjr|jjjtj|jjr|jjjtj|jjr|jjjtj|jj r|jj jd}|dkr:|jjjtjn|dkrV|jjjtjnp|d krr|jjjtjnT|dkr|jjjtjn8|d krd|j_n$|d krd|j_n|jj jd||jjr|jj j|jjd S)zADo any command-specific configuration based on command arguments.Trrrrsecr bugzillasbzscvesN)rr=)r>r?) r'ZdemandsZavailable_reposZsack_activationoptsZcommanddirect_commands spec_actionr/r)r;r4availabilitiesavailability_defaultpopset_advisory_typesraddhawkeyADVISORY_BUGFIXrADVISORY_ENHANCEMENTrADVISORY_NEWPACKAGErADVISORY_SECURITYr3r0insertadvisoryextend)r&r4r r r configurensJ                zUpdateInfoCommand.configurecCs|jjdkr$|j|jj}td}n^|jjdkrH|j|jj}td}n:|jjdkrl|j|jj}td}n|j|jj}td}|jjdkr|j |n$|jjdkr|j |n |j ||dS)z#Execute the command with arguments.r r!r"rrrN) rAr;installed_apkg_adv_instsr4rupdating_apkg_adv_instsall_apkg_adv_instsavailable_apkg_adv_instsrC display_list display_infodisplay_summary)r&apkg_adv_insts descriptionr r r runs           zUpdateInfoCommand.runcCs@|jdkr |jjjjj|_|jj|j|jd}t |dkS)N)nameZevr__gter) r%basesackqueryr Zapplyfilterr]evrlen)r&apackageqr r r _newer_equal_installeds z(UpdateInfoCommand._newer_equal_installedcs,|jj rJ|jj rJ|jj rJ|jj rJ|jj rJ|jj rJ|jj rJdSj|jjkr\dSt fdd|jjDrzdS|jjrj|jjkrdS|jjrt fdd|jjDrdS|jjrt fdd|jjDrdS|jjrt ddj DrdS|jjr(t ddj Dr(dSd S) NTc3s|]}tjj|VqdS)N)fnmatch fnmatchcaseid)r pat)rPr r rsz6UpdateInfoCommand._advisory_matcher..csg|]}j|qSr )Z match_bug)r Zbug)rPr r sz7UpdateInfoCommand._advisory_matcher..csg|]}j|qSr )Z match_cve)r Zcve)rPr r rkscSsg|]}|jtjkqSr )typerJ REFERENCE_CVE)r refr r r rkscSsg|]}|jtjkqSr )rlrJREFERENCE_BUGZILLA)r rnr r r rksF) rArHr4severityZbugzillar@r0r3rlany references)r&rPr )rPr _advisory_matchers2       ""  z#UpdateInfoCommand._advisory_matcherc#shxb|j|D]Tj|jj}|j|}tfdd|jjD}|sJ|r |j}||fVq WdS)z4Return (adv. package, advisory, installed) triplets.c3s|]}tjj|VqdS)N)rgrhr])r rj)rdr r rszAUpdateInfoCommand._apackage_advisory_installed..N) Zget_advisory_pkgsZ get_advisoryr^r_rsrqrAr4rf)r&Z pkgs_queryZcmptypespecsrPZadvisory_matchZapackage_matchr r )rdr _apackage_advisory_installeds   z.UpdateInfoCommand._apackage_advisory_installedcCs@|jj}|jjdd}|j}|r<|j|jj|jd}|S)z.z %*s %s)rprintrrJrMrNrKrLADVISORY_UNKNOWNrr r^confZautocheck_running_kernelr'Z_check_running_kernel) r&rZr[Ztyp2cntZ label_countswidthindentlabelcountr r r rYs2 $ z!UpdateInfoCommand.display_summaryc sfdd}fdd}t}x|D]\}}}d|j|j|jf}jjsRjjrx|jD]Z} | jt j krxjj rxqZn| jt j krjj rqZ|j|j f|j |||jft| j<qZWq$|j|j f|j |||jft|j<q$Wg} d} } } xt|jddd D]r\\}}}}t| t|} xR|jD]F\}}t| t|} ||}t| t|} | j||||||fq.WqWxZ| D]R\}}}}}jjjrtd || || || ||fntd || || ||fqWd S) zDisplay the list of advisories.cs jjdksdS|rdSdSdS)Nr"zi z )rAr;)inst)r&r r inst2mark2s  z1UpdateInfoCommand.display_list..inst2markcs2|tjkrjj|tdSjj|tdSdS)Nz Unknown/Sec.r)rJrNSECURITY2LABELgetr TYPE2LABEL)typZsev)r&r r type2label:s z2UpdateInfoCommand.display_list..type2labelz%s-%s.%srcSs|dS)Nrr )xr r r Rsz0UpdateInfoCommand.display_list..)keyz%s%-*s %-*s %-*s %sz%s%-*s %-*s %sN)dictr]rbarchrAr0r3rrrlrJrormrp setdefaultupdatedrisorteditemsrrcappendr^rverboser)r&rZrrZnevra_inst_dictrrPr ZnevrarnZadvlistZidwZtlwZnwrZaupdatedrZaidZatypesevrr )r&r rW0s4   *( $$ zUpdateInfoCommand.display_listc sjjjjjjtdtdtdtdtdtdtdtdtd td f fd d }t}x"|D]\}}}|j|||qtWtd j t |ddddS)z/Display the details about available advisories.z Update IDZTypeZUpdatedZBugsZCVEsZ DescriptionZSeverityZRightsZFilesZ Installedc s|jgjj|jtdgt|jggg|jp0dj|j g|j pBdjt t fdd|j Ddg }xV|jD]L}|jtjkr|djdj|j|jpdqn|jtjkrn|dj|jqnW|dj|djsd|d<d|d <jjd kr|rtd ntd g|d <t}g}|jdd|jd|j|jddxxt|D]j\}}|ddgfkrtqXxJt|D]>\}} |dkr|nd} |t| } |jd| d| | fq~WqXWdj|S)Nrrc3s|]}|jkr|jVqdS)N)rfilename)r Zpkg)archesr r rsszHUpdateInfoCommand.display_info..advisory2info..z{} - {}rr"trueZfalse =Oz rz %*s%s: %s )rirrrlrr rr[ splitlinesrpZrightsrrGZpackagesrrrJrorformattitlermsortrAr;rzip enumeraterjoin) rPr Z attributesrnrlinesrZ atr_linesilinerZ key_padding)rlabelsr&rr r advisory2infoisF          "z5UpdateInfoCommand.display_info..advisory2infoz cSs|jS)N)lower)rr r r rsz0UpdateInfoCommand.display_info..)rN) r^r_Z list_archesrrrrGrIrrr)r&rZrZ advisoriesrrPr r )rrr&rr rXas  (zUpdateInfoCommand.display_info)&__name__ __module__ __qualname____doc__rJrKrrLrNrrMrrrBrkeysaliasesrrErDr$ staticmethodr<rRr\rfrsrurzrVrSrTrUrrYrWrX __classcell__r r )r(r r)sJ        &6   1r)rZ __future__rrrrrgrJZdnf.clirZdnf.cli.option_parserrZdnf.i18nrrZ dnf.pycompr rZCommandrr r r r s      PK!Q6Q62cli/commands/__pycache__/updateinfo.cpython-36.pycnu[3 ft`2J@sdZddlmZddlmZddlmZddlZddlZddlZddlm Z ddl m Z ddl m Z mZdd lmZd d ZGd d d e jZdS)zUpdateInfo CLI command.)absolute_import)print_function)unicode_literalsN)commands) OptionParser)_ exact_width)unicodecCstdd|DS)z7Return maximum length of items in a non-empty iterable.css|]}t|VqdS)N)r).0itemr /usr/lib/python3.6/updateinfo.py &sz_maxlen..)max)iterabler r r _maxlen$src s.eZdZdZejedejedejedej edej ediZ ededed ed d Z d d d d d d ddZ dgee jZedZdZdddegZfddZeddZddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(d)Zd*d+Zd,d-Z d.d/Z!d0d1Z"d2d3Z#d4d5Z$Z%S)6UpdateInfoCommandz)Implementation of the UpdateInfo command.bugfix enhancementsecurityunknown newpackagez Critical/Sec.zImportant/Sec.z Moderate/Sec.zLow/Sec.)Critical ImportantModerateLowlistinfosummary)zlist-updateinfoz list-securityzlist-seczinfo-updateinfoz info-securityzinfo-seczsummary-updateinfoZ updateinfoz!display advisories about packages available installedupdatesallcstt|j|d|_dS)zInitialize the command.N)superr__init___installed_query)selfcli) __class__r r r$CszUpdateInfoCommand.__init__c Cs|j}|jddddtdd|jddddtd d|jd dd dtd d|jd dddtdddddg}|j}|jddddtdd|jddddtdd|jddddtdd|jddddtdd|jd d!ddtd"d|jd#d$d%||d&tjtd'd(dS))Nz --available _availabilityr store_constz?advisories about newer versions of installed packages (default))destconstactionhelpz --installedr z?advisories about equal and older versions of installed packagesz --updatesr!zbadvisories about newer versions of those installed packages for which a newer version is availablez--allr"z3advisories about any versions of installed packagesrrrz --summary _spec_actionz$show summary of advisories (default)z--listzshow list of advisoriesz--infozshow info of advisoriesz --with-cvewith_cveF store_truez'show only advisories with CVE reference)r+defaultr-r.z --with-bzwith_bzz,show only advisories with bugzilla referencespec*ZSPECrzPackage specification)nargsmetavarchoicesr2r-r.)Zadd_mutually_exclusive_group add_argumentrrZPkgNarrowCallback)parser availabilityZcmdsZ output_formatr r r set_argparserHsD                zUpdateInfoCommand.set_argparsercCsd|jj_d|jj_|jj|jkr6|j|jj|j_n|jjrJ|jj|j_|jj r`|jj |j_ n:|jj s||jj d|j kr|j |j_ n|jj jd|j_ t|j_|jjr|jjjtj|jjr|jjjtj|jjr|jjjtj|jjr|jjjtj|jj r|jj jd}|dkr:|jjjtjn|dkrV|jjjtjnp|d krr|jjjtjnT|dkr|jjjtjn8|d krd|j_n$|d krd|j_n|jj jd||jjr|jj j|jjd S)zADo any command-specific configuration based on command arguments.Trrrrsecr bugzillasbzscvesN)rr=)r>r?) r'ZdemandsZavailable_reposZsack_activationoptsZcommanddirect_commands spec_actionr/r)r;r4availabilitiesavailability_defaultpopset_advisory_typesraddhawkeyADVISORY_BUGFIXrADVISORY_ENHANCEMENTrADVISORY_NEWPACKAGErADVISORY_SECURITYr3r0insertadvisoryextend)r&r4r r r configurensJ                zUpdateInfoCommand.configurecCs|jjdkr$|j|jj}td}n^|jjdkrH|j|jj}td}n:|jjdkrl|j|jj}td}n|j|jj}td}|jjdkr|j |n$|jjdkr|j |n |j ||dS)z#Execute the command with arguments.r r!r"rrrN) rAr;installed_apkg_adv_instsr4rupdating_apkg_adv_instsall_apkg_adv_instsavailable_apkg_adv_instsrC display_list display_infodisplay_summary)r&apkg_adv_insts descriptionr r r runs           zUpdateInfoCommand.runcCs@|jdkr |jjjjj|_|jj|j|jd}t |dkS)N)nameZevr__gter) r%basesackqueryr Zapplyfilterr]evrlen)r&apackageqr r r _newer_equal_installeds z(UpdateInfoCommand._newer_equal_installedcs,|jj rJ|jj rJ|jj rJ|jj rJ|jj rJ|jj rJ|jj rJdSj|jjkr\dSt fdd|jjDrzdS|jjrj|jjkrdS|jjrt fdd|jjDrdS|jjrt fdd|jjDrdS|jjrt ddj DrdS|jjr(t ddj Dr(dSd S) NTc3s|]}tjj|VqdS)N)fnmatch fnmatchcaseid)r pat)rPr r rsz6UpdateInfoCommand._advisory_matcher..csg|]}j|qSr )Z match_bug)r Zbug)rPr r sz7UpdateInfoCommand._advisory_matcher..csg|]}j|qSr )Z match_cve)r Zcve)rPr r rkscSsg|]}|jtjkqSr )typerJ REFERENCE_CVE)r refr r r rkscSsg|]}|jtjkqSr )rlrJREFERENCE_BUGZILLA)r rnr r r rksF) rArHr4severityZbugzillar@r0r3rlany references)r&rPr )rPr _advisory_matchers2       ""  z#UpdateInfoCommand._advisory_matcherc#shxb|j|D]Tj|jj}|j|}tfdd|jjD}|sJ|r |j}||fVq WdS)z4Return (adv. package, advisory, installed) triplets.c3s|]}tjj|VqdS)N)rgrhr])r rj)rdr r rszAUpdateInfoCommand._apackage_advisory_installed..N) Zget_advisory_pkgsZ get_advisoryr^r_rsrqrAr4rf)r&Z pkgs_queryZcmptypespecsrPZadvisory_matchZapackage_matchr r )rdr _apackage_advisory_installeds   z.UpdateInfoCommand._apackage_advisory_installedcCs@|jj}|jjdd}|j}|r<|j|jj|jd}|S)z.z %*s %s)rprintrrJrMrNrKrLADVISORY_UNKNOWNrr r^confZautocheck_running_kernelr'Z_check_running_kernel) r&rZr[Ztyp2cntZ label_countswidthindentlabelcountr r r rYs2 $ z!UpdateInfoCommand.display_summaryc sfdd}fdd}t}x|D]\}}}d|j|j|jf}jjsRjjrx|jD]Z} | jt j krxjj rxqZn| jt j krjj rqZ|j|j f|j |||jft| j<qZWq$|j|j f|j |||jft|j<q$Wg} d} } } xt|jddd D]r\\}}}}t| t|} xR|jD]F\}}t| t|} ||}t| t|} | j||||||fq.WqWxZ| D]R\}}}}}jjjrtd || || || ||fntd || || ||fqWd S) zDisplay the list of advisories.cs jjdksdS|rdSdSdS)Nr"zi z )rAr;)inst)r&r r inst2mark2s  z1UpdateInfoCommand.display_list..inst2markcs2|tjkrjj|tdSjj|tdSdS)Nz Unknown/Sec.r)rJrNSECURITY2LABELgetr TYPE2LABEL)typZsev)r&r r type2label:s z2UpdateInfoCommand.display_list..type2labelz%s-%s.%srcSs|dS)Nrr )xr r r Rsz0UpdateInfoCommand.display_list..)keyz%s%-*s %-*s %-*s %sz%s%-*s %-*s %sN)dictr]rbarchrAr0r3rrrlrJrormrp setdefaultupdatedrisorteditemsrrcappendr^rverboser)r&rZrrZnevra_inst_dictrrPr ZnevrarnZadvlistZidwZtlwZnwrZaupdatedrZaidZatypesevrr )r&r rW0s4   *( $$ zUpdateInfoCommand.display_listc sjjjjjjtdtdtdtdtdtdtdtdtd td f fd d }t}x"|D]\}}}|j|||qtWtd j t |ddddS)z/Display the details about available advisories.z Update IDZTypeZUpdatedZBugsZCVEsZ DescriptionZSeverityZRightsZFilesZ Installedc s|jgjj|jtdgt|jggg|jp0dj|j g|j pBdjt t fdd|j Ddg }xV|jD]L}|jtjkr|djdj|j|jpdqn|jtjkrn|dj|jqnW|dj|djsd|d<d|d <jjd kr|rtd ntd g|d <t}g}|jdd|jd|j|jddxxt|D]j\}}|ddgfkrtqXxJt|D]>\}} |dkr|nd} |t| } |jd| d| | fq~WqXWdj|S)Nrrc3s|]}|jkr|jVqdS)N)rfilename)r Zpkg)archesr r rsszHUpdateInfoCommand.display_info..advisory2info..z{} - {}rr"trueZfalse =Oz rz %*s%s: %s )rirrrlrr rr[ splitlinesrpZrightsrrGZpackagesrrrJrorformattitlermsortrAr;rzip enumeraterjoin) rPr Z attributesrnrlinesrZ atr_linesilinerZ key_padding)rlabelsr&rr r advisory2infoisF          "z5UpdateInfoCommand.display_info..advisory2infoz cSs|jS)N)lower)rr r r rsz0UpdateInfoCommand.display_info..)rN) r^r_Z list_archesrrrrGrIrrr)r&rZrZ advisoriesrrPr r )rrr&rr rXas  (zUpdateInfoCommand.display_info)&__name__ __module__ __qualname____doc__rJrKrrLrNrrMrrrBrkeysaliasesrrErDr$ staticmethodr<rRr\rfrsrurzrVrSrTrUrrYrWrX __classcell__r r )r(r r)sJ        &6   1r)rZ __future__rrrrrgrJZdnf.clirZdnf.cli.option_parserrZdnf.i18nrrZ dnf.pycompr rZCommandrr r r r s      PK!&caa5cli/commands/__pycache__/upgrade.cpython-36.opt-1.pycnu[3 ft`~@stddlmZddlmZddlZddlZddlZddlmZddl m Z ddl m Z ej dZGdd d ejZdS) )absolute_import)unicode_literalsN)commands) OptionParser)_dnfc@sXeZdZdZdZedZed d Zd d Z d dZ ddZ ddZ ddZ ddZdS)UpgradeCommandzTA class containing methods needed by the cli to execute the update command. upgradeupdate upgrade-to update-to localupdateupz,upgrade a package or packages on your systemcCs"|jddtdtjtdddS)NZpackages*zPackage to upgradeZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/upgrade.py set_argparser*szUpgradeCommand.set_argparsercCsZ|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jd|_ d|_ d|_dS)zVerify that conditions are met so that this command can run. These include that there are enabled repositories with gpg keys, and that this command is being run by the root user. TN)clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepoupgrade_minimal all_securityskipped_grp_specs)selfrrrr configure0s zUpgradeCommand.configurecCs|jr dnd}|jj|j||jd|jjs<|jjs<|jjrzd}||jO}||j O}||j O}||j O}|rdSn|j j dStjjtddS)NeqZgte)cmp_typeallFzNo packages marked for upgrade.)rrZ _populate_update_security_filterrrr pkg_specs grp_specs_update_modules _update_files_update_packages_update_groupsrZ upgrade_allr exceptionsErrorr)r!r$resultrrrrunBs       zUpgradeCommand.runcCsNt|jj}tjjr6tjjj|j}|j |jj|_ n |jj|_ t|j |kS)N) lenrr'rrZ WITH_MODULESmodule module_baseZ ModuleBaser r )r!Zgroup_specs_numr2rrrr(Vs   zUpgradeCommand._update_modulescCsd}|jjrx~|jj|jjd|jjjdD]^}y|jj|d}Wq*tjj k r}z$t j t d|jjj j|jWYdd}~Xq*Xq*W|S)NF)strictprogressTzNo match for argument: %s)rrrZadd_remote_rpmsoutputr4Zpackage_upgraderr, MarkingErrorloggerinfortermboldlocation)r!successZpkgerrrr)`s  *zUpgradeCommand._update_filescCsrd}xh|jjD]\}y|jj|d}Wqtjjk rh}z"tjt d|jj j j |WYdd}~XqXqW|S)NFTzNo match for argument: %s) rr&rr rr,r6r7r8rr5r9r:)r!r<Zpkg_specr=rrrr*ms  (zUpgradeCommand._update_packagescCs|jr|jj|jdSdS)NTF)r rZenv_group_upgrade)r!rrrr+xszUpgradeCommand._update_groupsN)r r r r r r)__name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrr"r/r(r)r*r+rrrrr#s    r)Z __future__rrZloggingZdnf.exceptionsrZdnf.baseZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZ getLoggerr7ZCommandrrrrrs      PK!&caa/cli/commands/__pycache__/upgrade.cpython-36.pycnu[3 ft`~@stddlmZddlmZddlZddlZddlZddlmZddl m Z ddl m Z ej dZGdd d ejZdS) )absolute_import)unicode_literalsN)commands) OptionParser)_dnfc@sXeZdZdZdZedZed d Zd d Z d dZ ddZ ddZ ddZ ddZdS)UpgradeCommandzTA class containing methods needed by the cli to execute the update command. upgradeupdate upgrade-to update-to localupdateupz,upgrade a package or packages on your systemcCs"|jddtdtjtdddS)NZpackages*zPackage to upgradeZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/upgrade.py set_argparser*szUpgradeCommand.set_argparsercCsZ|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jd|_ d|_ d|_dS)zVerify that conditions are met so that this command can run. These include that there are enabled repositories with gpg keys, and that this command is being run by the root user. TN)clidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepoupgrade_minimal all_securityskipped_grp_specs)selfrrrr configure0s zUpgradeCommand.configurecCs|jr dnd}|jj|j||jd|jjs<|jjs<|jjrzd}||jO}||j O}||j O}||j O}|rdSn|j j dStjjtddS)NeqZgte)cmp_typeallFzNo packages marked for upgrade.)rrZ _populate_update_security_filterrrr pkg_specs grp_specs_update_modules _update_files_update_packages_update_groupsrZ upgrade_allr exceptionsErrorr)r!r$resultrrrrunBs       zUpgradeCommand.runcCsNt|jj}tjjr6tjjj|j}|j |jj|_ n |jj|_ t|j |kS)N) lenrr'rrZ WITH_MODULESmodule module_baseZ ModuleBaser r )r!Zgroup_specs_numr2rrrr(Vs   zUpgradeCommand._update_modulescCsd}|jjrx~|jj|jjd|jjjdD]^}y|jj|d}Wq*tjj k r}z$t j t d|jjj j|jWYdd}~Xq*Xq*W|S)NF)strictprogressTzNo match for argument: %s)rrrZadd_remote_rpmsoutputr4Zpackage_upgraderr, MarkingErrorloggerinfortermboldlocation)r!successZpkgerrrr)`s  *zUpgradeCommand._update_filescCsrd}xh|jjD]\}y|jj|d}Wqtjjk rh}z"tjt d|jj j j |WYdd}~XqXqW|S)NFTzNo match for argument: %s) rr&rr rr,r6r7r8rr5r9r:)r!r<Zpkg_specr=rrrr*ms  (zUpgradeCommand._update_packagescCs|jr|jj|jdSdS)NTF)r rZenv_group_upgrade)r!rrrr+xszUpgradeCommand._update_groupsN)r r r r r r)__name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrr"r/r(r)r*r+rrrrr#s    r)Z __future__rrZloggingZdnf.exceptionsrZdnf.baseZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZ getLoggerr7ZCommandrrrrrs      PK!EyOO<cli/commands/__pycache__/upgrademinimal.cpython-36.opt-1.pycnu[3 ft`@sDddlmZddlmZddlmZddlmZGdddeZdS))absolute_import)unicode_literals)_)UpgradeCommandc@s$eZdZdZd ZedZddZdS) UpgradeMinimalCommandzSA class containing methods needed by the cli to execute the check command. upgrade-minimalupdate-minimalup-minzWupgrade, but only 'newest' package match which fixes a problem that affects your systemc CsRtj|d|_t|jj|jj|jj|jj|jj |jj |jj |jj gsNd|_ dS)NT)r configureZupgrade_minimalanyZoptsZbugfixZ enhancementZ newpackageZsecurityZadvisoryZbugzillaZcvesZseverityZ all_security)selfr $/usr/lib/python3.6/upgrademinimal.pyr "s  zUpgradeMinimalCommand.configureN)rrr )__name__ __module__ __qualname____doc__aliasesrZsummaryr r r r rrsrN)Z __future__rrZdnf.i18nrZdnf.cli.commands.upgraderrr r r rs    PK!EyOO6cli/commands/__pycache__/upgrademinimal.cpython-36.pycnu[3 ft`@sDddlmZddlmZddlmZddlmZGdddeZdS))absolute_import)unicode_literals)_)UpgradeCommandc@s$eZdZdZd ZedZddZdS) UpgradeMinimalCommandzSA class containing methods needed by the cli to execute the check command. upgrade-minimalupdate-minimalup-minzWupgrade, but only 'newest' package match which fixes a problem that affects your systemc CsRtj|d|_t|jj|jj|jj|jj|jj |jj |jj |jj gsNd|_ dS)NT)r configureZupgrade_minimalanyZoptsZbugfixZ enhancementZ newpackageZsecurityZadvisoryZbugzillaZcvesZseverityZ all_security)selfr $/usr/lib/python3.6/upgrademinimal.pyr "s  zUpgradeMinimalCommand.configureN)rrr )__name__ __module__ __qualname____doc__aliasesrZsummaryr r r r rrsrN)Z __future__rrZdnf.i18nrZdnf.cli.commands.upgraderrr r r rs    PK!a0gg6cli/commands/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`{}@sdZddlmZddlmZddlmZddlmZddlZ ddl Z ddl Z ddl Z ddl Z ddlZe jdZedd Zed d Zed Zd dZffddZGdddeZGdddeZGdddeZGdddeZGdddeZGdddeZGdddeZdS)z< Classes for subcommands of the yum command line interface. )print_function)unicode_literals) OptionParser)_Ndnfz+To diagnose the problem, try running: '%s'.zrpm -Va --nofiles --nodigestzDYou probably have corrupted RPMDB, running '%s' might fix the issue.zrpm --rebuilddba You have enabled checking of packages via GPG keys. This is a good thing. However, you do not have any GPG public keys installed. You need to download the keys for packages you wish to install and install them. You can do that by running the command: rpm --import public.gpg.key Alternatively you can specify the url to the key you would like to use for a repository in the 'gpgkey' option in a repository section and {prog} will install it for you. For more information contact your distribution or package provider.cCsp|jjs dS|jslxV|jjD]H}|js0|jr |j r tjdt j t j j dtjtd|t jjq WdS)zVerify that there are gpg keys for the enabled repositories in the rpm database. :param base: a :class:`dnf.Base` object. :raises: :class:`cli.CliError` Nz %s )progzProblem repository: %s)confZgpgcheckZ_gpg_key_checkreposZ iter_enabledZ repo_gpgcheckZgpgkeyloggerZcriticalgpg_msgformatrutilMAIN_PROG_UPPERrcliCliError)baserrepor/usr/lib/python3.6/__init__.py _checkGPGKey:srcCs||jjrdSxD|D]<}|jdr2tjj|r2dStjjj|d}|d krdSqWt dj d j |j j }tjj|dS) zVerify that there is at least one enabled repo. :param base: a :class:`dnf.Base` object. :param possible_local_files: the list of strings that could be a local rpms :raises: :class:`cli.CliError`: Nz.rpmrhttpftpfilehttpsz*There are no enabled repositories in "{}".z", ")rrrr)r Z _any_enabledendswithospathexistsrZpycompZurlparserr joinrZreposdirrr)rZpossible_local_filesZlfileschememsgrrr_checkEnabledRepoKs  r!c@seZdZdZgZdZdZddZeddZ edd Z ed d Z d d Z ddZ ddZddZddZddZddZdS)Commandz%Abstract base class for CLI commands.NcCs ||_dS)N)r)selfrrrr__init__fszCommand.__init__cCs|jjS)N)rr)r$rrrrjsz Command.basecCs |jdS)Nr)aliases)r$rrr_basecmdoszCommand._basecmdcCs |jjjS)N)rroutput)r$rrrr(sszCommand.outputcCsdS)z4Define command specific options and arguments. #:apiNr)r$parserrrr set_argparserwszCommand.set_argparsercCsdS)z*Do any command-specific pre-configuration.Nr)r$rrr pre_configure{szCommand.pre_configurecCsdS)z&Do any command-specific configuration.Nr)r$rrr configureszCommand.configurecCs&t|tjjrttfStd|dS)z.Get suggestions for resolving the given error.zerror not supported yet: %sN) isinstancer exceptionsZTransactionCheckError _RPM_VERIFY_RPM_REBUILDDBNotImplementedError)r$errorrrrget_error_outputszCommand.get_error_outputcCsdS)zExecute the command.Nr)r$rrrrunsz Command.runcCsdS)z$Finalize operation after resolvementNr)r$rrr run_resolvedszCommand.run_resolvedcCsdS)z%Finalize operations post-transaction.Nr)r$rrrrun_transactionszCommand.run_transaction)__name__ __module__ __qualname____doc__r&summaryoptsr%propertyrr'r(r*r+r,r3r4r5r6rrrrr"_s   r"c @sReZdZdZdZedZdZddddd d d d eh Ze d dZ ddZ ddZ dS) InfoCommandzRA class containing methods needed by the cli to execute the info command. infoz4display details about a package or group of packagesall available installedextrasupdatesupgrades autoremoverecent obsoletesc Cs|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddtd|j|jtjtdddS) Nz--all_packages_action store_constr@zshow all packages (default))destactionconstdefaulthelpz --availablerAzshow only available packages)rKrLrMrOz --installedrBzshow only installed packagesz--extrasrCzshow only extras packagesz --updatesrEzshow only upgrades packagesz --upgradesz --autoremoverFzshow only autoremove packagesz--recentrGz#show only recently changed packagespackages*PACKAGEzPackage name specification)nargsmetavarchoicesrNrLrO)add_mutually_exclusive_group add_argumentr pkgnarrowsDEFAULT_PKGNARROWrPkgNarrowCallback)clsr)narrowsrrrr*s:        zInfoCommand.set_argparsercCs||jj}d|_|jjr"|jj|j_|jjdkr4d|_|jjrd|jjr\|jjdd|jjnd|j_|jjdkrxd|j_dS)NTrBz --obsoletesz--rHrDrE) rdemandssack_activationr<rIpackages_actionavailable_reposrH_option_conflict)r$r]rrrr,s   zInfoCommand.configurecCs&|jj|j|jjd|jj|jjS)Nr?)r _populate_update_security_filterr<routput_packagesr_rP)r$rrrr4szInfoCommand.runN)r?) r7r8r9r:r&rr;rYrX classmethodr*r,r4rrrrr>s   r>c@s$eZdZdZdZedZddZdS) ListCommandzRA class containing methods needed by the cli to execute the list command. listlsz$list a package or groups of packagescCs&|jj|j|jjd|jj|jjS)Nrf)rrbr<rrcr_rP)r$rrrr4szListCommand.runN)rfrg)r7r8r9r:r&rr;r4rrrrresrec@s8eZdZdZd ZedZeddZdd Z d d Z d S)ProvidesCommandzVA class containing methods needed by the cli to execute the provides command. provides whatprovidesprovz*find what package provides the given valuecCs|jddtdtdddS)N dependency+ZPROVIDEz#Provide specification to search for)rSrTrO)rWr)r)rrrr*szProvidesCommand.set_argparsercCs|jj}d|_d|_d|_dS)NTF)rr]r`Zfresh_metadatar^)r$r]rrrr,szProvidesCommand.configurecCstjtd|jj|jjS)NzSearching Packages: )r debugrrrir<rl)r$rrrr4szProvidesCommand.runN)rirjrk) r7r8r9r:r&rr; staticmethodr*r,r4rrrrrhs  rhc@s8eZdZdZd ZedZeddZddZ d d Z d S) CheckUpdateCommandzZA class containing methods needed by the cli to execute the check-update command. check-update check-upgradez$check for available package upgradescCs0|jddddtdd|jddtd d dS) Nz --changelogs changelogsF store_truezshow changelogs before update)rKrNrLrOrPrQrR)rSrT)rWr)r)rrrr*s z CheckUpdateCommand.set_argparsercCs6|jj}d|_d|_d|_|jjr(d|_t|jdS)NT) rr]r^r`Zplugin_filtering_enabledr<rsr!r)r$r]rrrr, szCheckUpdateCommand.configurecCsR|jj|jdd|jj|jjd|jjd}|r:d|jj_|jj j rN|jj dS)NZgte)Zcmp_typeT)print_rsd) rrbr<r check_updatesrPrsr]success_exit_statusrZautocheck_running_kernelZ_check_running_kernel)r$foundrrrr4s   zCheckUpdateCommand.runN)rqrr) r7r8r9r:r&rr;ror*r,r4rrrrrps   rpc seZdZdZGdddeZGdddeZGdddeZGdd d eZGd d d eZ Gd d d eZ GdddeZ GdddeZ GdddeZ GdddeZGdddeZeeeee e e e e eeh Zd%ZedZfddZdd Zd!d"Zd#d$ZZS)&RepoPkgsCommandz2Implementation of the repository-packages command.c@s$eZdZdZdZddZddZdS) z%RepoPkgsCommand.CheckUpdateSubCommandz'Implementation of the info sub-command. check-updatecCs|jj}d|_d|_dS)NT)rr]r`r^)r$r]rrrr,(sz/RepoPkgsCommand.CheckUpdateSubCommand.configurecCs*|jj|jj|jdd}|r&d|jj_dS)z?Execute the command with respect to given arguments *cli_args*.T)rurvN)rrwr< pkg_specsreponamerr]rx)r$ryrrr run_on_repo-s  z1RepoPkgsCommand.CheckUpdateSubCommand.run_on_repoN)r{)r7r8r9r:r&r,r~rrrrCheckUpdateSubCommand#src@s$eZdZdZdZddZddZdS) zRepoPkgsCommand.InfoSubCommandz'Implementation of the info sub-command.r?cCsh|jj}d|_|jjr"|jj|j_|jjdkr4d|_|jjrd|jjr\|jjdd|jjnd|j_dS)NTrBz --obsoletesz--rH) rr]r^r<_pkg_specs_actionpkg_specs_actionr`rHra)r$r]rrrr,9s  z(RepoPkgsCommand.InfoSubCommand.configurecCs.|jj|j|jjd|jj|jj|jdS)z?Execute the command with respect to given arguments *cli_args*.r?N)rrbr<rrcrr|r})r$rrrr~Fsz*RepoPkgsCommand.InfoSubCommand.run_on_repoN)r?)r7r8r9r:r&r,r~rrrrInfoSubCommand4s rc@s$eZdZdZdZddZddZdS) z!RepoPkgsCommand.InstallSubCommandz*Implementation of the install sub-command.installcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r`r^ resolving root_user)r$r]rrrr,Qs z+RepoPkgsCommand.InstallSubCommand.configurecCs|jj|jt|j|jd}|jjsjy|jjd|jWn&tj j k rbt j t dYqXd}nvxt|jjD]h}y|jj||jWnJtj j k r}z*dj|j|jjjj|}t j |WYdd}~XqtXd}qtW|stj jt ddS)NFrQzNo package available.Tz{}: {}zNo packages marked for install.)rrbr<rrr|rr}rr. MarkingErrorr r?rr valuer(termboldError)r$donepkg_specer rrrr~Xs$z-RepoPkgsCommand.InstallSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrInstallSubCommandLsrc@seZdZdZdZddZdS)zRepoPkgsCommand.ListSubCommandz'Implementation of the list sub-command.rfcCs.|jj|j|jjd|jj|jj|jdS)z?Execute the command with respect to given arguments *cli_args*.rfN)rrbr<rrcrr|r})r$rrrr~zsz*RepoPkgsCommand.ListSubCommand.run_on_repoN)rf)r7r8r9r:r&r~rrrrListSubCommandusrc@s$eZdZdZdZddZddZdS) z RepoPkgsCommand.MoveToSubCommandz*Implementation of the move-to sub-command.move-tocCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z*RepoPkgsCommand.MoveToSubCommand.configurecCst|j|jd}|jjsy|jjd|jdWn`tjj k rVt j t dYn@tjj k rzt j t dYntjjk rYnXd}nx|jjD]}y|jj||jdWntjj k rt d}t j ||Yqtjj k rd}z\xT|jD]J}d}|jjj|}|r.t d |}t d }t j ||jjj||qWWYd d }~Xqtjjk r|YqXd}qW|stjjt d d S) z?Execute the command with respect to given arguments *cli_args*.FrQ)Z new_reponamezNo package installed.zNo package available.TzNo match for argument: %sr#z (from %s)z%Installed package %s%s not available.NzNothing to do.)rrrr<r| reinstallr}rr.PackagesNotInstalledErrorr r?rPackagesNotAvailableErrorrrPhistoryrr(rrr)r$rrr errpkgxmsgpkgreporrrr~s>  .z,RepoPkgsCommand.MoveToSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrMoveToSubCommandsrc@s$eZdZdZdZddZddZdS) z&RepoPkgsCommand.ReinstallOldSubCommandz0Implementation of the reinstall-old sub-command. reinstall-oldcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z0RepoPkgsCommand.ReinstallOldSubCommand.configurecCst|j|jd}|jjsy|jjd|j|jWndtjj k r\t d}t j |Yn@tjj k rt j t dYntjjk rYnXd}nx|jjD]}y|jj||j|jWntjj k rt d}t j ||Yqtjj k rl}z\xT|jD]J}d}|jjj|}|r6t d|}t d }t j ||jjj||qWWYd d }~Xqtjjk rYqXd}qW|stjjt d d S) z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.zNo package available.TzNo match for argument: %sr#z (from %s)z%Installed package %s%s not available.NzNothing to do.)rrrr<r|rr}rr.rrr r?rrrPrrr(rrr)r$rr rrrrrrrrr~sB    .z2RepoPkgsCommand.ReinstallOldSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrReinstallOldSubCommandsrcs4eZdZdZd ZfddZddZddZZS) z#RepoPkgsCommand.ReinstallSubCommandz,Implementation of the reinstall sub-command.rcs,ttj|j|tj|tj|f|_dS)zInitialize the command.N)superrzReinstallSubCommandr%rrwrapped_commands)r$r) __class__rrr%sz,RepoPkgsCommand.ReinstallSubCommand.__init__cCs6d|jj_x&|jD]}|j|_|j|_|jqWdS)NT)rr]r`rr<r}r,)r$commandrrrr,s   z-RepoPkgsCommand.ReinstallSubCommand.configurec Cs\t|j|jxH|jD].}y |jWntjjk r@wYqXPqWtjjtddS)z?Execute the command with respect to given arguments *cli_args*.z!No packages marked for reinstall.N) rrrrr~rr.rr)r$rrrrr~s  z/RepoPkgsCommand.ReinstallSubCommand.run_on_repo)r) r7r8r9r:r&r%r,r~ __classcell__rr)rrrs  rc@s,eZdZdZd ZddZddZddZd S) z,RepoPkgsCommand.RemoveOrDistroSyncSubCommandz8Implementation of the remove-or-distro-sync sub-command.remove-or-distro-synccCs$|jj}d|_d|_d|_d|_dS)NT)rr]r`r^rr)r$r]rrrr,s z6RepoPkgsCommand.RemoveOrDistroSyncSubCommand.configurec s|jjjjtjj|}|j|jjj}|jjjfdd|j D}|s`tj j d||j }|jjj j}xD|D]<}|j|j|jdr|jjjj|qz|jjjj||dqzWdS)z;Synchronize a package with another repository or remove it.csg|]}j|kr|qSr)r).0r)rr}rr #szIRepoPkgsCommand.RemoveOrDistroSyncSubCommand._replace..zno package matched)namearch) clean_depsN)rrZsackZ disable_reporsubjectZSubjectZget_best_queryrrBr.rrArZclean_requirements_on_removefilterrrZ_goalZ distupgradeZerase) r$rr}rZmatchesrBrArpackager)rr}r_replaces    z5RepoPkgsCommand.RemoveOrDistroSyncSubCommand._replacec Cst|j|jd}|jjs^y|jd|jWn*tjj k rVt d}t j |YqXd}nVxT|jjD]H}y|j||jWn,tjj k rt d}t j ||YqhXd}qhW|stjj t ddS)z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.TzNo match for argument: %szNothing to do.N)rrrr<r|rr}rr.rrr r?r)r$rr rrrrr~0s$z8RepoPkgsCommand.RemoveOrDistroSyncSubCommand.run_on_repoN)r)r7r8r9r:r&r,rr~rrrrRemoveOrDistroSyncSubCommands rc@s$eZdZdZdZddZddZdS) z+RepoPkgsCommand.RemoveOrReinstallSubCommandz6Implementation of the remove-or-reinstall sub-command.remove-or-reinstallcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,Rs z5RepoPkgsCommand.RemoveOrReinstallSubCommand.configurec Cst|j|jd}|jjs~y|jjd|j|jddWn@tjj k r`t d}t j |Yqtjj k rvYqXd}nvxt|jjD]h}y|jj||j|jddWnBtjj k rt d}t j ||Yqtjj k rYqXd}qW|s tjjt ddS) z?Execute the command with respect to given arguments *cli_args*.FrQT)Z old_reponameZnew_reponame_neqZ remove_naz)No package installed from the repository.zNo match for argument: %szNothing to do.N)rrrr<r|rr}rr.rrr r?rr)r$rr rrrrr~Ys4  z7RepoPkgsCommand.RemoveOrReinstallSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrRemoveOrReinstallSubCommandMsrc@s$eZdZdZdZddZddZdS) z RepoPkgsCommand.RemoveSubCommandz)Implementation of the remove sub-command.removecCs*|jj}d|_d|_d|_d|_d|_dS)NTF)rr]r^Z allow_erasingr`rr)r$r]rrrr,s z*RepoPkgsCommand.RemoveSubCommand.configurecCsd}|jjsRy|jjd|jWn*tjjk rJtd}t j |YqXd}n`x^|jjD]R}y|jj||jWn4tjjk r}zt j t |WYdd}~Xq\Xd}q\W|st j tddS)z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.TNzNo packages marked for removal.) r<r|rrr}rr.rrr r?strZwarning)r$rr rrrrrr~s  z,RepoPkgsCommand.RemoveSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrRemoveSubCommand~src@s$eZdZdZd ZddZddZdS) z!RepoPkgsCommand.UpgradeSubCommandz*Implementation of the upgrade sub-command.upgrade upgrade-tocCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z+RepoPkgsCommand.UpgradeSubCommand.configurec Cst|j|jd}|jjs.|jj|jd}nTxR|jjD]F}y|jj||jWn(tj j k rxt j t d|Yq8Xd}q8W|stj jt ddS)z?Execute the command with respect to given arguments *cli_args*.FTzNo match for argument: %szNo packages marked for upgrade.N)rrrr<r|Z upgrade_allr}rrr.rr r?rr)r$rrrrrr~sz-RepoPkgsCommand.UpgradeSubCommand.run_on_repoN)rr)r7r8r9r:r&r,r~rrrrUpgradeSubCommandsrrepository-packages repo-pkgs repo-packagesrepository-pkgsz7run commands on top of all packages in given repositorycs>tt|jfdd|jD}d|_dd|D|_dS)zInitialize the command.c3s|]}|VqdS)Nr)rsubcmd)rrr sz+RepoPkgsCommand.__init__..NcSsi|]}|jD] }||qqSr)r&)rraliasrrr sz,RepoPkgsCommand.__init__..)rrzr%SUBCMDSr_subcmd_name2obj)r$rZ subcmd_objs)r)rrr%s zRepoPkgsCommand.__init__c Cs`|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddtjtdtddd d!|jD}d"d!|jD}|jd#dd$|d%j|d&d}|d dddd'ddh}|jd(d)td*||tjtd+d,dS)-Nz--allrrJr@zshow all packages (default))rKrLrMrNrOz --availablerAzshow only available packages)rKrLrMrOz --installedrBzshow only installed packagesz--extrasrCzshow only extras packagesz --updatesrEzshow only upgrades packagesz --upgradesz --autoremoverFzshow only autoremove packagesz--recentrGz#show only recently changed packagesr}ZREPOIDz Repository ID)rSrLrTrOcSsg|]}|jdqS)r)r&)rrrrrrsz1RepoPkgsCommand.set_argparser..cSsg|]}|jD]}|qqSr)r&)rrrrrrrsrZ SUBCOMMANDz, )rSrTrUrOrHr|rQrRzPackage specification)rSrTrUrNrLrO)rVrWrrZ_RepoCallbackEnablerrrZ)r$r)r\Zsubcommand_choicesZsubcommand_choices_allrYrXrrrr*sP         zRepoPkgsCommand.set_argparsercCsy|j|jjd|_Wn>tjjtfk rV}z|jjjtjjWYdd}~XnX|j|j_|jj d|j_ |jj dS)z8Verify whether the command can run with given arguments.rN) rr<rrrrKeyError optparserZ print_usager}r,)r$rrrrr,s  zRepoPkgsCommand.configurecCs|jjdS)z>Execute the command with respect to given arguments *extcmds*.N)rr~)r$rrrr4szRepoPkgsCommand.run)rrrr)r7r8r9r:r"rrrrrrrrrrrrr&rr;r%r*r,r4rrr)rrrz s0) 79>1(# + rzc@s0eZdZdZd ZedZeddZddZ dS) HelpCommandzRA class containing methods needed by the cli to execute the help command. rOzdisplay a helpful usage messagecCs*|jddtdtdjtjjdddS)Ncmd?ZCOMMANDz{prog} command to get help for)r)rSrTrO)rWrr rr r)r)rrrr*szHelpCommand.set_argparsercCsN|jj s|jj|jjkr(|jjjn"|jj|jj}|jjj||dS)N)r<rrZ cli_commandsrZ print_help)r$rrrrr4$s  zHelpCommand.runN)rO) r7r8r9r:r&rr;ror*r4rrrrrs  r)r:Z __future__rrZdnf.cli.option_parserrZdnf.i18nrZdnf.clirZdnf.exceptionsZ dnf.pycompZdnf.utilZloggingrZ getLoggerr r/r0r rr!objectr"r>rerhrprzrrrrrs:       9?$yPK!hh0cli/commands/__pycache__/__init__.cpython-36.pycnu[3 ft`{}@sdZddlmZddlmZddlmZddlmZddlZ ddl Z ddl Z ddl Z ddl Z ddlZe jdZedd Zed d Zed Zd dZffddZGdddeZGdddeZGdddeZGdddeZGdddeZGdddeZGdddeZdS)z< Classes for subcommands of the yum command line interface. )print_function)unicode_literals) OptionParser)_Ndnfz+To diagnose the problem, try running: '%s'.zrpm -Va --nofiles --nodigestzDYou probably have corrupted RPMDB, running '%s' might fix the issue.zrpm --rebuilddba You have enabled checking of packages via GPG keys. This is a good thing. However, you do not have any GPG public keys installed. You need to download the keys for packages you wish to install and install them. You can do that by running the command: rpm --import public.gpg.key Alternatively you can specify the url to the key you would like to use for a repository in the 'gpgkey' option in a repository section and {prog} will install it for you. For more information contact your distribution or package provider.cCsp|jjs dS|jslxV|jjD]H}|js0|jr |j r tjdt j t j j dtjtd|t jjq WdS)zVerify that there are gpg keys for the enabled repositories in the rpm database. :param base: a :class:`dnf.Base` object. :raises: :class:`cli.CliError` Nz %s )progzProblem repository: %s)confZgpgcheckZ_gpg_key_checkreposZ iter_enabledZ repo_gpgcheckZgpgkeyloggerZcriticalgpg_msgformatrutilMAIN_PROG_UPPERrcliCliError)baserrepor/usr/lib/python3.6/__init__.py _checkGPGKey:srcCs||jjrdSxD|D]<}|jdr2tjj|r2dStjjj|d}|d krdSqWt dj d j |j j }tjj|dS) zVerify that there is at least one enabled repo. :param base: a :class:`dnf.Base` object. :param possible_local_files: the list of strings that could be a local rpms :raises: :class:`cli.CliError`: Nz.rpmrhttpftpfilehttpsz*There are no enabled repositories in "{}".z", ")rrrr)r Z _any_enabledendswithospathexistsrZpycompZurlparserr joinrZreposdirrr)rZpossible_local_filesZlfileschememsgrrr_checkEnabledRepoKs  r!c@seZdZdZgZdZdZddZeddZ edd Z ed d Z d d Z ddZ ddZddZddZddZddZdS)Commandz%Abstract base class for CLI commands.NcCs ||_dS)N)r)selfrrrr__init__fszCommand.__init__cCs|jjS)N)rr)r$rrrrjsz Command.basecCs |jdS)Nr)aliases)r$rrr_basecmdoszCommand._basecmdcCs |jjjS)N)rroutput)r$rrrr(sszCommand.outputcCsdS)z4Define command specific options and arguments. #:apiNr)r$parserrrr set_argparserwszCommand.set_argparsercCsdS)z*Do any command-specific pre-configuration.Nr)r$rrr pre_configure{szCommand.pre_configurecCsdS)z&Do any command-specific configuration.Nr)r$rrr configureszCommand.configurecCs&t|tjjrttfStd|dS)z.Get suggestions for resolving the given error.zerror not supported yet: %sN) isinstancer exceptionsZTransactionCheckError _RPM_VERIFY_RPM_REBUILDDBNotImplementedError)r$errorrrrget_error_outputszCommand.get_error_outputcCsdS)zExecute the command.Nr)r$rrrrunsz Command.runcCsdS)z$Finalize operation after resolvementNr)r$rrr run_resolvedszCommand.run_resolvedcCsdS)z%Finalize operations post-transaction.Nr)r$rrrrun_transactionszCommand.run_transaction)__name__ __module__ __qualname____doc__r&summaryoptsr%propertyrr'r(r*r+r,r3r4r5r6rrrrr"_s   r"c @sReZdZdZdZedZdZddddd d d d eh Ze d dZ ddZ ddZ dS) InfoCommandzRA class containing methods needed by the cli to execute the info command. infoz4display details about a package or group of packagesall available installedextrasupdatesupgrades autoremoverecent obsoletesc Cs|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddtd|j|jtjtdddS) Nz--all_packages_action store_constr@zshow all packages (default))destactionconstdefaulthelpz --availablerAzshow only available packages)rKrLrMrOz --installedrBzshow only installed packagesz--extrasrCzshow only extras packagesz --updatesrEzshow only upgrades packagesz --upgradesz --autoremoverFzshow only autoremove packagesz--recentrGz#show only recently changed packagespackages*PACKAGEzPackage name specification)nargsmetavarchoicesrNrLrO)add_mutually_exclusive_group add_argumentr pkgnarrowsDEFAULT_PKGNARROWrPkgNarrowCallback)clsr)narrowsrrrr*s:        zInfoCommand.set_argparsercCs||jj}d|_|jjr"|jj|j_|jjdkr4d|_|jjrd|jjr\|jjdd|jjnd|j_|jjdkrxd|j_dS)NTrBz --obsoletesz--rHrDrE) rdemandssack_activationr<rIpackages_actionavailable_reposrH_option_conflict)r$r]rrrr,s   zInfoCommand.configurecCs&|jj|j|jjd|jj|jjS)Nr?)r _populate_update_security_filterr<routput_packagesr_rP)r$rrrr4szInfoCommand.runN)r?) r7r8r9r:r&rr;rYrX classmethodr*r,r4rrrrr>s   r>c@s$eZdZdZdZedZddZdS) ListCommandzRA class containing methods needed by the cli to execute the list command. listlsz$list a package or groups of packagescCs&|jj|j|jjd|jj|jjS)Nrf)rrbr<rrcr_rP)r$rrrr4szListCommand.runN)rfrg)r7r8r9r:r&rr;r4rrrrresrec@s8eZdZdZd ZedZeddZdd Z d d Z d S)ProvidesCommandzVA class containing methods needed by the cli to execute the provides command. provides whatprovidesprovz*find what package provides the given valuecCs|jddtdtdddS)N dependency+ZPROVIDEz#Provide specification to search for)rSrTrO)rWr)r)rrrr*szProvidesCommand.set_argparsercCs|jj}d|_d|_d|_dS)NTF)rr]r`Zfresh_metadatar^)r$r]rrrr,szProvidesCommand.configurecCstjtd|jj|jjS)NzSearching Packages: )r debugrrrir<rl)r$rrrr4szProvidesCommand.runN)rirjrk) r7r8r9r:r&rr; staticmethodr*r,r4rrrrrhs  rhc@s8eZdZdZd ZedZeddZddZ d d Z d S) CheckUpdateCommandzZA class containing methods needed by the cli to execute the check-update command. check-update check-upgradez$check for available package upgradescCs0|jddddtdd|jddtd d dS) Nz --changelogs changelogsF store_truezshow changelogs before update)rKrNrLrOrPrQrR)rSrT)rWr)r)rrrr*s z CheckUpdateCommand.set_argparsercCs6|jj}d|_d|_d|_|jjr(d|_t|jdS)NT) rr]r^r`Zplugin_filtering_enabledr<rsr!r)r$r]rrrr, szCheckUpdateCommand.configurecCsR|jj|jdd|jj|jjd|jjd}|r:d|jj_|jj j rN|jj dS)NZgte)Zcmp_typeT)print_rsd) rrbr<r check_updatesrPrsr]success_exit_statusrZautocheck_running_kernelZ_check_running_kernel)r$foundrrrr4s   zCheckUpdateCommand.runN)rqrr) r7r8r9r:r&rr;ror*r,r4rrrrrps   rpc seZdZdZGdddeZGdddeZGdddeZGdd d eZGd d d eZ Gd d d eZ GdddeZ GdddeZ GdddeZ GdddeZGdddeZeeeee e e e e eeh Zd%ZedZfddZdd Zd!d"Zd#d$ZZS)&RepoPkgsCommandz2Implementation of the repository-packages command.c@s$eZdZdZdZddZddZdS) z%RepoPkgsCommand.CheckUpdateSubCommandz'Implementation of the info sub-command. check-updatecCs|jj}d|_d|_dS)NT)rr]r`r^)r$r]rrrr,(sz/RepoPkgsCommand.CheckUpdateSubCommand.configurecCs*|jj|jj|jdd}|r&d|jj_dS)z?Execute the command with respect to given arguments *cli_args*.T)rurvN)rrwr< pkg_specsreponamerr]rx)r$ryrrr run_on_repo-s  z1RepoPkgsCommand.CheckUpdateSubCommand.run_on_repoN)r{)r7r8r9r:r&r,r~rrrrCheckUpdateSubCommand#src@s$eZdZdZdZddZddZdS) zRepoPkgsCommand.InfoSubCommandz'Implementation of the info sub-command.r?cCsh|jj}d|_|jjr"|jj|j_|jjdkr4d|_|jjrd|jjr\|jjdd|jjnd|j_dS)NTrBz --obsoletesz--rH) rr]r^r<_pkg_specs_actionpkg_specs_actionr`rHra)r$r]rrrr,9s  z(RepoPkgsCommand.InfoSubCommand.configurecCs.|jj|j|jjd|jj|jj|jdS)z?Execute the command with respect to given arguments *cli_args*.r?N)rrbr<rrcrr|r})r$rrrr~Fsz*RepoPkgsCommand.InfoSubCommand.run_on_repoN)r?)r7r8r9r:r&r,r~rrrrInfoSubCommand4s rc@s$eZdZdZdZddZddZdS) z!RepoPkgsCommand.InstallSubCommandz*Implementation of the install sub-command.installcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r`r^ resolving root_user)r$r]rrrr,Qs z+RepoPkgsCommand.InstallSubCommand.configurecCs|jj|jt|j|jd}|jjsjy|jjd|jWn&tj j k rbt j t dYqXd}nvxt|jjD]h}y|jj||jWnJtj j k r}z*dj|j|jjjj|}t j |WYdd}~XqtXd}qtW|stj jt ddS)NFrQzNo package available.Tz{}: {}zNo packages marked for install.)rrbr<rrr|rr}rr. MarkingErrorr r?rr valuer(termboldError)r$donepkg_specer rrrr~Xs$z-RepoPkgsCommand.InstallSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrInstallSubCommandLsrc@seZdZdZdZddZdS)zRepoPkgsCommand.ListSubCommandz'Implementation of the list sub-command.rfcCs.|jj|j|jjd|jj|jj|jdS)z?Execute the command with respect to given arguments *cli_args*.rfN)rrbr<rrcrr|r})r$rrrr~zsz*RepoPkgsCommand.ListSubCommand.run_on_repoN)rf)r7r8r9r:r&r~rrrrListSubCommandusrc@s$eZdZdZdZddZddZdS) z RepoPkgsCommand.MoveToSubCommandz*Implementation of the move-to sub-command.move-tocCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z*RepoPkgsCommand.MoveToSubCommand.configurecCst|j|jd}|jjsy|jjd|jdWnltjj k rVt j t dYnLtjj k rzt j t dYn(tjjk rdstdYnXd}nx|jjD]}y|jj||jdWntjj k rt d}t j ||Yqtjj k rp}z\xT|jD]J}d }|jjj|}|r:t d |}t d }t j ||jjj||qWWYd d }~Xqtjjk rdstdYqXd}qW|stjjt d d S)z?Execute the command with respect to given arguments *cli_args*.FrQ)Z new_reponamezNo package installed.zNo package available.z+Only the above marking errors are expected.TzNo match for argument: %sr#z (from %s)z%Installed package %s%s not available.NzNothing to do.)rrrr<r| reinstallr}rr.PackagesNotInstalledErrorr r?rPackagesNotAvailableErrorrAssertionErrorrPhistoryrr(rrr)r$rrr errpkgxmsgpkgreporrrr~s@  . z,RepoPkgsCommand.MoveToSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrMoveToSubCommandsrc@s$eZdZdZdZddZddZdS) z&RepoPkgsCommand.ReinstallOldSubCommandz0Implementation of the reinstall-old sub-command. reinstall-oldcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z0RepoPkgsCommand.ReinstallOldSubCommand.configurecCst|j|jd}|jjsy|jjd|j|jWnptjj k r\t d}t j |YnLtjj k rt j t dYn(tjjk rdstdYnXd}nx|jjD]}y|jj||j|jWntjj k rt d}t j ||Yqtjj k rx}z\xT|jD]J}d}|jjj|}|rBt d |}t d }t j ||jjj||qWWYd d }~Xqtjjk rdstdYqXd}qW|stjjt d d S) z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.zNo package available.z+Only the above marking errors are expected.TzNo match for argument: %sr#z (from %s)z%Installed package %s%s not available.NzNothing to do.)rrrr<r|rr}rr.rrr r?rrrrPrrr(rrr)r$rr rrrrrrrrr~sD    . z2RepoPkgsCommand.ReinstallOldSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrReinstallOldSubCommandsrcs4eZdZdZd ZfddZddZddZZS) z#RepoPkgsCommand.ReinstallSubCommandz,Implementation of the reinstall sub-command.rcs,ttj|j|tj|tj|f|_dS)zInitialize the command.N)superrzReinstallSubCommandr%rrwrapped_commands)r$r) __class__rrr%sz,RepoPkgsCommand.ReinstallSubCommand.__init__cCs6d|jj_x&|jD]}|j|_|j|_|jqWdS)NT)rr]r`rr<r}r,)r$commandrrrr,s   z-RepoPkgsCommand.ReinstallSubCommand.configurec Cs\t|j|jxH|jD].}y |jWntjjk r@wYqXPqWtjjtddS)z?Execute the command with respect to given arguments *cli_args*.z!No packages marked for reinstall.N) rrrrr~rr.rr)r$rrrrr~s  z/RepoPkgsCommand.ReinstallSubCommand.run_on_repo)r) r7r8r9r:r&r%r,r~ __classcell__rr)rrrs  rc@s,eZdZdZd ZddZddZddZd S) z,RepoPkgsCommand.RemoveOrDistroSyncSubCommandz8Implementation of the remove-or-distro-sync sub-command.remove-or-distro-synccCs$|jj}d|_d|_d|_d|_dS)NT)rr]r`r^rr)r$r]rrrr,s z6RepoPkgsCommand.RemoveOrDistroSyncSubCommand.configurec s|jjjjtjj|}|j|jjj}|jjjfdd|j D}|s`tj j d||j }|jjj j}xD|D]<}|j|j|jdr|jjjj|qz|jjjj||dqzWdS)z;Synchronize a package with another repository or remove it.csg|]}j|kr|qSr)r).0r)rr}rr #szIRepoPkgsCommand.RemoveOrDistroSyncSubCommand._replace..zno package matched)namearch) clean_depsN)rrZsackZ disable_reporsubjectZSubjectZget_best_queryrrBr.rrArZclean_requirements_on_removefilterrrZ_goalZ distupgradeZerase) r$rr}rZmatchesrBrArpackager)rr}r_replaces    z5RepoPkgsCommand.RemoveOrDistroSyncSubCommand._replacec Cst|j|jd}|jjs^y|jd|jWn*tjj k rVt d}t j |YqXd}nVxT|jjD]H}y|j||jWn,tjj k rt d}t j ||YqhXd}qhW|stjj t ddS)z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.TzNo match for argument: %szNothing to do.N)rrrr<r|rr}rr.rrr r?r)r$rr rrrrr~0s$z8RepoPkgsCommand.RemoveOrDistroSyncSubCommand.run_on_repoN)r)r7r8r9r:r&r,rr~rrrrRemoveOrDistroSyncSubCommands rc@s$eZdZdZdZddZddZdS) z+RepoPkgsCommand.RemoveOrReinstallSubCommandz6Implementation of the remove-or-reinstall sub-command.remove-or-reinstallcCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,Rs z5RepoPkgsCommand.RemoveOrReinstallSubCommand.configurec Cs*t|j|jd}|jjsy|jjd|j|jddWnLtjj k r`t d}t j |Yn(tjj k rds~tdYnXd}nx|jjD]x}y|jj||j|jddWnRtjj k rt d}t j ||Yqtjj k rdstdYqXd}qW|s&tjjt dd S) z?Execute the command with respect to given arguments *cli_args*.FrQT)Z old_reponameZnew_reponame_neqZ remove_naz)No package installed from the repository.z)Only the above marking error is expected.zNo match for argument: %szNothing to do.N)rrrr<r|rr}rr.rrr r?rrr)r$rr rrrrr~Ys4  z7RepoPkgsCommand.RemoveOrReinstallSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrRemoveOrReinstallSubCommandMsrc@s$eZdZdZdZddZddZdS) z RepoPkgsCommand.RemoveSubCommandz)Implementation of the remove sub-command.removecCs*|jj}d|_d|_d|_d|_d|_dS)NTF)rr]r^Z allow_erasingr`rr)r$r]rrrr,s z*RepoPkgsCommand.RemoveSubCommand.configurecCsd}|jjsRy|jjd|jWn*tjjk rJtd}t j |YqXd}n`x^|jjD]R}y|jj||jWn4tjjk r}zt j t |WYdd}~Xq\Xd}q\W|st j tddS)z?Execute the command with respect to given arguments *cli_args*.FrQz)No package installed from the repository.TNzNo packages marked for removal.) r<r|rrr}rr.rrr r?strZwarning)r$rr rrrrrr~s  z,RepoPkgsCommand.RemoveSubCommand.run_on_repoN)r)r7r8r9r:r&r,r~rrrrRemoveSubCommand~src@s$eZdZdZd ZddZddZdS) z!RepoPkgsCommand.UpgradeSubCommandz*Implementation of the upgrade sub-command.upgrade upgrade-tocCs$|jj}d|_d|_d|_d|_dS)NT)rr]r^r`rr)r$r]rrrr,s z+RepoPkgsCommand.UpgradeSubCommand.configurec Cst|j|jd}|jjs.|jj|jd}nTxR|jjD]F}y|jj||jWn(tj j k rxt j t d|Yq8Xd}q8W|stj jt ddS)z?Execute the command with respect to given arguments *cli_args*.FTzNo match for argument: %szNo packages marked for upgrade.N)rrrr<r|Z upgrade_allr}rrr.rr r?rr)r$rrrrrr~sz-RepoPkgsCommand.UpgradeSubCommand.run_on_repoN)rr)r7r8r9r:r&r,r~rrrrUpgradeSubCommandsrrepository-packages repo-pkgs repo-packagesrepository-pkgsz7run commands on top of all packages in given repositorycs>tt|jfdd|jD}d|_dd|D|_dS)zInitialize the command.c3s|]}|VqdS)Nr)rsubcmd)rrr sz+RepoPkgsCommand.__init__..NcSsi|]}|jD] }||qqSr)r&)rraliasrrr sz,RepoPkgsCommand.__init__..)rrzr%SUBCMDSr_subcmd_name2obj)r$rZ subcmd_objs)r)rrr%s zRepoPkgsCommand.__init__c Cs`|j}|jdddddtdd|jddddtd d |jd ddd td d |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddddtdd |jddtjtdtddd d!|jD}d"d!|jD}|jd#dd$|d%j|d&d}|d dddd'ddh}|jd(d)td*||tjtd+d,dS)-Nz--allrrJr@zshow all packages (default))rKrLrMrNrOz --availablerAzshow only available packages)rKrLrMrOz --installedrBzshow only installed packagesz--extrasrCzshow only extras packagesz --updatesrEzshow only upgrades packagesz --upgradesz --autoremoverFzshow only autoremove packagesz--recentrGz#show only recently changed packagesr}ZREPOIDz Repository ID)rSrLrTrOcSsg|]}|jdqS)r)r&)rrrrrrsz1RepoPkgsCommand.set_argparser..cSsg|]}|jD]}|qqSr)r&)rrrrrrrsrZ SUBCOMMANDz, )rSrTrUrOrHr|rQrRzPackage specification)rSrTrUrNrLrO)rVrWrrZ_RepoCallbackEnablerrrZ)r$r)r\Zsubcommand_choicesZsubcommand_choices_allrYrXrrrr*sP         zRepoPkgsCommand.set_argparsercCsy|j|jjd|_Wn>tjjtfk rV}z|jjjtjjWYdd}~XnX|j|j_|jj d|j_ |jj dS)z8Verify whether the command can run with given arguments.rN) rr<rrrrKeyError optparserZ print_usager}r,)r$rrrrr,s  zRepoPkgsCommand.configurecCs|jjdS)z>Execute the command with respect to given arguments *extcmds*.N)rr~)r$rrrr4szRepoPkgsCommand.run)rrrr)r7r8r9r:r"rrrrrrrrrrrrr&rr;r%r*r,r4rrr)rrrz s0) 79>1(# + rzc@s0eZdZdZd ZedZeddZddZ dS) HelpCommandzRA class containing methods needed by the cli to execute the help command. rOzdisplay a helpful usage messagecCs*|jddtdtdjtjjdddS)Ncmd?ZCOMMANDz{prog} command to get help for)r)rSrTrO)rWrr rr r)r)rrrr*szHelpCommand.set_argparsercCsN|jj s|jj|jjkr(|jjjn"|jj|jj}|jjj||dS)N)r<rrZ cli_commandsrZ print_help)r$rrrrr4$s  zHelpCommand.runN)rO) r7r8r9r:r&rr;ror*r4rrrrrs  r)r:Z __future__rrZdnf.cli.option_parserrZdnf.i18nrZdnf.clirZdnf.exceptionsZ dnf.pycompZdnf.utilZloggingrZ getLoggerr r/r0r rr!objectr"r>rerhrprzrrrrrs:       9?$yPK!V3cli/commands/__pycache__/alias.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlZddlZddl Zddlm Z ddl Zddl Zddl mZejdZGdd d e jZdS) )absolute_import)print_function)unicode_literalsN)commands)_dnfc@sleZdZdZedZeddZddZddZ d d Z d d Z d dZ ddZ ddZddZddZdS) AliasCommandaliaszList or create command aliasescCsl|j}|jdddtdd|jdddtdd|jdd d d d d gtd d|jdddtdddS)Nz--enable-resolvingF store_truezenable aliases resolving)defaultactionhelpz--disable-resolvingzdisable aliases resolving subcommand?listadddeletezaction to do with aliases)nargsr choicesr r *zcommand[=result]zalias definition)rmetavarr )Zadd_mutually_exclusive_group add_argumentr)parserZ enable_groupr/usr/lib/python3.6/alias.py set_argparser*s     zAliasCommand.set_argparsercCsH|jj}|jjdkrd|_tjjj|_|jj |jj |_ |j dS)NrrT)rr) clidemandsoptsrZ root_userraliasesZAliases aliases_baseZ _load_aliasesenabledZresolving_enabled_update_config_from_options)selfrrrr configure9s   zAliasCommand.configurecCsd}|jjrd}tjtd|jjr8d}tjtd|dk rtjjt j j j sft t j j j djt jjjt j j j ddd|i|jjs||j_dS)NTzAliases are now enabledFzAliases are now disabledwmainr!)rZenable_resolvingloggerinforZdisable_resolvingospathexistsrrrZALIASES_CONF_PATHopencloseconfZ BaseConfigZwrite_raw_configfiler Z_disabled_by_environr!)r#r!rrrr"Bs z(AliasCommand._update_config_from_optionscCsi}x|jjD]}|jdd}|dj}t|jdkrLtjtd|q|jdrhtjtd|qt|dkrtjtd|q|dj||<qW|S)N=rzInvalid alias key: %s-zAlias argument has no value: %s) rr splitstriplenr'warningr startswith)r#Z new_aliasesr cmdrrr_parse_option_aliasTs    z AliasCommand._parse_option_aliascCsxtjjtjjjs&ttjjjdjytjjj tjjj}Wn4tj j k rr}zt j td|dSd}~XnX|S)Nr%zConfig error: %s)r)r*r+rrrALIASES_USER_PATHr,r-Z AliasesConfig exceptionsZ ConfigErrorr'r5r)r#r.errr_load_user_aliaseseszAliasCommand._load_user_aliasescCsdttjjjd}d}|dj|7}|d7}x*|jD]\}}|dj|dj|7}q4W|j|dS)Nr%z[main] zenabled = {} z [aliases] z{} = {}  ) r,rrrr9formatitemsjoinwrite)r# user_aliasesr!Zfileobjoutputkeyvaluerrr_store_user_aliasespsz AliasCommand._store_user_aliasescCsP|j}|j}|dkrdS|j||j||jtjtddj|j dS)NzAliases added: %sz, ) r<rupdaterFr!r'r(rr@keys)r#rr.rBrrr add_aliasesys zAliasCommand.add_aliasesc Cs|j}|j}|dkrdSg}xF|D]>}y||=|j|Wq$tk r`tjtd|Yq$Xq$W|j||jtjtddj |dS)NzAlias not found: %szAliases deleted: %sz, ) r<rappendKeyErrorr'r(rrFr!r@)r#cmdsr.rBZ valid_cmdsr7rrrremove_aliasess zAliasCommand.remove_aliasescCs~|g}y|jj|}WnHtjjk r^}z(tjtd||dj|jj |WYdd}~XnXt td|dj|fdS)Nz%s, alias %s="%s"r=z Alias %s='%s') r Z_resolverr:Errorr'errorrr@rprint)r#r7argsr;rrr list_aliass0zAliasCommand.list_aliascCs|jjstjtd|jjdkrL|j}|s>tj j td|j |dS|jjdkr|jj }|gkrxtj j td|j |dS|jj s|jjstjtddSxX|jjD]}|j|qWns      PK!V-cli/commands/__pycache__/alias.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlZddlZddlZddl Zddlm Z ddl Zddl Zddl mZejdZGdd d e jZdS) )absolute_import)print_function)unicode_literalsN)commands)_dnfc@sleZdZdZedZeddZddZddZ d d Z d d Z d dZ ddZ ddZddZddZdS) AliasCommandaliaszList or create command aliasescCsl|j}|jdddtdd|jdddtdd|jdd d d d d gtd d|jdddtdddS)Nz--enable-resolvingF store_truezenable aliases resolving)defaultactionhelpz--disable-resolvingzdisable aliases resolving subcommand?listadddeletezaction to do with aliases)nargsr choicesr r *zcommand[=result]zalias definition)rmetavarr )Zadd_mutually_exclusive_group add_argumentr)parserZ enable_groupr/usr/lib/python3.6/alias.py set_argparser*s     zAliasCommand.set_argparsercCsH|jj}|jjdkrd|_tjjj|_|jj |jj |_ |j dS)NrrT)rr) clidemandsoptsrZ root_userraliasesZAliases aliases_baseZ _load_aliasesenabledZresolving_enabled_update_config_from_options)selfrrrr configure9s   zAliasCommand.configurecCsd}|jjrd}tjtd|jjr8d}tjtd|dk rtjjt j j j sft t j j j djt jjjt j j j ddd|i|jjs||j_dS)NTzAliases are now enabledFzAliases are now disabledwmainr!)rZenable_resolvingloggerinforZdisable_resolvingospathexistsrrrZALIASES_CONF_PATHopencloseconfZ BaseConfigZwrite_raw_configfiler Z_disabled_by_environr!)r#r!rrrr"Bs z(AliasCommand._update_config_from_optionscCsi}x|jjD]}|jdd}|dj}t|jdkrLtjtd|q|jdrhtjtd|qt|dkrtjtd|q|dj||<qW|S)N=rzInvalid alias key: %s-zAlias argument has no value: %s) rr splitstriplenr'warningr startswith)r#Z new_aliasesr cmdrrr_parse_option_aliasTs    z AliasCommand._parse_option_aliascCsxtjjtjjjs&ttjjjdjytjjj tjjj}Wn4tj j k rr}zt j td|dSd}~XnX|S)Nr%zConfig error: %s)r)r*r+rrrALIASES_USER_PATHr,r-Z AliasesConfig exceptionsZ ConfigErrorr'r5r)r#r.errr_load_user_aliaseseszAliasCommand._load_user_aliasescCsdttjjjd}d}|dj|7}|d7}x*|jD]\}}|dj|dj|7}q4W|j|dS)Nr%z[main] zenabled = {} z [aliases] z{} = {}  ) r,rrrr9formatitemsjoinwrite)r# user_aliasesr!Zfileobjoutputkeyvaluerrr_store_user_aliasespsz AliasCommand._store_user_aliasescCsP|j}|j}|dkrdS|j||j||jtjtddj|j dS)NzAliases added: %sz, ) r<rupdaterFr!r'r(rr@keys)r#rr.rBrrr add_aliasesys zAliasCommand.add_aliasesc Cs|j}|j}|dkrdSg}xF|D]>}y||=|j|Wq$tk r`tjtd|Yq$Xq$W|j||jtjtddj |dS)NzAlias not found: %szAliases deleted: %sz, ) r<rappendKeyErrorr'r(rrFr!r@)r#cmdsr.rBZ valid_cmdsr7rrrremove_aliasess zAliasCommand.remove_aliasescCs~|g}y|jj|}WnHtjjk r^}z(tjtd||dj|jj |WYdd}~XnXt td|dj|fdS)Nz%s, alias %s="%s"r=z Alias %s='%s') r Z_resolverr:Errorr'errorrr@rprint)r#r7argsr;rrr list_aliass0zAliasCommand.list_aliascCs|jjstjtd|jjdkrL|j}|s>tj j td|j |dS|jjdkr|jj }|gkrxtj j td|j |dS|jj s|jjstjtddSxX|jjD]}|j|qWns      PK!͌zz8cli/commands/__pycache__/autoremove.cpython-36.opt-1.pycnu[3 ft` @stddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z e j dZGdd d ejZdS) )absolute_import)unicode_literals)commands) OptionParser)_Ndnfc@sReZdZejejejdZd eej Z e dZ e ddZddZdd Zd S) AutoremoveCommand)z autoremove-nz autoremove-nazautoremove-nevra autoremovezKremove all unneeded packages that were originally installed as dependenciescCs"|jddtdtjtdddS)NZpackages*zPackage to removeZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr /usr/lib/python3.6/autoremove.py set_argparser,szAutoremoveCommand.set_argparsercCs\|jj}d|_d|_d|_t|jj|jj|jj grLd|j j _ d|_ d|_n d|_d|_dS)NTF)ZclidemandsZ resolvingZ root_userZsack_activationanyopts grp_specs pkg_specs filenamesbaseZconfZclean_requirements_on_removeZ allow_erasingZavailable_reposZfresh_metadata)selfrrrr configure2s zAutoremoveCommand.configurecCsjt|jj|jj|jjgr\g}|jj|jkr<|j|jjg}|jj||jj|jj|jjn |jjdS)N) rrrrrZcommand nevra_formsrr )rZformsrrrrunBs zAutoremoveCommand.runN)r )__name__ __module__ __qualname__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRArtuplekeysaliasesrZsummary staticmethodrrrrrrrr"s  r)Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZdnf.exceptionsrr"ZloggingZ getLoggerZloggerZCommandrrrrrs      PK!͌zz2cli/commands/__pycache__/autoremove.cpython-36.pycnu[3 ft` @stddlmZddlmZddlmZddlmZddlmZddl Z ddl Z ddl Z e j dZGdd d ejZdS) )absolute_import)unicode_literals)commands) OptionParser)_Ndnfc@sReZdZejejejdZd eej Z e dZ e ddZddZdd Zd S) AutoremoveCommand)z autoremove-nz autoremove-nazautoremove-nevra autoremovezKremove all unneeded packages that were originally installed as dependenciescCs"|jddtdtjtdddS)NZpackages*zPackage to removeZPACKAGE)nargshelpactionmetavar) add_argumentrrZParseSpecGroupFileCallback)parserr /usr/lib/python3.6/autoremove.py set_argparser,szAutoremoveCommand.set_argparsercCs\|jj}d|_d|_d|_t|jj|jj|jj grLd|j j _ d|_ d|_n d|_d|_dS)NTF)ZclidemandsZ resolvingZ root_userZsack_activationanyopts grp_specs pkg_specs filenamesbaseZconfZclean_requirements_on_removeZ allow_erasingZavailable_reposZfresh_metadata)selfrrrr configure2s zAutoremoveCommand.configurecCsjt|jj|jj|jjgr\g}|jj|jkr<|j|jjg}|jj||jj|jj|jjn |jjdS)N) rrrrrZcommand nevra_formsrr )rZformsrrrrunBs zAutoremoveCommand.runN)r )__name__ __module__ __qualname__hawkeyZ FORM_NAMEZFORM_NAZ FORM_NEVRArtuplekeysaliasesrZsummary staticmethodrrrrrrrr"s  r)Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZdnf.exceptionsrr"ZloggingZ getLoggerZloggerZCommandrrrrrs      PK!jX3cli/commands/__pycache__/check.cpython-36.opt-1.pycnu[3 ft`?@sVddlmZddlmZddlmZddlmZddlZddlZ Gdddej Z dS))absolute_import)unicode_literals)_)commandsNc@s8eZdZdZd ZedZeddZddZ dd Z d S) CheckCommandzSA class containing methods needed by the cli to execute the check command. checkz#check for problems in the packagedbc Cs|jddddtdd|jddddtd d|jd ddd td d|jd dddtdd|jddddtdd|jddddd ddggtjddS)Nz--all check_typesZ append_constallzshow all problems; default)destactionconsthelpz--dependencies dependencieszshow dependency problemsz --duplicates duplicateszshow duplicate problemsz --obsoleted obsoletedzshow obsoleted packagesz --providesprovideszshow problems with providescheck_yum_types*)nargschoicesr ) add_argumentrargparseZSUPPRESS)parserr/usr/lib/python3.6/check.py set_argparser$s$     zCheckCommand.set_argparsercCsxd|jj_|jjr<|jjr0|jj|jj|j_n |jj|j_|jjsPdh|j_nt|jj|j_|jjj dg7_ dS)NTr ) ZcliZdemandsZsack_activationoptsrrsetbaseconfZdisable_excludes)selfrrr configure;s   zCheckCommand.configurec Cst}|jjjj}|jjjddhrd}x||D]r}xt|jtt|j t|j BD]}t |j drtq`t |j|gds`t |j dr|dkrtjj|j}tjj|}|jt |dtjj|}|jjj|_|j|dd|j}|rq`td} |j| j|jjjj||jjjj|q`Wxx|jD]n} |j| gt | j d d } xJ| D]B} d } |j| j|jjjj||jjjj| |jjjj| q^Wq8Wq6W|jjjdd hrN|jj!|} |j"j#| j$}xl|j%D]`\}}|j&xL|d dD]<}tdj|jjjj|d |jjjj|} |j| qWqW|jjjddhrx||D]t}xl|j'D]b}|j|gt |j d d }t |rttdj|jjjj|d |jjjj|} |j| qtWqhW|jjjddhr\xf|D]^}xV|j(D]L}||j|gdkrtd} |j| j|jjjj||jjjj|qWqWxt)|D]} t*| qfW|rtj+j,djt |dS)Nr rZrpmlib)r(F)ZselectZoptionalz{} has missing requires of {}r)rnamez"{} has installed conflict "{}": {}rz{} is a duplicate with {}rz{} is obsoleted by {}rz%{} provides {} but it cannot be foundzCheck discovered {} problem(s))-rrsackZqueryZ installedrr intersectionZregular_requiresZ requires_preZprereq_ignoreinststr startswithlenfilterdnfZ rpmdb_sackselectorZSelectorgoalZGoalrZprotect_running_kernelZinstallrunraddformatoutputZtermZboldZ conflictssplitZ_get_installonly_queryZ duplicated differenceZ _name_dictitemssortZ obsoletesrsortedprint exceptionsError)r Z output_setqr%ZpkgZrequirer,r-ZsolvedmsgZconflictZ conflictedZ conflict_pkgZ installonlyZdupsr#ZpkgsdupZobsoleterZproviderrrr.Is(     $       zCheckCommand.runN)r) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrr!r.rrrrrs  r) Z __future__rrZdnf.i18nrZdnf.clirrZdnf.exceptionsr+ZCommandrrrrrs    PK!jX-cli/commands/__pycache__/check.cpython-36.pycnu[3 ft`?@sVddlmZddlmZddlmZddlmZddlZddlZ Gdddej Z dS))absolute_import)unicode_literals)_)commandsNc@s8eZdZdZd ZedZeddZddZ dd Z d S) CheckCommandzSA class containing methods needed by the cli to execute the check command. checkz#check for problems in the packagedbc Cs|jddddtdd|jddddtd d|jd ddd td d|jd dddtdd|jddddtdd|jddddd ddggtjddS)Nz--all check_typesZ append_constallzshow all problems; default)destactionconsthelpz--dependencies dependencieszshow dependency problemsz --duplicates duplicateszshow duplicate problemsz --obsoleted obsoletedzshow obsoleted packagesz --providesprovideszshow problems with providescheck_yum_types*)nargschoicesr ) add_argumentrargparseZSUPPRESS)parserr/usr/lib/python3.6/check.py set_argparser$s$     zCheckCommand.set_argparsercCsxd|jj_|jjr<|jjr0|jj|jj|j_n |jj|j_|jjsPdh|j_nt|jj|j_|jjj dg7_ dS)NTr ) ZcliZdemandsZsack_activationoptsrrsetbaseconfZdisable_excludes)selfrrr configure;s   zCheckCommand.configurec Cst}|jjjj}|jjjddhrd}x||D]r}xt|jtt|j t|j BD]}t |j drtq`t |j|gds`t |j dr|dkrtjj|j}tjj|}|jt |dtjj|}|jjj|_|j|dd|j}|rq`td} |j| j|jjjj||jjjj|q`Wxx|jD]n} |j| gt | j d d } xJ| D]B} d } |j| j|jjjj||jjjj| |jjjj| q^Wq8Wq6W|jjjdd hrN|jj!|} |j"j#| j$}xl|j%D]`\}}|j&xL|d dD]<}tdj|jjjj|d |jjjj|} |j| qWqW|jjjddhrx||D]t}xl|j'D]b}|j|gt |j d d }t |rttdj|jjjj|d |jjjj|} |j| qtWqhW|jjjddhr\xf|D]^}xV|j(D]L}||j|gdkrtd} |j| j|jjjj||jjjj|qWqWxt)|D]} t*| qfW|rtj+j,djt |dS)Nr rZrpmlib)r(F)ZselectZoptionalz{} has missing requires of {}r)rnamez"{} has installed conflict "{}": {}rz{} is a duplicate with {}rz{} is obsoleted by {}rz%{} provides {} but it cannot be foundzCheck discovered {} problem(s))-rrsackZqueryZ installedrr intersectionZregular_requiresZ requires_preZprereq_ignoreinststr startswithlenfilterdnfZ rpmdb_sackselectorZSelectorgoalZGoalrZprotect_running_kernelZinstallrunraddformatoutputZtermZboldZ conflictssplitZ_get_installonly_queryZ duplicated differenceZ _name_dictitemssortZ obsoletesrsortedprint exceptionsError)r Z output_setqr%ZpkgZrequirer,r-ZsolvedmsgZconflictZ conflictedZ conflict_pkgZ installonlyZdupsr#ZpkgsdupZobsoleterZproviderrrr.Is(     $       zCheckCommand.runN)r) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrr!r.rrrrrs  r) Z __future__rrZdnf.i18nrZdnf.clirrZdnf.exceptionsr+ZCommandrrrrrs    PK! '  3cli/commands/__pycache__/clean.cpython-36.opt-1.pycnu[3 ft`t@sddlmZddlmZddlmZddlmZmZddlm Z ddlZ ddl Z ddl Z ddl Z ddlZ ddlZddlZddlZddlZejdZdd d gd gd gd gdd d gd Zd dZddZddZddZGdddejZdS))absolute_import)unicode_literals)commands)_P_)miscNdnfmetadatadbcachez expire-cachepackages)r r r z expire-cacheallccsVxPtj|D]B\}}}tjj||}x(|D] }tjj||}tjj|Vq*Wq WdS)z:Traverse dirpath recursively and yield relative filenames.N)oswalkpathrelpathjoinnormpath)dirpathrootdirsfilesbasefrr/usr/lib/python3.6/clean.py_tree1s  rcsfdd|DS)z5Yield those filenames that match any of the patterns.c3s(|] }D]}tj||r |Vq qdS)N)rematch).0rp)patternsrr <sz_filter..r)rr r)r r_filter:sr"cCsLd}xB|D]:}tjj||}tjtjjtd|t j ||d7}q W|S)z(Remove the given filenames from dirpath.rzRemoving file %s) r rrloggerlogrloggingZDDEBUGrrZunlink_f)rrcountrrrrr_clean?s   r(cs0tjjdfdd|D}tdd|DS)z:Return the repo IDs that have some cached metadata around.r c3s|]}tj|VqdS)N)rr)rr)metapatrrr!Msz _cached_repos..css|]}|r|jdVqdS)ZrepoidN)group)rmrrrr!Ns)rrepo CACHE_FILESset)rZmatchesr)r)r _cached_reposJs r/c@s0eZdZdZd ZedZeddZddZ dS) CleanCommandzSA class containing methods needed by the cli to execute the clean command. cleanzremove cached datacCs|jddtjtdddS)Ntype+zMetadata type to clean)nargschoiceshelp) add_argument _CACHE_TYPESkeysr)parserrrr set_argparserYszCleanCommand.set_argparserc Csf|jjj}tjj|d}tjj|d}tjj|jjjd}x$y|oJ|oJ|t dd|j j D}t t |}tjtddj|d|krt|}|jjjj||jdtjtddd |D}t|t||} tjtd d | | dSQRXWq>tjjk r\} z:|jjjsHtd | j} tj| tj d n| WYdd} ~ Xq>Xq>WdS)NTcss |]}t|D] }|VqqdS)N)r8)rctrrrr!gsz#CleanCommand.run..zCleaning data:  z expire-cachezCache was expiredcSsg|]}tjj|qSr)rr,r-)rr=rrr qsz$CleanCommand.run..z%d file removedz%d files removedz*Waiting for process with pid %d to finish.)!rZconfcachedirrlockZbuild_metadata_lockZbuild_download_lockZbuild_rpmdb_lockZ persistdirr.Zoptsr2listrr$debugrrr/Z_repo_persistorZexpired_to_addupdateremoveinfor(r"r exceptionsZ LockErrorZ exit_on_lockpidtimeZsleep) selfrAZmd_lockZ download_lockZ rpmdb_locktypesrZexpiredr r'emsgrrrrun_s2      zCleanCommand.runN)r1) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodr;rOrrrrr0Qs  r0)Z __future__rrZdnf.clirZdnf.i18nrrZdnf.yumrrZdnf.exceptionsZdnf.lockZ dnf.loggingZdnf.repor&r rrJZ getLoggerr$r8rr"r(r/ZCommandr0rrrrs0       PK! '  -cli/commands/__pycache__/clean.cpython-36.pycnu[3 ft`t@sddlmZddlmZddlmZddlmZmZddlm Z ddlZ ddl Z ddl Z ddl Z ddlZ ddlZddlZddlZddlZejdZdd d gd gd gd gdd d gd Zd dZddZddZddZGdddejZdS))absolute_import)unicode_literals)commands)_P_)miscNdnfmetadatadbcachez expire-cachepackages)r r r z expire-cacheallccsVxPtj|D]B\}}}tjj||}x(|D] }tjj||}tjj|Vq*Wq WdS)z:Traverse dirpath recursively and yield relative filenames.N)oswalkpathrelpathjoinnormpath)dirpathrootdirsfilesbasefrr/usr/lib/python3.6/clean.py_tree1s  rcsfdd|DS)z5Yield those filenames that match any of the patterns.c3s(|] }D]}tj||r |Vq qdS)N)rematch).0rp)patternsrr <sz_filter..r)rr r)r r_filter:sr"cCsLd}xB|D]:}tjj||}tjtjjtd|t j ||d7}q W|S)z(Remove the given filenames from dirpath.rzRemoving file %s) r rrloggerlogrloggingZDDEBUGrrZunlink_f)rrcountrrrrr_clean?s   r(cs0tjjdfdd|D}tdd|DS)z:Return the repo IDs that have some cached metadata around.r c3s|]}tj|VqdS)N)rr)rr)metapatrrr!Msz _cached_repos..css|]}|r|jdVqdS)ZrepoidN)group)rmrrrr!Ns)rrepo CACHE_FILESset)rZmatchesr)r)r _cached_reposJs r/c@s0eZdZdZd ZedZeddZddZ dS) CleanCommandzSA class containing methods needed by the cli to execute the clean command. cleanzremove cached datacCs|jddtjtdddS)Ntype+zMetadata type to clean)nargschoiceshelp) add_argument _CACHE_TYPESkeysr)parserrrr set_argparserYszCleanCommand.set_argparserc Csf|jjj}tjj|d}tjj|d}tjj|jjjd}x$y|oJ|oJ|t dd|j j D}t t |}tjtddj|d|krt|}|jjjj||jdtjtddd |D}t|t||} tjtd d | | dSQRXWq>tjjk r\} z:|jjjsHtd | j} tj| tj d n| WYdd} ~ Xq>Xq>WdS)NTcss |]}t|D] }|VqqdS)N)r8)rctrrrr!gsz#CleanCommand.run..zCleaning data:  z expire-cachezCache was expiredcSsg|]}tjj|qSr)rr,r-)rr=rrr qsz$CleanCommand.run..z%d file removedz%d files removedz*Waiting for process with pid %d to finish.)!rZconfcachedirrlockZbuild_metadata_lockZbuild_download_lockZbuild_rpmdb_lockZ persistdirr.Zoptsr2listrr$debugrrr/Z_repo_persistorZexpired_to_addupdateremoveinfor(r"r exceptionsZ LockErrorZ exit_on_lockpidtimeZsleep) selfrAZmd_lockZ download_lockZ rpmdb_locktypesrZexpiredr r'emsgrrrrun_s2      zCleanCommand.runN)r1) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodr;rOrrrrr0Qs  r0)Z __future__rrZdnf.clirZdnf.i18nrrZdnf.yumrrZdnf.exceptionsZdnf.lockZ dnf.loggingZdnf.repor&r rrJZ getLoggerr$r8rr"r(r/ZCommandr0rrrrs0       PK!&yy5cli/commands/__pycache__/deplist.cpython-36.opt-1.pycnu[3 ft`@sPddlmZddlmZddlmZddlmZddlmZGdddeZdS) )print_function)absolute_import)unicode_literals)_)RepoQueryCommandc@s$eZdZdZdZedZddZdS)DeplistCommandz< The command is alias for 'dnf repoquery --deplist' deplistz`[deprecated, use repoquery --deplist] List package's dependencies and what packages provide themcCstj|d|j_dS)NT)r configureZoptsr)selfr /usr/lib/python3.6/deplist.pyr "s zDeplistCommand.configureN)r)__name__ __module__ __qualname____doc__aliasesrZsummaryr r r r r rsrN) Z __future__rrrZdnf.i18nrZdnf.cli.commands.repoqueryrrr r r r s     PK!&yy/cli/commands/__pycache__/deplist.cpython-36.pycnu[3 ft`@sPddlmZddlmZddlmZddlmZddlmZGdddeZdS) )print_function)absolute_import)unicode_literals)_)RepoQueryCommandc@s$eZdZdZdZedZddZdS)DeplistCommandz< The command is alias for 'dnf repoquery --deplist' deplistz`[deprecated, use repoquery --deplist] List package's dependencies and what packages provide themcCstj|d|j_dS)NT)r configureZoptsr)selfr /usr/lib/python3.6/deplist.pyr "s zDeplistCommand.configureN)r)__name__ __module__ __qualname____doc__aliasesrZsummaryr r r r r rsrN) Z __future__rrrZdnf.i18nrZdnf.cli.commands.repoqueryrrr r r r s     PK!(uu8cli/commands/__pycache__/distrosync.cpython-36.opt-1.pycnu[3 ft`@s:ddlmZddlmZddlmZGdddejZdS))absolute_import)commands)_c@s8eZdZdZdZedZeddZd d Z d d Z d S)DistroSyncCommandzZA class containing methods needed by the cli to execute the distro-synch command. distro-sync distrosyncdistribution-synchronizationdsyncz?synchronize installed packages to the latest available versionscCs|jddtdddS)Npackage*zPackage to synchronize)nargshelp) add_argumentr)parserr /usr/lib/python3.6/distrosync.py set_argparser"szDistroSyncCommand.set_argparsercCsF|jj}d|_d|_d|_d|_tj|j|jtj |j|j j dS)NT) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseZ_checkEnabledRepooptsr )selfrrrr configure&szDistroSyncCommand.configurecCs|jj|jjS)N)rZdistro_sync_userlistrr )rrrrrun/szDistroSyncCommand.runN)rrrr ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrrrrrrrs   rN)Z __future__rZdnf.clirZdnf.i18nrZCommandrrrrrs   PK!(uu2cli/commands/__pycache__/distrosync.cpython-36.pycnu[3 ft`@s:ddlmZddlmZddlmZGdddejZdS))absolute_import)commands)_c@s8eZdZdZdZedZeddZd d Z d d Z d S)DistroSyncCommandzZA class containing methods needed by the cli to execute the distro-synch command. distro-sync distrosyncdistribution-synchronizationdsyncz?synchronize installed packages to the latest available versionscCs|jddtdddS)Npackage*zPackage to synchronize)nargshelp) add_argumentr)parserr /usr/lib/python3.6/distrosync.py set_argparser"szDistroSyncCommand.set_argparsercCsF|jj}d|_d|_d|_d|_tj|j|jtj |j|j j dS)NT) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseZ_checkEnabledRepooptsr )selfrrrr configure&szDistroSyncCommand.configurecCs|jj|jjS)N)rZdistro_sync_userlistrr )rrrrrun/szDistroSyncCommand.runN)rrrr ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrrrrrrrs   rN)Z __future__rZdnf.clirZdnf.i18nrZCommandrrrrrs   PK!&R  7cli/commands/__pycache__/downgrade.cpython-36.opt-1.pycnu[3 ft` @sRddlmZddlmZddlmZddlmZddlmZGdddej Z dS) )absolute_import)unicode_literals)commands) OptionParser)_c@s8eZdZdZd ZedZeddZddZ d d Z d S) DowngradeCommandzWA class containing methods needed by the cli to execute the downgrade command. downgradedgzDowngrade a packagecCs|jddtdtjddS)Npackage*zPackage to downgrade)nargshelpaction) add_argumentrrZParseSpecGroupFileCallback)parserr/usr/lib/python3.6/downgrade.py set_argparser$szDowngradeCommand.set_argparsercCsH|jj}d|_d|_d|_d|_tj|j|j|j j sDtj |jdS)NT) ZclidemandsZsack_activationZavailable_reposZ resolvingZ root_userrZ _checkGPGKeybaseopts filenamesZ_checkEnabledRepo)selfrrrr configure)szDowngradeCommand.configurecCsJ|jj|jjd|jjjd}|jj|jjdd|jjD||jj j dS)NF)strictprogresscSsg|] }d|qS)@r).0xrrr 8sz(DowngradeCommand.run..)Zspecs file_pkgsr) rZadd_remote_rpmsrroutputrZ downgradePkgsZ pkg_specsZ grp_specsZconfr)rr rrrrun4s zDowngradeCommand.runN)rr ) __name__ __module__ __qualname____doc__aliasesrZsummary staticmethodrrr"rrrrrs   rN) Z __future__rrZdnf.clirZdnf.cli.option_parserrZdnf.i18nrZCommandrrrrrs     PK!| {}{}cli/commands/__init__.pynu[# Copyright 2006 Duke University # Copyright (C) 2012-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Written by Seth Vidal """ Classes for subcommands of the yum command line interface. """ from __future__ import print_function from __future__ import unicode_literals from dnf.cli.option_parser import OptionParser from dnf.i18n import _ import dnf.cli import dnf.exceptions import dnf.pycomp import dnf.util import logging import os logger = logging.getLogger('dnf') _RPM_VERIFY = _("To diagnose the problem, try running: '%s'.") % \ 'rpm -Va --nofiles --nodigest' _RPM_REBUILDDB = _("You probably have corrupted RPMDB, running '%s'" " might fix the issue.") % 'rpm --rebuilddb' gpg_msg = \ _("""You have enabled checking of packages via GPG keys. This is a good thing. However, you do not have any GPG public keys installed. You need to download the keys for packages you wish to install and install them. You can do that by running the command: rpm --import public.gpg.key Alternatively you can specify the url to the key you would like to use for a repository in the 'gpgkey' option in a repository section and {prog} will install it for you. For more information contact your distribution or package provider.""") def _checkGPGKey(base, cli): """Verify that there are gpg keys for the enabled repositories in the rpm database. :param base: a :class:`dnf.Base` object. :raises: :class:`cli.CliError` """ if not base.conf.gpgcheck: return if not base._gpg_key_check(): for repo in base.repos.iter_enabled(): if (repo.gpgcheck or repo.repo_gpgcheck) and not repo.gpgkey: logger.critical("\n%s\n", gpg_msg.format(prog=dnf.util.MAIN_PROG_UPPER)) logger.critical(_("Problem repository: %s"), repo) raise dnf.cli.CliError def _checkEnabledRepo(base, possible_local_files=()): """Verify that there is at least one enabled repo. :param base: a :class:`dnf.Base` object. :param possible_local_files: the list of strings that could be a local rpms :raises: :class:`cli.CliError`: """ if base.repos._any_enabled(): return for lfile in possible_local_files: if lfile.endswith(".rpm") and os.path.exists(lfile): return scheme = dnf.pycomp.urlparse.urlparse(lfile)[0] if scheme in ('http', 'ftp', 'file', 'https'): return msg = _('There are no enabled repositories in "{}".').format('", "'.join(base.conf.reposdir)) raise dnf.cli.CliError(msg) class Command(object): """Abstract base class for CLI commands.""" aliases = [] # :api summary = "" # :api opts = None def __init__(self, cli): # :api self.cli = cli @property def base(self): # :api return self.cli.base @property def _basecmd(self): return self.aliases[0] @property def output(self): return self.cli.base.output def set_argparser(self, parser): """Define command specific options and arguments. #:api""" pass def pre_configure(self): # :api """Do any command-specific pre-configuration.""" pass def configure(self): # :api """Do any command-specific configuration.""" pass def get_error_output(self, error): """Get suggestions for resolving the given error.""" if isinstance(error, dnf.exceptions.TransactionCheckError): return (_RPM_VERIFY, _RPM_REBUILDDB) raise NotImplementedError('error not supported yet: %s' % error) def run(self): # :api """Execute the command.""" pass def run_resolved(self): """Finalize operation after resolvement""" pass def run_transaction(self): """Finalize operations post-transaction.""" pass class InfoCommand(Command): """A class containing methods needed by the cli to execute the info command. """ aliases = ('info',) summary = _('display details about a package or group of packages') DEFAULT_PKGNARROW = 'all' pkgnarrows = {'available', 'installed', 'extras', 'updates', 'upgrades', 'autoremove', 'recent', 'obsoletes', DEFAULT_PKGNARROW} @classmethod def set_argparser(cls, parser): narrows = parser.add_mutually_exclusive_group() narrows.add_argument('--all', dest='_packages_action', action='store_const', const='all', default=None, help=_("show all packages (default)")) narrows.add_argument('--available', dest='_packages_action', action='store_const', const='available', help=_("show only available packages")) narrows.add_argument('--installed', dest='_packages_action', action='store_const', const='installed', help=_("show only installed packages")) narrows.add_argument('--extras', dest='_packages_action', action='store_const', const='extras', help=_("show only extras packages")) narrows.add_argument('--updates', dest='_packages_action', action='store_const', const='upgrades', help=_("show only upgrades packages")) narrows.add_argument('--upgrades', dest='_packages_action', action='store_const', const='upgrades', help=_("show only upgrades packages")) narrows.add_argument('--autoremove', dest='_packages_action', action='store_const', const='autoremove', help=_("show only autoremove packages")) narrows.add_argument('--recent', dest='_packages_action', action='store_const', const='recent', help=_("show only recently changed packages")) parser.add_argument('packages', nargs='*', metavar=_('PACKAGE'), choices=cls.pkgnarrows, default=cls.DEFAULT_PKGNARROW, action=OptionParser.PkgNarrowCallback, help=_("Package name specification")) def configure(self): demands = self.cli.demands demands.sack_activation = True if self.opts._packages_action: self.opts.packages_action = self.opts._packages_action if self.opts.packages_action != 'installed': demands.available_repos = True if self.opts.obsoletes: if self.opts._packages_action: self.cli._option_conflict("--obsoletes", "--" + self.opts._packages_action) else: self.opts.packages_action = 'obsoletes' if self.opts.packages_action == 'updates': self.opts.packages_action = 'upgrades' def run(self): self.cli._populate_update_security_filter(self.opts) return self.base.output_packages('info', self.opts.packages_action, self.opts.packages) class ListCommand(InfoCommand): """A class containing methods needed by the cli to execute the list command. """ aliases = ('list', 'ls') summary = _('list a package or groups of packages') def run(self): self.cli._populate_update_security_filter(self.opts) return self.base.output_packages('list', self.opts.packages_action, self.opts.packages) class ProvidesCommand(Command): """A class containing methods needed by the cli to execute the provides command. """ aliases = ('provides', 'whatprovides', 'prov') summary = _('find what package provides the given value') @staticmethod def set_argparser(parser): parser.add_argument('dependency', nargs='+', metavar=_('PROVIDE'), help=_("Provide specification to search for")) def configure(self): demands = self.cli.demands demands.available_repos = True demands.fresh_metadata = False demands.sack_activation = True def run(self): logger.debug(_("Searching Packages: ")) return self.base.provides(self.opts.dependency) class CheckUpdateCommand(Command): """A class containing methods needed by the cli to execute the check-update command. """ aliases = ('check-update', 'check-upgrade') summary = _('check for available package upgrades') @staticmethod def set_argparser(parser): parser.add_argument('--changelogs', dest='changelogs', default=False, action='store_true', help=_('show changelogs before update')) parser.add_argument('packages', nargs='*', metavar=_('PACKAGE')) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.plugin_filtering_enabled = True if self.opts.changelogs: demands.changelogs = True _checkEnabledRepo(self.base) def run(self): self.cli._populate_update_security_filter(self.opts, cmp_type="gte") found = self.base.check_updates(self.opts.packages, print_=True, changelogs=self.opts.changelogs) if found: self.cli.demands.success_exit_status = 100 if self.base.conf.autocheck_running_kernel: self.cli._check_running_kernel() class RepoPkgsCommand(Command): """Implementation of the repository-packages command.""" class CheckUpdateSubCommand(Command): """Implementation of the info sub-command.""" aliases = ('check-update',) def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" found = self.base.check_updates(self.opts.pkg_specs, self.reponame, print_=True) if found: self.cli.demands.success_exit_status = 100 class InfoSubCommand(Command): """Implementation of the info sub-command.""" aliases = ('info',) def configure(self): demands = self.cli.demands demands.sack_activation = True if self.opts._pkg_specs_action: self.opts.pkg_specs_action = self.opts._pkg_specs_action if self.opts.pkg_specs_action != 'installed': demands.available_repos = True if self.opts.obsoletes: if self.opts._pkg_specs_action: self.cli._option_conflict("--obsoletes", "--" + self.opts._pkg_specs_action) else: self.opts.pkg_specs_action = 'obsoletes' def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" self.cli._populate_update_security_filter(self.opts) self.base.output_packages('info', self.opts.pkg_specs_action, self.opts.pkg_specs, self.reponame) class InstallSubCommand(Command): """Implementation of the install sub-command.""" aliases = ('install',) def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_repo(self): self.cli._populate_update_security_filter(self.opts) """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Install all packages. try: self.base.install('*', self.reponame) except dnf.exceptions.MarkingError: logger.info(_('No package available.')) else: done = True else: # Install packages. for pkg_spec in self.opts.pkg_specs: try: self.base.install(pkg_spec, self.reponame) except dnf.exceptions.MarkingError as e: msg = '{}: {}'.format(e.value, self.base.output.term.bold(pkg_spec)) logger.info(msg) else: done = True if not done: raise dnf.exceptions.Error(_('No packages marked for install.')) class ListSubCommand(InfoSubCommand): """Implementation of the list sub-command.""" aliases = ('list',) def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" self.cli._populate_update_security_filter(self.opts) self.base.output_packages('list', self.opts.pkg_specs_action, self.opts.pkg_specs, self.reponame) class MoveToSubCommand(Command): """Implementation of the move-to sub-command.""" aliases = ('move-to',) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Reinstall all packages. try: self.base.reinstall('*', new_reponame=self.reponame) except dnf.exceptions.PackagesNotInstalledError: logger.info(_('No package installed.')) except dnf.exceptions.PackagesNotAvailableError: logger.info(_('No package available.')) except dnf.exceptions.MarkingError: assert False, 'Only the above marking errors are expected.' else: done = True else: # Reinstall packages. for pkg_spec in self.opts.pkg_specs: try: self.base.reinstall(pkg_spec, new_reponame=self.reponame) except dnf.exceptions.PackagesNotInstalledError: msg = _('No match for argument: %s') logger.info(msg, pkg_spec) except dnf.exceptions.PackagesNotAvailableError as err: for pkg in err.packages: xmsg = '' pkgrepo = self.base.history.repo(pkg) if pkgrepo: xmsg = _(' (from %s)') % pkgrepo msg = _('Installed package %s%s not available.') logger.info(msg, self.output.term.bold(pkg), xmsg) except dnf.exceptions.MarkingError: assert False, \ 'Only the above marking errors are expected.' else: done = True if not done: raise dnf.exceptions.Error(_('Nothing to do.')) class ReinstallOldSubCommand(Command): """Implementation of the reinstall-old sub-command.""" aliases = ('reinstall-old',) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Reinstall all packages. try: self.base.reinstall('*', self.reponame, self.reponame) except dnf.exceptions.PackagesNotInstalledError: msg = _('No package installed from the repository.') logger.info(msg) except dnf.exceptions.PackagesNotAvailableError: logger.info(_('No package available.')) except dnf.exceptions.MarkingError: assert False, 'Only the above marking errors are expected.' else: done = True else: # Reinstall packages. for pkg_spec in self.opts.pkg_specs: try: self.base.reinstall(pkg_spec, self.reponame, self.reponame) except dnf.exceptions.PackagesNotInstalledError: msg = _('No match for argument: %s') logger.info(msg, pkg_spec) except dnf.exceptions.PackagesNotAvailableError as err: for pkg in err.packages: xmsg = '' pkgrepo = self.base.history.repo(pkg) if pkgrepo: xmsg = _(' (from %s)') % pkgrepo msg = _('Installed package %s%s not available.') logger.info(msg, self.output.term.bold(pkg), xmsg) except dnf.exceptions.MarkingError: assert False, \ 'Only the above marking errors are expected.' else: done = True if not done: raise dnf.exceptions.Error(_('Nothing to do.')) class ReinstallSubCommand(Command): """Implementation of the reinstall sub-command.""" aliases = ('reinstall',) def __init__(self, cli): """Initialize the command.""" super(RepoPkgsCommand.ReinstallSubCommand, self).__init__(cli) self.wrapped_commands = (RepoPkgsCommand.ReinstallOldSubCommand(cli), RepoPkgsCommand.MoveToSubCommand(cli)) def configure(self): self.cli.demands.available_repos = True for command in self.wrapped_commands: command.opts = self.opts command.reponame = self.reponame command.configure() def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) for command in self.wrapped_commands: try: command.run_on_repo() except dnf.exceptions.Error: continue else: break else: raise dnf.exceptions.Error(_('No packages marked for reinstall.')) class RemoveOrDistroSyncSubCommand(Command): """Implementation of the remove-or-distro-sync sub-command.""" aliases = ('remove-or-distro-sync',) def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def _replace(self, pkg_spec, reponame): """Synchronize a package with another repository or remove it.""" self.cli.base.sack.disable_repo(reponame) subject = dnf.subject.Subject(pkg_spec) matches = subject.get_best_query(self.cli.base.sack) history = self.cli.base.history installed = [ pkg for pkg in matches.installed() if history.repo(pkg) == reponame] if not installed: raise dnf.exceptions.PackagesNotInstalledError( 'no package matched', pkg_spec) available = matches.available() clean_deps = self.cli.base.conf.clean_requirements_on_remove for package in installed: if available.filter(name=package.name, arch=package.arch): self.cli.base._goal.distupgrade(package) else: self.cli.base._goal.erase(package, clean_deps=clean_deps) def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Sync all packages. try: self._replace('*', self.reponame) except dnf.exceptions.PackagesNotInstalledError: msg = _('No package installed from the repository.') logger.info(msg) else: done = True else: # Reinstall packages. for pkg_spec in self.opts.pkg_specs: try: self._replace(pkg_spec, self.reponame) except dnf.exceptions.PackagesNotInstalledError: msg = _('No match for argument: %s') logger.info(msg, pkg_spec) else: done = True if not done: raise dnf.exceptions.Error(_('Nothing to do.')) class RemoveOrReinstallSubCommand(Command): """Implementation of the remove-or-reinstall sub-command.""" aliases = ('remove-or-reinstall',) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Reinstall all packages. try: self.base.reinstall('*', old_reponame=self.reponame, new_reponame_neq=self.reponame, remove_na=True) except dnf.exceptions.PackagesNotInstalledError: msg = _('No package installed from the repository.') logger.info(msg) except dnf.exceptions.MarkingError: assert False, 'Only the above marking error is expected.' else: done = True else: # Reinstall packages. for pkg_spec in self.opts.pkg_specs: try: self.base.reinstall( pkg_spec, old_reponame=self.reponame, new_reponame_neq=self.reponame, remove_na=True) except dnf.exceptions.PackagesNotInstalledError: msg = _('No match for argument: %s') logger.info(msg, pkg_spec) except dnf.exceptions.MarkingError: assert False, 'Only the above marking error is expected.' else: done = True if not done: raise dnf.exceptions.Error(_('Nothing to do.')) class RemoveSubCommand(Command): """Implementation of the remove sub-command.""" aliases = ('remove',) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.allow_erasing = True demands.available_repos = False demands.resolving = True demands.root_user = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" done = False if not self.opts.pkg_specs: # Remove all packages. try: self.base.remove('*', self.reponame) except dnf.exceptions.MarkingError: msg = _('No package installed from the repository.') logger.info(msg) else: done = True else: # Remove packages. for pkg_spec in self.opts.pkg_specs: try: self.base.remove(pkg_spec, self.reponame) except dnf.exceptions.MarkingError as e: logger.info(str(e)) else: done = True if not done: logger.warning(_('No packages marked for removal.')) class UpgradeSubCommand(Command): """Implementation of the upgrade sub-command.""" aliases = ('upgrade', 'upgrade-to') def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True def run_on_repo(self): """Execute the command with respect to given arguments *cli_args*.""" _checkGPGKey(self.base, self.cli) done = False if not self.opts.pkg_specs: # Update all packages. self.base.upgrade_all(self.reponame) done = True else: # Update packages. for pkg_spec in self.opts.pkg_specs: try: self.base.upgrade(pkg_spec, self.reponame) except dnf.exceptions.MarkingError: logger.info(_('No match for argument: %s'), pkg_spec) else: done = True if not done: raise dnf.exceptions.Error(_('No packages marked for upgrade.')) SUBCMDS = {CheckUpdateSubCommand, InfoSubCommand, InstallSubCommand, ListSubCommand, MoveToSubCommand, ReinstallOldSubCommand, ReinstallSubCommand, RemoveOrDistroSyncSubCommand, RemoveOrReinstallSubCommand, RemoveSubCommand, UpgradeSubCommand} aliases = ('repository-packages', 'repo-pkgs', 'repo-packages', 'repository-pkgs') summary = _('run commands on top of all packages in given repository') def __init__(self, cli): """Initialize the command.""" super(RepoPkgsCommand, self).__init__(cli) subcmd_objs = (subcmd(cli) for subcmd in self.SUBCMDS) self.subcmd = None self._subcmd_name2obj = { alias: subcmd for subcmd in subcmd_objs for alias in subcmd.aliases} def set_argparser(self, parser): narrows = parser.add_mutually_exclusive_group() narrows.add_argument('--all', dest='_pkg_specs_action', action='store_const', const='all', default=None, help=_("show all packages (default)")) narrows.add_argument('--available', dest='_pkg_specs_action', action='store_const', const='available', help=_("show only available packages")) narrows.add_argument('--installed', dest='_pkg_specs_action', action='store_const', const='installed', help=_("show only installed packages")) narrows.add_argument('--extras', dest='_pkg_specs_action', action='store_const', const='extras', help=_("show only extras packages")) narrows.add_argument('--updates', dest='_pkg_specs_action', action='store_const', const='upgrades', help=_("show only upgrades packages")) narrows.add_argument('--upgrades', dest='_pkg_specs_action', action='store_const', const='upgrades', help=_("show only upgrades packages")) narrows.add_argument('--autoremove', dest='_pkg_specs_action', action='store_const', const='autoremove', help=_("show only autoremove packages")) narrows.add_argument('--recent', dest='_pkg_specs_action', action='store_const', const='recent', help=_("show only recently changed packages")) parser.add_argument( 'reponame', nargs=1, action=OptionParser._RepoCallbackEnable, metavar=_('REPOID'), help=_("Repository ID")) subcommand_choices = [subcmd.aliases[0] for subcmd in self.SUBCMDS] subcommand_choices_all = [alias for subcmd in self.SUBCMDS for alias in subcmd.aliases] parser.add_argument('subcmd', nargs=1, metavar="SUBCOMMAND", choices=subcommand_choices_all, help=", ".join(subcommand_choices)) DEFAULT_PKGNARROW = 'all' pkgnarrows = {DEFAULT_PKGNARROW, 'installed', 'available', 'autoremove', 'extras', 'obsoletes', 'recent', 'upgrades'} parser.add_argument('pkg_specs', nargs='*', metavar=_('PACKAGE'), choices=pkgnarrows, default=DEFAULT_PKGNARROW, action=OptionParser.PkgNarrowCallback, help=_("Package specification")) def configure(self): """Verify whether the command can run with given arguments.""" # Check sub-command. try: self.subcmd = self._subcmd_name2obj[self.opts.subcmd[0]] except (dnf.cli.CliError, KeyError) as e: self.cli.optparser.print_usage() raise dnf.cli.CliError self.subcmd.opts = self.opts self.subcmd.reponame = self.opts.reponame[0] self.subcmd.configure() def run(self): """Execute the command with respect to given arguments *extcmds*.""" self.subcmd.run_on_repo() class HelpCommand(Command): """A class containing methods needed by the cli to execute the help command. """ aliases = ('help',) summary = _('display a helpful usage message') @staticmethod def set_argparser(parser): parser.add_argument('cmd', nargs='?', metavar=_('COMMAND'), help=_("{prog} command to get help for").format( prog=dnf.util.MAIN_PROG_UPPER)) def run(self): if (not self.opts.cmd or self.opts.cmd not in self.cli.cli_commands): self.cli.optparser.print_help() else: command = self.cli.cli_commands[self.opts.cmd] self.cli.optparser.print_help(command(self)) PK!tDcli/commands/alias.pynu[# alias.py # Alias CLI command. # # Copyright (C) 2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import logging import os.path import dnf.cli import dnf.cli.aliases from dnf.cli import commands import dnf.conf import dnf.exceptions from dnf.i18n import _ logger = logging.getLogger('dnf') class AliasCommand(commands.Command): aliases = ('alias',) summary = _('List or create command aliases') @staticmethod def set_argparser(parser): enable_group = parser.add_mutually_exclusive_group() enable_group.add_argument( '--enable-resolving', default=False, action='store_true', help=_('enable aliases resolving')) enable_group.add_argument( '--disable-resolving', default=False, action='store_true', help=_('disable aliases resolving')) parser.add_argument("subcommand", nargs='?', default='list', choices=['add', 'list', 'delete'], help=_("action to do with aliases")) parser.add_argument("alias", nargs="*", metavar="command[=result]", help=_("alias definition")) def configure(self): demands = self.cli.demands if self.opts.subcommand in ('add', 'delete'): demands.root_user = True self.aliases_base = dnf.cli.aliases.Aliases() self.aliases_base._load_aliases() self.resolving_enabled = self.aliases_base.enabled self._update_config_from_options() def _update_config_from_options(self): enabled = None if self.opts.enable_resolving: enabled = True logger.info(_("Aliases are now enabled")) if self.opts.disable_resolving: enabled = False logger.info(_("Aliases are now disabled")) if enabled is not None: if not os.path.exists(dnf.cli.aliases.ALIASES_CONF_PATH): open(dnf.cli.aliases.ALIASES_CONF_PATH, 'w').close() dnf.conf.BaseConfig.write_raw_configfile( dnf.cli.aliases.ALIASES_CONF_PATH, 'main', None, {'enabled': enabled}) if not self.aliases_base._disabled_by_environ(): self.aliases_base.enabled = enabled def _parse_option_alias(self): new_aliases = {} for alias in self.opts.alias: alias = alias.split('=', 1) cmd = alias[0].strip() if len(cmd.split()) != 1: logger.warning(_("Invalid alias key: %s"), cmd) continue if cmd.startswith('-'): logger.warning(_("Invalid alias key: %s"), cmd) continue if len(alias) == 1: logger.warning(_("Alias argument has no value: %s"), cmd) continue new_aliases[cmd] = alias[1].split() return new_aliases def _load_user_aliases(self): if not os.path.exists(dnf.cli.aliases.ALIASES_USER_PATH): open(dnf.cli.aliases.ALIASES_USER_PATH, 'w').close() try: conf = dnf.cli.aliases.AliasesConfig( dnf.cli.aliases.ALIASES_USER_PATH) except dnf.exceptions.ConfigError as e: logger.warning(_('Config error: %s'), e) return None return conf def _store_user_aliases(self, user_aliases, enabled): fileobj = open(dnf.cli.aliases.ALIASES_USER_PATH, 'w') output = "[main]\n" output += "enabled = {}\n\n".format(enabled) output += "[aliases]\n" for key, value in user_aliases.items(): output += "{} = {}\n".format(key, ' '.join(value)) fileobj.write(output) def add_aliases(self, aliases): conf = self._load_user_aliases() user_aliases = conf.aliases if user_aliases is None: return user_aliases.update(aliases) self._store_user_aliases(user_aliases, conf.enabled) logger.info(_("Aliases added: %s"), ', '.join(aliases.keys())) def remove_aliases(self, cmds): conf = self._load_user_aliases() user_aliases = conf.aliases if user_aliases is None: return valid_cmds = [] for cmd in cmds: try: del user_aliases[cmd] valid_cmds.append(cmd) except KeyError: logger.info(_("Alias not found: %s"), cmd) self._store_user_aliases(user_aliases, conf.enabled) logger.info(_("Aliases deleted: %s"), ', '.join(valid_cmds)) def list_alias(self, cmd): args = [cmd] try: args = self.aliases_base._resolve(args) except dnf.exceptions.Error as e: logger.error( _('%s, alias %s="%s"'), e, cmd, (' ').join(self.aliases_base.aliases[cmd])) else: print(_("Alias %s='%s'") % (cmd, " ".join(args))) def run(self): if not self.aliases_base.enabled: logger.warning(_("Aliases resolving is disabled.")) if self.opts.subcommand == 'add': # Add new alias aliases = self._parse_option_alias() if not aliases: raise dnf.exceptions.Error(_("No aliases specified.")) self.add_aliases(aliases) return if self.opts.subcommand == 'delete': # Remove alias cmds = self.opts.alias if cmds == []: raise dnf.exceptions.Error(_("No alias specified.")) self.remove_aliases(cmds) return if not self.opts.alias: # List all aliases if not self.aliases_base.aliases: logger.info(_("No aliases defined.")) return for cmd in self.aliases_base.aliases: self.list_alias(cmd) else: # List alias by key for cmd in self.opts.alias: if cmd not in self.aliases_base.aliases: logger.info(_("No match for alias: %s") % cmd) continue self.list_alias(cmd) PK! cli/commands/autoremove.pynu[# autoremove.py # Autoremove CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _ import dnf.exceptions import hawkey import logging logger = logging.getLogger("dnf") class AutoremoveCommand(commands.Command): nevra_forms = {'autoremove-n': hawkey.FORM_NAME, 'autoremove-na': hawkey.FORM_NA, 'autoremove-nevra': hawkey.FORM_NEVRA} aliases = ('autoremove',) + tuple(nevra_forms.keys()) summary = _('remove all unneeded packages that were originally installed ' 'as dependencies') @staticmethod def set_argparser(parser): parser.add_argument('packages', nargs='*', help=_('Package to remove'), action=OptionParser.ParseSpecGroupFileCallback, metavar=_('PACKAGE')) def configure(self): demands = self.cli.demands demands.resolving = True demands.root_user = True demands.sack_activation = True if any([self.opts.grp_specs, self.opts.pkg_specs, self.opts.filenames]): self.base.conf.clean_requirements_on_remove = True demands.allow_erasing = True # disable all available repos to delete whole dependency tree # instead of replacing removable package with available packages demands.available_repos = False else: demands.available_repos = True demands.fresh_metadata = False def run(self): if any([self.opts.grp_specs, self.opts.pkg_specs, self.opts.filenames]): forms = [] if self.opts.command in self.nevra_forms: forms = [self.nevra_forms[self.opts.command]] self.base.autoremove(forms, self.opts.pkg_specs, self.opts.grp_specs, self.opts.filenames) else: self.base.autoremove() PK!Hh??cli/commands/check.pynu[# # Copyright (C) 2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ from dnf.cli import commands import argparse import dnf.exceptions class CheckCommand(commands.Command): """A class containing methods needed by the cli to execute the check command. """ aliases = ('check',) summary = _('check for problems in the packagedb') @staticmethod def set_argparser(parser): parser.add_argument('--all', dest='check_types', action='append_const', const='all', help=_('show all problems; default')) parser.add_argument('--dependencies', dest='check_types', action='append_const', const='dependencies', help=_('show dependency problems')) parser.add_argument('--duplicates', dest='check_types', action='append_const', const='duplicates', help=_('show duplicate problems')) parser.add_argument('--obsoleted', dest='check_types', action='append_const', const='obsoleted', help=_('show obsoleted packages')) parser.add_argument('--provides', dest='check_types', action='append_const', const='provides', help=_('show problems with provides')) # Add compatibility with yum but invisible in help # In choices [] allows to return empty list if no argument otherwise it fails parser.add_argument('check_yum_types', nargs='*', choices=[ 'all', 'dependencies', 'duplicates', 'obsoleted', 'provides', []], help=argparse.SUPPRESS) def configure(self): self.cli.demands.sack_activation = True if self.opts.check_yum_types: if self.opts.check_types: self.opts.check_types = self.opts.check_types + \ self.opts.check_yum_types else: self.opts.check_types = self.opts.check_yum_types if not self.opts.check_types: self.opts.check_types = {'all'} else: self.opts.check_types = set(self.opts.check_types) self.base.conf.disable_excludes += ["all"] def run(self): output_set = set() q = self.base.sack.query().installed() if self.opts.check_types.intersection({'all', 'dependencies'}): sack = None for pkg in q: for require in set(pkg.regular_requires) | set(set(pkg.requires_pre) - set(pkg.prereq_ignoreinst)): if str(require).startswith('rpmlib'): continue if not len(q.filter(provides=[require])): if str(require).startswith('('): # rich deps can be only tested by solver if sack is None: sack = dnf.sack.rpmdb_sack(self.base) selector = dnf.selector.Selector(sack) selector.set(provides=str(require)) goal = dnf.goal.Goal(sack) goal.protect_running_kernel = self.base.conf.protect_running_kernel goal.install(select=selector, optional=False) solved = goal.run() # there ase only @system repo in sack, therefore solved is only in case # when rich deps doesn't require any additional package if solved: continue msg = _("{} has missing requires of {}") output_set.add(msg.format( self.base.output.term.bold(pkg), self.base.output.term.bold(require))) for conflict in pkg.conflicts: conflicted = q.filter(provides=[conflict], name=str(conflict).split()[0]) for conflict_pkg in conflicted: msg = '{} has installed conflict "{}": {}' output_set.add(msg.format( self.base.output.term.bold(pkg), self.base.output.term.bold(conflict), self.base.output.term.bold(conflict_pkg))) if self.opts.check_types.intersection({'all', 'duplicates'}): installonly = self.base._get_installonly_query(q) dups = q.duplicated().difference(installonly)._name_dict() for name, pkgs in dups.items(): pkgs.sort() for dup in pkgs[1:]: msg = _("{} is a duplicate with {}").format( self.base.output.term.bold(pkgs[0]), self.base.output.term.bold(dup)) output_set.add(msg) if self.opts.check_types.intersection({'all', 'obsoleted'}): for pkg in q: for obsolete in pkg.obsoletes: obsoleted = q.filter(provides=[obsolete], name=str(obsolete).split()[0]) if len(obsoleted): msg = _("{} is obsoleted by {}").format( self.base.output.term.bold(obsoleted[0]), self.base.output.term.bold(pkg)) output_set.add(msg) if self.opts.check_types.intersection({'all', 'provides'}): for pkg in q: for provide in pkg.provides: if pkg not in q.filter(provides=[provide]): msg = _("{} provides {} but it cannot be found") output_set.add(msg.format( self.base.output.term.bold(pkg), self.base.output.term.bold(provide))) for msg in sorted(output_set): print(msg) if output_set: raise dnf.exceptions.Error( 'Check discovered {} problem(s)'.format(len(output_set))) PK!% ttcli/commands/clean.pynu[# clean.py # Clean CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.i18n import _, P_ from dnf.yum import misc import dnf.cli import dnf.exceptions import dnf.lock import dnf.logging import dnf.repo import logging import os import re import time logger = logging.getLogger("dnf") # Dict mapping cmdline arguments to actual data types to be cleaned up _CACHE_TYPES = { 'metadata': ['metadata', 'dbcache', 'expire-cache'], 'packages': ['packages'], 'dbcache': ['dbcache'], 'expire-cache': ['expire-cache'], 'all': ['metadata', 'packages', 'dbcache'], } def _tree(dirpath): """Traverse dirpath recursively and yield relative filenames.""" for root, dirs, files in os.walk(dirpath): base = os.path.relpath(root, dirpath) for f in files: path = os.path.join(base, f) yield os.path.normpath(path) def _filter(files, patterns): """Yield those filenames that match any of the patterns.""" return (f for f in files for p in patterns if re.match(p, f)) def _clean(dirpath, files): """Remove the given filenames from dirpath.""" count = 0 for f in files: path = os.path.join(dirpath, f) logger.log(dnf.logging.DDEBUG, _('Removing file %s'), path) misc.unlink_f(path) count += 1 return count def _cached_repos(files): """Return the repo IDs that have some cached metadata around.""" metapat = dnf.repo.CACHE_FILES['metadata'] matches = (re.match(metapat, f) for f in files) return set(m.group('repoid') for m in matches if m) class CleanCommand(commands.Command): """A class containing methods needed by the cli to execute the clean command. """ aliases = ('clean',) summary = _('remove cached data') @staticmethod def set_argparser(parser): parser.add_argument('type', nargs='+', choices=_CACHE_TYPES.keys(), help=_('Metadata type to clean')) def run(self): cachedir = self.base.conf.cachedir md_lock = dnf.lock.build_metadata_lock(cachedir, True) download_lock = dnf.lock.build_download_lock(cachedir, True) rpmdb_lock = dnf.lock.build_rpmdb_lock(self.base.conf.persistdir, True) while True: try: with md_lock and download_lock and rpmdb_lock: types = set(t for c in self.opts.type for t in _CACHE_TYPES[c]) files = list(_tree(cachedir)) logger.debug(_('Cleaning data: ' + ' '.join(types))) if 'expire-cache' in types: expired = _cached_repos(files) self.base._repo_persistor.expired_to_add.update(expired) types.remove('expire-cache') logger.info(_('Cache was expired')) patterns = [dnf.repo.CACHE_FILES[t] for t in types] count = _clean(cachedir, _filter(files, patterns)) logger.info(P_('%d file removed', '%d files removed', count) % count) return except dnf.exceptions.LockError as e: if not self.base.conf.exit_on_lock: msg = _('Waiting for process with pid %d to finish.') % (e.pid) logger.info(msg) time.sleep(3) else: raise e PK!cli/commands/deplist.pynu[# # Copyright (C) 2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ from dnf.cli.commands.repoquery import RepoQueryCommand class DeplistCommand(RepoQueryCommand): """ The command is alias for 'dnf repoquery --deplist' """ aliases = ('deplist',) summary = _("[deprecated, use repoquery --deplist] List package's dependencies and what packages provide them") def configure(self): RepoQueryCommand.configure(self) self.opts.deplist = True PK!,cli/commands/distrosync.pynu[# distrosync.py # distro-sync CLI command. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from dnf.cli import commands from dnf.i18n import _ class DistroSyncCommand(commands.Command): """A class containing methods needed by the cli to execute the distro-synch command. """ aliases = ('distro-sync', 'distrosync', 'distribution-synchronization', 'dsync') summary = _('synchronize installed packages to the latest available versions') @staticmethod def set_argparser(parser): parser.add_argument('package', nargs='*', help=_('Package to synchronize')) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) commands._checkEnabledRepo(self.base, self.opts.package) def run(self): return self.base.distro_sync_userlist(self.opts.package) PK!(C  cli/commands/downgrade.pynu[# downgrade.py # Downgrade CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _ class DowngradeCommand(commands.Command): """A class containing methods needed by the cli to execute the downgrade command. """ aliases = ('downgrade', 'dg') summary = _("Downgrade a package") @staticmethod def set_argparser(parser): parser.add_argument('package', nargs='*', help=_('Package to downgrade'), action=OptionParser.ParseSpecGroupFileCallback) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) if not self.opts.filenames: commands._checkEnabledRepo(self.base) def run(self): file_pkgs = self.base.add_remote_rpms(self.opts.filenames, strict=False, progress=self.base.output.progress) return self.base.downgradePkgs( specs=self.opts.pkg_specs + ['@' + x for x in self.opts.grp_specs], file_pkgs=file_pkgs, strict=self.base.conf.strict) PK!#B::cli/commands/group.pynu[# group.py # Group CLI command. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.comps import CompsQuery from dnf.cli import commands from dnf.i18n import _, ucd import libdnf.transaction import dnf.cli import dnf.exceptions import dnf.util import logging logger = logging.getLogger("dnf") class GroupCommand(commands.Command): """ Single sub-command interface for most groups interaction. """ direct_commands = {'grouplist' : 'list', 'groupinstall' : 'install', 'groupupdate' : 'install', 'groupremove' : 'remove', 'grouperase' : 'remove', 'groupinfo' : 'info'} aliases = ('group', 'groups', 'grp') + tuple(direct_commands.keys()) summary = _('display, or use, the groups information') _CMD_ALIASES = {'update' : 'upgrade', 'erase' : 'remove'} _MARK_CMDS = ('install', 'remove') _GROUP_SUBCOMMANDS = ('summary', 'list', 'info', 'remove', 'install', 'upgrade', 'mark') def _canonical(self): # were we called with direct command? direct = self.direct_commands.get(self.opts.command) if direct: # canonize subcmd and args if self.opts.subcmd is not None: self.opts.args.insert(0, self.opts.subcmd) self.opts.subcmd = direct if self.opts.subcmd is None: self.opts.subcmd = 'summary' self.opts.subcmd = self._CMD_ALIASES.get(self.opts.subcmd, self.opts.subcmd) def __init__(self, cli): super(GroupCommand, self).__init__(cli) self._remark = False def _assert_comps(self): msg = _('No group data available for configured repositories.') if not len(self.base.comps): raise dnf.exceptions.CompsError(msg) def _environment_lists(self, patterns): def available_pred(env): env_found = self.base.history.env.get(env.id) return not(env_found) self._assert_comps() if patterns is None: envs = self.base.comps.environments else: envs = self.base.comps.environments_by_pattern(",".join(patterns)) return dnf.util.mapall(list, dnf.util.partition(available_pred, envs)) def _group_lists(self, uservisible, patterns): def installed_pred(group): group_found = self.base.history.group.get(group.id) if group_found: return True return False installed = [] available = [] self._assert_comps() if patterns is None: grps = self.base.comps.groups else: grps = self.base.comps.groups_by_pattern(",".join(patterns)) for grp in grps: tgt_list = available if installed_pred(grp): tgt_list = installed if not uservisible or grp.uservisible: tgt_list.append(grp) return installed, available def _info(self, userlist): for strng in userlist: group_matched = False for env in self.base.comps.environments_by_pattern(strng): self.output.display_groups_in_environment(env) group_matched = True for group in self.base.comps.groups_by_pattern(strng): self.output.display_pkgs_in_groups(group) group_matched = True if not group_matched: logger.error(_('Warning: Group %s does not exist.'), strng) return 0, [] def _list(self, userlist): uservisible = 1 showinstalled = 0 showavailable = 0 print_ids = self.base.conf.verbose or self.opts.ids while userlist: if userlist[0] == 'hidden': uservisible = 0 userlist.pop(0) elif userlist[0] == 'installed': showinstalled = 1 userlist.pop(0) elif userlist[0] == 'available': showavailable = 1 userlist.pop(0) elif userlist[0] == 'ids': print_ids = True userlist.pop(0) else: break if self.opts.hidden: uservisible = 0 if self.opts.installed: showinstalled = 1 if self.opts.available: showavailable = 1 if not userlist: userlist = None # Match everything... errs = False if userlist is not None: for group in userlist: comps = self.base.comps in_group = len(comps.groups_by_pattern(group)) > 0 in_environment = len(comps.environments_by_pattern(group)) > 0 if not in_group and not in_environment: logger.error(_('Warning: No groups match:') + '\n %s', group) errs = True if errs: return 0, [] env_inst, env_avail = self._environment_lists(userlist) installed, available = self._group_lists(uservisible, userlist) def _out_grp(sect, group): if not done: print(sect) msg = ' %s' % (group.ui_name if group.ui_name is not None else _("")) if print_ids: msg += ' (%s)' % group.id if group.lang_only: msg += ' [%s]' % group.lang_only print('{}'.format(msg)) def _out_env(sect, envs): if envs: print(sect) for e in envs: msg = ' %s' % (e.ui_name if e.ui_name is not None else _("")) if print_ids: msg += ' (%s)' % e.id print(msg) if not showinstalled: _out_env(_('Available Environment Groups:'), env_avail) if not showavailable: _out_env(_('Installed Environment Groups:'), env_inst) if not showavailable: done = False for group in installed: if group.lang_only: continue _out_grp(_('Installed Groups:'), group) done = True done = False for group in installed: if not group.lang_only: continue _out_grp(_('Installed Language Groups:'), group) done = True if showinstalled: return 0, [] done = False for group in available: if group.lang_only: continue _out_grp(_('Available Groups:'), group) done = True done = False for group in available: if not group.lang_only: continue _out_grp(_('Available Language Groups:'), group) done = True return 0, [] def _mark_install(self, patterns): q = CompsQuery(self.base.comps, self.base.history, CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS, CompsQuery.AVAILABLE | CompsQuery.INSTALLED) solver = self.base._build_comps_solver() res = q.get(*patterns) if self.opts.with_optional: types = tuple(self.base.conf.group_package_types + ['optional']) else: types = tuple(self.base.conf.group_package_types) pkg_types = libdnf.transaction.listToCompsPackageType(types) for env_id in res.environments: solver._environment_install(env_id, pkg_types) for group_id in res.groups: solver._group_install(group_id, pkg_types) def _mark_remove(self, patterns): q = CompsQuery(self.base.comps, self.base.history, CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS, CompsQuery.INSTALLED) solver = self.base._build_comps_solver() res = q.get(*patterns) for env_id in res.environments: assert dnf.util.is_string_type(env_id) solver._environment_remove(env_id) for grp_id in res.groups: assert dnf.util.is_string_type(grp_id) solver._group_remove(grp_id) def _mark_subcmd(self, extcmds): if extcmds[0] in self._MARK_CMDS: return extcmds[0], extcmds[1:] return 'install', extcmds def _summary(self, userlist): uservisible = 1 if len(userlist) > 0: if userlist[0] == 'hidden': uservisible = 0 userlist.pop(0) if self.opts.hidden: uservisible = 0 if not userlist: userlist = None # Match everything... installed, available = self._group_lists(uservisible, userlist) def _out_grp(sect, num): if not num: return logger.info('%s %u', sect, num) done = 0 for group in installed: if group.lang_only: continue done += 1 _out_grp(_('Installed Groups:'), done) done = 0 for group in installed: if not group.lang_only: continue done += 1 _out_grp(_('Installed Language Groups:'), done) done = False for group in available: if group.lang_only: continue done += 1 _out_grp(_('Available Groups:'), done) done = False for group in available: if not group.lang_only: continue done += 1 _out_grp(_('Available Language Groups:'), done) return 0, [] @staticmethod def set_argparser(parser): parser.add_argument('--with-optional', action='store_true', help=_("include optional packages from group")) grpparser = parser.add_mutually_exclusive_group() grpparser.add_argument('--hidden', action='store_true', help=_("show also hidden groups")) grpparser.add_argument('--installed', action='store_true', help=_("show only installed groups")) grpparser.add_argument('--available', action='store_true', help=_("show only available groups")) grpparser.add_argument('--ids', action='store_true', help=_("show also ID of groups")) parser.add_argument('subcmd', nargs='?', metavar='COMMAND', help=_('available subcommands: {} (default), {}').format( GroupCommand._GROUP_SUBCOMMANDS[0], ', '.join(GroupCommand._GROUP_SUBCOMMANDS[1:]))) parser.add_argument('args', nargs='*', metavar='COMMAND_ARG', help=_('argument for group subcommand')) def configure(self): self._canonical() cmd = self.opts.subcmd args = self.opts.args if cmd not in self._GROUP_SUBCOMMANDS: logger.critical(_('Invalid groups sub-command, use: %s.'), ", ".join(self._GROUP_SUBCOMMANDS)) raise dnf.cli.CliError if cmd in ('install', 'remove', 'mark', 'info') and not args: self.cli.optparser.print_help(self) raise dnf.cli.CliError demands = self.cli.demands demands.sack_activation = True if cmd in ('install', 'mark', 'remove', 'upgrade'): demands.root_user = True demands.resolving = True if cmd == 'remove': demands.allow_erasing = True demands.available_repos = False else: demands.available_repos = True if cmd not in ('remove'): commands._checkEnabledRepo(self.base) if cmd in ('install', 'upgrade'): commands._checkGPGKey(self.base, self.cli) def run(self): cmd = self.opts.subcmd extcmds = self.opts.args if cmd == 'summary': return self._summary(extcmds) if cmd == 'list': return self._list(extcmds) if cmd == 'info': return self._info(extcmds) if cmd == 'mark': (subcmd, extcmds) = self._mark_subcmd(extcmds) if subcmd == 'remove': return self._mark_remove(extcmds) else: assert subcmd == 'install' return self._mark_install(extcmds) if cmd == 'install': if self.opts.with_optional: types = tuple(self.base.conf.group_package_types + ['optional']) else: types = tuple(self.base.conf.group_package_types) self._remark = True try: return self.base.env_group_install(extcmds, types, self.base.conf.strict) except dnf.exceptions.MarkingError as e: msg = _('No package %s available.') logger.info(msg, self.base.output.term.bold(e)) raise dnf.exceptions.PackagesNotAvailableError( _("Unable to find a mandatory group package.")) if cmd == 'upgrade': return self.base.env_group_upgrade(extcmds) if cmd == 'remove': for arg in extcmds: try: self.base.env_group_remove([arg]) except dnf.exceptions.Error: pass def run_transaction(self): if not self._remark: return goal = self.base._goal history = self.base.history names = goal.group_members for pkg in self.base.sack.query().installed().filterm(name=names): reason = history.rpm.get_reason(pkg) history.set_reason(pkg, goal.group_reason(pkg, reason)) PK!Ā%F%Fcli/commands/history.pynu[# Copyright 2006 Duke University # Copyright (C) 2012-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import libdnf import hawkey from dnf.i18n import _, ucd from dnf.cli import commands from dnf.transaction_sr import TransactionReplay, serialize_transaction import dnf.cli import dnf.exceptions import dnf.transaction import dnf.util import json import logging import os logger = logging.getLogger('dnf') class HistoryCommand(commands.Command): """A class containing methods needed by the cli to execute the history command. """ aliases = ('history', 'hist') summary = _('display, or use, the transaction history') _CMDS = ['list', 'info', 'redo', 'replay', 'rollback', 'store', 'undo', 'userinstalled'] def __init__(self, *args, **kw): super(HistoryCommand, self).__init__(*args, **kw) self._require_one_transaction_id = False @staticmethod def set_argparser(parser): parser.add_argument('transactions_action', nargs='?', metavar="COMMAND", help="Available commands: {} (default), {}".format( HistoryCommand._CMDS[0], ", ".join(HistoryCommand._CMDS[1:]))) parser.add_argument('--reverse', action='store_true', help="display history list output reversed") parser.add_argument("-o", "--output", default=None, help=_("For the store command, file path to store the transaction to")) parser.add_argument("--ignore-installed", action="store_true", help=_("For the replay command, don't check for installed packages matching " "those in transaction")) parser.add_argument("--ignore-extras", action="store_true", help=_("For the replay command, don't check for extra packages pulled " "into the transaction")) parser.add_argument("--skip-unavailable", action="store_true", help=_("For the replay command, skip packages that are not available or have " "missing dependencies")) parser.add_argument('transactions', nargs='*', metavar="TRANSACTION", help="For commands working with history transactions, " "Transaction ID (, 'last' or 'last-' " "for one transaction, .. " "for a range)") parser.add_argument('transaction_filename', nargs='?', metavar="TRANSACTION_FILE", help="For the replay command, path to the stored " "transaction file to replay") def configure(self): if not self.opts.transactions_action: # no positional argument given self.opts.transactions_action = self._CMDS[0] elif self.opts.transactions_action not in self._CMDS: # first positional argument is not a command self.opts.transactions.insert(0, self.opts.transactions_action) self.opts.transactions_action = self._CMDS[0] self._require_one_transaction_id_msg = _("Found more than one transaction ID.\n" "'{}' requires one transaction ID or package name." ).format(self.opts.transactions_action) demands = self.cli.demands if self.opts.transactions_action == 'replay': if not self.opts.transactions: raise dnf.cli.CliError(_('No transaction file name given.')) if len(self.opts.transactions) > 1: raise dnf.cli.CliError(_('More than one argument given as transaction file name.')) # in case of replay, copy over the file name to it's appropriate variable # (the arg parser can't distinguish here) self.opts.transaction_filename = os.path.abspath(self.opts.transactions[0]) self.opts.transactions = [] demands.available_repos = True demands.resolving = True demands.root_user = True # Override configuration options that affect how the transaction is resolved self.base.conf.clean_requirements_on_remove = False self.base.conf.install_weak_deps = False dnf.cli.commands._checkGPGKey(self.base, self.cli) elif self.opts.transactions_action == 'store': self._require_one_transaction_id = True if not self.opts.transactions: raise dnf.cli.CliError(_('No transaction ID or package name given.')) elif self.opts.transactions_action in ['redo', 'undo', 'rollback']: demands.available_repos = True demands.resolving = True demands.root_user = True self._require_one_transaction_id = True if not self.opts.transactions: msg = _('No transaction ID or package name given.') logger.critical(msg) raise dnf.cli.CliError(msg) elif len(self.opts.transactions) > 1: logger.critical(self._require_one_transaction_id_msg) raise dnf.cli.CliError(self._require_one_transaction_id_msg) demands.available_repos = True dnf.cli.commands._checkGPGKey(self.base, self.cli) else: demands.fresh_metadata = False demands.sack_activation = True if self.base.history.path != ":memory:" and not os.access(self.base.history.path, os.R_OK): msg = _("You don't have access to the history DB: %s" % self.base.history.path) logger.critical(msg) raise dnf.cli.CliError(msg) def get_error_output(self, error): """Get suggestions for resolving the given error.""" if isinstance(error, dnf.exceptions.TransactionCheckError): if self.opts.transactions_action == 'undo': id_, = self.opts.transactions return (_('Cannot undo transaction %s, doing so would result ' 'in an inconsistent package database.') % id_,) elif self.opts.transactions_action == 'rollback': id_, = (self.opts.transactions if self.opts.transactions[0] != 'force' else self.opts.transactions[1:]) return (_('Cannot rollback transaction %s, doing so would ' 'result in an inconsistent package database.') % id_,) return dnf.cli.commands.Command.get_error_output(self, error) def _hcmd_redo(self, extcmds): old = self._history_get_transaction(extcmds) data = serialize_transaction(old) self.replay = TransactionReplay( self.base, data=data, ignore_installed=True, ignore_extras=True, skip_unavailable=self.opts.skip_unavailable ) self.replay.run() def _history_get_transactions(self, extcmds): if not extcmds: raise dnf.cli.CliError(_('No transaction ID given')) old = self.base.history.old(extcmds) if not old: raise dnf.cli.CliError(_('Transaction ID "{0}" not found.').format(extcmds[0])) return old def _history_get_transaction(self, extcmds): old = self._history_get_transactions(extcmds) if len(old) > 1: raise dnf.cli.CliError(_('Found more than one transaction ID!')) return old[0] def _hcmd_undo(self, extcmds): old = self._history_get_transaction(extcmds) self._revert_transaction(old) def _hcmd_rollback(self, extcmds): old = self._history_get_transaction(extcmds) last = self.base.history.last() merged_trans = None if old.tid != last.tid: # history.old([]) returns all transactions and we don't want that # so skip merging the transactions when trying to rollback to the last transaction # which is the current system state and rollback is not applicable for trans in self.base.history.old(list(range(old.tid + 1, last.tid + 1))): if trans.altered_lt_rpmdb: logger.warning(_('Transaction history is incomplete, before %u.'), trans.tid) elif trans.altered_gt_rpmdb: logger.warning(_('Transaction history is incomplete, after %u.'), trans.tid) if merged_trans is None: merged_trans = dnf.db.history.MergedTransactionWrapper(trans) else: merged_trans.merge(trans) self._revert_transaction(merged_trans) def _revert_transaction(self, trans): action_map = { "Install": "Removed", "Removed": "Install", "Upgrade": "Downgraded", "Upgraded": "Downgrade", "Downgrade": "Upgraded", "Downgraded": "Upgrade", "Reinstalled": "Reinstall", "Reinstall": "Reinstalled", "Obsoleted": "Install", "Obsolete": "Obsoleted", "Reason Change": "Reason Change", } data = serialize_transaction(trans) # revert actions in the serialized transaction data to perform rollback/undo for content_type in ("rpms", "groups", "environments"): for ti in data.get(content_type, []): ti["action"] = action_map[ti["action"]] if ti["action"] == "Install" and ti.get("reason", None) == "clean": ti["reason"] = "dependency" if ti["action"] == "Reason Change" and "nevra" in ti: subj = hawkey.Subject(ti["nevra"]) nevra = subj.get_nevra_possibilities(forms=[hawkey.FORM_NEVRA])[0] reason = self.output.history.swdb.resolveRPMTransactionItemReason( nevra.name, nevra.arch, trans.tids()[0] - 1 ) ti["reason"] = libdnf.transaction.TransactionItemReasonToString(reason) if ti.get("repo_id") == hawkey.SYSTEM_REPO_NAME: # erase repo_id, because it's not possible to perform forward actions from the @System repo ti["repo_id"] = None self.replay = TransactionReplay( self.base, data=data, ignore_installed=True, ignore_extras=True, skip_unavailable=self.opts.skip_unavailable ) self.replay.run() def _hcmd_userinstalled(self): """Execute history userinstalled command.""" pkgs = tuple(self.base.iter_userinstalled()) n_listed = self.output.listPkgs(pkgs, 'Packages installed by user', 'nevra') if n_listed == 0: raise dnf.cli.CliError(_('No packages to list')) def _args2transaction_ids(self): """Convert commandline arguments to transaction ids""" def str2transaction_id(s): if s == 'last': s = '0' elif s.startswith('last-'): s = s[4:] transaction_id = int(s) if transaction_id <= 0: transaction_id += self.output.history.last().tid return transaction_id tids = set() merged_tids = set() for t in self.opts.transactions: if '..' in t: try: begin_transaction_id, end_transaction_id = t.split('..', 2) except ValueError: logger.critical( _("Invalid transaction ID range definition '{}'.\n" "Use '..'." ).format(t)) raise dnf.cli.CliError cant_convert_msg = _("Can't convert '{}' to transaction ID.\n" "Use '', 'last', 'last-'.") try: begin_transaction_id = str2transaction_id(begin_transaction_id) except ValueError: logger.critical(_(cant_convert_msg).format(begin_transaction_id)) raise dnf.cli.CliError try: end_transaction_id = str2transaction_id(end_transaction_id) except ValueError: logger.critical(_(cant_convert_msg).format(end_transaction_id)) raise dnf.cli.CliError if self._require_one_transaction_id and begin_transaction_id != end_transaction_id: logger.critical(self._require_one_transaction_id_msg) raise dnf.cli.CliError if begin_transaction_id > end_transaction_id: begin_transaction_id, end_transaction_id = \ end_transaction_id, begin_transaction_id merged_tids.add((begin_transaction_id, end_transaction_id)) tids.update(range(begin_transaction_id, end_transaction_id + 1)) else: try: tids.add(str2transaction_id(t)) except ValueError: # not a transaction id, assume it's package name transact_ids_from_pkgname = self.output.history.search([t]) if transact_ids_from_pkgname: tids.update(transact_ids_from_pkgname) else: msg = _("No transaction which manipulates package '{}' was found." ).format(t) if self._require_one_transaction_id: logger.critical(msg) raise dnf.cli.CliError else: logger.info(msg) return sorted(tids, reverse=True), merged_tids def run(self): vcmd = self.opts.transactions_action if vcmd == 'replay': self.replay = TransactionReplay( self.base, filename=self.opts.transaction_filename, ignore_installed = self.opts.ignore_installed, ignore_extras = self.opts.ignore_extras, skip_unavailable = self.opts.skip_unavailable ) self.replay.run() else: tids, merged_tids = self._args2transaction_ids() if vcmd == 'list' and (tids or not self.opts.transactions): self.output.historyListCmd(tids, reverse=self.opts.reverse) elif vcmd == 'info' and (tids or not self.opts.transactions): self.output.historyInfoCmd(tids, self.opts.transactions, merged_tids) elif vcmd == 'undo': self._hcmd_undo(tids) elif vcmd == 'redo': self._hcmd_redo(tids) elif vcmd == 'rollback': self._hcmd_rollback(tids) elif vcmd == 'userinstalled': self._hcmd_userinstalled() elif vcmd == 'store': tid = self._history_get_transaction(tids) data = serialize_transaction(tid) try: filename = self.opts.output if self.opts.output is not None else "transaction.json" # it is absolutely possible for both assumeyes and assumeno to be True, go figure if (self.base.conf.assumeno or not self.base.conf.assumeyes) and os.path.isfile(filename): msg = _("{} exists, overwrite?").format(filename) if self.base.conf.assumeno or not self.base.output.userconfirm( msg='\n{} [y/N]: '.format(msg), defaultyes_msg='\n{} [Y/n]: '.format(msg)): print(_("Not overwriting {}, exiting.").format(filename)) return with open(filename, "w") as f: json.dump(data, f, indent=4, sort_keys=True) f.write("\n") print(_("Transaction saved to {}.").format(filename)) except OSError as e: raise dnf.cli.CliError(_('Error storing transaction: {}').format(str(e))) def run_resolved(self): if self.opts.transactions_action not in ("replay", "redo", "rollback", "undo"): return self.replay.post_transaction() def run_transaction(self): if self.opts.transactions_action not in ("replay", "redo", "rollback", "undo"): return warnings = self.replay.get_warnings() if warnings: logger.log( dnf.logging.WARNING, _("Warning, the following problems occurred while running a transaction:") ) for w in warnings: logger.log(dnf.logging.WARNING, " " + w) PK!JlSScli/commands/install.pynu[# install.py # Install CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import logging from itertools import chain import hawkey import dnf.exceptions from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _ logger = logging.getLogger('dnf') class InstallCommand(commands.Command): """A class containing methods needed by the cli to execute the install command. """ nevra_forms = {'install-n': hawkey.FORM_NAME, 'install-na': hawkey.FORM_NA, 'install-nevra': hawkey.FORM_NEVRA} alternatives_provide = 'alternative-for({})' aliases = ('install', 'localinstall', 'in') + tuple(nevra_forms.keys()) summary = _('install a package or packages on your system') @staticmethod def set_argparser(parser): parser.add_argument('package', nargs='+', metavar=_('PACKAGE'), action=OptionParser.ParseSpecGroupFileCallback, help=_('Package to install')) def configure(self): """Verify that conditions are met so that this command can run. That there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. """ demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) if not self.opts.filenames: commands._checkEnabledRepo(self.base) def run(self): err_pkgs = [] errs = [] error_module_specs = [] nevra_forms = self._get_nevra_forms_from_command() self.cli._populate_update_security_filter(self.opts) if self.opts.command == 'localinstall' and (self.opts.grp_specs or self.opts.pkg_specs): self._log_not_valid_rpm_file_paths(self.opts.grp_specs) if self.base.conf.strict: raise dnf.exceptions.Error(_('Nothing to do.')) skipped_grp_specs = [] if self.opts.grp_specs and self.opts.command != 'localinstall': if dnf.base.WITH_MODULES: try: module_base = dnf.module.module_base.ModuleBase(self.base) module_base.install(self.opts.grp_specs, strict=self.base.conf.strict) except dnf.exceptions.MarkingErrors as e: if e.no_match_group_specs: for e_spec in e.no_match_group_specs: skipped_grp_specs.append(e_spec) if e.error_group_specs: for e_spec in e.error_group_specs: error_module_specs.append("@" + e_spec) module_depsolv_errors = e.module_depsolv_errors if module_depsolv_errors: logger.error(dnf.module.module_base.format_modular_solver_errors( module_depsolv_errors[0])) else: skipped_grp_specs = self.opts.grp_specs if self.opts.filenames and nevra_forms: self._inform_not_a_valid_combination(self.opts.filenames) if self.base.conf.strict: raise dnf.exceptions.Error(_('Nothing to do.')) else: err_pkgs = self._install_files() if skipped_grp_specs and nevra_forms: self._inform_not_a_valid_combination(skipped_grp_specs) if self.base.conf.strict: raise dnf.exceptions.Error(_('Nothing to do.')) elif skipped_grp_specs and self.opts.command != 'localinstall': self._install_groups(skipped_grp_specs) if self.opts.command != 'localinstall': errs = self._install_packages(nevra_forms) if (len(errs) != 0 or len(err_pkgs) != 0 or error_module_specs) and self.base.conf.strict: raise dnf.exceptions.PackagesNotAvailableError(_("Unable to find a match"), pkg_spec=' '.join(errs), packages=err_pkgs) def _get_nevra_forms_from_command(self): if self.opts.command in self.nevra_forms: return [self.nevra_forms[self.opts.command]] else: return [] def _log_not_valid_rpm_file_paths(self, grp_specs): group_names = map(lambda g: '@' + g, grp_specs) for pkg in chain(self.opts.pkg_specs, group_names): msg = _('Not a valid rpm file path: %s') logger.info(msg, self.base.output.term.bold(pkg)) def _inform_not_a_valid_combination(self, forms): for form in forms: msg = _('Not a valid form: %s') logger.warning(msg, self.base.output.term.bold(form)) def _install_files(self): err_pkgs = [] strict = self.base.conf.strict for pkg in self.base.add_remote_rpms(self.opts.filenames, strict=strict, progress=self.base.output.progress): try: self.base.package_install(pkg, strict=strict) except dnf.exceptions.MarkingError: msg = _('No match for argument: %s') logger.info(msg, self.base.output.term.bold(pkg.location)) err_pkgs.append(pkg) return err_pkgs def _install_groups(self, grp_specs): try: self.base.env_group_install(grp_specs, tuple(self.base.conf.group_package_types), strict=self.base.conf.strict) except dnf.exceptions.Error: if self.base.conf.strict: raise def _report_alternatives(self, pkg_spec): query = self.base.sack.query().filterm( provides=self.alternatives_provide.format(pkg_spec)) if query: msg = _('There are following alternatives for "{0}": {1}') logger.info(msg.format( pkg_spec, ', '.join(sorted(set([alt.name for alt in query]))))) def _install_packages(self, nevra_forms): errs = [] strict = self.base.conf.strict for pkg_spec in self.opts.pkg_specs: try: self.base.install(pkg_spec, strict=strict, forms=nevra_forms) except dnf.exceptions.MarkingError as e: msg = '{}: {}'.format(e.value, self.base.output.term.bold(pkg_spec)) logger.info(msg) self.base._report_icase_hint(pkg_spec) self._report_alternatives(pkg_spec) errs.append(pkg_spec) return errs PK!Nmmcli/commands/makecache.pynu[# makecache.py # Makecache CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.i18n import _ import argparse import dnf.cli import dnf.exceptions import dnf.util import logging logger = logging.getLogger("dnf") class MakeCacheCommand(commands.Command): aliases = ('makecache', 'mc') summary = _('generate the metadata cache') @staticmethod def set_argparser(parser): parser.add_argument('--timer', action='store_true', dest="timer_opt") # compatibility with dnf < 2.0 parser.add_argument('timer', nargs='?', choices=['timer'], metavar='timer', help=argparse.SUPPRESS) def run(self): timer = self.opts.timer is not None or self.opts.timer_opt msg = _("Making cache files for all metadata files.") logger.debug(msg) return self.base.update_cache(timer) PK!!B cli/commands/mark.pynu[# mark.py # Mark CLI command. # # Copyright (C) 2015-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import unicode_literals import libdnf.transaction from dnf.i18n import _ from dnf.cli import commands import dnf import functools import logging logger = logging.getLogger("dnf") class MarkCommand(commands.Command): aliases = ('mark',) summary = _('mark or unmark installed packages as installed by user.') @staticmethod def set_argparser(parser): parser.add_argument('mark', nargs=1, choices=['install', 'remove', 'group'], help=_("install: mark as installed by user\n" "remove: unmark as installed by user\n" "group: mark as installed by group")) parser.add_argument('package', nargs='+', metavar="PACKAGE", help=_("Package specification")) def _mark_install(self, pkg): self.base.history.set_reason(pkg, libdnf.transaction.TransactionItemReason_USER) logger.info(_('%s marked as user installed.'), str(pkg)) def _mark_remove(self, pkg): self.base.history.set_reason(pkg, libdnf.transaction.TransactionItemReason_DEPENDENCY) logger.info(_('%s unmarked as user installed.'), str(pkg)) def _mark_group(self, pkg): self.base.history.set_reason(pkg, libdnf.transaction.TransactionItemReason_GROUP) logger.info(_('%s marked as group installed.'), str(pkg)) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.root_user = True demands.available_repos = False demands.resolving = False def run(self): cmd = self.opts.mark[0] pkgs = self.opts.package mark_func = functools.partial(getattr(self, '_mark_' + cmd)) notfound = [] for pkg in pkgs: subj = dnf.subject.Subject(pkg) q = subj.get_best_query(self.base.sack) for pkg in q: mark_func(pkg) if len(q) == 0: notfound.append(pkg) if notfound: logger.error(_('Error:')) for pkg in notfound: logger.error(_('Package %s is not installed.'), pkg) raise dnf.cli.CliError old = self.base.history.last() if old is None: rpmdb_version = self.sack._rpmdb_version() else: rpmdb_version = old.end_rpmdb_version self.base.history.beg(rpmdb_version, [], []) self.base.history.end(rpmdb_version) PK!nAAcli/commands/module.pynu[# supplies the 'module' command. # # Copyright (C) 2014-2017 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from dnf.cli import commands, CliError from dnf.i18n import _ from dnf.module.exceptions import NoModuleException from dnf.util import logger import dnf.util import sys import os import hawkey import libdnf import dnf.module.module_base import dnf.exceptions class ModuleCommand(commands.Command): class SubCommand(commands.Command): def __init__(self, cli): super(ModuleCommand.SubCommand, self).__init__(cli) self.module_base = dnf.module.module_base.ModuleBase(self.base) def _get_modules_from_name_stream_specs(self): modules_from_specs = set() for module_spec in self.opts.module_spec: __, nsvcap = self.module_base._get_modules(module_spec) # When there is no match, the problem was already reported by module_base.remove() if nsvcap is None: continue name = nsvcap.name if nsvcap.name else "" stream = nsvcap.stream if nsvcap.stream else "" if (nsvcap.version and nsvcap.version != -1) or nsvcap.context: logger.info(_("Only module name, stream, architecture or profile is used. " "Ignoring unneeded information in argument: '{}'").format( module_spec)) arch = nsvcap.arch if nsvcap.arch else "" modules = self.base._moduleContainer.query(name, stream, "", "", arch) modules_from_specs.update(modules) return modules_from_specs def _get_module_artifact_names(self, use_modules, skip_modules): artifacts = set() pkg_names = set() for module in use_modules: if module not in skip_modules: if self.base._moduleContainer.isModuleActive(module): artifacts.update(module.getArtifacts()) for artifact in artifacts: subj = hawkey.Subject(artifact) for nevra_obj in subj.get_nevra_possibilities( forms=[hawkey.FORM_NEVRA]): if nevra_obj.name: pkg_names.add(nevra_obj.name) return pkg_names, artifacts class ListSubCommand(SubCommand): aliases = ('list',) summary = _('list all module streams, profiles and states') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True def run_on_module(self): mods = self.module_base if self.opts.enabled: output = mods._get_brief_description( self.opts.module_spec, libdnf.module.ModulePackageContainer.ModuleState_ENABLED) elif self.opts.disabled: output = mods._get_brief_description( self.opts.module_spec, libdnf.module.ModulePackageContainer.ModuleState_DISABLED) elif self.opts.installed: output = mods._get_brief_description( self.opts.module_spec, libdnf.module.ModulePackageContainer.ModuleState_INSTALLED) else: output = mods._get_brief_description( self.opts.module_spec, libdnf.module.ModulePackageContainer.ModuleState_UNKNOWN) if output: print(output) return if self.opts.module_spec: msg = _('No matching Modules to list') raise dnf.exceptions.Error(msg) class InfoSubCommand(SubCommand): aliases = ('info',) summary = _('print detailed information about a module') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True def run_on_module(self): if self.opts.verbose: output = self.module_base._get_full_info(self.opts.module_spec) elif self.opts.profile: output = self.module_base._get_info_profiles(self.opts.module_spec) else: output = self.module_base._get_info(self.opts.module_spec) if output: print(output) else: raise dnf.exceptions.Error(_('No matching Modules to list')) class EnableSubCommand(SubCommand): aliases = ('enable',) summary = _('enable a module stream') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_module(self): try: self.module_base.enable(self.opts.module_spec) except dnf.exceptions.MarkingErrors as e: if self.base.conf.strict: if e.no_match_group_specs or e.error_group_specs: raise e if e.module_depsolv_errors and e.module_depsolv_errors[1] != \ libdnf.module.ModulePackageContainer.ModuleErrorType_ERROR_IN_DEFAULTS: raise e logger.error(str(e)) class DisableSubCommand(SubCommand): aliases = ('disable',) summary = _('disable a module with all its streams') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_module(self): try: self.module_base.disable(self.opts.module_spec) except dnf.exceptions.MarkingErrors as e: if self.base.conf.strict: if e.no_match_group_specs or e.error_group_specs: raise e if e.module_depsolv_errors and e.module_depsolv_errors[1] != \ libdnf.module.ModulePackageContainer.ModuleErrorType_ERROR_IN_DEFAULTS: raise e logger.error(str(e)) class ResetSubCommand(SubCommand): aliases = ('reset',) summary = _('reset a module') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_module(self): try: self.module_base.reset(self.opts.module_spec) except dnf.exceptions.MarkingErrors as e: if self.base.conf.strict: if e.no_match_group_specs: raise e logger.error(str(e)) class InstallSubCommand(SubCommand): aliases = ('install',) summary = _('install a module profile including its packages') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_module(self): try: self.module_base.install(self.opts.module_spec, self.base.conf.strict) except dnf.exceptions.MarkingErrors as e: if self.base.conf.strict: if e.no_match_group_specs or e.error_group_specs: raise e logger.error(str(e)) class UpdateSubCommand(SubCommand): aliases = ('update',) summary = _('update packages associated with an active stream') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True def run_on_module(self): module_specs = self.module_base.upgrade(self.opts.module_spec) if module_specs: raise NoModuleException(", ".join(module_specs)) class RemoveSubCommand(SubCommand): aliases = ('remove', 'erase',) summary = _('remove installed module profiles and their packages') def configure(self): demands = self.cli.demands demands.allow_erasing = True demands.available_repos = True demands.fresh_metadata = False demands.resolving = True demands.root_user = True demands.sack_activation = True def run_on_module(self): skipped_groups = self.module_base.remove(self.opts.module_spec) if self.opts.all: modules_from_specs = self._get_modules_from_name_stream_specs() remove_names_from_spec, __ = self._get_module_artifact_names( modules_from_specs, set()) keep_names, __ = self._get_module_artifact_names( self.base._moduleContainer.getModulePackages(), modules_from_specs) remove_query = self.base.sack.query().installed().filterm( name=remove_names_from_spec) keep_query = self.base.sack.query().installed().filterm(name=keep_names) for pkg in remove_query: if pkg in keep_query: msg = _("Package {} belongs to multiple modules, skipping").format(pkg) logger.info(msg) else: self.base.goal.erase( pkg, clean_deps=self.base.conf.clean_requirements_on_remove) if not skipped_groups: return logger.error(dnf.exceptions.MarkingErrors(no_match_group_specs=skipped_groups)) class SwitchToSubCommand(SubCommand): aliases = ('switch-to',) summary = _('switch a module to a stream and distrosync rpm packages') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True demands.resolving = True demands.root_user = True self.base.conf.module_stream_switch = True def run_on_module(self): try: self.module_base.switch_to(self.opts.module_spec, strict=self.base.conf.strict) except dnf.exceptions.MarkingErrors as e: if self.base.conf.strict: if e.no_match_group_specs or e.error_group_specs: raise e logger.error(str(e)) class ProvidesSubCommand(SubCommand): aliases = ("provides", ) summary = _('list modular packages') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True def run_on_module(self): output = self.module_base._what_provides(self.opts.module_spec) if output: print(output) class RepoquerySubCommand(SubCommand): aliases = ("repoquery", ) summary = _('list packages belonging to a module') def configure(self): demands = self.cli.demands demands.available_repos = True demands.sack_activation = True def run_on_module(self): modules_from_specs = set() for module_spec in self.opts.module_spec: modules, __ = self.module_base._get_modules(module_spec) modules_from_specs.update(modules) names_from_spec, spec_artifacts = self._get_module_artifact_names( modules_from_specs, set()) package_strings = set() if self.opts.available or not self.opts.installed: query = self.base.sack.query().available().filterm(nevra_strict=spec_artifacts) for pkg in query: package_strings.add(str(pkg)) if self.opts.installed: query = self.base.sack.query().installed().filterm(name=names_from_spec) for pkg in query: package_strings.add(str(pkg)) output = "\n".join(sorted(package_strings)) print(output) SUBCMDS = {ListSubCommand, InfoSubCommand, EnableSubCommand, DisableSubCommand, ResetSubCommand, InstallSubCommand, UpdateSubCommand, RemoveSubCommand, SwitchToSubCommand, ProvidesSubCommand, RepoquerySubCommand} SUBCMDS_NOT_REQUIRED_ARG = {ListSubCommand} aliases = ("module",) summary = _("Interact with Modules.") def __init__(self, cli): super(ModuleCommand, self).__init__(cli) subcmd_objs = (subcmd(cli) for subcmd in self.SUBCMDS) self.subcmd = None self._subcmd_name2obj = { alias: subcmd for subcmd in subcmd_objs for alias in subcmd.aliases} def set_argparser(self, parser): narrows = parser.add_mutually_exclusive_group() narrows.add_argument('--enabled', dest='enabled', action='store_true', help=_("show only enabled modules")) narrows.add_argument('--disabled', dest='disabled', action='store_true', help=_("show only disabled modules")) narrows.add_argument('--installed', dest='installed', action='store_true', help=_("show only installed modules or packages")) narrows.add_argument('--profile', dest='profile', action='store_true', help=_("show profile content")) parser.add_argument('--available', dest='available', action='store_true', help=_("show only available packages")) narrows.add_argument('--all', dest='all', action='store_true', help=_("remove all modular packages")) subcommand_choices = [] subcommand_help = [] for subcmd in sorted(self.SUBCMDS, key=lambda x: x.aliases[0]): subcommand_choices.append(subcmd.aliases[0]) subcommand_help.append('{}: {}'.format(subcmd.aliases[0], subcmd.summary or '')) parser.add_argument('subcmd', nargs=1, choices=subcommand_choices, metavar='', help='\n'.join(subcommand_help)) parser.add_argument('module_spec', metavar='module-spec', nargs='*', help=_("Module specification")) def configure(self): try: self.subcmd = self._subcmd_name2obj[self.opts.subcmd[0]] except (CliError, KeyError): self.cli.optparser.print_usage() raise CliError self.subcmd.opts = self.opts self.subcmd.configure() def run(self): self.check_required_argument() self.subcmd.run_on_module() def check_required_argument(self): not_required_argument = [alias for subcmd in self.SUBCMDS_NOT_REQUIRED_ARG for alias in subcmd.aliases] if self.opts.subcmd[0] not in not_required_argument: if not self.opts.module_spec: raise CliError( _("{} {} {}: too few arguments").format(dnf.util.MAIN_PROG, self.opts.command, self.opts.subcmd[0])) PK![fH]]cli/commands/reinstall.pynu[# reinstall.py # Reinstall CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _ import dnf.exceptions import logging logger = logging.getLogger('dnf') class ReinstallCommand(commands.Command): """A class containing methods needed by the cli to execute the reinstall command. """ aliases = ('reinstall', 'rei') summary = _('reinstall a package') @staticmethod def set_argparser(parser): parser.add_argument('packages', nargs='+', help=_('Package to reinstall'), action=OptionParser.ParseSpecGroupFileCallback, metavar=_('PACKAGE')) def configure(self): """Verify that conditions are met so that this command can run. These include that the program is being run by the root user, that there are enabled repositories with gpg keys, and that this command is called with appropriate arguments. """ demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) if not self.opts.filenames: commands._checkEnabledRepo(self.base) def run(self): # Reinstall files. done = False for pkg in self.base.add_remote_rpms(self.opts.filenames, strict=False, progress=self.base.output.progress): try: self.base.package_reinstall(pkg) except dnf.exceptions.MarkingError: logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkg.location)) else: done = True # Reinstall packages. for pkg_spec in self.opts.pkg_specs + ['@' + x for x in self.opts.grp_specs]: try: self.base.reinstall(pkg_spec) except dnf.exceptions.PackagesNotInstalledError as err: for pkg in err.packages: logger.info(_('Package %s available, but not installed.'), self.output.term.bold(pkg.name)) break logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkg_spec)) except dnf.exceptions.PackagesNotAvailableError as err: for pkg in err.packages: xmsg = '' pkgrepo = self.base.history.repo(pkg) if pkgrepo: xmsg = _(' (from %s)') % pkgrepo msg = _('Installed package %s%s not available.') logger.info(msg, self.base.output.term.bold(pkg), xmsg) except dnf.exceptions.MarkingError: assert False, 'Only the above marking errors are expected.' else: done = True if not done: raise dnf.exceptions.Error(_('No packages marked for reinstall.')) PK!Bpcli/commands/remove.pynu[# remove_command.py # Remove CLI command. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.i18n import _ from dnf.cli.option_parser import OptionParser import dnf.base import argparse import hawkey import dnf.exceptions import logging logger = logging.getLogger("dnf") class RemoveCommand(commands.Command): """Remove command.""" nevra_forms = {'remove-n': hawkey.FORM_NAME, 'remove-na': hawkey.FORM_NA, 'remove-nevra': hawkey.FORM_NEVRA, 'erase-n': hawkey.FORM_NAME, 'erase-na': hawkey.FORM_NA, 'erase-nevra': hawkey.FORM_NEVRA} aliases = ('remove', 'erase', 'rm') + tuple(nevra_forms.keys()) summary = _('remove a package or packages from your system') @staticmethod def set_argparser(parser): mgroup = parser.add_mutually_exclusive_group() mgroup.add_argument('--duplicates', action='store_true', dest='duplicated', help=_('remove duplicated packages')) mgroup.add_argument('--duplicated', action='store_true', help=argparse.SUPPRESS) mgroup.add_argument('--oldinstallonly', action='store_true', help=_( 'remove installonly packages over the limit')) parser.add_argument('packages', nargs='*', help=_('Package to remove'), action=OptionParser.ParseSpecGroupFileCallback, metavar=_('PACKAGE')) def configure(self): demands = self.cli.demands # disable all available repos to delete whole dependency tree # instead of replacing removable package with available packages demands.resolving = True demands.root_user = True demands.sack_activation = True if self.opts.duplicated: demands.available_repos = True elif dnf.base.WITH_MODULES and self.opts.grp_specs: demands.available_repos = True demands.fresh_metadata = False demands.allow_erasing = True else: demands.allow_erasing = True demands.available_repos = False def run(self): forms = [] if self.opts.command in self.nevra_forms: forms = [self.nevra_forms[self.opts.command]] # local pkgs not supported in erase command self.opts.pkg_specs += self.opts.filenames done = False if self.opts.duplicated: q = self.base.sack.query() instonly = self.base._get_installonly_query(q.installed()) dups = q.duplicated().difference(instonly) if not dups: raise dnf.exceptions.Error(_('No duplicated packages found for removal.')) for (name, arch), pkgs_list in dups._na_dict().items(): if len(pkgs_list) < 2: continue pkgs_list.sort(reverse=True) try: self.base.reinstall(str(pkgs_list[0])) except dnf.exceptions.PackagesNotAvailableError: xmsg = '' msg = _('Installed package %s%s not available.') logger.warning(msg, self.base.output.term.bold(str(pkgs_list[0])), xmsg) for pkg in pkgs_list[1:]: self.base.package_remove(pkg) return if self.opts.oldinstallonly: q = self.base.sack.query() instonly = self.base._get_installonly_query(q.installed()).latest(-1) # also remove running kernel from the set kernel = self.base.sack.get_running_kernel() if kernel is not None: running_installonly = instonly.filter( epoch=kernel.epoch, version=kernel.version, release=kernel.release) if running_installonly: instonly = instonly.difference(running_installonly) if instonly: for pkg in instonly: self.base.package_remove(pkg) else: raise dnf.exceptions.Error( _('No old installonly packages found for removal.')) return # Remove groups. if self.opts.grp_specs and forms: for grp_spec in self.opts.grp_specs: msg = _('Not a valid form: %s') logger.warning(msg, self.base.output.term.bold(grp_spec)) elif self.opts.grp_specs: if dnf.base.WITH_MODULES: module_base = dnf.module.module_base.ModuleBase(self.base) skipped_grps = module_base.remove(self.opts.grp_specs) if len(self.opts.grp_specs) != len(skipped_grps): done = True else: skipped_grps = self.opts.grp_specs if skipped_grps: for group in skipped_grps: try: if self.base.env_group_remove([group]): done = True except dnf.exceptions.Error: pass for pkg_spec in self.opts.pkg_specs: try: self.base.remove(pkg_spec, forms=forms) except dnf.exceptions.MarkingError as e: msg = '{}: {}'.format(e.value, self.base.output.term.bold(pkg_spec)) logger.info(msg) else: done = True if not done: logger.warning(_('No packages marked for removal.')) PK!3z2z2cli/commands/repolist.pynu[# repolist.py # repolist CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli import commands from dnf.i18n import _, ucd, fill_exact_width, exact_width from dnf.cli.option_parser import OptionParser import dnf.cli.format import dnf.pycomp import dnf.util import fnmatch import hawkey import logging import operator logger = logging.getLogger('dnf') def _expire_str(repo, md): last = dnf.util.normalize_time(repo._repo.getTimestamp()) if md else _("unknown") if repo.metadata_expire <= -1: return _("Never (last: %s)") % last elif not repo.metadata_expire: return _("Instant (last: %s)") % last else: num = _num2ui_num(repo.metadata_expire) return _("%s second(s) (last: %s)") % (num, last) def _num2ui_num(num): return ucd(dnf.pycomp.format("%d", num, True)) def _repo_match(repo, patterns): rid = repo.id.lower() rnm = repo.name.lower() for pat in patterns: if fnmatch.fnmatch(rid, pat): return True if fnmatch.fnmatch(rnm, pat): return True return False def _repo_size(sack, repo): ret = 0 for pkg in sack.query(flags=hawkey.IGNORE_EXCLUDES).filterm(reponame__eq=repo.id): ret += pkg._size return dnf.cli.format.format_number(ret) class RepoListCommand(commands.Command): """A class containing methods needed by the cli to execute the repolist command. """ aliases = ('repolist', 'repoinfo') summary = _('display the configured software repositories') @staticmethod def set_argparser(parser): repolimit = parser.add_mutually_exclusive_group() repolimit.add_argument('--all', dest='_repos_action', action='store_const', const='all', default=None, help=_("show all repos")) repolimit.add_argument('--enabled', dest='_repos_action', action='store_const', const='enabled', help=_("show enabled repos (default)")) repolimit.add_argument('--disabled', dest='_repos_action', action='store_const', const='disabled', help=_("show disabled repos")) parser.add_argument('repos', nargs='*', default='enabled-default', metavar="REPOSITORY", choices=['all', 'enabled', 'disabled'], action=OptionParser.PkgNarrowCallback, help=_("Repository specification")) def pre_configure(self): if not self.opts.quiet: self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO) def configure(self): if not self.opts.quiet: self.cli.redirect_repo_progress() demands = self.cli.demands if self.base.conf.verbose or self.opts.command == 'repoinfo': demands.available_repos = True demands.sack_activation = True if self.opts._repos_action: self.opts.repos_action = self.opts._repos_action def run(self): arg = self.opts.repos_action extcmds = [x.lower() for x in self.opts.repos] verbose = self.base.conf.verbose repos = list(self.base.repos.values()) repos.sort(key=operator.attrgetter('id')) term = self.output.term on_ehibeg = term.FG_COLOR['green'] + term.MODE['bold'] on_dhibeg = term.FG_COLOR['red'] on_hiend = term.MODE['normal'] tot_num = 0 cols = [] if not repos: logger.warning(_('No repositories available')) return include_status = arg == 'all' or (arg == 'enabled-default' and extcmds) repoinfo_output = [] for repo in repos: if len(extcmds) and not _repo_match(repo, extcmds): continue (ehibeg, dhibeg, hiend) = '', '', '' ui_enabled = '' ui_endis_wid = 0 ui_excludes_num = '' if include_status: (ehibeg, dhibeg, hiend) = (on_ehibeg, on_dhibeg, on_hiend) if repo.enabled: enabled = True if arg == 'disabled': continue if include_status or verbose or self.opts.command == 'repoinfo': ui_enabled = ehibeg + _('enabled') + hiend ui_endis_wid = exact_width(_('enabled')) if verbose or self.opts.command == 'repoinfo': ui_size = _repo_size(self.base.sack, repo) else: enabled = False if arg == 'enabled' or (arg == 'enabled-default' and not extcmds): continue ui_enabled = dhibeg + _('disabled') + hiend ui_endis_wid = exact_width(_('disabled')) if not (verbose or self.opts.command == 'repoinfo'): rid = ucd(repo.id) cols.append((rid, repo.name, (ui_enabled, ui_endis_wid))) else: if enabled: md = repo.metadata else: md = None out = [self.output.fmtKeyValFill(_("Repo-id : "), repo.id), self.output.fmtKeyValFill(_("Repo-name : "), repo.name)] if include_status: out += [self.output.fmtKeyValFill(_("Repo-status : "), ui_enabled)] if md and repo._repo.getRevision(): out += [self.output.fmtKeyValFill(_("Repo-revision : "), repo._repo.getRevision())] if md and repo._repo.getContentTags(): tags = repo._repo.getContentTags() out += [self.output.fmtKeyValFill(_("Repo-tags : "), ", ".join(sorted(tags)))] if md and repo._repo.getDistroTags(): distroTagsDict = {k: v for (k, v) in repo._repo.getDistroTags()} for (distro, tags) in distroTagsDict.items(): out += [self.output.fmtKeyValFill( _("Repo-distro-tags : "), "[%s]: %s" % (distro, ", ".join(sorted(tags))))] if md: num = len(self.base.sack.query(flags=hawkey.IGNORE_EXCLUDES).filterm( reponame__eq=repo.id)) num_available = len(self.base.sack.query().filterm(reponame__eq=repo.id)) ui_num = _num2ui_num(num) ui_num_available = _num2ui_num(num_available) tot_num += num out += [ self.output.fmtKeyValFill( _("Repo-updated : "), dnf.util.normalize_time(repo._repo.getMaxTimestamp())), self.output.fmtKeyValFill(_("Repo-pkgs : "), ui_num), self.output.fmtKeyValFill(_("Repo-available-pkgs: "), ui_num_available), self.output.fmtKeyValFill(_("Repo-size : "), ui_size)] if repo.metalink: out += [self.output.fmtKeyValFill(_("Repo-metalink : "), repo.metalink)] if enabled: ts = repo._repo.getTimestamp() out += [self.output.fmtKeyValFill( _(" Updated : "), dnf.util.normalize_time(ts))] elif repo.mirrorlist: out += [self.output.fmtKeyValFill(_("Repo-mirrors : "), repo.mirrorlist)] baseurls = repo.baseurl if baseurls: out += [self.output.fmtKeyValFill(_("Repo-baseurl : "), ", ".join(baseurls))] elif enabled: mirrors = repo._repo.getMirrors() if mirrors: url = "%s (%d more)" % (mirrors[0], len(mirrors) - 1) out += [self.output.fmtKeyValFill(_("Repo-baseurl : "), url)] expire = _expire_str(repo, md) out += [self.output.fmtKeyValFill(_("Repo-expire : "), expire)] if repo.excludepkgs: # TRANSLATORS: Packages that are excluded - their names like (dnf systemd) out += [self.output.fmtKeyValFill(_("Repo-exclude : "), ", ".join(repo.excludepkgs))] if repo.includepkgs: out += [self.output.fmtKeyValFill(_("Repo-include : "), ", ".join(repo.includepkgs))] if ui_excludes_num: # TRANSLATORS: Number of packages that where excluded (5) out += [self.output.fmtKeyValFill(_("Repo-excluded : "), ui_excludes_num)] if repo.repofile: out += [self.output.fmtKeyValFill(_("Repo-filename : "), repo.repofile)] repoinfo_output.append("\n".join(map(ucd, out))) if repoinfo_output: print("\n\n".join(repoinfo_output)) if not verbose and cols: # Work out the first (id) and last (enabled/disabled/count), # then chop the middle (name)... id_len = exact_width(_('repo id')) nm_len = 0 st_len = 0 for (rid, rname, (ui_enabled, ui_endis_wid)) in cols: if id_len < exact_width(rid): id_len = exact_width(rid) if nm_len < exact_width(rname): nm_len = exact_width(rname) if st_len < ui_endis_wid: st_len = ui_endis_wid # Need this as well as above for: fill_exact_width() if include_status: if exact_width(_('status')) > st_len: left = term.columns - (id_len + len(_('status')) + 2) else: left = term.columns - (id_len + st_len + 2) else: # Don't output a status column. left = term.columns - (id_len + 1) if left < nm_len: # Name gets chopped nm_len = left else: # Share the extra... left -= nm_len id_len += left // 2 nm_len += left - (left // 2) txt_rid = fill_exact_width(_('repo id'), id_len) if include_status: txt_rnam = fill_exact_width(_('repo name'), nm_len, nm_len) else: txt_rnam = _('repo name') if not include_status: # Don't output a status column. print("%s %s" % (txt_rid, txt_rnam)) else: print("%s %s %s" % (txt_rid, txt_rnam, _('status'))) for (rid, rname, (ui_enabled, ui_endis_wid)) in cols: if not include_status: # Don't output a status column. print("%s %s" % (fill_exact_width(rid, id_len), rname)) continue print("%s %s %s" % (fill_exact_width(rid, id_len), fill_exact_width(rname, nm_len, nm_len), ui_enabled)) if verbose or self.opts.command == 'repoinfo': msg = _('Total packages: {}') print(msg.format(_num2ui_num(tot_num))) PK!yننcli/commands/repoquery.pynu[# # Copyright (C) 2014 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from dnf.i18n import _ from dnf.cli import commands from dnf.cli.option_parser import OptionParser import argparse import datetime import logging import re import sys import dnf import dnf.cli import dnf.exceptions import dnf.subject import dnf.util import hawkey logger = logging.getLogger('dnf') QFORMAT_DEFAULT = '%{name}-%{epoch}:%{version}-%{release}.%{arch}' # matches %[-][dd]{attr} QFORMAT_MATCH = re.compile(r'%(-?\d*?){([:.\w]+?)}') QUERY_TAGS = """\ name, arch, epoch, version, release, reponame (repoid), from_repo, evr, debug_name, source_name, source_debug_name, installtime, buildtime, size, downloadsize, installsize, provides, requires, obsoletes, conflicts, sourcerpm, description, summary, license, url, reason""" OPTS_MAPPING = { 'conflicts': 'conflicts', 'enhances': 'enhances', 'obsoletes': 'obsoletes', 'provides': 'provides', 'recommends': 'recommends', 'requires': 'requires', 'requires-pre': 'requires_pre', 'suggests': 'suggests', 'supplements': 'supplements' } def rpm2py_format(queryformat): """Convert a rpm like QUERYFMT to an python .format() string.""" def fmt_repl(matchobj): fill = matchobj.groups()[0] key = matchobj.groups()[1] if fill: if fill[0] == '-': fill = '>' + fill[1:] else: fill = '<' + fill fill = ':' + fill return '{0.' + key.lower() + fill + "}" def brackets(txt): return txt.replace('{', '{{').replace('}', '}}') queryformat = queryformat.replace("\\n", "\n").replace("\\t", "\t") for key, value in OPTS_MAPPING.items(): queryformat = queryformat.replace(key, value) fmt = "" spos = 0 for item in QFORMAT_MATCH.finditer(queryformat): fmt += brackets(queryformat[spos:item.start()]) fmt += fmt_repl(item) spos = item.end() fmt += brackets(queryformat[spos:]) return fmt class _CommaSplitCallback(OptionParser._SplitCallback): SPLITTER = r'\s*,\s*' class RepoQueryCommand(commands.Command): """A class containing methods needed by the cli to execute the repoquery command. """ nevra_forms = {'repoquery-n': hawkey.FORM_NAME, 'repoquery-na': hawkey.FORM_NA, 'repoquery-nevra': hawkey.FORM_NEVRA} aliases = ('repoquery', 'rq') + tuple(nevra_forms.keys()) summary = _('search for packages matching keyword') @staticmethod def filter_repo_arch(opts, query): """Filter query by repoid and arch options""" if opts.repo: query.filterm(reponame=opts.repo) if opts.arches: query.filterm(arch=opts.arches) return query @staticmethod def set_argparser(parser): parser.add_argument('-a', '--all', dest='queryall', action='store_true', help=_("Query all packages (shorthand for repoquery '*' " "or repoquery without argument)")) parser.add_argument('--show-duplicates', action='store_true', help=_("Query all versions of packages (default)")) parser.add_argument('--arch', '--archlist', dest='arches', default=[], action=_CommaSplitCallback, metavar='[arch]', help=_('show only results from this ARCH')) parser.add_argument('-f', '--file', metavar='FILE', nargs='+', help=_('show only results that owns FILE')) parser.add_argument('--whatconflicts', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that conflict REQ')) parser.add_argument('--whatdepends', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('shows results that requires, suggests, supplements, enhances,' 'or recommends package provides and files REQ')) parser.add_argument('--whatobsoletes', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that obsolete REQ')) parser.add_argument('--whatprovides', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that provide REQ')) parser.add_argument('--whatrequires', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('shows results that requires package provides and files REQ')) parser.add_argument('--whatrecommends', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that recommend REQ')) parser.add_argument('--whatenhances', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that enhance REQ')) parser.add_argument('--whatsuggests', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that suggest REQ')) parser.add_argument('--whatsupplements', default=[], action=_CommaSplitCallback, metavar='REQ', help=_('show only results that supplement REQ')) whatrequiresform = parser.add_mutually_exclusive_group() whatrequiresform.add_argument("--alldeps", action="store_true", help=_("check non-explicit dependencies (files and Provides); default")) whatrequiresform.add_argument("--exactdeps", action="store_true", help=_('check dependencies exactly as given, opposite of --alldeps')) parser.add_argument("--recursive", action="store_true", help=_( 'used with --whatrequires, and --requires --resolve, query packages recursively.')) parser.add_argument('--deplist', action='store_true', help=_( "show a list of all dependencies and what packages provide them")) parser.add_argument('--resolve', action='store_true', help=_('resolve capabilities to originating package(s)')) parser.add_argument("--tree", action="store_true", help=_('show recursive tree for package(s)')) parser.add_argument('--srpm', action='store_true', help=_('operate on corresponding source RPM')) parser.add_argument("--latest-limit", dest='latest_limit', type=int, help=_('show N latest packages for a given name.arch' ' (or latest but N if N is negative)')) parser.add_argument("--disable-modular-filtering", action="store_true", help=_("list also packages of inactive module streams")) outform = parser.add_mutually_exclusive_group() outform.add_argument('-i', "--info", dest='queryinfo', default=False, action='store_true', help=_('show detailed information about the package')) outform.add_argument('-l', "--list", dest='queryfilelist', default=False, action='store_true', help=_('show list of files in the package')) outform.add_argument('-s', "--source", dest='querysourcerpm', default=False, action='store_true', help=_('show package source RPM name')) outform.add_argument('--changelogs', dest='querychangelogs', default=False, action='store_true', help=_('show changelogs of the package')) outform.add_argument('--qf', "--queryformat", dest='queryformat', default=QFORMAT_DEFAULT, help=_('display format for listing packages: ' '"%%{name} %%{version} ...", ' 'use --querytags to view full tag list')) parser.add_argument('--querytags', action='store_true', help=_('show available tags to use with ' '--queryformat')) outform.add_argument("--nevra", dest='queryformat', const=QFORMAT_DEFAULT, action='store_const', help=_('use name-epoch:version-release.architecture format for ' 'displaying found packages (default)')) outform.add_argument("--nvr", dest='queryformat', const='%{name}-%{version}-%{release}', action='store_const', help=_('use name-version-release format for ' 'displaying found packages ' '(rpm query default)')) outform.add_argument("--envra", dest='queryformat', const='%{epoch}:%{name}-%{version}-%{release}.%{arch}', action='store_const', help=_('use epoch:name-version-release.architecture format for ' 'displaying found packages')) outform.add_argument('--groupmember', action="store_true", help=_( 'Display in which comps groups are presented selected packages')) pkgfilter = parser.add_mutually_exclusive_group() pkgfilter.add_argument("--duplicates", dest='pkgfilter', const='duplicated', action='store_const', help=_('limit the query to installed duplicate ' 'packages')) pkgfilter.add_argument("--duplicated", dest='pkgfilter', const='duplicated', action='store_const', help=argparse.SUPPRESS) pkgfilter.add_argument("--installonly", dest='pkgfilter', const='installonly', action='store_const', help=_('limit the query to installed installonly packages')) pkgfilter.add_argument("--unsatisfied", dest='pkgfilter', const='unsatisfied', action='store_const', help=_('limit the query to installed packages with unsatisfied dependencies')) parser.add_argument('--location', action='store_true', help=_('show a location from where packages can be downloaded')) package_attribute = parser.add_mutually_exclusive_group() help_msgs = { 'conflicts': _('Display capabilities that the package conflicts with.'), 'depends': _('Display capabilities that the package can depend on, enhance, recommend,' ' suggest, and supplement.'), 'enhances': _('Display capabilities that the package can enhance.'), 'provides': _('Display capabilities provided by the package.'), 'recommends': _('Display capabilities that the package recommends.'), 'requires': _('Display capabilities that the package depends on.'), 'requires-pre': _('If the package is not installed display capabilities that it depends on for ' 'running %%pre and %%post scriptlets. If the package is installed display ' 'capabilities that is depends for %%pre, %%post, %%preun and %%postun.'), 'suggests': _('Display capabilities that the package suggests.'), 'supplements': _('Display capabilities that the package can supplement.') } for arg, help_msg in help_msgs.items(): name = '--%s' % arg package_attribute.add_argument(name, dest='packageatr', action='store_const', const=arg, help=help_msg) parser.add_argument('--available', action="store_true", help=_('Display only available packages.')) help_list = { 'installed': _('Display only installed packages.'), 'extras': _('Display only packages that are not present in any of available repositories.'), 'upgrades': _('Display only packages that provide an upgrade for some already installed package.'), 'unneeded': _('Display only packages that can be removed by "{prog} autoremove" ' 'command.').format(prog=dnf.util.MAIN_PROG), 'userinstalled': _('Display only packages that were installed by user.') } list_group = parser.add_mutually_exclusive_group() for list_arg, help_arg in help_list.items(): switch = '--%s' % list_arg list_group.add_argument(switch, dest='list', action='store_const', const=list_arg, help=help_arg) # make --autoremove hidden compatibility alias for --unneeded list_group.add_argument( '--autoremove', dest='list', action='store_const', const="unneeded", help=argparse.SUPPRESS) parser.add_argument('--recent', action="store_true", help=_('Display only recently edited packages')) parser.add_argument('key', nargs='*', metavar="KEY", help=_('the key to search for')) def pre_configure(self): if not self.opts.quiet: self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO) def configure(self): if not self.opts.quiet: self.cli.redirect_repo_progress() demands = self.cli.demands if self.opts.obsoletes: if self.opts.packageatr: self.cli._option_conflict("--obsoletes", "--" + self.opts.packageatr) else: self.opts.packageatr = "obsoletes" if self.opts.querytags: return if self.opts.resolve and not self.opts.packageatr: raise dnf.cli.CliError( _("Option '--resolve' has to be used together with one of the " "'--conflicts', '--depends', '--enhances', '--provides', '--recommends', " "'--requires', '--requires-pre', '--suggests' or '--supplements' options")) if self.opts.recursive: if self.opts.exactdeps: self.cli._option_conflict("--recursive", "--exactdeps") if not any([self.opts.whatrequires, (self.opts.packageatr == "requires" and self.opts.resolve)]): raise dnf.cli.CliError( _("Option '--recursive' has to be used with '--whatrequires ' " "(optionally with '--alldeps', but not with '--exactdeps'), or with " "'--requires --resolve'")) if self.opts.alldeps or self.opts.exactdeps: if not (self.opts.whatrequires or self.opts.whatdepends): raise dnf.cli.CliError( _("argument {} requires --whatrequires or --whatdepends option".format( '--alldeps' if self.opts.alldeps else '--exactdeps'))) if self.opts.srpm: self.base.repos.enable_source_repos() if (self.opts.list not in ["installed", "userinstalled"] and self.opts.pkgfilter != "installonly") or self.opts.available: demands.available_repos = True demands.sack_activation = True if self.opts.querychangelogs: demands.changelogs = True def build_format_fn(self, opts, pkg): if opts.querychangelogs: out = [] out.append('Changelog for %s' % str(pkg)) for chlog in pkg.changelogs: dt = chlog['timestamp'] out.append('* %s %s\n%s\n' % (dt.strftime("%a %b %d %Y"), dnf.i18n.ucd(chlog['author']), dnf.i18n.ucd(chlog['text']))) return '\n'.join(out) try: po = PackageWrapper(pkg) if opts.queryinfo: return self.base.output.infoOutput(pkg) elif opts.queryfilelist: filelist = po.files if not filelist: print(_('Package {} contains no files').format(pkg), file=sys.stderr) return filelist elif opts.querysourcerpm: return po.sourcerpm else: return rpm2py_format(opts.queryformat).format(po) except AttributeError as e: # catch that the user has specified attributes # there don't exist on the dnf Package object. raise dnf.exceptions.Error(str(e)) def _resolve_nevras(self, nevras, base_query): resolved_nevras_query = self.base.sack.query().filterm(empty=True) for nevra in nevras: resolved_nevras_query = resolved_nevras_query.union(base_query.intersection( dnf.subject.Subject(nevra).get_best_query( self.base.sack, with_provides=False, with_filenames=False ) )) return resolved_nevras_query def _do_recursive_deps(self, query_in, query_select, done=None): done = done if done else query_select query_required = query_in.filter(requires=query_select) query_select = query_required.difference(done) done = query_required.union(done) if query_select: done = self._do_recursive_deps(query_in, query_select, done=done) return done def by_all_deps(self, names, query, all_dep_types=False): # in case of arguments being NEVRAs, resolve them to packages resolved_nevras_query = self._resolve_nevras(names, query) # filter the arguments directly as reldeps depquery = query.filter(requires__glob=names) # filter the resolved NEVRAs as packages depquery = depquery.union(query.filter(requires=resolved_nevras_query)) if all_dep_types: # TODO this is very inefficient, as it resolves the `names` glob to # reldeps four more times, which in a reasonably wide glob like # `dnf repoquery --whatdepends "libdnf*"` can take roughly 50% of # the total execution time. depquery = depquery.union(query.filter(recommends__glob=names)) depquery = depquery.union(query.filter(enhances__glob=names)) depquery = depquery.union(query.filter(supplements__glob=names)) depquery = depquery.union(query.filter(suggests__glob=names)) depquery = depquery.union(query.filter(recommends=resolved_nevras_query)) depquery = depquery.union(query.filter(enhances=resolved_nevras_query)) depquery = depquery.union(query.filter(supplements=resolved_nevras_query)) depquery = depquery.union(query.filter(suggests=resolved_nevras_query)) if self.opts.recursive: depquery = self._do_recursive_deps(query, depquery) return depquery def _get_recursive_providers_query(self, query_in, providers, done=None): done = done if done else self.base.sack.query().filterm(empty=True) t = self.base.sack.query().filterm(empty=True) for pkg in providers.run(): t = t.union(query_in.filter(provides=pkg.requires)) query_select = t.difference(done) if query_select: done = self._get_recursive_providers_query(query_in, query_select, done=t.union(done)) return t.union(done) def _add_add_remote_packages(self): rpmnames = [] remote_packages = [] for key in self.opts.key: schemes = dnf.pycomp.urlparse.urlparse(key)[0] if key.endswith('.rpm'): rpmnames.append(key) elif schemes and schemes in ('http', 'ftp', 'file', 'https'): rpmnames.append(key) if rpmnames: remote_packages = self.base.add_remote_rpms( rpmnames, strict=False, progress=self.base.output.progress) return remote_packages def run(self): if self.opts.querytags: print(QUERY_TAGS) return self.cli._populate_update_security_filter(self.opts) q = self.base.sack.query( flags=hawkey.IGNORE_MODULAR_EXCLUDES if self.opts.disable_modular_filtering else hawkey.APPLY_EXCLUDES ) if self.opts.key: remote_packages = self._add_add_remote_packages() kwark = {} if self.opts.command in self.nevra_forms: kwark["forms"] = [self.nevra_forms[self.opts.command]] pkgs = [] query_results = q.filter(empty=True) if remote_packages: query_results = query_results.union( self.base.sack.query().filterm(pkg=remote_packages)) for key in self.opts.key: query_results = query_results.union( dnf.subject.Subject(key, ignore_case=True).get_best_query( self.base.sack, with_provides=False, query=q, **kwark)) q = query_results if self.opts.recent: q = q._recent(self.base.conf.recent) if self.opts.available: if self.opts.list and self.opts.list != "installed": print(self.cli.optparser.print_usage()) raise dnf.exceptions.Error(_("argument {}: not allowed with argument {}".format( "--available", "--" + self.opts.list))) elif self.opts.list == "unneeded": q = q._unneeded(self.base.history.swdb) elif self.opts.list and self.opts.list != 'userinstalled': q = getattr(q, self.opts.list)() if self.opts.pkgfilter == "duplicated": installonly = self.base._get_installonly_query(q) q = q.difference(installonly).duplicated() elif self.opts.pkgfilter == "installonly": q = self.base._get_installonly_query(q) elif self.opts.pkgfilter == "unsatisfied": rpmdb = dnf.sack.rpmdb_sack(self.base) rpmdb._configure(self.base.conf.installonlypkgs, self.base.conf.installonly_limit) goal = dnf.goal.Goal(rpmdb) goal.protect_running_kernel = False solved = goal.run(verify=True) if not solved: print(dnf.util._format_resolve_problems(goal.problem_rules())) return elif not self.opts.list: # do not show packages from @System repo q = q.available() # filter repo and arch q = self.filter_repo_arch(self.opts, q) orquery = q if self.opts.file: q.filterm(file__glob=self.opts.file) if self.opts.whatconflicts: rels = q.filter(conflicts__glob=self.opts.whatconflicts) q = rels.union(q.filter(conflicts=self._resolve_nevras(self.opts.whatconflicts, q))) if self.opts.whatobsoletes: q.filterm(obsoletes=self.opts.whatobsoletes) if self.opts.whatprovides: query_for_provide = q.filter(provides__glob=self.opts.whatprovides) if query_for_provide: q = query_for_provide else: q.filterm(file__glob=self.opts.whatprovides) if self.opts.whatrequires: if (self.opts.exactdeps): q.filterm(requires__glob=self.opts.whatrequires) else: q = self.by_all_deps(self.opts.whatrequires, q) if self.opts.whatdepends: if (self.opts.exactdeps): dependsquery = q.filter(requires__glob=self.opts.whatdepends) dependsquery = dependsquery.union(q.filter(recommends__glob=self.opts.whatdepends)) dependsquery = dependsquery.union(q.filter(enhances__glob=self.opts.whatdepends)) dependsquery = dependsquery.union(q.filter(supplements__glob=self.opts.whatdepends)) q = dependsquery.union(q.filter(suggests__glob=self.opts.whatdepends)) else: q = self.by_all_deps(self.opts.whatdepends, q, True) if self.opts.whatrecommends: rels = q.filter(recommends__glob=self.opts.whatrecommends) q = rels.union(q.filter(recommends=self._resolve_nevras(self.opts.whatrecommends, q))) if self.opts.whatenhances: rels = q.filter(enhances__glob=self.opts.whatenhances) q = rels.union(q.filter(enhances=self._resolve_nevras(self.opts.whatenhances, q))) if self.opts.whatsupplements: rels = q.filter(supplements__glob=self.opts.whatsupplements) q = rels.union(q.filter(supplements=self._resolve_nevras(self.opts.whatsupplements, q))) if self.opts.whatsuggests: rels = q.filter(suggests__glob=self.opts.whatsuggests) q = rels.union(q.filter(suggests=self._resolve_nevras(self.opts.whatsuggests, q))) if self.opts.latest_limit: q = q.latest(self.opts.latest_limit) # reduce a query to security upgrades if they are specified q = self.base._merge_update_filters(q, warning=False) if self.opts.srpm: pkg_list = [] for pkg in q: srcname = pkg.source_name if srcname is not None: tmp_query = self.base.sack.query().filterm(name=srcname, evr=pkg.evr, arch='src') pkg_list += tmp_query.run() q = self.base.sack.query().filterm(pkg=pkg_list) if self.opts.tree: if not self.opts.whatrequires and self.opts.packageatr not in ( 'conflicts', 'enhances', 'obsoletes', 'provides', 'recommends', 'requires', 'suggests', 'supplements'): raise dnf.exceptions.Error( _("No valid switch specified\nusage: {prog} repoquery [--conflicts|" "--enhances|--obsoletes|--provides|--recommends|--requires|" "--suggest|--supplements|--whatrequires] [key] [--tree]\n\n" "description:\n For the given packages print a tree of the" "packages.").format(prog=dnf.util.MAIN_PROG)) self.tree_seed(q, orquery, self.opts) return pkgs = set() if self.opts.packageatr: rels = set() for pkg in q.run(): if self.opts.list != 'userinstalled' or self.base.history.user_installed(pkg): if self.opts.packageatr == 'depends': rels.update(pkg.requires + pkg.enhances + pkg.suggests + pkg.supplements + pkg.recommends) else: rels.update(getattr(pkg, OPTS_MAPPING[self.opts.packageatr])) if self.opts.resolve: # find the providing packages and show them if self.opts.list == "installed": query = self.filter_repo_arch(self.opts, self.base.sack.query()) else: query = self.filter_repo_arch(self.opts, self.base.sack.query().available()) providers = query.filter(provides=rels) if self.opts.recursive: providers = providers.union( self._get_recursive_providers_query(query, providers)) pkgs = set() for pkg in providers.latest().run(): pkgs.add(self.build_format_fn(self.opts, pkg)) else: pkgs.update(str(rel) for rel in rels) elif self.opts.location: for pkg in q.run(): location = pkg.remote_location() if location is not None: pkgs.add(location) elif self.opts.deplist: pkgs = [] for pkg in sorted(set(q.run())): if self.opts.list != 'userinstalled' or self.base.history.user_installed(pkg): deplist_output = [] deplist_output.append('package: ' + str(pkg)) for req in sorted([str(req) for req in pkg.requires]): deplist_output.append(' dependency: ' + req) subject = dnf.subject.Subject(req) query = subject.get_best_query(self.base.sack) query = self.filter_repo_arch( self.opts, query.available()) if not self.opts.verbose: query = query.latest() for provider in query.run(): deplist_output.append(' provider: ' + str(provider)) pkgs.append('\n'.join(deplist_output)) if pkgs: print('\n\n'.join(pkgs)) return elif self.opts.groupmember: self._group_member_report(q) return else: for pkg in q.run(): if self.opts.list != 'userinstalled' or self.base.history.user_installed(pkg): pkgs.add(self.build_format_fn(self.opts, pkg)) if pkgs: if self.opts.queryinfo: print("\n\n".join(sorted(pkgs))) else: print("\n".join(sorted(pkgs))) def _group_member_report(self, query): package_conf_dict = {} for group in self.base.comps.groups: package_conf_dict[group.id] = set([pkg.name for pkg in group.packages_iter()]) group_package_dict = {} pkg_not_in_group = [] for pkg in query.run(): group_id_list = [] for group_id, package_name_set in package_conf_dict.items(): if pkg.name in package_name_set: group_id_list.append(group_id) if group_id_list: group_package_dict.setdefault( '$'.join(sorted(group_id_list)), []).append(str(pkg)) else: pkg_not_in_group.append(str(pkg)) output = [] for key, package_list in sorted(group_package_dict.items()): output.append( '\n'.join(sorted(package_list) + sorted([' @' + id for id in key.split('$')]))) output.append('\n'.join(sorted(pkg_not_in_group))) if output: print('\n'.join(output)) def grow_tree(self, level, pkg, opts): pkg_string = self.build_format_fn(opts, pkg) if level == -1: print(pkg_string) return spacing = " " for x in range(0, level): spacing += "| " requires = [] for requirepkg in pkg.requires: requires.append(str(requirepkg)) reqstr = "[" + str(len(requires)) + ": " + ", ".join(requires) + "]" print(spacing + r"\_ " + pkg_string + " " + reqstr) def tree_seed(self, query, aquery, opts, level=-1, usedpkgs=None): for pkg in sorted(set(query.run()), key=lambda p: p.name): usedpkgs = set() if usedpkgs is None or level == -1 else usedpkgs if pkg.name.startswith("rpmlib") or pkg.name.startswith("solvable"): return self.grow_tree(level, pkg, opts) if pkg not in usedpkgs: usedpkgs.add(pkg) if opts.packageatr: strpkg = getattr(pkg, opts.packageatr) ar = {} for name in set(strpkg): pkgquery = self.base.sack.query().filterm(provides=name) for querypkg in pkgquery: ar[querypkg.name + "." + querypkg.arch] = querypkg pkgquery = self.base.sack.query().filterm(pkg=list(ar.values())) else: pkgquery = self.by_all_deps((pkg.name, ), aquery) if opts.alldeps \ else aquery.filter(requires__glob=pkg.name) self.tree_seed(pkgquery, aquery, opts, level + 1, usedpkgs) class PackageWrapper(object): """Wrapper for dnf.package.Package, so we can control formatting.""" def __init__(self, pkg): self._pkg = pkg def __getattr__(self, attr): atr = getattr(self._pkg, attr) if atr is None: return "(none)" if isinstance(atr, list): return '\n'.join(sorted({dnf.i18n.ucd(reldep) for reldep in atr})) return dnf.i18n.ucd(atr) @staticmethod def _get_timestamp(timestamp): if timestamp > 0: dt = datetime.datetime.utcfromtimestamp(timestamp) return dt.strftime("%Y-%m-%d %H:%M") else: return '' @property def buildtime(self): return self._get_timestamp(self._pkg.buildtime) @property def installtime(self): return self._get_timestamp(self._pkg.installtime) PK!31cli/commands/search.pynu[# search.py # Search CLI command. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import collections from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import ucd, _, C_ import dnf.i18n import dnf.match_counter import dnf.util import hawkey import logging logger = logging.getLogger('dnf') class SearchCommand(commands.Command): """A class containing methods needed by the cli to execute the search command. """ aliases = ('search', 'se') summary = _('search package details for the given string') @staticmethod def set_argparser(parser): parser.add_argument('--all', action='store_true', help=_("search also package description and URL")) parser.add_argument('query_string', nargs='+', metavar=_('KEYWORD'), choices=['all'], default=None, action=OptionParser.PkgNarrowCallback, help=_("Keyword to search for")) def _search(self, args): """Search for simple text tags in a package object.""" TRANS_TBL = collections.OrderedDict(( ('name', C_('long', 'Name')), ('summary', C_('long', 'Summary')), ('description', C_('long', 'Description')), ('url', _('URL')), )) def _translate_attr(attr): try: return TRANS_TBL[attr] except: return attr def _print_section_header(exact_match, attrs, keys): trans_attrs = map(_translate_attr, attrs) # TRANSLATORS: separator used between package attributes (eg. Name & Summary & URL) trans_attrs_str = _(' & ').join(trans_attrs) if exact_match: # TRANSLATORS: %s - translated package attributes, # %%s - found keys (in listed attributes) section_text = _('%s Exactly Matched: %%s') % trans_attrs_str else: # TRANSLATORS: %s - translated package attributes, # %%s - found keys (in listed attributes) section_text = _('%s Matched: %%s') % trans_attrs_str formatted = self.base.output.fmtSection(section_text % ", ".join(keys)) print(ucd(formatted)) counter = dnf.match_counter.MatchCounter() for arg in args: self._search_counted(counter, 'name', arg) self._search_counted(counter, 'summary', arg) if self.opts.all: for arg in args: self._search_counted(counter, 'description', arg) self._search_counted(counter, 'url', arg) else: needles = len(args) pkgs = list(counter.keys()) for pkg in pkgs: if len(counter.matched_needles(pkg)) != needles: del counter[pkg] used_attrs = None matched_needles = None exact_match = False print_section_header = False limit = None if not self.base.conf.showdupesfromrepos: limit = self.base.sack.query().filterm(pkg=counter.keys()).latest() seen = set() for pkg in counter.sorted(reverse=True, limit_to=limit): if not self.base.conf.showdupesfromrepos: if pkg.name + pkg.arch in seen: continue seen.add(pkg.name + pkg.arch) if used_attrs != counter.matched_keys(pkg): used_attrs = counter.matched_keys(pkg) print_section_header = True if matched_needles != counter.matched_needles(pkg): matched_needles = counter.matched_needles(pkg) print_section_header = True if exact_match != (counter.matched_haystacks(pkg) == matched_needles): exact_match = counter.matched_haystacks(pkg) == matched_needles print_section_header = True if print_section_header: _print_section_header(exact_match, used_attrs, matched_needles) print_section_header = False self.base.output.matchcallback(pkg, counter.matched_haystacks(pkg), args) if len(counter) == 0: logger.info(_('No matches found.')) def _search_counted(self, counter, attr, needle): fdict = {'%s__substr' % attr : needle} if dnf.util.is_glob_pattern(needle): fdict = {'%s__glob' % attr : needle} q = self.base.sack.query().filterm(hawkey.ICASE, **fdict) for pkg in q.run(): counter.add(pkg, attr, needle) return counter def pre_configure(self): if not self.opts.quiet: self.cli.redirect_logger(stdout=logging.WARNING, stderr=logging.INFO) def configure(self): if not self.opts.quiet: self.cli.redirect_repo_progress() demands = self.cli.demands demands.available_repos = True demands.fresh_metadata = False demands.sack_activation = True self.opts.all = self.opts.all or self.opts.query_string_action def run(self): logger.debug(_('Searching Packages: ')) return self._search(self.opts.query_string) PK!-?kl&l&cli/commands/shell.pynu[# shell.py # Shell CLI command. # # Copyright (C) 2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from dnf.cli import commands from dnf.i18n import _, ucd import dnf.util import cmd import copy import dnf import logging import shlex import sys logger = logging.getLogger('dnf') # only demands we'd like to override class ShellDemandSheet(object): available_repos = True resolving = True root_user = True sack_activation = True class ShellCommand(commands.Command, cmd.Cmd): aliases = ('shell', 'sh') summary = _('run an interactive {prog} shell').format(prog=dnf.util.MAIN_PROG_UPPER) MAPPING = {'repo': 'repo', 'repository': 'repo', 'exit': 'quit', 'quit': 'quit', 'run': 'ts_run', 'ts': 'transaction', 'transaction': 'transaction', 'config': 'config', 'resolvedep': 'resolve', 'help': 'help' } def __init__(self, cli): commands.Command.__init__(self, cli) cmd.Cmd.__init__(self) self.prompt = '> ' @staticmethod def set_argparser(parser): parser.add_argument('script', nargs='?', metavar=_('SCRIPT'), help=_('Script to run in {prog} shell').format( prog=dnf.util.MAIN_PROG_UPPER)) def configure(self): # append to ShellDemandSheet missing demands from # dnf.cli.demand.DemandSheet with their default values. default_demands = self.cli.demands self.cli.demands = ShellDemandSheet() for attr in dir(default_demands): if attr.startswith('__'): continue try: getattr(self.cli.demands, attr) except AttributeError: setattr(self.cli.demands, attr, getattr(default_demands, attr)) def run(self): if self.opts.script: self._run_script(self.opts.script) else: self.cmdloop() def _clean(self): self.base._finalize_base() self.base._transaction = None self.base.fill_sack() def onecmd(self, line): if not line or line == '\n': return if line == 'EOF': line = 'quit' try: s_line = shlex.split(line) except: self._help() return # reset option parser before each command, keep usage information self.cli.optparser.__init__(reset_usage=False) opts = self.cli.optparser.parse_main_args(s_line) # Disable shell recursion. if opts.command == 'shell': return if opts.command in self.MAPPING: getattr(self, '_' + self.MAPPING[opts.command])(s_line[1::]) else: cmd_cls = self.cli.cli_commands.get(opts.command) if cmd_cls is not None: cmd = cmd_cls(self.cli) try: opts = self.cli.optparser.parse_command_args(cmd, s_line) except SystemExit: # argparse.ArgumentParser prints usage information and executes # sys.exit() on problems with parsing command line arguments return try: cmd.cli.demands = copy.deepcopy(self.cli.demands) cmd.configure() cmd.run() except dnf.exceptions.Error as e: logger.error(_("Error:") + " " + ucd(e)) return else: self._help() def _config(self, args=None): def print_or_set(key, val, conf): if val: setattr(conf, key, val) else: try: print('{}: {}'.format(key, getattr(conf, str(key)))) except: logger.warning(_('Unsupported key value.')) if not args or len(args) > 2: self._help('config') return key = args[0] val = args[1] if len(args) == 2 else None period = key.find('.') if period != -1: repo_name = key[:period] key = key[period+1:] repos = self.base.repos.get_matching(repo_name) for repo in repos: print_or_set(key, val, repo) if not repos: logger.warning(_('Could not find repository: %s'), repo_name) else: print_or_set(key, val, self.base.conf) def _help(self, args=None): """Output help information. :param args: the command to output help information about. If *args* is an empty, general help will be output. """ arg = args[0] if isinstance(args, list) and len(args) > 0 else args msg = None if arg: if arg == 'config': msg = _("""{} arg [value] arg: debuglevel, errorlevel, obsoletes, gpgcheck, assumeyes, exclude, repo_id.gpgcheck, repo_id.exclude If no value is given it prints the current value. If value is given it sets that value.""").format(arg) elif arg == 'help': msg = _("""{} [command] print help""").format(arg) elif arg in ['repo', 'repository']: msg = _("""{} arg [option] list: lists repositories and their status. option = [all | id | glob] enable: enable repositories. option = repository id disable: disable repositories. option = repository id""").format(arg) elif arg == 'resolvedep': msg = _("""{} resolve the transaction set""").format(arg) elif arg in ['transaction', 'ts']: msg = _("""{} arg list: lists the contents of the transaction reset: reset (zero-out) the transaction run: run the transaction""").format(arg) elif arg == 'run': msg = _("""{} run the transaction""").format(arg) elif arg in ['exit', 'quit']: msg = _("""{} exit the shell""").format(arg) if not msg: self.cli.optparser.print_help() msg = _("""Shell specific arguments: config set config options help print help repository (or repo) enable, disable or list repositories resolvedep resolve the transaction set transaction (or ts) list, reset or run the transaction set run resolve and run the transaction set exit (or quit) exit the shell""") print('\n' + msg) def _repo(self, args=None): cmd = args[0] if args else None if cmd in ['list', None]: self.onecmd('repolist ' + ' '.join(args[1:])) elif cmd in ['enable', 'disable']: repos = self.cli.base.repos fill_sack = False for repo in args[1::]: r = repos.get_matching(repo) if r: getattr(r, cmd)() fill_sack = True else: logger.critical(_("Error:") + " " + _("Unknown repo: '%s'"), self.base.output.term.bold(repo)) if fill_sack: self.base.fill_sack() # reset base._comps, as it has changed due to changing the repos self.base._comps = None else: self._help('repo') def _resolve(self, args=None): try: self.cli.base.resolve(self.cli.demands.allow_erasing) except dnf.exceptions.DepsolveError as e: print(e) def _run_script(self, file): try: with open(file, 'r') as fd: lines = fd.readlines() for line in lines: if not line.startswith('#'): self.onecmd(line) except IOError: logger.info(_('Error: Cannot open %s for reading'), self.base.output.term.bold(file)) sys.exit(1) def _transaction(self, args=None): cmd = args[0] if args else None if cmd == 'reset': self._clean() return self._resolve() if cmd in ['list', None]: if self.base._transaction: out = self.base.output.list_transaction(self.base._transaction) logger.info(out) elif cmd == 'run': try: self.base.do_transaction() except dnf.exceptions.Error as e: logger.error(_("Error:") + " " + ucd(e)) else: logger.info(_("Complete!")) self._clean() else: self._help('transaction') def _ts_run(self, args=None): self._transaction(['run']) def _quit(self, args=None): logger.info(_('Leaving Shell')) sys.exit(0) PK!s s cli/commands/swap.pynu[# # Copyright (C) 2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ from dnf.cli import commands import dnf.util import logging logger = logging.getLogger("dnf") class SwapCommand(commands.Command): """A class containing methods needed by the cli to execute the swap command. """ aliases = ('swap',) summary = _('run an interactive {prog} mod for remove and install one spec').format( prog=dnf.util.MAIN_PROG_UPPER) @staticmethod def set_argparser(parser): parser.add_argument('remove_spec', action="store", help=_('The specs that will be removed')) parser.add_argument('install_spec', action="store", help=_( 'The specs that will be installed')) def configure(self): demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) commands._checkEnabledRepo(self.base, [self.opts.install_spec]) def _perform(self, cmd_str, spec): cmd_cls = self.cli.cli_commands.get(cmd_str) if cmd_cls is not None: cmd = cmd_cls(self.cli) self.cli.optparser.parse_command_args(cmd, [cmd_str, spec]) cmd.run() def run(self): self._perform('remove', self.opts.remove_spec) self._perform('install', self.opts.install_spec) PK!H2J2Jcli/commands/updateinfo.pynu[# updateinfo.py # UpdateInfo CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # """UpdateInfo CLI command.""" from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import collections import fnmatch import hawkey from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _, exact_width from dnf.pycomp import unicode def _maxlen(iterable): """Return maximum length of items in a non-empty iterable.""" return max(exact_width(item) for item in iterable) class UpdateInfoCommand(commands.Command): """Implementation of the UpdateInfo command.""" TYPE2LABEL = {hawkey.ADVISORY_BUGFIX: _('bugfix'), hawkey.ADVISORY_ENHANCEMENT: _('enhancement'), hawkey.ADVISORY_SECURITY: _('security'), hawkey.ADVISORY_UNKNOWN: _('unknown'), hawkey.ADVISORY_NEWPACKAGE: _('newpackage')} SECURITY2LABEL = {'Critical': _('Critical/Sec.'), 'Important': _('Important/Sec.'), 'Moderate': _('Moderate/Sec.'), 'Low': _('Low/Sec.')} direct_commands = {'list-updateinfo' : 'list', 'list-security' : 'list', 'list-sec' : 'list', 'info-updateinfo' : 'info', 'info-security' : 'info', 'info-sec' : 'info', 'summary-updateinfo' : 'summary'} aliases = ['updateinfo'] + list(direct_commands.keys()) summary = _('display advisories about packages') availability_default = 'available' availabilities = ['installed', 'updates', 'all', availability_default] def __init__(self, cli): """Initialize the command.""" super(UpdateInfoCommand, self).__init__(cli) self._installed_query = None @staticmethod def set_argparser(parser): availability = parser.add_mutually_exclusive_group() availability.add_argument( "--available", dest='_availability', const='available', action='store_const', help=_("advisories about newer versions of installed packages (default)")) availability.add_argument( "--installed", dest='_availability', const='installed', action='store_const', help=_("advisories about equal and older versions of installed packages")) availability.add_argument( "--updates", dest='_availability', const='updates', action='store_const', help=_("advisories about newer versions of those installed packages " "for which a newer version is available")) availability.add_argument( "--all", dest='_availability', const='all', action='store_const', help=_("advisories about any versions of installed packages")) cmds = ['summary', 'list', 'info'] output_format = parser.add_mutually_exclusive_group() output_format.add_argument("--summary", dest='_spec_action', const='summary', action='store_const', help=_('show summary of advisories (default)')) output_format.add_argument("--list", dest='_spec_action', const='list', action='store_const', help=_('show list of advisories')) output_format.add_argument("--info", dest='_spec_action', const='info', action='store_const', help=_('show info of advisories')) parser.add_argument("--with-cve", dest='with_cve', default=False, action='store_true', help=_('show only advisories with CVE reference')) parser.add_argument("--with-bz", dest='with_bz', default=False, action='store_true', help=_('show only advisories with bugzilla reference')) parser.add_argument('spec', nargs='*', metavar='SPEC', choices=cmds, default=cmds[0], action=OptionParser.PkgNarrowCallback, help=_("Package specification")) def configure(self): """Do any command-specific configuration based on command arguments.""" self.cli.demands.available_repos = True self.cli.demands.sack_activation = True if self.opts.command in self.direct_commands: # we were called with direct command self.opts.spec_action = self.direct_commands[self.opts.command] else: if self.opts._spec_action: self.opts.spec_action = self.opts._spec_action if self.opts._availability: self.opts.availability = self.opts._availability else: # yum compatibility - search for all|available|installed|updates in spec[0] if not self.opts.spec or self.opts.spec[0] not in self.availabilities: self.opts.availability = self.availability_default else: self.opts.availability = self.opts.spec.pop(0) # filtering by advisory types (security/bugfix/enhancement/newpackage) self.opts._advisory_types = set() if self.opts.bugfix: self.opts._advisory_types.add(hawkey.ADVISORY_BUGFIX) if self.opts.enhancement: self.opts._advisory_types.add(hawkey.ADVISORY_ENHANCEMENT) if self.opts.newpackage: self.opts._advisory_types.add(hawkey.ADVISORY_NEWPACKAGE) if self.opts.security: self.opts._advisory_types.add(hawkey.ADVISORY_SECURITY) # yum compatibility - yum accepts types also as positional arguments if self.opts.spec: spec = self.opts.spec.pop(0) if spec == 'bugfix': self.opts._advisory_types.add(hawkey.ADVISORY_BUGFIX) elif spec == 'enhancement': self.opts._advisory_types.add(hawkey.ADVISORY_ENHANCEMENT) elif spec in ('security', 'sec'): self.opts._advisory_types.add(hawkey.ADVISORY_SECURITY) elif spec == 'newpackage': self.opts._advisory_types.add(hawkey.ADVISORY_NEWPACKAGE) elif spec in ('bugzillas', 'bzs'): self.opts.with_bz = True elif spec == 'cves': self.opts.with_cve = True else: self.opts.spec.insert(0, spec) if self.opts.advisory: self.opts.spec.extend(self.opts.advisory) def run(self): """Execute the command with arguments.""" if self.opts.availability == 'installed': apkg_adv_insts = self.installed_apkg_adv_insts(self.opts.spec) description = _('installed') elif self.opts.availability == 'updates': apkg_adv_insts = self.updating_apkg_adv_insts(self.opts.spec) description = _('updates') elif self.opts.availability == 'all': apkg_adv_insts = self.all_apkg_adv_insts(self.opts.spec) description = _('all') else: apkg_adv_insts = self.available_apkg_adv_insts(self.opts.spec) description = _('available') if self.opts.spec_action == 'list': self.display_list(apkg_adv_insts) elif self.opts.spec_action == 'info': self.display_info(apkg_adv_insts) else: self.display_summary(apkg_adv_insts, description) def _newer_equal_installed(self, apackage): if self._installed_query is None: self._installed_query = self.base.sack.query().installed().apply() q = self._installed_query.filter(name=apackage.name, evr__gte=apackage.evr) return len(q) > 0 def _advisory_matcher(self, advisory): if not self.opts._advisory_types \ and not self.opts.spec \ and not self.opts.severity \ and not self.opts.bugzilla \ and not self.opts.cves \ and not self.opts.with_cve \ and not self.opts.with_bz: return True if advisory.type in self.opts._advisory_types: return True if any(fnmatch.fnmatchcase(advisory.id, pat) for pat in self.opts.spec): return True if self.opts.severity and advisory.severity in self.opts.severity: return True if self.opts.bugzilla and any([advisory.match_bug(bug) for bug in self.opts.bugzilla]): return True if self.opts.cves and any([advisory.match_cve(cve) for cve in self.opts.cves]): return True if self.opts.with_cve: if any([ref.type == hawkey.REFERENCE_CVE for ref in advisory.references]): return True if self.opts.with_bz: if any([ref.type == hawkey.REFERENCE_BUGZILLA for ref in advisory.references]): return True return False def _apackage_advisory_installed(self, pkgs_query, cmptype, specs): """Return (adv. package, advisory, installed) triplets.""" for apackage in pkgs_query.get_advisory_pkgs(cmptype): advisory = apackage.get_advisory(self.base.sack) advisory_match = self._advisory_matcher(advisory) apackage_match = any(fnmatch.fnmatchcase(apackage.name, pat) for pat in self.opts.spec) if advisory_match or apackage_match: installed = self._newer_equal_installed(apackage) yield apackage, advisory, installed def running_kernel_pkgs(self): """Return query containing packages of currently running kernel""" sack = self.base.sack q = sack.query().filterm(empty=True) kernel = sack.get_running_kernel() if kernel: q = q.union(sack.query().filterm(sourcerpm=kernel.sourcerpm)) return q def available_apkg_adv_insts(self, specs): """Return available (adv. package, adv., inst.) triplets""" # check advisories for the latest installed packages q = self.base.sack.query().installed().latest(1) # plus packages of the running kernel q = q.union(self.running_kernel_pkgs().installed()) return self._apackage_advisory_installed(q, hawkey.GT, specs) def installed_apkg_adv_insts(self, specs): """Return installed (adv. package, adv., inst.) triplets""" return self._apackage_advisory_installed( self.base.sack.query().installed(), hawkey.LT | hawkey.EQ, specs) def updating_apkg_adv_insts(self, specs): """Return updating (adv. package, adv., inst.) triplets""" return self._apackage_advisory_installed( self.base.sack.query().filterm(upgradable=True), hawkey.GT, specs) def all_apkg_adv_insts(self, specs): """Return installed (adv. package, adv., inst.) triplets""" return self._apackage_advisory_installed( self.base.sack.query().installed(), hawkey.LT | hawkey.EQ | hawkey.GT, specs) def _summary(self, apkg_adv_insts): """Make the summary of advisories.""" # Remove duplicate advisory IDs. We assume that the ID is unique within # a repository and two advisories with the same IDs in different # repositories must have the same type. id2type = {} for (apkg, advisory, installed) in apkg_adv_insts: id2type[advisory.id] = advisory.type if advisory.type == hawkey.ADVISORY_SECURITY: id2type[(advisory.id, advisory.severity)] = (advisory.type, advisory.severity) return collections.Counter(id2type.values()) def display_summary(self, apkg_adv_insts, description): """Display the summary of advisories.""" typ2cnt = self._summary(apkg_adv_insts) if typ2cnt: print(_('Updates Information Summary: ') + description) # Convert types to strings and order the entries. label_counts = [ (0, _('New Package notice(s)'), typ2cnt[hawkey.ADVISORY_NEWPACKAGE]), (0, _('Security notice(s)'), typ2cnt[hawkey.ADVISORY_SECURITY]), (1, _('Critical Security notice(s)'), typ2cnt[(hawkey.ADVISORY_SECURITY, 'Critical')]), (1, _('Important Security notice(s)'), typ2cnt[(hawkey.ADVISORY_SECURITY, 'Important')]), (1, _('Moderate Security notice(s)'), typ2cnt[(hawkey.ADVISORY_SECURITY, 'Moderate')]), (1, _('Low Security notice(s)'), typ2cnt[(hawkey.ADVISORY_SECURITY, 'Low')]), (1, _('Unknown Security notice(s)'), typ2cnt[(hawkey.ADVISORY_SECURITY, None)]), (0, _('Bugfix notice(s)'), typ2cnt[hawkey.ADVISORY_BUGFIX]), (0, _('Enhancement notice(s)'), typ2cnt[hawkey.ADVISORY_ENHANCEMENT]), (0, _('other notice(s)'), typ2cnt[hawkey.ADVISORY_UNKNOWN])] width = _maxlen(unicode(v[2]) for v in label_counts if v[2]) for indent, label, count in label_counts: if not count: continue print(' %*s %s' % (width + 4 * indent, unicode(count), label)) if self.base.conf.autocheck_running_kernel: self.cli._check_running_kernel() def display_list(self, apkg_adv_insts): """Display the list of advisories.""" def inst2mark(inst): if not self.opts.availability == 'all': return '' elif inst: return 'i ' else: return ' ' def type2label(typ, sev): if typ == hawkey.ADVISORY_SECURITY: return self.SECURITY2LABEL.get(sev, _('Unknown/Sec.')) else: return self.TYPE2LABEL.get(typ, _('unknown')) nevra_inst_dict = dict() for apkg, advisory, installed in apkg_adv_insts: nevra = '%s-%s.%s' % (apkg.name, apkg.evr, apkg.arch) if self.opts.with_cve or self.opts.with_bz: for ref in advisory.references: if ref.type == hawkey.REFERENCE_BUGZILLA and not self.opts.with_bz: continue elif ref.type == hawkey.REFERENCE_CVE and not self.opts.with_cve: continue nevra_inst_dict.setdefault((nevra, installed, advisory.updated), dict())[ref.id] = ( advisory.type, advisory.severity) else: nevra_inst_dict.setdefault((nevra, installed, advisory.updated), dict())[advisory.id] = ( advisory.type, advisory.severity) advlist = [] # convert types to labels, find max len of advisory IDs and types idw = tlw = nw = 0 for (nevra, inst, aupdated), id2type in sorted(nevra_inst_dict.items(), key=lambda x: x[0]): nw = max(nw, len(nevra)) for aid, atypesev in id2type.items(): idw = max(idw, len(aid)) label = type2label(*atypesev) tlw = max(tlw, len(label)) advlist.append((inst2mark(inst), aid, label, nevra, aupdated)) for (inst, aid, label, nevra, aupdated) in advlist: if self.base.conf.verbose: print('%s%-*s %-*s %-*s %s' % (inst, idw, aid, tlw, label, nw, nevra, aupdated)) else: print('%s%-*s %-*s %s' % (inst, idw, aid, tlw, label, nevra)) def display_info(self, apkg_adv_insts): """Display the details about available advisories.""" arches = self.base.sack.list_arches() verbose = self.base.conf.verbose labels = (_('Update ID'), _('Type'), _('Updated'), _('Bugs'), _('CVEs'), _('Description'), _('Severity'), _('Rights'), _('Files'), _('Installed')) def advisory2info(advisory, installed): attributes = [ [advisory.id], [self.TYPE2LABEL.get(advisory.type, _('unknown'))], [unicode(advisory.updated)], [], [], (advisory.description or '').splitlines(), [advisory.severity], (advisory.rights or '').splitlines(), sorted(set(pkg.filename for pkg in advisory.packages if pkg.arch in arches)), None] for ref in advisory.references: if ref.type == hawkey.REFERENCE_BUGZILLA: attributes[3].append('{} - {}'.format(ref.id, ref.title or '')) elif ref.type == hawkey.REFERENCE_CVE: attributes[4].append(ref.id) attributes[3].sort() attributes[4].sort() if not verbose: attributes[7] = None attributes[8] = None if self.opts.availability == 'all': attributes[9] = [_('true') if installed else _('false')] width = _maxlen(labels) lines = [] lines.append('=' * 79) lines.append(' ' + advisory.title) lines.append('=' * 79) for label, atr_lines in zip(labels, attributes): if atr_lines in (None, [None]): continue for i, line in enumerate(atr_lines): key = label if i == 0 else '' key_padding = width - exact_width(key) lines.append('%*s%s: %s' % (key_padding, "", key, line)) return '\n'.join(lines) advisories = set() for apkg, advisory, installed in apkg_adv_insts: advisories.add(advisory2info(advisory, installed)) print("\n\n".join(sorted(advisories, key=lambda x: x.lower()))) PK!}gO~~cli/commands/upgrade.pynu[# upgrade.py # Upgrade CLI command. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import logging import dnf.exceptions import dnf.base from dnf.cli import commands from dnf.cli.option_parser import OptionParser from dnf.i18n import _ logger = logging.getLogger('dnf') class UpgradeCommand(commands.Command): """A class containing methods needed by the cli to execute the update command. """ aliases = ('upgrade', 'update', 'upgrade-to', 'update-to', 'localupdate', 'up') summary = _('upgrade a package or packages on your system') @staticmethod def set_argparser(parser): parser.add_argument('packages', nargs='*', help=_('Package to upgrade'), action=OptionParser.ParseSpecGroupFileCallback, metavar=_('PACKAGE')) def configure(self): """Verify that conditions are met so that this command can run. These include that there are enabled repositories with gpg keys, and that this command is being run by the root user. """ demands = self.cli.demands demands.sack_activation = True demands.available_repos = True demands.resolving = True demands.root_user = True commands._checkGPGKey(self.base, self.cli) if not self.opts.filenames: commands._checkEnabledRepo(self.base) self.upgrade_minimal = None self.all_security = None self.skipped_grp_specs = None def run(self): cmp_type = "eq" if self.upgrade_minimal else "gte" self.cli._populate_update_security_filter(self.opts, cmp_type=cmp_type, all=self.all_security) if self.opts.filenames or self.opts.pkg_specs or self.opts.grp_specs: result = False result |= self._update_modules() result |= self._update_files() result |= self._update_packages() result |= self._update_groups() if result: return else: self.base.upgrade_all() return raise dnf.exceptions.Error(_('No packages marked for upgrade.')) def _update_modules(self): group_specs_num = len(self.opts.grp_specs) if dnf.base.WITH_MODULES: module_base = dnf.module.module_base.ModuleBase(self.base) self.skipped_grp_specs = module_base.upgrade(self.opts.grp_specs) else: self.skipped_grp_specs = self.opts.grp_specs return len(self.skipped_grp_specs) != group_specs_num def _update_files(self): success = False if self.opts.filenames: for pkg in self.base.add_remote_rpms(self.opts.filenames, strict=False, progress=self.base.output.progress): try: self.base.package_upgrade(pkg) success = True except dnf.exceptions.MarkingError as e: logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkg.location)) return success def _update_packages(self): success = False for pkg_spec in self.opts.pkg_specs: try: self.base.upgrade(pkg_spec) success = True except dnf.exceptions.MarkingError as e: logger.info(_('No match for argument: %s'), self.base.output.term.bold(pkg_spec)) return success def _update_groups(self): if self.skipped_grp_specs: self.base.env_group_upgrade(self.skipped_grp_specs) return True return False PK!;{cli/commands/upgrademinimal.pynu[# # Copyright (C) 2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ from dnf.cli.commands.upgrade import UpgradeCommand class UpgradeMinimalCommand(UpgradeCommand): """A class containing methods needed by the cli to execute the check command. """ aliases = ('upgrade-minimal', 'update-minimal', 'up-min') summary = _("upgrade, but only 'newest' package match which fixes a problem" " that affects your system") def configure(self): UpgradeCommand.configure(self) self.upgrade_minimal = True if not any([self.opts.bugfix, self.opts.enhancement, self.opts.newpackage, self.opts.security, self.opts.advisory, self.opts.bugzilla, self.opts.cves, self.opts.severity]): self.all_security = True PK!; acli/__init__.pynu[# __init__.py # DNF cli subpackage. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import import dnf.exceptions class CliError(dnf.exceptions.Error): """CLI Exception. :api""" pass from dnf.cli.cli import Cli # :api from dnf.cli.commands import Command # :api PK!6D}cli/aliases.pynu[# aliases.py # Resolving aliases in CLI arguments. # # Copyright (C) 2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ import collections import dnf.cli from dnf.conf.config import PRIO_DEFAULT import dnf.exceptions import libdnf.conf import logging import os import os.path logger = logging.getLogger('dnf') ALIASES_DROPIN_DIR = '/etc/dnf/aliases.d/' ALIASES_CONF_PATH = os.path.join(ALIASES_DROPIN_DIR, 'ALIASES.conf') ALIASES_USER_PATH = os.path.join(ALIASES_DROPIN_DIR, 'USER.conf') class AliasesConfig(object): def __init__(self, path): self._path = path self._parser = libdnf.conf.ConfigParser() self._parser.read(self._path) @property def enabled(self): option = libdnf.conf.OptionBool(True) try: option.set(PRIO_DEFAULT, self._parser.getData()["main"]["enabled"]) except IndexError: pass return option.getValue() @property def aliases(self): result = collections.OrderedDict() section = "aliases" if not self._parser.hasSection(section): return result for key in self._parser.options(section): value = self._parser.getValue(section, key) if not value: continue result[key] = value.split() return result class Aliases(object): def __init__(self): self.aliases = collections.OrderedDict() self.conf = None self.enabled = True if self._disabled_by_environ(): self.enabled = False return self._load_main() if not self.enabled: return self._load_aliases() def _disabled_by_environ(self): option = libdnf.conf.OptionBool(True) try: option.set(PRIO_DEFAULT, os.environ['DNF_DISABLE_ALIASES']) return option.getValue() except KeyError: return False except RuntimeError: logger.warning( _('Unexpected value of environment variable: ' 'DNF_DISABLE_ALIASES=%s'), os.environ['DNF_DISABLE_ALIASES']) return True def _load_conf(self, path): try: return AliasesConfig(path) except RuntimeError as e: raise dnf.exceptions.ConfigError( _('Parsing file "%s" failed: %s') % (path, e)) except IOError as e: raise dnf.exceptions.ConfigError( _('Cannot read file "%s": %s') % (path, e)) def _load_main(self): try: self.conf = self._load_conf(ALIASES_CONF_PATH) self.enabled = self.conf.enabled except dnf.exceptions.ConfigError as e: logger.debug(_('Config error: %s'), e) def _load_aliases(self, filenames=None): if filenames is None: try: filenames = self._dropin_dir_filenames() except dnf.exceptions.ConfigError: return for filename in filenames: try: conf = self._load_conf(filename) if conf.enabled: self.aliases.update(conf.aliases) except dnf.exceptions.ConfigError as e: logger.warning(_('Config error: %s'), e) def _dropin_dir_filenames(self): # Get default aliases config filenames: # all files from ALIASES_DROPIN_DIR, # and ALIASES_USER_PATH as the last one (-> override all others) ignored_filenames = [os.path.basename(ALIASES_CONF_PATH), os.path.basename(ALIASES_USER_PATH)] def _ignore_filename(filename): return filename in ignored_filenames or\ filename.startswith('.') or\ not filename.endswith(('.conf', '.CONF')) filenames = [] try: if not os.path.exists(ALIASES_DROPIN_DIR): os.mkdir(ALIASES_DROPIN_DIR) for fn in sorted(os.listdir(ALIASES_DROPIN_DIR)): if _ignore_filename(fn): continue filenames.append(os.path.join(ALIASES_DROPIN_DIR, fn)) except (IOError, OSError) as e: raise dnf.exceptions.ConfigError(e) if os.path.exists(ALIASES_USER_PATH): filenames.append(ALIASES_USER_PATH) return filenames def _resolve(self, args): stack = [] self.prefix_options = [] def store_prefix(args): num = 0 for arg in args: if arg and arg[0] != '-': break num += 1 self.prefix_options += args[:num] return args[num:] def subresolve(args): suffix = store_prefix(args) if (not suffix or # Current alias on stack is resolved suffix[0] not in self.aliases or # End resolving suffix[0].startswith('\\')): # End resolving try: stack.pop() # strip the '\' if it exists if suffix[0].startswith('\\'): suffix[0] = suffix[0][1:] except IndexError: pass return suffix if suffix[0] in stack: # Infinite recursion detected raise dnf.exceptions.Error( _('Aliases contain infinite recursion')) # Next word must be an alias stack.append(suffix[0]) current_alias_result = subresolve(self.aliases[suffix[0]]) if current_alias_result: # We reached non-alias or '\' return current_alias_result + suffix[1:] else: # Need to resolve aliases in the rest return subresolve(suffix[1:]) suffix = subresolve(args) return self.prefix_options + suffix def resolve(self, args): if self.enabled: try: args = self._resolve(args) except dnf.exceptions.Error as e: logger.error(_('%s, using original arguments.'), e) return args PK!4땱 cli/cli.pynu[# Copyright 2005 Duke University # Copyright (C) 2012-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Written by Seth Vidal """ Command line interface yum class and related. """ from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals try: from collections.abc import Sequence except ImportError: from collections import Sequence import datetime import logging import operator import os import random import rpm import sys import time import hawkey import libdnf.transaction from . import output from dnf.cli import CliError from dnf.i18n import ucd, _ import dnf import dnf.cli.aliases import dnf.cli.commands import dnf.cli.commands.alias import dnf.cli.commands.autoremove import dnf.cli.commands.check import dnf.cli.commands.clean import dnf.cli.commands.deplist import dnf.cli.commands.distrosync import dnf.cli.commands.downgrade import dnf.cli.commands.group import dnf.cli.commands.history import dnf.cli.commands.install import dnf.cli.commands.makecache import dnf.cli.commands.mark import dnf.cli.commands.module import dnf.cli.commands.reinstall import dnf.cli.commands.remove import dnf.cli.commands.repolist import dnf.cli.commands.repoquery import dnf.cli.commands.search import dnf.cli.commands.shell import dnf.cli.commands.swap import dnf.cli.commands.updateinfo import dnf.cli.commands.upgrade import dnf.cli.commands.upgrademinimal import dnf.cli.demand import dnf.cli.format import dnf.cli.option_parser import dnf.conf import dnf.conf.substitutions import dnf.const import dnf.db.history import dnf.exceptions import dnf.logging import dnf.persistor import dnf.plugin import dnf.rpm import dnf.sack import dnf.transaction import dnf.util import dnf.yum.misc logger = logging.getLogger('dnf') def _add_pkg_simple_list_lens(data, pkg, indent=''): """ Get the length of each pkg's column. Add that to data. This "knows" about simpleList and printVer. """ na = len(pkg.name) + 1 + len(pkg.arch) + len(indent) ver = len(pkg.evr) rid = len(pkg._from_repo) for (d, v) in (('na', na), ('ver', ver), ('rid', rid)): data[d].setdefault(v, 0) data[d][v] += 1 def _list_cmd_calc_columns(output, ypl): """ Work out the dynamic size of the columns to pass to fmtColumns. """ data = {'na' : {}, 'ver' : {}, 'rid' : {}} for lst in (ypl.installed, ypl.available, ypl.extras, ypl.autoremove, ypl.updates, ypl.recent): for pkg in lst: _add_pkg_simple_list_lens(data, pkg) if len(ypl.obsoletes) > 0: for (npkg, opkg) in ypl.obsoletesTuples: _add_pkg_simple_list_lens(data, npkg) _add_pkg_simple_list_lens(data, opkg, indent=" " * 4) data = [data['na'], data['ver'], data['rid']] columns = output.calcColumns(data, remainder_column=1) return (-columns[0], -columns[1], -columns[2]) def print_versions(pkgs, base, output): def sm_ui_time(x): return time.strftime("%c", time.gmtime(x)) rpmdb_sack = dnf.sack.rpmdb_sack(base) done = False for pkg in rpmdb_sack.query().installed().filterm(name=pkgs): if done: print("") done = True if pkg.epoch == '0': ver = '%s-%s.%s' % (pkg.version, pkg.release, pkg.arch) else: ver = '%s:%s-%s.%s' % (pkg.epoch, pkg.version, pkg.release, pkg.arch) name = output.term.bold(pkg.name) print(_(" Installed: %s-%s at %s") %(name, ver, sm_ui_time(pkg.installtime))) print(_(" Built : %s at %s") % (pkg.packager if pkg.packager else "", sm_ui_time(pkg.buildtime))) # :hawkey, no changelist information yet # print(_(" Committed: %s at %s") % (pkg.committer, # sm_ui_date(pkg.committime))) def report_module_switch(switchedModules): msg1 = _("The operation would result in switching of module '{0}' stream '{1}' to " "stream '{2}'") for moduleName, streams in switchedModules.items(): logger.warning(msg1.format(moduleName, streams[0], streams[1])) class BaseCli(dnf.Base): """This is the base class for yum cli.""" def __init__(self, conf=None): conf = conf or dnf.conf.Conf() super(BaseCli, self).__init__(conf=conf) self.output = output.Output(self, self.conf) def do_transaction(self, display=()): """Take care of package downloading, checking, user confirmation and actually running the transaction. :param display: `rpm.callback.TransactionProgress` object(s) :return: history database transaction ID or None """ if dnf.base.WITH_MODULES: if not self.conf.module_stream_switch: switchedModules = dict(self._moduleContainer.getSwitchedStreams()) if switchedModules: report_module_switch(switchedModules) msg = _("It is not possible to switch enabled streams of a module unless explicitly " "enabled via configuration option module_stream_switch.\n" "It is recommended to rather remove all installed content from the module, and " "reset the module using '{prog} module reset ' command. After " "you reset the module, you can install the other stream.").format( prog=dnf.util.MAIN_PROG) raise dnf.exceptions.Error(msg) trans = self.transaction pkg_str = self.output.list_transaction(trans) if pkg_str: logger.info(pkg_str) if trans: # Check which packages have to be downloaded install_pkgs = [] rmpkgs = [] install_only = True for tsi in trans: if tsi.action in dnf.transaction.FORWARD_ACTIONS: install_pkgs.append(tsi.pkg) elif tsi.action in dnf.transaction.BACKWARD_ACTIONS: install_only = False rmpkgs.append(tsi.pkg) # Close the connection to the rpmdb so that rpm doesn't hold the # SIGINT handler during the downloads. del self._ts # report the total download size to the user if not install_pkgs: self.output.reportRemoveSize(rmpkgs) else: self.output.reportDownloadSize(install_pkgs, install_only) if trans or self._moduleContainer.isChanged() or \ (self._history and (self._history.group or self._history.env)): # confirm with user if self.conf.downloadonly: logger.info(_("{prog} will only download packages for the transaction.").format( prog=dnf.util.MAIN_PROG_UPPER)) elif 'test' in self.conf.tsflags: logger.info(_("{prog} will only download packages, install gpg keys, and check the " "transaction.").format(prog=dnf.util.MAIN_PROG_UPPER)) if self._promptWanted(): if self.conf.assumeno or not self.output.userconfirm(): raise CliError(_("Operation aborted.")) else: logger.info(_('Nothing to do.')) return if trans: if install_pkgs: logger.info(_('Downloading Packages:')) try: total_cb = self.output.download_callback_total_cb self.download_packages(install_pkgs, self.output.progress, total_cb) except dnf.exceptions.DownloadError as e: specific = dnf.cli.format.indent_block(ucd(e)) errstr = _('Error downloading packages:') + '\n%s' % specific # setting the new line to prevent next chars being eaten up # by carriage returns print() raise dnf.exceptions.Error(errstr) # Check GPG signatures self.gpgsigcheck(install_pkgs) if self.conf.downloadonly: return if not isinstance(display, Sequence): display = [display] display = [output.CliTransactionDisplay()] + list(display) tid = super(BaseCli, self).do_transaction(display) # display last transaction (which was closed during do_transaction()) if tid is not None: trans = self.history.old([tid])[0] trans = dnf.db.group.RPMTransaction(self.history, trans._trans) else: trans = None if trans: # the post transaction summary is already written to log during # Base.do_transaction() so here only print the messages to the # user arranged in columns print() print('\n'.join(self.output.post_transaction_output(trans))) print() for tsi in trans: if tsi.state == libdnf.transaction.TransactionItemState_ERROR: raise dnf.exceptions.Error(_('Transaction failed')) return tid def gpgsigcheck(self, pkgs): """Perform GPG signature verification on the given packages, installing keys if possible. :param pkgs: a list of package objects to verify the GPG signatures of :raises: Will raise :class:`Error` if there's a problem """ error_messages = [] for po in pkgs: result, errmsg = self._sig_check_pkg(po) if result == 0: # Verified ok, or verify not req'd continue elif result == 1: ay = self.conf.assumeyes and not self.conf.assumeno if (not sys.stdin or not sys.stdin.isatty()) and not ay: raise dnf.exceptions.Error(_('Refusing to automatically import keys when running ' \ 'unattended.\nUse "-y" to override.')) # the callback here expects to be able to take options which # userconfirm really doesn't... so fake it fn = lambda x, y, z: self.output.userconfirm() try: self._get_key_for_package(po, fn) except (dnf.exceptions.Error, ValueError) as e: error_messages.append(str(e)) else: # Fatal error error_messages.append(errmsg) if error_messages: for msg in error_messages: logger.critical(msg) raise dnf.exceptions.Error(_("GPG check FAILED")) def latest_changelogs(self, package): """Return list of changelogs for package newer then installed version""" newest = None # find the date of the newest changelog for installed version of package # stored in rpmdb for mi in self._rpmconn.readonly_ts.dbMatch('name', package.name): changelogtimes = mi[rpm.RPMTAG_CHANGELOGTIME] if changelogtimes: newest = datetime.date.fromtimestamp(changelogtimes[0]) break chlogs = [chlog for chlog in package.changelogs if newest is None or chlog['timestamp'] > newest] return chlogs def format_changelog(self, changelog): """Return changelog formatted as in spec file""" chlog_str = '* %s %s\n%s\n' % ( changelog['timestamp'].strftime("%a %b %d %X %Y"), dnf.i18n.ucd(changelog['author']), dnf.i18n.ucd(changelog['text'])) return chlog_str def print_changelogs(self, packages): # group packages by src.rpm to avoid showing duplicate changelogs bysrpm = dict() for p in packages: # there are packages without source_name, use name then. bysrpm.setdefault(p.source_name or p.name, []).append(p) for source_name in sorted(bysrpm.keys()): bin_packages = bysrpm[source_name] print(_("Changelogs for {}").format(', '.join([str(pkg) for pkg in bin_packages]))) for chl in self.latest_changelogs(bin_packages[0]): print(self.format_changelog(chl)) def check_updates(self, patterns=(), reponame=None, print_=True, changelogs=False): """Check updates matching given *patterns* in selected repository.""" ypl = self.returnPkgLists('upgrades', patterns, reponame=reponame) if self.conf.obsoletes or self.conf.verbose: typl = self.returnPkgLists('obsoletes', patterns, reponame=reponame) ypl.obsoletes = typl.obsoletes ypl.obsoletesTuples = typl.obsoletesTuples if print_: columns = _list_cmd_calc_columns(self.output, ypl) if len(ypl.updates) > 0: local_pkgs = {} highlight = self.output.term.MODE['bold'] if highlight: # Do the local/remote split we get in "yum updates" for po in sorted(ypl.updates): local = po.localPkg() if os.path.exists(local) and po.verifyLocalPkg(): local_pkgs[(po.name, po.arch)] = po cul = self.conf.color_update_local cur = self.conf.color_update_remote self.output.listPkgs(ypl.updates, '', outputType='list', highlight_na=local_pkgs, columns=columns, highlight_modes={'=' : cul, 'not in' : cur}) if changelogs: self.print_changelogs(ypl.updates) if len(ypl.obsoletes) > 0: print(_('Obsoleting Packages')) # The tuple is (newPkg, oldPkg) ... so sort by new for obtup in sorted(ypl.obsoletesTuples, key=operator.itemgetter(0)): self.output.updatesObsoletesList(obtup, 'obsoletes', columns=columns) return ypl.updates or ypl.obsoletes def distro_sync_userlist(self, userlist): """ Upgrade or downgrade packages to match the latest versions available in the enabled repositories. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage """ oldcount = self._goal.req_length() if len(userlist) == 0: self.distro_sync() else: for pkg_spec in userlist: self.distro_sync(pkg_spec) cnt = self._goal.req_length() - oldcount if cnt <= 0 and not self._goal.req_has_distupgrade_all(): msg = _('No packages marked for distribution synchronization.') raise dnf.exceptions.Error(msg) def downgradePkgs(self, specs=[], file_pkgs=[], strict=False): """Attempt to take the user specified list of packages or wildcards and downgrade them. If a complete version number is specified, attempt to downgrade them to the specified version :param specs: a list of names or wildcards specifying packages to downgrade :param file_pkgs: a list of pkg objects from local files """ result = False for pkg in file_pkgs: try: self.package_downgrade(pkg, strict=strict) result = True except dnf.exceptions.MarkingError as e: logger.info(_('No match for argument: %s'), self.output.term.bold(pkg.location)) for arg in specs: try: self.downgrade_to(arg, strict=strict) result = True except dnf.exceptions.PackageNotFoundError as err: msg = _('No package %s available.') logger.info(msg, self.output.term.bold(arg)) except dnf.exceptions.PackagesNotInstalledError as err: logger.info(_('Packages for argument %s available, but not installed.'), self.output.term.bold(err.pkg_spec)) except dnf.exceptions.MarkingError: assert False if not result: raise dnf.exceptions.Error(_('No packages marked for downgrade.')) def output_packages(self, basecmd, pkgnarrow='all', patterns=(), reponame=None): """Output selection *pkgnarrow* of packages matching *patterns* and *repoid*.""" try: highlight = self.output.term.MODE['bold'] ypl = self.returnPkgLists( pkgnarrow, patterns, installed_available=highlight, reponame=reponame) except dnf.exceptions.Error as e: return 1, [str(e)] else: update_pkgs = {} inst_pkgs = {} local_pkgs = {} columns = None if basecmd == 'list': # Dynamically size the columns columns = _list_cmd_calc_columns(self.output, ypl) if highlight and ypl.installed: # If we have installed and available lists, then do the # highlighting for the installed packages so you can see what's # available to update, an extra, or newer than what we have. for pkg in (ypl.hidden_available + ypl.reinstall_available + ypl.old_available): key = (pkg.name, pkg.arch) if key not in update_pkgs or pkg > update_pkgs[key]: update_pkgs[key] = pkg if highlight and ypl.available: # If we have installed and available lists, then do the # highlighting for the available packages so you can see what's # available to install vs. update vs. old. for pkg in ypl.hidden_installed: key = (pkg.name, pkg.arch) if key not in inst_pkgs or pkg > inst_pkgs[key]: inst_pkgs[key] = pkg if highlight and ypl.updates: # Do the local/remote split we get in "yum updates" for po in sorted(ypl.updates): if po.reponame != hawkey.SYSTEM_REPO_NAME: local_pkgs[(po.name, po.arch)] = po # Output the packages: clio = self.conf.color_list_installed_older clin = self.conf.color_list_installed_newer clir = self.conf.color_list_installed_reinstall clie = self.conf.color_list_installed_extra rip = self.output.listPkgs(ypl.installed, _('Installed Packages'), basecmd, highlight_na=update_pkgs, columns=columns, highlight_modes={'>' : clio, '<' : clin, '=' : clir, 'not in' : clie}) clau = self.conf.color_list_available_upgrade clad = self.conf.color_list_available_downgrade clar = self.conf.color_list_available_reinstall clai = self.conf.color_list_available_install rap = self.output.listPkgs(ypl.available, _('Available Packages'), basecmd, highlight_na=inst_pkgs, columns=columns, highlight_modes={'<' : clau, '>' : clad, '=' : clar, 'not in' : clai}) raep = self.output.listPkgs(ypl.autoremove, _('Autoremove Packages'), basecmd, columns=columns) rep = self.output.listPkgs(ypl.extras, _('Extra Packages'), basecmd, columns=columns) cul = self.conf.color_update_local cur = self.conf.color_update_remote rup = self.output.listPkgs(ypl.updates, _('Available Upgrades'), basecmd, highlight_na=local_pkgs, columns=columns, highlight_modes={'=' : cul, 'not in' : cur}) # XXX put this into the ListCommand at some point if len(ypl.obsoletes) > 0 and basecmd == 'list': # if we've looked up obsolete lists and it's a list request rop = len(ypl.obsoletes) print(_('Obsoleting Packages')) for obtup in sorted(ypl.obsoletesTuples, key=operator.itemgetter(0)): self.output.updatesObsoletesList(obtup, 'obsoletes', columns=columns) else: rop = self.output.listPkgs(ypl.obsoletes, _('Obsoleting Packages'), basecmd, columns=columns) rrap = self.output.listPkgs(ypl.recent, _('Recently Added Packages'), basecmd, columns=columns) if len(patterns) and \ rrap == 0 and rop == 0 and rup == 0 and rep == 0 and rap == 0 and raep == 0 and rip == 0: raise dnf.exceptions.Error(_('No matching Packages to list')) def returnPkgLists(self, pkgnarrow='all', patterns=None, installed_available=False, reponame=None): """Return a :class:`dnf.yum.misc.GenericHolder` object containing lists of package objects that match the given names or wildcards. :param pkgnarrow: a string specifying which types of packages lists to produce, such as updates, installed, available, etc. :param patterns: a list of names or wildcards specifying packages to list :param installed_available: whether the available package list is present as .hidden_available when doing all, available, or installed :param reponame: limit packages list to the given repository :return: a :class:`dnf.yum.misc.GenericHolder` instance with the following lists defined:: available = list of packageObjects installed = list of packageObjects upgrades = tuples of packageObjects (updating, installed) extras = list of packageObjects obsoletes = tuples of packageObjects (obsoleting, installed) recent = list of packageObjects """ done_hidden_available = False done_hidden_installed = False if installed_available and pkgnarrow == 'installed': done_hidden_available = True pkgnarrow = 'all' elif installed_available and pkgnarrow == 'available': done_hidden_installed = True pkgnarrow = 'all' ypl = self._do_package_lists( pkgnarrow, patterns, ignore_case=True, reponame=reponame) if self.conf.showdupesfromrepos: for pkg in ypl.reinstall_available: if not pkg.installed and not done_hidden_available: ypl.available.append(pkg) if installed_available: ypl.hidden_available = ypl.available ypl.hidden_installed = ypl.installed if done_hidden_available: ypl.available = [] if done_hidden_installed: ypl.installed = [] return ypl def provides(self, args): """Print out a list of packages that provide the given file or feature. This a cli wrapper to the provides methods in the rpmdb and pkgsack. :param args: the name of a file or feature to search for :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage """ # always in showdups mode old_sdup = self.conf.showdupesfromrepos self.conf.showdupesfromrepos = True matches = [] used_search_strings = [] for spec in args: query, used_search_string = super(BaseCli, self).provides(spec) matches.extend(query) used_search_strings.extend(used_search_string) for pkg in sorted(matches): self.output.matchcallback_verbose(pkg, used_search_strings, args) self.conf.showdupesfromrepos = old_sdup if not matches: raise dnf.exceptions.Error(_('No Matches found')) def _promptWanted(self): # shortcut for the always-off/always-on options if self.conf.assumeyes and not self.conf.assumeno: return False return True class Cli(object): def __init__(self, base): self.base = base self.cli_commands = {} self.command = None self.demands = dnf.cli.demand.DemandSheet() # :api self.register_command(dnf.cli.commands.alias.AliasCommand) self.register_command(dnf.cli.commands.autoremove.AutoremoveCommand) self.register_command(dnf.cli.commands.check.CheckCommand) self.register_command(dnf.cli.commands.clean.CleanCommand) self.register_command(dnf.cli.commands.distrosync.DistroSyncCommand) self.register_command(dnf.cli.commands.deplist.DeplistCommand) self.register_command(dnf.cli.commands.downgrade.DowngradeCommand) self.register_command(dnf.cli.commands.group.GroupCommand) self.register_command(dnf.cli.commands.history.HistoryCommand) self.register_command(dnf.cli.commands.install.InstallCommand) self.register_command(dnf.cli.commands.makecache.MakeCacheCommand) self.register_command(dnf.cli.commands.mark.MarkCommand) self.register_command(dnf.cli.commands.module.ModuleCommand) self.register_command(dnf.cli.commands.reinstall.ReinstallCommand) self.register_command(dnf.cli.commands.remove.RemoveCommand) self.register_command(dnf.cli.commands.repolist.RepoListCommand) self.register_command(dnf.cli.commands.repoquery.RepoQueryCommand) self.register_command(dnf.cli.commands.search.SearchCommand) self.register_command(dnf.cli.commands.shell.ShellCommand) self.register_command(dnf.cli.commands.swap.SwapCommand) self.register_command(dnf.cli.commands.updateinfo.UpdateInfoCommand) self.register_command(dnf.cli.commands.upgrade.UpgradeCommand) self.register_command(dnf.cli.commands.upgrademinimal.UpgradeMinimalCommand) self.register_command(dnf.cli.commands.InfoCommand) self.register_command(dnf.cli.commands.ListCommand) self.register_command(dnf.cli.commands.ProvidesCommand) self.register_command(dnf.cli.commands.CheckUpdateCommand) self.register_command(dnf.cli.commands.RepoPkgsCommand) self.register_command(dnf.cli.commands.HelpCommand) def _configure_repos(self, opts): self.base.read_all_repos(opts) if opts.repofrompath: for label, path in opts.repofrompath.items(): this_repo = self.base.repos.add_new_repo(label, self.base.conf, baseurl=[path]) this_repo._configure_from_options(opts) # do not let this repo to be disabled opts.repos_ed.append((label, "enable")) if opts.repo: opts.repos_ed.insert(0, ("*", "disable")) opts.repos_ed.extend([(r, "enable") for r in opts.repo]) notmatch = set() # Process repo enables and disables in order try: for (repo, operation) in opts.repos_ed: repolist = self.base.repos.get_matching(repo) if not repolist: if self.base.conf.strict and operation == "enable": msg = _("Unknown repo: '%s'") raise dnf.exceptions.RepoError(msg % repo) notmatch.add(repo) if operation == "enable": repolist.enable() else: repolist.disable() except dnf.exceptions.ConfigError as e: logger.critical(e) self.optparser.print_help() sys.exit(1) for repo in notmatch: logger.warning(_("No repository match: %s"), repo) expired_repos = self.base._repo_persistor.get_expired_repos() if expired_repos is None: expired_repos = self.base.repos.keys() for rid in expired_repos: repo = self.base.repos.get(rid) if repo: repo._repo.expire() # setup the progress bars/callbacks (bar, self.base._ds_callback) = self.base.output.setup_progress_callbacks() self.base.repos.all().set_progress_bar(bar) key_import = output.CliKeyImport(self.base, self.base.output) self.base.repos.all()._set_key_import(key_import) def _log_essentials(self): logger.debug('{prog} version: %s'.format(prog=dnf.util.MAIN_PROG_UPPER), dnf.const.VERSION) logger.log(dnf.logging.DDEBUG, 'Command: %s', self.cmdstring) logger.log(dnf.logging.DDEBUG, 'Installroot: %s', self.base.conf.installroot) logger.log(dnf.logging.DDEBUG, 'Releasever: %s', self.base.conf.releasever) logger.debug("cachedir: %s", self.base.conf.cachedir) def _process_demands(self): demands = self.demands repos = self.base.repos if demands.root_user: if not dnf.util.am_i_root(): raise dnf.exceptions.Error( _('This command has to be run with superuser privileges ' '(under the root user on most systems).')) if demands.changelogs: for repo in repos.iter_enabled(): repo.load_metadata_other = True if demands.cacheonly or self.base.conf.cacheonly: self.base.conf.cacheonly = True for repo in repos.values(): repo._repo.setSyncStrategy(dnf.repo.SYNC_ONLY_CACHE) else: if demands.freshest_metadata: for repo in repos.iter_enabled(): repo._repo.expire() elif not demands.fresh_metadata: for repo in repos.values(): repo._repo.setSyncStrategy(dnf.repo.SYNC_LAZY) if demands.sack_activation: self.base.fill_sack( load_system_repo='auto' if self.demands.load_system_repo else False, load_available_repos=self.demands.available_repos) def _parse_commands(self, opts, args): """Check that the requested CLI command exists.""" basecmd = opts.command command_cls = self.cli_commands.get(basecmd) if command_cls is None: logger.critical(_('No such command: %s. Please use %s --help'), basecmd, sys.argv[0]) if self.base.conf.plugins: logger.critical(_("It could be a {PROG} plugin command, " "try: \"{prog} install 'dnf-command(%s)'\"").format( prog=dnf.util.MAIN_PROG, PROG=dnf.util.MAIN_PROG_UPPER), basecmd) else: logger.critical(_("It could be a {prog} plugin command, " "but loading of plugins is currently disabled.").format( prog=dnf.util.MAIN_PROG_UPPER)) raise CliError self.command = command_cls(self) logger.log(dnf.logging.DDEBUG, 'Base command: %s', basecmd) logger.log(dnf.logging.DDEBUG, 'Extra commands: %s', args) def configure(self, args, option_parser=None): """Parse command line arguments, and set up :attr:`self.base.conf` and :attr:`self.cmds`, as well as logger objects in base instance. :param args: a list of command line arguments :param option_parser: a class for parsing cli options """ aliases = dnf.cli.aliases.Aliases() args = aliases.resolve(args) self.optparser = dnf.cli.option_parser.OptionParser() \ if option_parser is None else option_parser opts = self.optparser.parse_main_args(args) # Just print out the version if that's what the user wanted if opts.version: print(dnf.const.VERSION) print_versions(self.base.conf.history_record_packages, self.base, self.base.output) sys.exit(0) if opts.quiet: opts.debuglevel = 0 opts.errorlevel = 2 if opts.verbose: opts.debuglevel = opts.errorlevel = dnf.const.VERBOSE_LEVEL # Read up configuration options and initialize plugins try: if opts.cacheonly: self.base.conf._set_value("cachedir", self.base.conf.system_cachedir, dnf.conf.PRIO_DEFAULT) self.demands.cacheonly = True self.base.conf._configure_from_options(opts) self._read_conf_file(opts.releasever) if 'arch' in opts: self.base.conf.arch = opts.arch self.base.conf._adjust_conf_options() except (dnf.exceptions.ConfigError, ValueError) as e: logger.critical(_('Config error: %s'), e) sys.exit(1) except IOError as e: e = '%s: %s' % (ucd(str(e)), repr(e.filename)) logger.critical(_('Config error: %s'), e) sys.exit(1) if opts.destdir is not None: self.base.conf.destdir = opts.destdir if not self.base.conf.downloadonly and opts.command not in ( 'download', 'system-upgrade', 'reposync', 'modulesync'): logger.critical(_('--destdir or --downloaddir must be used with --downloadonly ' 'or download or system-upgrade command.') ) sys.exit(1) if (opts.set_enabled or opts.set_disabled) and opts.command != 'config-manager': logger.critical( _('--enable, --set-enabled and --disable, --set-disabled ' 'must be used with config-manager command.')) sys.exit(1) if opts.sleeptime is not None: time.sleep(random.randrange(opts.sleeptime * 60)) # store the main commands & summaries, before plugins are loaded self.optparser.add_commands(self.cli_commands, 'main') # store the plugin commands & summaries self.base.init_plugins(opts.disableplugin, opts.enableplugin, self) self.optparser.add_commands(self.cli_commands,'plugin') # show help if no command specified # this is done here, because we first have the full # usage info after the plugins are loaded. if not opts.command: self.optparser.print_help() sys.exit(0) # save our original args out self.base.args = args # save out as a nice command string self.cmdstring = self.optparser.prog + ' ' for arg in self.base.args: self.cmdstring += '%s ' % arg self._log_essentials() try: self._parse_commands(opts, args) except CliError: sys.exit(1) # show help for dnf --help / --help-cmd if opts.help: self.optparser.print_help(self.command) sys.exit(0) opts = self.optparser.parse_command_args(self.command, args) if opts.allowerasing: self.demands.allow_erasing = opts.allowerasing self.base._allow_erasing = True if opts.freshest_metadata: self.demands.freshest_metadata = opts.freshest_metadata if opts.debugsolver: self.base.conf.debug_solver = True if opts.obsoletes: self.base.conf.obsoletes = True self.command.pre_configure() self.base.pre_configure_plugins() # with cachedir in place we can configure stuff depending on it: self.base._activate_persistor() self._configure_repos(opts) self.base.configure_plugins() self.base.conf._configure_from_options(opts) self.command.configure() if self.base.conf.destdir: dnf.util.ensure_dir(self.base.conf.destdir) self.base.repos.all().pkgdir = self.base.conf.destdir if self.base.conf.color != 'auto': self.base.output.term.reinit(color=self.base.conf.color) if rpm.expandMacro('%_pkgverify_level') in ('signature', 'all'): forcing = False for repo in self.base.repos.iter_enabled(): if repo.gpgcheck: continue repo.gpgcheck = True forcing = True if not self.base.conf.localpkg_gpgcheck: self.base.conf.localpkg_gpgcheck = True forcing = True if forcing: logger.warning( _("Warning: Enforcing GPG signature check globally " "as per active RPM security policy (see 'gpgcheck' in " "dnf.conf(5) for how to squelch this message)" ) ) def _read_conf_file(self, releasever=None): timer = dnf.logging.Timer('config') conf = self.base.conf # replace remote config path with downloaded file conf._check_remote_file('config_file_path') # search config file inside the installroot first conf._search_inside_installroot('config_file_path') # check whether a config file is requested from command line and the file exists filename = conf._get_value('config_file_path') if (conf._get_priority('config_file_path') == dnf.conf.PRIO_COMMANDLINE) and \ not os.path.isfile(filename): raise dnf.exceptions.ConfigError(_('Config file "{}" does not exist').format(filename)) # read config conf.read(priority=dnf.conf.PRIO_MAINCONFIG) # search reposdir file inside the installroot first from_root = conf._search_inside_installroot('reposdir') # Update vars from same root like repos were taken if conf._get_priority('varsdir') == dnf.conf.PRIO_COMMANDLINE: from_root = "/" subst = conf.substitutions subst.update_from_etc(from_root, varsdir=conf._get_value('varsdir')) # cachedir, logs, releasever, and gpgkey are taken from or stored in installroot if releasever is None and conf.releasever is None: releasever = dnf.rpm.detect_releasever(conf.installroot) elif releasever == '/': releasever = dnf.rpm.detect_releasever(releasever) if releasever is not None: conf.releasever = releasever if conf.releasever is None: logger.warning(_("Unable to detect release version (use '--releasever' to specify " "release version)")) for opt in ('cachedir', 'logdir', 'persistdir'): conf.prepend_installroot(opt) self.base._logging._setup_from_dnf_conf(conf) timer() return conf def _populate_update_security_filter(self, opts, cmp_type='eq', all=None): """ :param opts: :param cmp_type: string supported "eq", "gte" :param all: :return: """ if (opts is None) and (all is None): return types = [] if opts.bugfix or all: types.append('bugfix') if opts.enhancement or all: types.append('enhancement') if opts.newpackage or all: types.append('newpackage') if opts.security or all: types.append('security') self.base.add_security_filters(cmp_type, types=types, advisory=opts.advisory, bugzilla=opts.bugzilla, cves=opts.cves, severity=opts.severity) def redirect_logger(self, stdout=None, stderr=None): # :api """ Change minimal logger level for terminal output to stdout and stderr according to specific command requirements @param stdout: logging.INFO, logging.WARNING, ... @param stderr:logging.INFO, logging.WARNING, ... """ if stdout is not None: self.base._logging.stdout_handler.setLevel(stdout) if stderr is not None: self.base._logging.stderr_handler.setLevel(stderr) def redirect_repo_progress(self, fo=sys.stderr): progress = dnf.cli.progress.MultiFileProgressMeter(fo) self.base.output.progress = progress self.base.repos.all().set_progress_bar(progress) def _check_running_kernel(self): kernel = self.base.sack.get_running_kernel() if kernel is None: return q = self.base.sack.query().filterm(provides=kernel.name) q = q.installed() q.filterm(advisory_type='security') ikpkg = kernel for pkg in q: if pkg > ikpkg: ikpkg = pkg if ikpkg > kernel: print('Security: %s is an installed security update' % ikpkg) print('Security: %s is the currently running version' % kernel) def _option_conflict(self, option_string_1, option_string_2): print(self.optparser.print_usage()) raise dnf.exceptions.Error(_("argument {}: not allowed with argument {}".format( option_string_1, option_string_2))) def register_command(self, command_cls): """Register a Command. :api""" for name in command_cls.aliases: if name in self.cli_commands: raise dnf.exceptions.ConfigError(_('Command "%s" already defined') % name) self.cli_commands[name] = command_cls def run(self): """Call the base command, and pass it the extended commands or arguments. :return: (exit_code, [ errors ]) exit_code is:: 0 = we're done, exit 1 = we've errored, exit with error string 2 = we've got work yet to do, onto the next stage """ self._process_demands() # Reports about excludes and includes (but not from plugins) if self.base.conf.excludepkgs: logger.debug( _('Excludes in dnf.conf: ') + ", ".join(sorted(set(self.base.conf.excludepkgs)))) if self.base.conf.includepkgs: logger.debug( _('Includes in dnf.conf: ') + ", ".join(sorted(set(self.base.conf.includepkgs)))) for repo in self.base.repos.iter_enabled(): if repo.excludepkgs: logger.debug(_('Excludes in repo ') + repo.id + ": " + ", ".join(sorted(set(repo.excludepkgs)))) if repo.includepkgs: logger.debug(_('Includes in repo ') + repo.id + ": " + ", ".join(sorted(set(repo.includepkgs)))) return self.command.run() PK!,8M//cli/completion_helper.pynu[#!/usr/libexec/platform-python # # This file is part of dnf. # # Copyright 2015 (C) Igor Gnatenko # Copyright 2016 (C) Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA import dnf.exceptions import dnf.cli import dnf.cli.commands.clean import sys def filter_list_by_kw(kw, lst): return filter(lambda k: str(k).startswith(kw), lst) def listpkg_to_setstr(pkgs): return set([str(x) for x in pkgs]) class RemoveCompletionCommand(dnf.cli.commands.remove.RemoveCommand): def __init__(self, args): super(RemoveCompletionCommand, self).__init__(args) def configure(self): self.cli.demands.root_user = False self.cli.demands.sack_activation = True def run(self): for pkg in ListCompletionCommand.installed(self.base, self.opts.pkg_specs): print(str(pkg)) class InstallCompletionCommand(dnf.cli.commands.install.InstallCommand): def __init__(self, args): super(InstallCompletionCommand, self).__init__(args) def configure(self): self.cli.demands.root_user = False self.cli.demands.available_repos = True self.cli.demands.sack_activation = True def run(self): installed = listpkg_to_setstr(ListCompletionCommand.installed(self.base, self.opts.pkg_specs)) available = listpkg_to_setstr(ListCompletionCommand.available(self.base, self.opts.pkg_specs)) for pkg in (available - installed): print(str(pkg)) class ReinstallCompletionCommand(dnf.cli.commands.reinstall.ReinstallCommand): def __init__(self, args): super(ReinstallCompletionCommand, self).__init__(args) def configure(self): self.cli.demands.root_user = False self.cli.demands.available_repos = True self.cli.demands.sack_activation = True def run(self): installed = listpkg_to_setstr(ListCompletionCommand.installed(self.base, self.opts.pkg_specs)) available = listpkg_to_setstr(ListCompletionCommand.available(self.base, self.opts.pkg_specs)) for pkg in (installed & available): print(str(pkg)) class ListCompletionCommand(dnf.cli.commands.ListCommand): def __init__(self, args): super(ListCompletionCommand, self).__init__(args) def run(self): subcmds = self.pkgnarrows args = self.opts.packages action = self.opts.packages_action if len(args) > 1 and args[1] not in subcmds: print("\n".join(filter_list_by_kw(args[1], subcmds))) else: if action == "installed": pkgs = self.installed(self.base, args) elif action == "available": pkgs = self.available(self.base, args) elif action == "updates": pkgs = self.updates(self.base, args) else: available = listpkg_to_setstr(self.available(self.base, args)) installed = listpkg_to_setstr(self.installed(self.base, args)) pkgs = (available | installed) if not pkgs: print("\n".join(filter_list_by_kw(args[0], subcmds))) return for pkg in pkgs: print(str(pkg)) @staticmethod def installed(base, arg): return base.sack.query().installed().filterm(name__glob="{}*".format(arg[0])) @staticmethod def available(base, arg): return base.sack.query().available().filterm(name__glob="{}*".format(arg[0])) @staticmethod def updates(base, arg): return base.check_updates(["{}*".format(arg[0])], print_=False) class RepoListCompletionCommand(dnf.cli.commands.repolist.RepoListCommand): def __init__(self, args): super(RepoListCompletionCommand, self).__init__(args) def run(self): args = self.opts if args.repos_action == "enabled": print("\n".join(filter_list_by_kw(args.repos[0], [r.id for r in self.base.repos.iter_enabled()]))) elif args.repos_action == "disabled": print("\n".join(filter_list_by_kw(args.repos[0], [r.id for r in self.base.repos.all() if not r.enabled]))) elif args.repos_action == "all": print("\n".join(filter_list_by_kw(args.repos[0], [r.id for r in self.base.repos.all()]))) class UpgradeCompletionCommand(dnf.cli.commands.upgrade.UpgradeCommand): def __init__(self, args): super(UpgradeCompletionCommand, self).__init__(args) def configure(self): self.cli.demands.root_user = False self.cli.demands.available_repos = True self.cli.demands.sack_activation = True def run(self): for pkg in ListCompletionCommand.updates(self.base, self.opts.pkg_specs): print(str(pkg)) class DowngradeCompletionCommand(dnf.cli.commands.downgrade.DowngradeCommand): def __init__(self, args): super(DowngradeCompletionCommand, self).__init__(args) def configure(self): self.cli.demands.root_user = False self.cli.demands.available_repos = True self.cli.demands.sack_activation = True def run(self): for pkg in ListCompletionCommand.available(self.base, self.opts.pkg_specs).downgrades(): print(str(pkg)) class CleanCompletionCommand(dnf.cli.commands.clean.CleanCommand): def __init__(self, args): super(CleanCompletionCommand, self).__init__(args) def run(self): subcmds = dnf.cli.commands.clean._CACHE_TYPES.keys() print("\n".join(filter_list_by_kw(self.opts.type[1], subcmds))) def main(args): base = dnf.cli.cli.BaseCli() cli = dnf.cli.Cli(base) if args[0] == "_cmds": base.init_plugins([], [], cli) print("\n".join(filter_list_by_kw(args[1], cli.cli_commands))) return cli.cli_commands.clear() cli.register_command(RemoveCompletionCommand) cli.register_command(InstallCompletionCommand) cli.register_command(ReinstallCompletionCommand) cli.register_command(ListCompletionCommand) cli.register_command(RepoListCompletionCommand) cli.register_command(UpgradeCompletionCommand) cli.register_command(DowngradeCompletionCommand) cli.register_command(CleanCompletionCommand) cli.configure(args) try: cli.run() except (OSError, dnf.exceptions.Error): sys.exit(0) if __name__ == "__main__": try: main(sys.argv[1:]) except KeyboardInterrupt: sys.exit(1) PK! cli/demand.pynu[# demand.py # Demand sheet and related classes. # # Copyright (C) 2014-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals class _BoolDefault(object): def __init__(self, default): self.default = default self._storing_name = '__%s%x' % (self.__class__.__name__, id(self)) def __get__(self, obj, objtype=None): objdict = obj.__dict__ if self._storing_name in objdict: return objdict[self._storing_name] return self.default def __set__(self, obj, val): objdict = obj.__dict__ if self._storing_name in objdict: current_val = objdict[self._storing_name] if current_val != val: raise AttributeError('Demand already set.') objdict[self._storing_name] = val class DemandSheet(object): """Collection of demands that different CLI parts have on other parts. :api""" # :api... allow_erasing = _BoolDefault(False) available_repos = _BoolDefault(False) resolving = _BoolDefault(False) root_user = _BoolDefault(False) sack_activation = _BoolDefault(False) load_system_repo = _BoolDefault(True) success_exit_status = 0 cacheonly = _BoolDefault(False) fresh_metadata = _BoolDefault(True) freshest_metadata = _BoolDefault(False) changelogs = _BoolDefault(False) transaction_display = None # This demand controlls applicability of the plugins that could filter # repositories packages (e.g. versionlock). # If it stays None, the demands.resolving is used as a fallback. plugin_filtering_enabled = _BoolDefault(None) PK! 3 cli/format.pynu[# Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. from __future__ import unicode_literals from dnf.pycomp import long def format_number(number, SI=0, space=' '): """Return a human-readable metric-like string representation of a number. :param number: the number to be converted to a human-readable form :param SI: If is 0, this function will use the convention that 1 kilobyte = 1024 bytes, otherwise, the convention that 1 kilobyte = 1000 bytes will be used :param space: string that will be placed between the number and the SI prefix :return: a human-readable metric-like string representation of *number* """ # copied from from urlgrabber.progress symbols = [ ' ', # (none) 'k', # kilo 'M', # mega 'G', # giga 'T', # tera 'P', # peta 'E', # exa 'Z', # zetta 'Y'] # yotta if SI: step = 1000.0 else: step = 1024.0 thresh = 999 depth = 0 max_depth = len(symbols) - 1 if number is None: number = 0.0 # we want numbers between 0 and thresh, but don't exceed the length # of our list. In that event, the formatting will be screwed up, # but it'll still show the right number. while number > thresh and depth < max_depth: depth = depth + 1 number = number / step if isinstance(number, int) or isinstance(number, long): format = '%i%s%s' elif number < 9.95: # must use 9.95 for proper sizing. For example, 9.99 will be # rounded to 10.0 with the .1f format string (which is too long) format = '%.1f%s%s' else: format = '%.0f%s%s' return(format % (float(number or 0), space, symbols[depth])) def format_time(seconds, use_hours=0): """Return a human-readable string representation of a number of seconds. The string will show seconds, minutes, and optionally hours. :param seconds: the number of seconds to convert to a human-readable form :param use_hours: If use_hours is 0, the representation will be in minutes and seconds. Otherwise, it will be in hours, minutes, and seconds :return: a human-readable string representation of *seconds* """ # copied from from urlgrabber.progress if seconds is None or seconds < 0: if use_hours: return '--:--:--' else: return '--:--' elif seconds == float('inf'): return 'Infinite' else: seconds = int(seconds) minutes = seconds // 60 seconds = seconds % 60 if use_hours: hours = minutes // 60 minutes = minutes % 60 return '%02i:%02i:%02i' % (hours, minutes, seconds) else: return '%02i:%02i' % (minutes, seconds) def indent_block(s): return '\n'.join(' ' + s for s in s.splitlines()) PK!.) ff cli/main.pynu[# Copyright 2005 Duke University # Copyright (C) 2012-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ Entrance point for the yum command line interface. """ from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from dnf.conf import Conf from dnf.cli.cli import Cli from dnf.cli.option_parser import OptionParser from dnf.i18n import ucd from dnf.cli.utils import show_lock_owner from dnf.i18n import _ import dnf.cli import dnf.cli.cli import dnf.cli.option_parser import dnf.exceptions import dnf.i18n import dnf.logging import dnf.util import errno import hawkey import libdnf.error import logging import os import os.path import sys logger = logging.getLogger("dnf") def ex_IOError(e): logger.log(dnf.logging.SUBDEBUG, '', exc_info=True) logger.critical(ucd(e)) return 1 def ex_Error(e): logger.log(dnf.logging.SUBDEBUG, '', exc_info=True) if e.value is not None: logger.critical(_('Error: %s'), ucd(e)) return 1 def main(args, conf_class=Conf, cli_class=Cli, option_parser_class=OptionParser): try: dnf.i18n.setup_stdout() with dnf.cli.cli.BaseCli(conf_class()) as base: return _main(base, args, cli_class, option_parser_class) except dnf.exceptions.ProcessLockError as e: logger.critical(e.value) show_lock_owner(e.pid) return 200 except dnf.exceptions.LockError as e: logger.critical(e.value) return 200 except dnf.exceptions.DepsolveError as e: return 1 except dnf.exceptions.Error as e: return ex_Error(e) except hawkey.Exception as e: logger.critical(_('Error: %s'), ucd(e)) return 1 except libdnf.error.Error as e: logger.critical(_('Error: %s'), ucd(e)) return 1 except IOError as e: return ex_IOError(e) except KeyboardInterrupt as e: logger.critical('{}: {}'.format(type(e).__name__, _("Terminated."))) return 1 def _main(base, args, cli_class, option_parser): """Run the dnf program from a command line interface.""" # our core object for the cli base._logging._presetup() cli = cli_class(base) # do our cli parsing and config file setup # also sanity check the things being passed on the cli try: cli.configure(list(map(ucd, args)), option_parser()) except (IOError, OSError) as e: return ex_IOError(e) return cli_run(cli, base) def cli_run(cli, base): # Try to open the current directory to see if we have # read and execute access. If not, chdir to / try: f = open(".") except IOError as e: if e.errno == errno.EACCES: logger.critical(_('No read/execute access in current directory, moving to /')) os.chdir("/") else: f.close() try: cli.run() except dnf.exceptions.LockError: raise except (IOError, OSError) as e: return ex_IOError(e) if cli.demands.resolving: try: ret = resolving(cli, base) except dnf.exceptions.DepsolveError as e: ex_Error(e) msg = "" if not cli.demands.allow_erasing and base._goal.problem_conflicts(available=True): msg += _("try to add '{}' to command line to replace conflicting " "packages").format("--allowerasing") if cli.base.conf.strict: if not msg: msg += _("try to add '{}' to skip uninstallable packages").format( "--skip-broken") else: msg += _(" or '{}' to skip uninstallable packages").format("--skip-broken") if cli.base.conf.best: prio = cli.base.conf._get_priority("best") if prio <= dnf.conf.PRIO_MAINCONFIG: if not msg: msg += _("try to add '{}' to use not only best candidate packages").format( "--nobest") else: msg += _(" or '{}' to use not only best candidate packages").format( "--nobest") if msg: logger.info("({})".format(msg)) raise if ret: return ret cli.command.run_transaction() return cli.demands.success_exit_status def resolving(cli, base): """Perform the depsolve, download and RPM transaction stage.""" if base.transaction is None: base.resolve(cli.demands.allow_erasing) logger.info(_('Dependencies resolved.')) cli.command.run_resolved() # Run the transaction displays = [] if cli.demands.transaction_display is not None: displays.append(cli.demands.transaction_display) try: base.do_transaction(display=displays) except dnf.cli.CliError as exc: logger.error(ucd(exc)) return 1 except dnf.exceptions.TransactionCheckError as err: for msg in cli.command.get_error_output(err): logger.critical(msg) return 1 except IOError as e: return ex_IOError(e) else: logger.info(_('Complete!')) return 0 def user_main(args, exit_code=False): """Call one of the multiple main() functions based on environment variables. :param args: command line arguments passed into yum :param exit_code: if *exit_code* is True, this function will exit python with its exit code when it has finished executing. Otherwise, it will return its exit code. :return: the exit code from dnf.yum execution """ errcode = main(args) if exit_code: sys.exit(errcode) return errcode if __name__ == "__main__": user_main(sys.argv[1:], exit_code=True) PK!4B4]4]cli/option_parser.pynu[# optparse.py # CLI options parser. # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals from dnf.i18n import _ from dnf.util import _parse_specs import argparse import dnf.exceptions import dnf.util import dnf.rpm import dnf.yum.misc import logging import os.path import re import sys logger = logging.getLogger("dnf") class MultilineHelpFormatter(argparse.HelpFormatter): def _split_lines(self, text, width): if '\n' in text: return text.splitlines() return super(MultilineHelpFormatter, self)._split_lines(text, width) class OptionParser(argparse.ArgumentParser): """ArgumentParser like class to do things the "yum way".""" def __init__(self, reset_usage=True): super(OptionParser, self).__init__(add_help=False, formatter_class=MultilineHelpFormatter) self.command_positional_parser = None self.command_group = None self._add_general_options() if reset_usage: self._cmd_usage = {} # names, summary for dnf commands, to build usage self._cmd_groups = set() # cmd groups added (main, plugin) def error(self, msg): """Output an error message, and exit the program. This method overrides standard argparser's error so that error output goes to the logger. :param msg: the error message to output """ self.print_usage() logger.critical(_("Command line error: %s"), msg) sys.exit(1) class _RepoCallback(argparse.Action): def __call__(self, parser, namespace, values, opt_str): operation = 'disable' if opt_str == '--disablerepo' else 'enable' l = getattr(namespace, self.dest) l.extend((x, operation) for x in re.split(r'\s*[,\s]\s*', values)) class _RepoCallbackEnable(argparse.Action): def __call__(self, parser, namespace, values, opt_str): namespace.repos_ed.append((values[0], 'enable')) setattr(namespace, 'reponame', values) class _SplitCallback(argparse._AppendAction): """ Split all strings in seq, at "," and whitespace. Returns a new list. """ SPLITTER = r'\s*[,\s]\s*' def __call__(self, parser, namespace, values, opt_str): first = True for val in re.split(self.SPLITTER, values): if first or val: # Empty values are sometimes used to clear existing content of the option. # Only the first value in the parsed string can be empty. Other empty values # are ignored. super(OptionParser._SplitCallback, self).__call__(parser, namespace, val, opt_str) first = False class _SplitExtendDictCallback(argparse.Action): """ Split string at "," or whitespace to (key, value). Extends dict with {key: value}.""" def __call__(self, parser, namespace, values, opt_str): try: key, val = values.split(',') if not key or not val: raise ValueError except ValueError: msg = _('bad format: %s') % values raise argparse.ArgumentError(self, msg) dct = getattr(namespace, self.dest) dct[key] = val class _SetoptsCallback(argparse.Action): """ Parse setopts arguments and put them into main_ and repo_.""" def __call__(self, parser, namespace, values, opt_str): vals = values.split('=') if len(vals) > 2: logger.warning(_("Setopt argument has multiple values: %s"), values) return if len(vals) < 2: logger.warning(_("Setopt argument has no value: %s"), values) return k, v = vals period = k.rfind('.') if period != -1: repo = k[:period] k = k[period+1:] if hasattr(namespace, 'repo_setopts'): repoopts = namespace.repo_setopts else: repoopts = {} repoopts.setdefault(repo, {}).setdefault(k, []).append(v) setattr(namespace, 'repo_' + self.dest, repoopts) else: if hasattr(namespace, 'main_setopts'): mainopts = namespace.main_setopts else: mainopts = {} mainopts.setdefault(k, []).append(v) setattr(namespace, 'main_' + self.dest, mainopts) class ParseSpecGroupFileCallback(argparse.Action): def __call__(self, parser, namespace, values, opt_str): _parse_specs(namespace, values) class PkgNarrowCallback(argparse.Action): def __init__(self, *args, **kwargs): self.pkgnarrow = {} try: for k in ['choices', 'default']: self.pkgnarrow[k] = kwargs[k] del kwargs[k] except KeyError as e: raise TypeError("%s() missing mandatory argument %s" % (self.__class__.__name__, e)) kwargs['default'] = [] super(OptionParser.PkgNarrowCallback, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, opt_str): dest_action = self.dest + '_action' if not values or values[0] not in self.pkgnarrow['choices']: narrow = self.pkgnarrow['default'] else: narrow = values.pop(0) setattr(namespace, dest_action, narrow) setattr(namespace, self.dest, values) class ForceArchAction(argparse.Action): def __call__(self, parser, namespace, values, opt_str): namespace.ignorearch = True namespace.arch = values def _add_general_options(self): """ Standard options known to all dnf subcommands. """ # All defaults need to be a None, so we can always tell whether the user # has set something or whether we are getting a default. general_grp = self.add_argument_group(_('General {prog} options'.format( prog=dnf.util.MAIN_PROG_UPPER))) general_grp.add_argument("-c", "--config", dest="config_file_path", default=None, metavar='[config file]', help=_("config file location")) general_grp.add_argument("-q", "--quiet", dest="quiet", action="store_true", default=None, help=_("quiet operation")) general_grp.add_argument("-v", "--verbose", action="store_true", default=None, help=_("verbose operation")) general_grp.add_argument("--version", action="store_true", default=None, help=_("show {prog} version and exit").format( prog=dnf.util.MAIN_PROG_UPPER)) general_grp.add_argument("--installroot", help=_("set install root"), metavar='[path]') general_grp.add_argument("--nodocs", action="store_const", const=['nodocs'], dest='tsflags', help=_("do not install documentations")) general_grp.add_argument("--noplugins", action="store_false", default=None, dest='plugins', help=_("disable all plugins")) general_grp.add_argument("--enableplugin", dest="enableplugin", default=[], action=self._SplitCallback, help=_("enable plugins by name"), metavar='[plugin]') general_grp.add_argument("--disableplugin", dest="disableplugin", default=[], action=self._SplitCallback, help=_("disable plugins by name"), metavar='[plugin]') general_grp.add_argument("--releasever", default=None, help=_("override the value of $releasever" " in config and repo files")) general_grp.add_argument("--setopt", dest="setopts", default=[], action=self._SetoptsCallback, help=_("set arbitrary config and repo options")) general_grp.add_argument("--skip-broken", dest="skip_broken", action="store_true", default=None, help=_("resolve depsolve problems by skipping packages")) general_grp.add_argument('-h', '--help', '--help-cmd', action="store_true", dest='help', help=_("show command help")) general_grp.add_argument('--allowerasing', action='store_true', default=None, help=_('allow erasing of installed packages to ' 'resolve dependencies')) best_group = general_grp.add_mutually_exclusive_group() best_group.add_argument("-b", "--best", action="store_true", dest='best', default=None, help=_("try the best available package versions in transactions.")) best_group.add_argument("--nobest", action="store_false", dest='best', help=_("do not limit the transaction to the best candidate")) general_grp.add_argument("-C", "--cacheonly", dest="cacheonly", action="store_true", default=None, help=_("run entirely from system cache, " "don't update cache")) general_grp.add_argument("-R", "--randomwait", dest="sleeptime", type=int, default=None, metavar='[minutes]', help=_("maximum command wait time")) general_grp.add_argument("-d", "--debuglevel", dest="debuglevel", metavar='[debug level]', default=None, help=_("debugging output level"), type=int) general_grp.add_argument("--debugsolver", action="store_true", default=None, help=_("dumps detailed solving results into" " files")) general_grp.add_argument("--showduplicates", dest="showdupesfromrepos", action="store_true", default=None, help=_("show duplicates, in repos, " "in list/search commands")) general_grp.add_argument("-e", "--errorlevel", default=None, type=int, help=_("error output level")) general_grp.add_argument("--obsoletes", default=None, dest="obsoletes", action="store_true", help=_("enables {prog}'s obsoletes processing logic " "for upgrade or display capabilities that " "the package obsoletes for info, list and " "repoquery").format(prog=dnf.util.MAIN_PROG)) general_grp.add_argument("--rpmverbosity", default=None, help=_("debugging output level for rpm"), metavar='[debug level name]') general_grp.add_argument("-y", "--assumeyes", action="store_true", default=None, help=_("automatically answer yes" " for all questions")) general_grp.add_argument("--assumeno", action="store_true", default=None, help=_("automatically answer no" " for all questions")) general_grp.add_argument("--enablerepo", action=self._RepoCallback, dest='repos_ed', default=[], metavar='[repo]', help=_("Enable additional repositories. List option. " "Supports globs, can be specified multiple times.")) repo_group = general_grp.add_mutually_exclusive_group() repo_group.add_argument("--disablerepo", action=self._RepoCallback, dest='repos_ed', default=[], metavar='[repo]', help=_("Disable repositories. List option. " "Supports globs, can be specified multiple times.")) repo_group.add_argument('--repo', '--repoid', metavar='[repo]', dest='repo', action=self._SplitCallback, default=[], help=_('enable just specific repositories by an id or a glob, ' 'can be specified multiple times')) enable_group = general_grp.add_mutually_exclusive_group() enable_group.add_argument("--enable", default=False, dest="set_enabled", action="store_true", help=_("enable repos with config-manager " "command (automatically saves)")) enable_group.add_argument("--disable", default=False, dest="set_disabled", action="store_true", help=_("disable repos with config-manager " "command (automatically saves)")) general_grp.add_argument("-x", "--exclude", "--excludepkgs", default=[], dest='excludepkgs', action=self._SplitCallback, help=_("exclude packages by name or glob"), metavar='[package]') general_grp.add_argument("--disableexcludes", "--disableexcludepkgs", default=[], dest="disable_excludes", action=self._SplitCallback, help=_("disable excludepkgs"), metavar='[repo]') general_grp.add_argument("--repofrompath", default={}, action=self._SplitExtendDictCallback, metavar='[repo,path]', help=_("label and path to an additional repository to use (same " "path as in a baseurl), can be specified multiple times.")) general_grp.add_argument("--noautoremove", action="store_false", default=None, dest='clean_requirements_on_remove', help=_("disable removal of dependencies that are no longer used")) general_grp.add_argument("--nogpgcheck", action="store_false", default=None, dest='gpgcheck', help=_("disable gpg signature checking (if RPM policy allows)")) general_grp.add_argument("--color", dest="color", default=None, help=_("control whether color is used")) general_grp.add_argument("--refresh", dest="freshest_metadata", action="store_true", help=_("set metadata as expired before running" " the command")) general_grp.add_argument("-4", dest="ip_resolve", default=None, help=_("resolve to IPv4 addresses only"), action="store_const", const='ipv4') general_grp.add_argument("-6", dest="ip_resolve", default=None, help=_("resolve to IPv6 addresses only"), action="store_const", const='ipv6') general_grp.add_argument("--destdir", "--downloaddir", dest="destdir", default=None, help=_("set directory to copy packages to")) general_grp.add_argument("--downloadonly", dest="downloadonly", action="store_true", default=False, help=_("only download packages")) general_grp.add_argument("--comment", dest="comment", default=None, help=_("add a comment to transaction")) # Updateinfo options... general_grp.add_argument("--bugfix", action="store_true", help=_("Include bugfix relevant packages, " "in updates")) general_grp.add_argument("--enhancement", action="store_true", help=_("Include enhancement relevant packages," " in updates")) general_grp.add_argument("--newpackage", action="store_true", help=_("Include newpackage relevant packages," " in updates")) general_grp.add_argument("--security", action="store_true", help=_("Include security relevant packages, " "in updates")) general_grp.add_argument("--advisory", "--advisories", dest="advisory", default=[], action=self._SplitCallback, help=_("Include packages needed to fix the " "given advisory, in updates")) general_grp.add_argument("--bz", "--bzs", default=[], dest="bugzilla", action=self._SplitCallback, help=_( "Include packages needed to fix the given BZ, in updates")) general_grp.add_argument("--cve", "--cves", default=[], dest="cves", action=self._SplitCallback, help=_("Include packages needed to fix the given CVE, in updates")) general_grp.add_argument( "--sec-severity", "--secseverity", choices=['Critical', 'Important', 'Moderate', 'Low'], default=[], dest="severity", action=self._SplitCallback, help=_( "Include security relevant packages matching the severity, " "in updates")) general_grp.add_argument("--forcearch", metavar="ARCH", dest=argparse.SUPPRESS, action=self.ForceArchAction, choices=sorted(dnf.rpm._BASEARCH_MAP.keys()), help=_("Force the use of an architecture")) general_grp.add_argument('command', nargs='?', help=argparse.SUPPRESS) def _add_cmd_usage(self, cmd, group): """ store usage info about a single dnf command.""" summary = dnf.i18n.ucd(cmd.summary) name = dnf.i18n.ucd(cmd.aliases[0]) if not name in self._cmd_usage: self._cmd_usage[name] = (group, summary) self._cmd_groups.add(group) def add_commands(self, cli_cmds, group): """ store name & summary for dnf commands The stored information is used build usage information grouped by build-in & plugin commands. """ for cmd in set(cli_cmds.values()): self._add_cmd_usage(cmd, group) def get_usage(self): """ get the usage information to show the user. """ desc = {'main': _('List of Main Commands:'), 'plugin': _('List of Plugin Commands:')} usage = '%s [options] COMMAND\n' % dnf.util.MAIN_PROG for grp in ['main', 'plugin']: if not grp in self._cmd_groups: # dont add plugin usage, if we dont have plugins continue usage += "\n%s\n\n" % desc[grp] for name in sorted(self._cmd_usage.keys()): group, summary = self._cmd_usage[name] if group == grp: usage += "%-25s %s\n" % (name, summary) return usage def _add_command_options(self, command): self.prog = "%s %s" % (dnf.util.MAIN_PROG, command._basecmd) self.description = command.summary self.command_positional_parser = argparse.ArgumentParser(self.prog, add_help=False) self.command_positional_parser.print_usage = self.print_usage self.command_positional_parser._positionals.title = None self.command_group = self.add_argument_group( '{} command-specific options'.format(command._basecmd.capitalize())) self.command_group.add_argument = self.cmd_add_argument self.command_group._command = command._basecmd command.set_argparser(self.command_group) def cmd_add_argument(self, *args, **kwargs): if all([(arg[0] in self.prefix_chars) for arg in args]): return type(self.command_group).add_argument(self.command_group, *args, **kwargs) else: return self.command_positional_parser.add_argument(*args, **kwargs) def _check_encoding(self, args): for arg in args: try: arg.encode('utf-8') except UnicodeEncodeError as e: raise dnf.exceptions.ConfigError( _("Cannot encode argument '%s': %s") % (arg, str(e))) def parse_main_args(self, args): self._check_encoding(args) namespace, _unused_args = self.parse_known_args(args) return namespace def parse_command_args(self, command, args): self._add_command_options(command) namespace, unused_args = self.parse_known_args(args) namespace = self.command_positional_parser.parse_args(unused_args, namespace) command.opts = namespace return command.opts def print_usage(self, file_=None): if self.command_positional_parser: self._actions += self.command_positional_parser._actions super(OptionParser, self).print_usage(file_) def print_help(self, command=None): # pylint: disable=W0212 if command: if not self.command_group or self.command_group._command != command._basecmd: self._add_command_options(command) self._actions += self.command_positional_parser._actions self._action_groups.append(self.command_positional_parser._positionals) else: self.usage = self.get_usage() super(OptionParser, self).print_help() PK!NaBZZ cli/output.pynu[# Copyright 2005 Duke University # Copyright (C) 2012-2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Handle actual output from the cli.""" from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import fnmatch import hawkey import itertools import libdnf.transaction import logging import operator import pwd import re import sys import time from dnf.cli.format import format_number, format_time from dnf.i18n import _, C_, P_, ucd, fill_exact_width, textwrap_fill, exact_width, select_short_long from dnf.pycomp import xrange, basestring, long, unicode, sys_maxsize from dnf.yum.rpmtrans import TransactionDisplay from dnf.db.history import MergedTransactionWrapper import dnf.base import dnf.callback import dnf.cli.progress import dnf.cli.term import dnf.conf import dnf.crypto import dnf.i18n import dnf.transaction import dnf.util import dnf.yum.misc logger = logging.getLogger('dnf') def _spread_in_columns(cols_count, label, lst): left = itertools.chain((label,), itertools.repeat('')) lst_length = len(lst) right_count = cols_count - 1 missing_items = -lst_length % right_count if not lst_length: lst = itertools.repeat('', right_count) elif missing_items: lst.extend(('',) * missing_items) lst_iter = iter(lst) return list(zip(left, *[lst_iter] * right_count)) class Output(object): """Main output class for the yum command line.""" GRP_PACKAGE_INDENT = ' ' * 3 FILE_PROVIDE_RE = re.compile(r'^\*{0,2}/') def __init__(self, base, conf): self.conf = conf self.base = base self.term = dnf.cli.term.Term() self.progress = None def _banner(self, col_data, row): term_width = self.term.columns rule = '%s' % '=' * term_width header = self.fmtColumns(zip(row, col_data), ' ') return rule, header, rule def _col_widths(self, rows): col_data = [dict() for _ in rows[0]] for row in rows: for (i, val) in enumerate(row): col_dct = col_data[i] length = len(val) col_dct[length] = col_dct.get(length, 0) + 1 cols = self.calcColumns(col_data, None, indent=' ') # align to the left return list(map(operator.neg, cols)) def _highlight(self, highlight): hibeg = '' hiend = '' if not highlight: pass elif not isinstance(highlight, basestring) or highlight == 'bold': hibeg = self.term.MODE['bold'] elif highlight == 'normal': pass # Minor opt. else: # Turn a string into a specific output: colour, bold, etc. for high in highlight.replace(',', ' ').split(): if high == 'normal': hibeg = '' elif high in self.term.MODE: hibeg += self.term.MODE[high] elif high in self.term.FG_COLOR: hibeg += self.term.FG_COLOR[high] elif (high.startswith('fg:') and high[3:] in self.term.FG_COLOR): hibeg += self.term.FG_COLOR[high[3:]] elif (high.startswith('bg:') and high[3:] in self.term.BG_COLOR): hibeg += self.term.BG_COLOR[high[3:]] if hibeg: hiend = self.term.MODE['normal'] return (hibeg, hiend) def _sub_highlight(self, haystack, highlight, needles, **kwds): hibeg, hiend = self._highlight(highlight) return self.term.sub(haystack, hibeg, hiend, needles, **kwds) @staticmethod def _calc_columns_spaces_helps(current, data_tups, left): """ Spaces left on the current field will help how many pkgs? """ ret = 0 for tup in data_tups: if left < (tup[0] - current): break ret += tup[1] return ret @property def history(self): return self.base.history @property def sack(self): return self.base.sack def calcColumns(self, data, columns=None, remainder_column=0, total_width=None, indent=''): """Dynamically calculate the widths of the columns that the fields in data should be placed into for output. :param data: a list of dictionaries that represent the data to be output. Each dictionary in the list corresponds to a column of output. The keys of the dictionary are the lengths of the items to be output, and the value associated with a key is the number of items of that length. :param columns: a list containing the minimum amount of space that must be allocated for each row. This can be used to ensure that there is space available in a column if, for example, the actual lengths of the items being output cannot be given in *data* :param remainder_column: number of the column to receive a few extra spaces that may remain after other allocation has taken place :param total_width: the total width of the output. self.term.real_columns is used by default :param indent: string that will be prefixed to a line of output to create e.g. an indent :return: a list of the widths of the columns that the fields in data should be placed into for output """ cols = len(data) # Convert the data to ascending list of tuples, (field_length, pkgs) pdata = data data = [None] * cols # Don't modify the passed in data for d in range(0, cols): data[d] = sorted(pdata[d].items()) if total_width is None: total_width = self.term.real_columns # We start allocating 1 char to everything but the last column, and a # space between each (again, except for the last column). Because # at worst we are better with: # |one two three| # | four | # ...than: # |one two three| # | f| # |our | # ...the later being what we get if we pre-allocate the last column, and # thus. the space, due to "three" overflowing it's column by 2 chars. if columns is None: columns = [1] * (cols - 1) columns.append(0) # i'm not able to get real terminal width so i'm probably # running in non interactive terminal (pipe to grep, redirect to file...) # avoid splitting lines to enable filtering output if not total_width: full_columns = [] for d in xrange(0, cols): col = data[d] if col: full_columns.append(col[-1][0]) else: full_columns.append(columns[d] + 1) full_columns[0] += len(indent) # if possible, try to keep default width (usually 80 columns) default_width = self.term.columns if sum(full_columns) > default_width: return full_columns total_width = default_width total_width -= (sum(columns) + (cols - 1) + exact_width(indent)) if not columns[-1]: total_width += 1 while total_width > 0: # Find which field all the spaces left will help best helps = 0 val = 0 for d in xrange(0, cols): thelps = self._calc_columns_spaces_helps(columns[d], data[d], total_width) if not thelps: continue # We prefer to overflow: the last column, and then earlier # columns. This is so that in the best case (just overflow the # last) ... grep still "works", and then we make it prettier. if helps and (d == (cols - 1)) and (thelps / 2) < helps: continue if thelps < helps: continue helps = thelps val = d # If we found a column to expand, move up to the next level with # that column and start again with any remaining space. if helps: diff = data[val].pop(0)[0] - columns[val] if not columns[val] and (val == (cols - 1)): # If we are going from 0 => N on the last column, take 1 # for the space before the column. total_width -= 1 columns[val] += diff total_width -= diff continue overflowed_columns = 0 for d in xrange(0, cols): if not data[d]: continue overflowed_columns += 1 if overflowed_columns: # Split the remaining spaces among each overflowed column # equally norm = total_width // overflowed_columns for d in xrange(0, cols): if not data[d]: continue columns[d] += norm total_width -= norm # Split the remaining spaces among each column equally, except the # last one. And put the rest into the remainder column cols -= 1 norm = total_width // cols for d in xrange(0, cols): columns[d] += norm columns[remainder_column] += total_width - (cols * norm) total_width = 0 return columns @staticmethod def _fmt_column_align_width(width): """Returns tuple of (align_left, width)""" if width < 0: return (True, -width) return (False, width) def _col_data(self, col_data): assert len(col_data) == 2 or len(col_data) == 3 if len(col_data) == 2: (val, width) = col_data hibeg = hiend = '' if len(col_data) == 3: (val, width, highlight) = col_data (hibeg, hiend) = self._highlight(highlight) return (ucd(val), width, hibeg, hiend) def fmtColumns(self, columns, msg=u'', end=u''): """Return a row of data formatted into a string for output. Items can overflow their columns. :param columns: a list of tuples containing the data to output. Each tuple contains first the item to be output, then the amount of space allocated for the column, and then optionally a type of highlighting for the item :param msg: a string to begin the line of output with :param end: a string to end the line of output with :return: a row of data formatted into a string for output """ columns = list(columns) total_width = len(msg) data = [] for col_data in columns[:-1]: (val, width, hibeg, hiend) = self._col_data(col_data) if not width: # Don't count this column, invisible text msg += u"%s" data.append(val) continue (align_left, width) = self._fmt_column_align_width(width) val_width = exact_width(val) if val_width <= width: # Don't use fill_exact_width() because it sucks performance # wise for 1,000s of rows. Also allows us to use len(), when # we can. msg += u"%s%s%s%s " if align_left: data.extend([hibeg, val, " " * (width - val_width), hiend]) else: data.extend([hibeg, " " * (width - val_width), val, hiend]) else: msg += u"%s%s%s\n" + " " * (total_width + width + 1) data.extend([hibeg, val, hiend]) total_width += width total_width += 1 (val, width, hibeg, hiend) = self._col_data(columns[-1]) (align_left, width) = self._fmt_column_align_width(width) val = fill_exact_width(val, width, left=align_left, prefix=hibeg, suffix=hiend) msg += u"%%s%s" % end data.append(val) return msg % tuple(data) def simpleList(self, pkg, ui_overflow=False, indent='', highlight=False, columns=None): """Print a package as a line. :param pkg: the package to be printed :param ui_overflow: unused :param indent: string to be prefixed onto the line to provide e.g. an indent :param highlight: highlighting options for the name of the package :param columns: tuple containing the space allocated for each column of output. The columns are the package name, version, and repository """ if columns is None: columns = (-40, -22, -16) # Old default na = '%s%s.%s' % (indent, pkg.name, pkg.arch) hi_cols = [highlight, 'normal', 'normal'] columns = zip((na, pkg.evr, pkg._from_repo), columns, hi_cols) print(self.fmtColumns(columns)) def simpleEnvraList(self, pkg, ui_overflow=False, indent='', highlight=False, columns=None): """Print a package as a line, with the package itself in envra format so it can be passed to list/install/etc. :param pkg: the package to be printed :param ui_overflow: unused :param indent: string to be prefixed onto the line to provide e.g. an indent :param highlight: highlighting options for the name of the package :param columns: tuple containing the space allocated for each column of output. The columns the are the package envra and repository """ if columns is None: columns = (-63, -16) # Old default envra = '%s%s' % (indent, ucd(pkg)) hi_cols = [highlight, 'normal', 'normal'] rid = pkg.ui_from_repo columns = zip((envra, rid), columns, hi_cols) print(self.fmtColumns(columns)) def simple_name_list(self, pkg): """Print a package as a line containing its name.""" print(ucd(pkg.name)) def simple_nevra_list(self, pkg): """Print a package as a line containing its NEVRA.""" print(ucd(pkg)) def fmtKeyValFill(self, key, val): """Return a key value pair in the common two column output format. :param key: the key to be formatted :param val: the value associated with *key* :return: the key value pair formatted in two columns for output """ keylen = exact_width(key) cols = self.term.real_columns if not cols: cols = sys_maxsize elif cols < 20: cols = 20 nxt = ' ' * (keylen - 2) + ': ' if not val: # textwrap.fill in case of empty val returns empty string return key val = ucd(val) ret = textwrap_fill(val, width=cols, initial_indent=key, subsequent_indent=nxt) if ret.count("\n") > 1 and keylen > (cols // 3): # If it's big, redo it again with a smaller subsequent off ret = textwrap_fill(val, width=cols, initial_indent=key, subsequent_indent=' ...: ') return ret def fmtSection(self, name, fill='='): """Format and return a section header. The format of the header is a line with *name* centered, and *fill* repeated on either side to fill an entire line on the terminal. :param name: the name of the section :param fill: the character to repeat on either side of *name* to fill an entire line. *fill* must be a single character. :return: a string formatted to be a section header """ name = ucd(name) cols = self.term.columns - 2 name_len = exact_width(name) if name_len >= (cols - 4): beg = end = fill * 2 else: beg = fill * ((cols - name_len) // 2) end = fill * (cols - name_len - len(beg)) return "%s %s %s" % (beg, name, end) def infoOutput(self, pkg, highlight=False): """Print information about the given package. :param pkg: the package to print information about :param highlight: highlighting options for the name of the package """ def format_key_val(key, val): return " ".join([fill_exact_width(key, 12, 12), ":", str(val)]) def format_key_val_fill(key, val): return self.fmtKeyValFill(fill_exact_width(key, 12, 12) + " : ", val or "") output_list = [] (hibeg, hiend) = self._highlight(highlight) # Translators: This is abbreviated 'Name'. Should be no longer # than 12 characters. You can use the full version if it is short # enough in your language. key = select_short_long(12, C_("short", "Name"), C_("long", "Name")) output_list.append(format_key_val(key, "%s%s%s" % (hibeg, pkg.name, hiend))) if pkg.epoch: # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Epoch"), pkg.epoch)) key = select_short_long(12, C_("short", "Version"), C_("long", "Version")) output_list.append(format_key_val(key, pkg.version)) # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Release"), pkg.release)) key = select_short_long(12, C_("short", "Arch"), C_("long", "Architecture")) output_list.append(format_key_val(key, pkg.arch)) key = select_short_long(12, C_("short", "Size"), C_("long", "Size")) output_list.append(format_key_val(key, format_number(float(pkg._size)))) # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Source"), pkg.sourcerpm)) key = select_short_long(12, C_("short", "Repo"), C_("long", "Repository")) output_list.append(format_key_val(key, pkg.repoid)) if pkg._from_system: history_repo = self.history.repo(pkg) if history_repo: # Translators: This message should be no longer than 12 chars. output_list.append(format_key_val(_("From repo"), history_repo)) if self.conf.verbose: # :hawkey does not support changelog information # print(_("Committer : %s") % ucd(pkg.committer)) # print(_("Committime : %s") % time.ctime(pkg.committime)) # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Packager"), pkg.packager)) # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Buildtime"), dnf.util.normalize_time(pkg.buildtime))) if pkg.installtime: # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val(_("Install time"), dnf.util.normalize_time(pkg.installtime))) history_pkg = self.history.package_data(pkg) if history_pkg: try: uid = int(history_pkg._item.getInstalledBy()) except ValueError: # In case int() fails uid = None # Translators: This message should be no longer than 12 chars. output_list.append(format_key_val(_("Installed by"), self._pwd_ui_username(uid))) # Translators: This is abbreviated 'Summary'. Should be no longer # than 12 characters. You can use the full version if it is short # enough in your language. key = select_short_long(12, C_("short", "Summary"), C_("long", "Summary")) output_list.append(format_key_val_fill(key, pkg.summary)) if pkg.url: output_list.append(format_key_val(_("URL"), ucd(pkg.url))) # Translators: This message should be no longer than 12 characters. output_list.append(format_key_val_fill(_("License"), pkg.license)) # Translators: This is abbreviated 'Description'. Should be no longer # than 12 characters. You can use the full version if it is short # enough in your language. key = select_short_long(12, C_("short", "Description"), C_("long", "Description")) output_list.append(format_key_val_fill(key, pkg.description)) return "\n".join(output_list) def updatesObsoletesList(self, uotup, changetype, columns=None): """Print a simple string that explains the relationship between the members of an update or obsoletes tuple. :param uotup: an update or obsoletes tuple. The first member is the new package, and the second member is the old package :param changetype: a string indicating what the change between the packages is, e.g. 'updates' or 'obsoletes' :param columns: a tuple containing information about how to format the columns of output. The absolute value of each number in the tuple indicates how much space has been allocated for the corresponding column. If the number is negative, the text in the column will be left justified, and if it is positive, the text will be right justified. The columns of output are the package name, version, and repository """ (changePkg, instPkg) = uotup if columns is not None: # New style, output all info. for both old/new with old indented chi = self.conf.color_update_remote if changePkg.reponame != hawkey.SYSTEM_REPO_NAME: chi = self.conf.color_update_local self.simpleList(changePkg, columns=columns, highlight=chi) self.simpleList(instPkg, columns=columns, indent=' ' * 4, highlight=self.conf.color_update_installed) return # Old style c_compact = changePkg.compactPrint() i_compact = '%s.%s' % (instPkg.name, instPkg.arch) c_repo = changePkg.repoid print('%-35.35s [%.12s] %.10s %-20.20s' % (c_compact, c_repo, changetype, i_compact)) def listPkgs(self, lst, description, outputType, highlight_na={}, columns=None, highlight_modes={}): """Prints information about the given list of packages. :param lst: a list of packages to print information about :param description: string describing what the list of packages contains, e.g. 'Available Packages' :param outputType: The type of information to be printed. Current options:: 'list' - simple pkg list 'info' - similar to rpm -qi output 'name' - simple name list 'nevra' - simple nevra list :param highlight_na: a dictionary containing information about packages that should be highlighted in the output. The dictionary keys are (name, arch) tuples for the package, and the associated values are the package objects themselves. :param columns: a tuple containing information about how to format the columns of output. The absolute value of each number in the tuple indicates how much space has been allocated for the corresponding column. If the number is negative, the text in the column will be left justified, and if it is positive, the text will be right justified. The columns of output are the package name, version, and repository :param highlight_modes: dictionary containing information about to highlight the packages in *highlight_na*. *highlight_modes* should contain the following keys:: 'not_in' - highlighting used for packages not in *highlight_na* '=' - highlighting used when the package versions are equal '<' - highlighting used when the package has a lower version number '>' - highlighting used when the package has a higher version number :return: number of packages listed """ if outputType in ['list', 'info', 'name', 'nevra']: if len(lst) > 0: print('%s' % description) info_set = set() if outputType == 'list': unique_item_dict = {} for pkg in lst: unique_item_dict[str(pkg) + str(pkg._from_repo)] = pkg lst = unique_item_dict.values() for pkg in sorted(lst): key = (pkg.name, pkg.arch) highlight = False if key not in highlight_na: highlight = highlight_modes.get('not in', 'normal') elif pkg.evr_eq(highlight_na[key]): highlight = highlight_modes.get('=', 'normal') elif pkg.evr_lt(highlight_na[key]): highlight = highlight_modes.get('>', 'bold') else: highlight = highlight_modes.get('<', 'normal') if outputType == 'list': self.simpleList(pkg, ui_overflow=True, highlight=highlight, columns=columns) elif outputType == 'info': info_set.add(self.infoOutput(pkg, highlight=highlight) + "\n") elif outputType == 'name': self.simple_name_list(pkg) elif outputType == 'nevra': self.simple_nevra_list(pkg) else: pass if info_set: print("\n".join(sorted(info_set))) return len(lst) def userconfirm(self, msg=None, defaultyes_msg=None): """Get a yes or no from the user, and default to No :msg: String for case with [y/N] :defaultyes_msg: String for case with [Y/n] :return: True if the user selects yes, and False if the user selects no """ yui = (ucd(_('y')), ucd(_('yes'))) nui = (ucd(_('n')), ucd(_('no'))) aui = yui + nui while True: if msg is None: msg = _('Is this ok [y/N]: ') choice = '' if self.conf.defaultyes: if defaultyes_msg is None: msg = _('Is this ok [Y/n]: ') else: msg = defaultyes_msg try: choice = dnf.i18n.ucd_input(msg) except EOFError: pass except KeyboardInterrupt: choice = nui[0] choice = ucd(choice).lower() if len(choice) == 0: choice = yui[0] if self.conf.defaultyes else nui[0] if choice in aui: break # If the English one letter names don't mix with the translated # letters, allow them too: if u'y' == choice and u'y' not in aui: choice = yui[0] break if u'n' == choice and u'n' not in aui: choice = nui[0] break if choice in yui: return True return False def _pkgs2name_dict(self, sections): installed = self.sack.query().installed()._name_dict() available = self.sack.query().available()._name_dict() d = {} for pkg_name in itertools.chain(*list(zip(*sections))[1]): if pkg_name in installed: d[pkg_name] = installed[pkg_name][0] elif pkg_name in available: d[pkg_name] = available[pkg_name][0] return d def _pkgs2col_lengths(self, sections, name_dict): nevra_lengths = {} repo_lengths = {} for pkg_name in itertools.chain(*list(zip(*sections))[1]): pkg = name_dict.get(pkg_name) if pkg is None: continue nevra_l = exact_width(ucd(pkg)) + exact_width(self.GRP_PACKAGE_INDENT) repo_l = exact_width(ucd(pkg.reponame)) nevra_lengths[nevra_l] = nevra_lengths.get(nevra_l, 0) + 1 repo_lengths[repo_l] = repo_lengths.get(repo_l, 0) + 1 return (nevra_lengths, repo_lengths) def _display_packages(self, pkg_names): for name in pkg_names: print('%s%s' % (self.GRP_PACKAGE_INDENT, name)) def _display_packages_verbose(self, pkg_names, name_dict, columns): for name in pkg_names: try: pkg = name_dict[name] except KeyError: # package not in any repo -> print only package name print('%s%s' % (self.GRP_PACKAGE_INDENT, name)) continue highlight = False if not pkg._from_system: highlight = self.conf.color_list_available_install self.simpleEnvraList(pkg, ui_overflow=True, indent=self.GRP_PACKAGE_INDENT, highlight=highlight, columns=columns) def display_pkgs_in_groups(self, group): """Output information about the packages in a given group :param group: a Group object to output information about """ def names(packages): return sorted(pkg.name for pkg in packages) print('\n' + _('Group: %s') % group.ui_name) verbose = self.conf.verbose if verbose: print(_(' Group-Id: %s') % ucd(group.id)) if group.ui_description: print(_(' Description: %s') % ucd(group.ui_description) or "") if group.lang_only: print(_(' Language: %s') % group.lang_only) sections = ( (_(' Mandatory Packages:'), names(group.mandatory_packages)), (_(' Default Packages:'), names(group.default_packages)), (_(' Optional Packages:'), names(group.optional_packages)), (_(' Conditional Packages:'), names(group.conditional_packages))) if verbose: name_dict = self._pkgs2name_dict(sections) col_lengths = self._pkgs2col_lengths(sections, name_dict) columns = self.calcColumns(col_lengths) columns = (-columns[0], -columns[1]) for (section_name, packages) in sections: if len(packages) < 1: continue print(section_name) self._display_packages_verbose(packages, name_dict, columns) else: for (section_name, packages) in sections: if len(packages) < 1: continue print(section_name) self._display_packages(packages) def display_groups_in_environment(self, environment): """Output information about the packages in a given environment :param environment: an Environment object to output information about """ def names(groups): return sorted(group.name for group in groups) print(_('Environment Group: %s') % environment.ui_name) if self.conf.verbose: print(_(' Environment-Id: %s') % ucd(environment.id)) if environment.ui_description: description = ucd(environment.ui_description) or "" print(_(' Description: %s') % description) sections = ( (_(' Mandatory Groups:'), names(environment.mandatory_groups)), (_(' Optional Groups:'), names(environment.optional_groups))) for (section_name, packages) in sections: if len(packages) < 1: continue print(section_name) self._display_packages(packages) def matchcallback(self, po, values, matchfor=None, verbose=None, highlight=None): """Output search/provides type callback matches. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output :param verbose: whether to output extra verbose information :param highlight: highlighting options for the highlighted matches """ def print_highlighted_key_item(key, item, printed_headline, can_overflow=False): if not printed_headline: print(_('Matched from:')) item = ucd(item) or "" if item == "": return if matchfor: item = self._sub_highlight(item, highlight, matchfor, ignore_case=True) if can_overflow: print(self.fmtKeyValFill(key, item)) else: print(key % item) def print_file_provides(item, printed_match): if not self.FILE_PROVIDE_RE.match(item): return False key = _("Filename : %s") file_match = False for filename in po.files: if fnmatch.fnmatch(filename, item): print_highlighted_key_item( key, filename, file_match or printed_match, can_overflow=False) file_match = True return file_match if self.conf.showdupesfromrepos: msg = '%s : ' % po else: msg = '%s.%s : ' % (po.name, po.arch) msg = self.fmtKeyValFill(msg, po.summary or "") if matchfor: if highlight is None: highlight = self.conf.color_search_match msg = self._sub_highlight(msg, highlight, matchfor, ignore_case=True) print(msg) if verbose is None: verbose = self.conf.verbose if not verbose: return print(_("Repo : %s") % po.ui_from_repo) printed_match = False name_match = False for item in set(values): if po.summary == item: name_match = True continue # Skip double name/summary printing if po.description == item: key = _("Description : ") print_highlighted_key_item(key, item, printed_match, can_overflow=True) printed_match = True elif po.url == item: key = _("URL : %s") print_highlighted_key_item(key, item, printed_match, can_overflow=False) printed_match = True elif po.license == item: key = _("License : %s") print_highlighted_key_item(key, item, printed_match, can_overflow=False) printed_match = True elif print_file_provides(item, printed_match): printed_match = True else: key = _("Provide : %s") for provide in po.provides: provide = str(provide) if fnmatch.fnmatch(provide, item): print_highlighted_key_item(key, provide, printed_match, can_overflow=False) printed_match = True else: first_provide = provide.split()[0] possible = set('=<>') if any((char in possible) for char in item): item_new = item.split()[0] else: item_new = item if fnmatch.fnmatch(first_provide, item_new): print_highlighted_key_item( key, provide, printed_match, can_overflow=False) printed_match = True if not any([printed_match, name_match]): for item in set(values): key = _("Other : %s") print_highlighted_key_item(key, item, printed_match, can_overflow=False) print() def matchcallback_verbose(self, po, values, matchfor=None): """Output search/provides type callback matches. This will output more information than :func:`matchcallback`. :param po: the package object that matched the search :param values: the information associated with *po* that matched the search :param matchfor: a list of strings to be highlighted in the output """ return self.matchcallback(po, values, matchfor, verbose=True) def reportDownloadSize(self, packages, installonly=False): """Report the total download size for a set of packages :param packages: a list of package objects :param installonly: whether the transaction consists only of installations """ totsize = 0 locsize = 0 insize = 0 error = False for pkg in packages: # Just to be on the safe side, if for some reason getting # the package size fails, log the error and don't report download # size try: size = int(pkg._size) totsize += size try: if pkg.verifyLocalPkg(): locsize += size except Exception: pass if not installonly: continue try: size = int(pkg.installsize) except Exception: pass insize += size except Exception: error = True msg = _('There was an error calculating total download size') logger.error(msg) break if not error: if locsize: logger.info(_("Total size: %s"), format_number(totsize)) if locsize != totsize: logger.info(_("Total download size: %s"), format_number(totsize - locsize)) if installonly: logger.info(_("Installed size: %s"), format_number(insize)) def reportRemoveSize(self, packages): """Report the total size of packages being removed. :param packages: a list of package objects """ totsize = 0 error = False for pkg in packages: # Just to be on the safe side, if for some reason getting # the package size fails, log the error and don't report download # size try: size = pkg._size totsize += size except Exception: error = True msg = _('There was an error calculating installed size') logger.error(msg) break if not error: logger.info(_("Freed space: %s"), format_number(totsize)) def list_group_transaction(self, comps, history, diff): if not diff: return None out = [] rows = [] if diff.new_groups: out.append(_('Marking packages as installed by the group:')) for grp_id in diff.new_groups: pkgs = list(diff.added_packages(grp_id)) group_object = comps._group_by_id(grp_id) grp_name = group_object.ui_name if group_object else grp_id rows.extend(_spread_in_columns(4, "@" + grp_name, pkgs)) if diff.removed_groups: out.append(_('Marking packages as removed by the group:')) for grp_id in diff.removed_groups: pkgs = list(diff.removed_packages(grp_id)) grp_name = history.group.get(grp_id).ui_name rows.extend(_spread_in_columns(4, "@" + grp_name, pkgs)) if rows: col_data = self._col_widths(rows) for row in rows: out.append(self.fmtColumns(zip(row, col_data), ' ')) out[0:0] = self._banner(col_data, (_('Group'), _('Packages'), '', '')) return '\n'.join(out) def list_transaction(self, transaction, total_width=None): """Return a string representation of the transaction in an easy-to-read format. """ forward_actions = hawkey.UPGRADE | hawkey.UPGRADE_ALL | hawkey.DISTUPGRADE | \ hawkey.DISTUPGRADE_ALL | hawkey.DOWNGRADE | hawkey.INSTALL skipped_conflicts = set() skipped_broken = set() if transaction is None: # set empty transaction list instead of returning None # in order to display module changes when RPM transaction is empty transaction = [] list_bunch = dnf.util._make_lists(transaction) pkglist_lines = [] data = {'n' : {}, 'v' : {}, 'r' : {}} a_wid = 0 # Arch can't get "that big" ... so always use the max. def _add_line(lines, data, a_wid, po, obsoletes=[]): (n, a, e, v, r) = po.pkgtup evr = po.evr repoid = po._from_repo size = format_number(po._size) if a is None: # gpgkeys are weird a = 'noarch' # none, partial, full? if po._from_system: hi = self.conf.color_update_installed elif po._from_cmdline: hi = self.conf.color_update_local else: hi = self.conf.color_update_remote lines.append((n, a, evr, repoid, size, obsoletes, hi)) # Create a dict of field_length => number of packages, for # each field. for (d, v) in (("n", len(n)), ("v", len(evr)), ("r", len(repoid))): data[d].setdefault(v, 0) data[d][v] += 1 a_wid = max(a_wid, len(a)) return a_wid ins_group_msg = _('Installing group/module packages') if dnf.base.WITH_MODULES \ else _('Installing group packages') for (action, pkglist) in [ # TRANSLATORS: This is for a list of packages to be installed. (C_('summary', 'Installing'), list_bunch.installed), # TRANSLATORS: This is for a list of packages to be upgraded. (C_('summary', 'Upgrading'), list_bunch.upgraded), # TRANSLATORS: This is for a list of packages to be reinstalled. (C_('summary', 'Reinstalling'), list_bunch.reinstalled), (ins_group_msg, list_bunch.installed_group), (_('Installing dependencies'), list_bunch.installed_dep), (_('Installing weak dependencies'), list_bunch.installed_weak), # TRANSLATORS: This is for a list of packages to be removed. (_('Removing'), list_bunch.erased), (_('Removing dependent packages'), list_bunch.erased_dep), (_('Removing unused dependencies'), list_bunch.erased_clean), # TRANSLATORS: This is for a list of packages to be downgraded. (C_('summary', 'Downgrading'), list_bunch.downgraded)]: lines = [] # build a reverse mapping to 'replaced_by' # this is required to achieve reasonable speed replaces = {} for tsi in transaction: if tsi.action != libdnf.transaction.TransactionItemAction_OBSOLETED: continue for i in tsi._item.getReplacedBy(): replaces.setdefault(i, set()).add(tsi) for tsi in sorted(pkglist, key=lambda x: x.pkg): if tsi.action not in dnf.transaction.FORWARD_ACTIONS + [libdnf.transaction.TransactionItemAction_REMOVE]: continue # get TransactionItems obsoleted by tsi obsoleted = sorted(replaces.get(tsi._item, [])) a_wid = _add_line(lines, data, a_wid, tsi.pkg, obsoleted) pkglist_lines.append((action, lines)) installedProfiles = sorted(dict(self.base._moduleContainer.getInstalledProfiles()).items()) if installedProfiles: action = _("Installing module profiles") lines = [] for name, profiles in installedProfiles: for profile in list(profiles): lines.append(("%s/%s" % (name, profile), "", "", "", "", "", "")) pkglist_lines.append((action, lines)) removedProfiles = sorted(dict(self.base._moduleContainer.getRemovedProfiles()).items()) if removedProfiles: action = _("Disabling module profiles") lines = [] for name, profiles in removedProfiles: for profile in list(profiles): lines.append(("%s/%s" % (name, profile), "", "", "", "", "", "")) pkglist_lines.append((action, lines)) enabledStreams = sorted(dict(self.base._moduleContainer.getEnabledStreams()).items()) if enabledStreams: action = _("Enabling module streams") lines = [] for name, stream in enabledStreams: lines.append((name, "", stream, "", "", "", "")) pkglist_lines.append((action, lines)) switchedStreams = sorted(dict(self.base._moduleContainer.getSwitchedStreams()).items()) if switchedStreams: action = _("Switching module streams") lines = [] for name, stream in switchedStreams: lines.append((name, "", "%s -> %s" % (stream[0], stream[1]), "", "", "", "")) pkglist_lines.append((action, lines)) disabledModules = sorted(list(self.base._moduleContainer.getDisabledModules())) if disabledModules: action = _("Disabling modules") lines = [] for name in disabledModules: lines.append((name, "", "", "", "", "", "")) pkglist_lines.append((action, lines)) resetModules = sorted(list(self.base._moduleContainer.getResetModules())) if resetModules: action = _("Resetting modules") lines = [] for name in resetModules: lines.append((name, "", "", "", "", "", "")) pkglist_lines.append((action, lines)) if self.base._history: def format_line(group): name = group.getName() return (name if name else _(""), "", "", "", "", "", "") install_env_group = self.base._history.env._installed if install_env_group: action = _("Installing Environment Groups") lines = [] for group in install_env_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) upgrade_env_group = self.base._history.env._upgraded if upgrade_env_group: action = _("Upgrading Environment Groups") lines = [] for group in upgrade_env_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) remove_env_group = self.base._history.env._removed if remove_env_group: action = _("Removing Environment Groups") lines = [] for group in remove_env_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) install_group = self.base._history.group._installed if install_group: action = _("Installing Groups") lines = [] for group in install_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) upgrade_group = self.base._history.group._upgraded if upgrade_group: action = _("Upgrading Groups") lines = [] for group in upgrade_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) remove_group = self.base._history.group._removed if remove_group: action = _("Removing Groups") lines = [] for group in remove_group.values(): lines.append(format_line(group)) pkglist_lines.append((action, lines)) # show skipped conflicting packages if not self.conf.best and self.base._goal.actions & forward_actions: lines = [] skipped_conflicts, skipped_broken = self.base._skipped_packages( report_problems=True, transaction=transaction) skipped_broken = dict((str(pkg), pkg) for pkg in skipped_broken) for pkg in sorted(skipped_conflicts): a_wid = _add_line(lines, data, a_wid, pkg, []) recommendations = ["--best"] if not self.base._allow_erasing: recommendations.append("--allowerasing") skip_str = _("Skipping packages with conflicts:\n" "(add '%s' to command line " "to force their upgrade)") % " ".join(recommendations) # remove misleading green color from the "packages with conflicts" lines lines = [i[:-1] + ("", ) for i in lines] pkglist_lines.append((skip_str, lines)) lines = [] for nevra, pkg in sorted(skipped_broken.items()): a_wid = _add_line(lines, data, a_wid, pkg, []) skip_str = _("Skipping packages with broken dependencies%s") if self.base.conf.upgrade_group_objects_upgrade: skip_str = skip_str % "" else: skip_str = skip_str % _(" or part of a group") # remove misleading green color from the "broken dependencies" lines lines = [i[:-1] + ("", ) for i in lines] pkglist_lines.append((skip_str, lines)) output_width = self.term.columns if not data['n'] and not self.base._moduleContainer.isChanged() and not \ (self.base._history and (self.base._history.group or self.base._history.env)): return u'' else: data = [data['n'], {}, data['v'], data['r'], {}] columns = [1, a_wid, 1, 1, 5] columns = self.calcColumns(data, indent=" ", columns=columns, remainder_column=2, total_width=total_width) (n_wid, a_wid, v_wid, r_wid, s_wid) = columns real_width = sum(columns) + 5 output_width = output_width if output_width >= real_width else real_width # Do not use 'Package' without context. Using context resolves # RhBug 1302935 as a side effect. msg_package = select_short_long(n_wid, # Translators: This is the short version of 'Package'. You can # use the full (unabbreviated) term 'Package' if you think that # the translation to your language is not too long and will # always fit to limited space. C_('short', 'Package'), # Translators: This is the full (unabbreviated) term 'Package'. C_('long', 'Package')) msg_arch = select_short_long(a_wid, # Translators: This is abbreviated 'Architecture', used when # we have not enough space to display the full word. C_('short', 'Arch'), # Translators: This is the full word 'Architecture', used when # we have enough space. C_('long', 'Architecture')) msg_version = select_short_long(v_wid, # Translators: This is the short version of 'Version'. You can # use the full (unabbreviated) term 'Version' if you think that # the translation to your language is not too long and will # always fit to limited space. C_('short', 'Version'), # Translators: This is the full (unabbreviated) term 'Version'. C_('long', 'Version')) msg_repository = select_short_long(r_wid, # Translators: This is abbreviated 'Repository', used when # we have not enough space to display the full word. C_('short', 'Repo'), # Translators: This is the full word 'Repository', used when # we have enough space. C_('long', 'Repository')) msg_size = select_short_long(s_wid, # Translators: This is the short version of 'Size'. It should # not be longer than 5 characters. If the term 'Size' in your # language is not longer than 5 characters then you can use it # unabbreviated. C_('short', 'Size'), # Translators: This is the full (unabbreviated) term 'Size'. C_('long', 'Size')) out = [u"%s\n%s\n%s\n" % ('=' * output_width, self.fmtColumns(((msg_package, -n_wid), (msg_arch, -a_wid), (msg_version, -v_wid), (msg_repository, -r_wid), (msg_size, s_wid)), u" "), '=' * output_width)] for (action, lines) in pkglist_lines: if lines: totalmsg = u"%s:\n" % action for (n, a, evr, repoid, size, obsoletes, hi) in lines: columns = ((n, -n_wid, hi), (a, -a_wid), (evr, -v_wid), (repoid, -r_wid), (size, s_wid)) msg = self.fmtColumns(columns, u" ", u"\n") hibeg, hiend = self._highlight(self.conf.color_update_installed) for obspo in sorted(obsoletes): appended = ' ' + _('replacing') + ' %s%s%s.%s %s\n' appended %= (hibeg, obspo.name, hiend, obspo.arch, obspo.evr) msg += appended totalmsg = totalmsg + msg if lines: out.append(totalmsg) out.append(_(""" Transaction Summary %s """) % ('=' * output_width)) summary_data = ( (_('Install'), len(list_bunch.installed) + len(list_bunch.installed_group) + len(list_bunch.installed_weak) + len(list_bunch.installed_dep), 0), (_('Upgrade'), len(list_bunch.upgraded), 0), (_('Remove'), len(list_bunch.erased) + len(list_bunch.erased_dep) + len(list_bunch.erased_clean), 0), (_('Downgrade'), len(list_bunch.downgraded), 0), (_('Skip'), len(skipped_conflicts) + len(skipped_broken), 0)) max_msg_action = 0 max_msg_count = 0 max_msg_pkgs = 0 max_msg_depcount = 0 for action, count, depcount in summary_data: if not count and not depcount: continue msg_pkgs = P_('Package', 'Packages', count) len_msg_action = exact_width(action) len_msg_count = exact_width(unicode(count)) len_msg_pkgs = exact_width(msg_pkgs) if depcount: len_msg_depcount = exact_width(unicode(depcount)) else: len_msg_depcount = 0 max_msg_action = max(len_msg_action, max_msg_action) max_msg_count = max(len_msg_count, max_msg_count) max_msg_pkgs = max(len_msg_pkgs, max_msg_pkgs) max_msg_depcount = max(len_msg_depcount, max_msg_depcount) for action, count, depcount in summary_data: msg_pkgs = P_('Package', 'Packages', count) if depcount: msg_deppkgs = P_('Dependent package', 'Dependent packages', depcount) action_msg = fill_exact_width(action, max_msg_action) if count: msg = '%s %*d %s (+%*d %s)\n' out.append(msg % (action_msg, max_msg_count, count, "%-*s" % (max_msg_pkgs, msg_pkgs), max_msg_depcount, depcount, msg_deppkgs)) else: msg = '%s %s ( %*d %s)\n' out.append(msg % (action_msg, (max_msg_count + max_msg_pkgs) * ' ', max_msg_depcount, depcount, msg_deppkgs)) elif count: msg = '%s %*d %s\n' out.append(msg % (fill_exact_width(action, max_msg_action), max_msg_count, count, msg_pkgs)) return ''.join(out) def _pto_callback(self, action, tsis): # Works a bit like calcColumns, but we never overflow a column we just # have a dynamic number of columns. def _fits_in_cols(msgs, num): """ Work out how many columns we can use to display stuff, in the post trans output. """ if len(msgs) < num: return [] left = self.term.columns - ((num - 1) + 2) if left <= 0: return [] col_lens = [0] * num col = 0 for msg in msgs: if len(msg) > col_lens[col]: diff = (len(msg) - col_lens[col]) if left <= diff: return [] left -= diff col_lens[col] = len(msg) col += 1 col %= len(col_lens) for col in range(len(col_lens)): col_lens[col] += left // num col_lens[col] *= -1 return col_lens if not tsis: return '' out = [] msgs = [] out.append('{}:'.format(action)) for tsi in tsis: msgs.append(str(tsi)) for num in (8, 7, 6, 5, 4, 3, 2): cols = _fits_in_cols(msgs, num) if cols: break if not cols: cols = [-(self.term.columns - 2)] while msgs: current_msgs = msgs[:len(cols)] out.append(' {}'.format(self.fmtColumns(zip(current_msgs, cols)))) msgs = msgs[len(cols):] return out def post_transaction_output(self, transaction): """ Return a human-readable summary of the transaction. Packages in sections are arranged to columns. """ return dnf.util._post_transaction_output(self.base, transaction, self._pto_callback) def setup_progress_callbacks(self): """Set up the progress callbacks and various output bars based on debug level. """ progressbar = None if self.conf.debuglevel >= 2: progressbar = dnf.cli.progress.MultiFileProgressMeter(fo=sys.stdout) self.progress = dnf.cli.progress.MultiFileProgressMeter(fo=sys.stdout) # setup our depsolve progress callback return (progressbar, DepSolveProgressCallBack()) def download_callback_total_cb(self, remote_size, download_start_timestamp): """Outputs summary information about the download process. :param remote_size: the total amount of information that was downloaded, in bytes :param download_start_timestamp: the time when the download process started, in seconds since the epoch """ if remote_size <= 0: return width = self.term.columns logger.info("-" * width) dl_time = max(0.01, time.time() - download_start_timestamp) msg = ' %5sB/s | %5sB %9s ' % ( format_number(remote_size // dl_time), format_number(remote_size), format_time(dl_time)) msg = fill_exact_width(_("Total"), width - len(msg)) + msg logger.info(msg) def _history_uiactions(self, hpkgs): actions = set() actions_short = set() count = 0 for pkg in hpkgs: if pkg.action in (libdnf.transaction.TransactionItemAction_UPGRADED, libdnf.transaction.TransactionItemAction_DOWNGRADED): # skip states we don't want to display in user input continue actions.add(pkg.action_name) actions_short.add(pkg.action_short) count += 1 if len(actions) > 1: return count, ", ".join(sorted(actions_short)) # So empty transactions work, although that "shouldn't" really happen return count, "".join(list(actions)) def _pwd_ui_username(self, uid, limit=None): if isinstance(uid, list): return [self._pwd_ui_username(u, limit) for u in uid] # loginuid is set to -1 (0xFFFF_FFFF) on init, in newer kernels. # loginuid is set to INT_MAX (0x7FFF_FFFF) on init, in older kernels. if uid is None or uid in (0xFFFFFFFF, 0x7FFFFFFF): loginid = _("") name = _("System") + " " + loginid if limit is not None and len(name) > limit: name = loginid return ucd(name) def _safe_split_0(text, *args): """ Split gives us a [0] for everything _but_ '', this function returns '' in that case. """ ret = text.split(*args) if not ret: return '' return ret[0] try: user = pwd.getpwuid(int(uid)) fullname = _safe_split_0(ucd(user.pw_gecos), ';', 2) user_name = ucd(user.pw_name) name = "%s <%s>" % (fullname, user_name) if limit is not None and len(name) > limit: name = "%s ... <%s>" % (_safe_split_0(fullname), user_name) if len(name) > limit: name = "<%s>" % user_name return name except KeyError: return ucd(uid) def historyListCmd(self, tids, reverse=False): """Output a list of information about the history of yum transactions. :param tids: transaction Ids; lists all transactions if empty """ transactions = self.history.old(tids) if self.conf.history_list_view == 'users': uids = [1, 2] elif self.conf.history_list_view == 'commands': uids = [1] else: assert self.conf.history_list_view == 'single-user-commands' uids = set() done = 0 blanks = 0 for transaction in transactions: done += 1 if transaction.cmdline is None: blanks += 1 uids.add(transaction.loginuid) fmt = "%s | %s | %s | %s | %s" if len(uids) == 1: name = _("Command line") real_cols = self.term.real_columns if real_cols is None: # if output is redirected in `less` the columns # detected are None value, to detect terminal size # use stdin file descriptor real_cols = dnf.cli.term._real_term_width(0) if real_cols is None: # if even stdin fd fails use 24 to fit to 80 cols real_cols = 24 name_width = real_cols - 55 if real_cols > 79 else 24 else: # TRANSLATORS: user names who executed transaction in history command output name = _("User name") name_width = 24 print(fmt % (fill_exact_width(_("ID"), 6, 6), fill_exact_width(name, name_width, name_width), fill_exact_width(_("Date and time"), 16, 16), fill_exact_width(_("Action(s)"), 14, 14), fill_exact_width(_("Altered"), 7, 7))) # total table width: each column length +3 (padding and separator between columns) table_width = 6 + 3 + name_width + 3 + 16 + 3 + 14 + 3 + 7 print("-" * table_width) fmt = "%6u | %s | %-16.16s | %s | %4u" if reverse is True: transactions = reversed(transactions) for transaction in transactions: if len(uids) == 1: name = transaction.cmdline or '' else: name = self._pwd_ui_username(transaction.loginuid, 24) name = ucd(name) tm = time.strftime("%Y-%m-%d %H:%M", time.localtime(transaction.beg_timestamp)) num, uiacts = self._history_uiactions(transaction.data()) name = fill_exact_width(name, name_width, name_width) uiacts = fill_exact_width(uiacts, 14, 14) rmark = lmark = ' ' if transaction.return_code is None: rmark = lmark = '*' elif transaction.return_code: rmark = lmark = '#' # We don't check .errors, because return_code will be non-0 elif transaction.is_output: rmark = lmark = 'E' if transaction.altered_lt_rpmdb: rmark = '<' if transaction.altered_gt_rpmdb: lmark = '>' print(fmt % (transaction.tid, name, tm, uiacts, num), "%s%s" % (lmark, rmark)) def historyInfoCmd(self, tids, pats=[], mtids=set()): """Output information about a transaction in history :param tids: transaction Ids; prints info for the last transaction if empty :raises dnf.exceptions.Error in case no transactions were found """ tids = set(tids) last = self.history.last() if last is None: logger.critical(_('No transactions')) raise dnf.exceptions.Error(_('Failed history info')) lasttid = last.tid lastdbv = last.end_rpmdb_version transactions = [] if not tids: last = self.history.last(complete_transactions_only=False) if last is not None: tids.add(last.tid) transactions.append(last) else: transactions = self.history.old(tids) if not tids: logger.critical(_('No transaction ID, or package, given')) raise dnf.exceptions.Error(_('Failed history info')) bmtid, emtid = -1, -1 mobj = None done = False if mtids: mtids = sorted(mtids) bmtid, emtid = mtids.pop() for trans in transactions: if lastdbv is not None and trans.tid == lasttid: # If this is the last transaction, is good and it doesn't # match the current rpmdb ... then mark it as bad. rpmdbv = self.sack._rpmdb_version() trans.compare_rpmdbv(str(rpmdbv)) lastdbv = None merged = False if trans.tid >= bmtid and trans.tid <= emtid: if mobj is None: mobj = MergedTransactionWrapper(trans) else: mobj.merge(trans) merged = True elif mobj is not None: if done: print("-" * 79) done = True self._historyInfoCmd(mobj) mobj = None if mtids: bmtid, emtid = mtids.pop() if trans.tid >= bmtid and trans.tid <= emtid: mobj = trans merged = True if not merged: if done: print("-" * 79) done = True self._historyInfoCmd(trans, pats) if mobj is not None: if done: print("-" * 79) self._historyInfoCmd(mobj) def _historyInfoCmd(self, old, pats=[]): loginuid = old.loginuid if isinstance(loginuid, int): loginuid = [loginuid] name = [self._pwd_ui_username(uid) for uid in loginuid] _pkg_states_installed = {'i' : _('Installed'), 'e' : _('Erased'), 'o' : _('Upgraded'), 'n' : _('Downgraded')} _pkg_states_available = {'i' : _('Installed'), 'e' : _('Not installed'), 'o' : _('Older'), 'n' : _('Newer')} maxlen = max([len(x) for x in (list(_pkg_states_installed.values()) + list(_pkg_states_available.values()))]) _pkg_states_installed['maxlen'] = maxlen _pkg_states_available['maxlen'] = maxlen def _simple_pkg(pkg, prefix_len, was_installed=False, highlight=False, pkg_max_len=0, show_repo=True): prefix = " " * prefix_len if was_installed: _pkg_states = _pkg_states_installed else: _pkg_states = _pkg_states_available state = _pkg_states['i'] # get installed packages with name = pkg.name ipkgs = self.sack.query().installed().filterm(name=pkg.name).run() if not ipkgs: state = _pkg_states['e'] else: # get latest installed package from software database inst_pkg = self.history.package(ipkgs[0]) if inst_pkg: res = pkg.compare(inst_pkg) # res is: # 0 if inst_pkg == pkg # > 0 when inst_pkg > pkg # < 0 when inst_pkg < pkg if res == 0: pass # installed elif res > 0: state = _pkg_states['o'] # updated else: state = _pkg_states['n'] # downgraded if highlight: (hibeg, hiend) = self._highlight('bold') else: (hibeg, hiend) = self._highlight('normal') state = fill_exact_width(state, _pkg_states['maxlen']) ui_repo = '' if show_repo: ui_repo = pkg.ui_from_repo() print("%s%s%s%s %-*s %s" % (prefix, hibeg, state, hiend, pkg_max_len, str(pkg), ui_repo)) tids = old.tids() if len(tids) > 1: print(_("Transaction ID :"), "%u..%u" % (tids[0], tids[-1])) else: print(_("Transaction ID :"), tids[0]) begt = float(old.beg_timestamp) begtm = time.strftime("%c", time.localtime(begt)) print(_("Begin time :"), begtm) if old.beg_rpmdb_version is not None: if old.altered_lt_rpmdb: print(_("Begin rpmdb :"), old.beg_rpmdb_version, "**") else: print(_("Begin rpmdb :"), old.beg_rpmdb_version) if old.end_timestamp is not None: endt = old.end_timestamp endtm = time.strftime("%c", time.localtime(endt)) diff = endt - begt if diff < 5 * 60: diff = _("(%u seconds)") % diff elif diff < 5 * 60 * 60: diff = _("(%u minutes)") % (diff // 60) elif diff < 5 * 60 * 60 * 24: diff = _("(%u hours)") % (diff // (60 * 60)) else: diff = _("(%u days)") % (diff // (60 * 60 * 24)) print(_("End time :"), endtm, diff) if old.end_rpmdb_version is not None: if old.altered_gt_rpmdb: print(_("End rpmdb :"), old.end_rpmdb_version, "**") else: print(_("End rpmdb :"), old.end_rpmdb_version) if isinstance(name, (list, tuple)): seen = set() for i in name: if i in seen: continue seen.add(i) print(_("User :"), i) else: print(_("User :"), name) if isinstance(old.return_code, (list, tuple)): codes = old.return_code if codes[0] is None: print(_("Return-Code :"), "**", _("Aborted"), "**") codes = codes[1:] elif not all(codes): print(_("Return-Code :"), _("Success")) elif codes: print(_("Return-Code :"), _("Failures:"), ", ".join([str(i) for i in codes])) elif old.return_code is None: print(_("Return-Code :"), "**", _("Aborted"), "**") elif old.return_code: print(_("Return-Code :"), _("Failure:"), old.return_code) else: print(_("Return-Code :"), _("Success")) if isinstance(old.releasever, (list, tuple)): seen = set() for i in old.releasever: if i in seen: continue seen.add(i) print(_("Releasever :"), i) else: print(_("Releasever :"), old.releasever) if old.cmdline is not None: if isinstance(old.cmdline, (list, tuple)): for cmdline in old.cmdline: print(_("Command Line :"), cmdline) else: print(_("Command Line :"), old.cmdline) if old.comment is not None: if isinstance(old.comment, (list, tuple)): for comment in old.comment: print(_("Comment :"), comment) else: print(_("Comment :"), old.comment) perf_with = old.performed_with() if perf_with: print(_("Transaction performed with:")) max_len = 0 for with_pkg in perf_with: str_len = len(str(with_pkg)) if str_len > max_len: max_len = str_len for with_pkg in perf_with: _simple_pkg(with_pkg, 4, was_installed=True, pkg_max_len=max_len) print(_("Packages Altered:")) self.historyInfoCmdPkgsAltered(old, pats) t_out = old.output() if t_out: print(_("Scriptlet output:")) num = 0 for line in t_out: num += 1 print("%4d" % num, line) t_err = old.error() if t_err: print(_("Errors:")) num = 0 for line in t_err: num += 1 print("%4d" % num, line) # TODO: remove _history_state2uistate = {'True-Install' : _('Install'), 'Install' : _('Install'), 'Dep-Install' : _('Dep-Install'), 'Obsoleted' : _('Obsoleted'), 'Obsoleting' : _('Obsoleting'), 'Erase' : _('Erase'), 'Reinstall' : _('Reinstall'), 'Downgrade' : _('Downgrade'), 'Downgraded' : _('Downgraded'), 'Update' : _('Upgrade'), 'Updated' : _('Upgraded'), } def historyInfoCmdPkgsAltered(self, old, pats=[]): """Print information about how packages are altered in a transaction. :param old: the :class:`DnfSwdbTrans` to print information about :param pats: a list of patterns. Packages that match a patten in *pats* will be highlighted in the output """ # Note that these don't use _simple_pkg() because we are showing what # happened to them in the transaction ... not the difference between the # version in the transaction and now. all_uistates = self._history_state2uistate maxlen = 0 pkg_max_len = 0 packages = old.packages() for pkg in packages: uistate = all_uistates.get(pkg.action_name, pkg.action_name) if maxlen < len(uistate): maxlen = len(uistate) pkg_len = len(str(pkg)) if pkg_max_len < pkg_len: pkg_max_len = pkg_len for pkg in packages: prefix = " " * 4 if pkg.state != libdnf.transaction.TransactionItemState_DONE: prefix = " ** " highlight = 'normal' if pats: if any([pkg.match(pat) for pat in pats]): highlight = 'bold' (hibeg, hiend) = self._highlight(highlight) uistate = all_uistates.get(pkg.action_name, pkg.action_name) uistate = fill_exact_width(ucd(uistate), maxlen) print("%s%s%s%s %-*s %s" % (prefix, hibeg, uistate, hiend, pkg_max_len, str(pkg), pkg.ui_from_repo())) class DepSolveProgressCallBack(dnf.callback.Depsolve): """Provides text output callback functions for Dependency Solver callback.""" def pkg_added(self, pkg, mode): """Print information about a package being added to the transaction set. :param pkgtup: tuple containing the package name, arch, version, and repository :param mode: a short string indicating why the package is being added to the transaction set. Valid current values for *mode* are:: i = the package will be installed u = the package will be an update e = the package will be erased r = the package will be reinstalled d = the package will be a downgrade o = the package will be obsoleting another package ud = the package will be updated od = the package will be obsoleted """ output = None if mode == 'i': output = _('---> Package %s.%s %s will be installed') elif mode == 'u': output = _('---> Package %s.%s %s will be an upgrade') elif mode == 'e': output = _('---> Package %s.%s %s will be erased') elif mode == 'r': output = _('---> Package %s.%s %s will be reinstalled') elif mode == 'd': output = _('---> Package %s.%s %s will be a downgrade') elif mode == 'o': output = _('---> Package %s.%s %s will be obsoleting') elif mode == 'ud': output = _('---> Package %s.%s %s will be upgraded') elif mode == 'od': output = _('---> Package %s.%s %s will be obsoleted') if output: logger.debug(output, pkg.name, pkg.arch, pkg.evr) def start(self): """Perform setup at the beginning of the dependency solving process. """ logger.debug(_('--> Starting dependency resolution')) def end(self): """Output a message stating that dependency resolution has finished.""" logger.debug(_('--> Finished dependency resolution')) class CliKeyImport(dnf.callback.KeyImport): def __init__(self, base, output): self.base = base self.output = output def _confirm(self, id, userid, fingerprint, url, timestamp): def short_id(id): rj = '0' if dnf.pycomp.PY3 else b'0' return id[-8:].rjust(8, rj) msg = (_('Importing GPG key 0x%s:\n' ' Userid : "%s"\n' ' Fingerprint: %s\n' ' From : %s') % (short_id(id), userid, dnf.crypto._printable_fingerprint(fingerprint), url.replace("file://", ""))) logger.critical("%s", msg) if self.base.conf.assumeyes: return True if self.base.conf.assumeno: return False return self.output.userconfirm() class CliTransactionDisplay(TransactionDisplay): """A YUM specific callback class for RPM operations.""" width = property(lambda self: dnf.cli.term._term_width()) def __init__(self): super(CliTransactionDisplay, self).__init__() self.lastmsg = "" self.lastpackage = None # name of last package we looked at self.output = True # for a progress bar self.mark = "=" self.marks = 22 def progress(self, package, action, ti_done, ti_total, ts_done, ts_total): """Output information about an rpm operation. This may include a text progress bar. :param package: the package involved in the event :param action: the type of action that is taking place. Valid values are given by :func:`rpmtrans.TransactionDisplay.action.keys()` :param ti_done: a number representing the amount of work already done in the current transaction :param ti_total: a number representing the total amount of work to be done in the current transaction :param ts_done: the number of the current transaction in transaction set :param ts_total: the total number of transactions in the transaction set """ action_str = dnf.transaction.ACTIONS.get(action) if action_str is None: return wid1 = self._max_action_width() pkgname = ucd(package) self.lastpackage = package if ti_total == 0: percent = 0 else: percent = (ti_done*long(100))//ti_total self._out_progress(ti_done, ti_total, ts_done, ts_total, percent, action_str, pkgname, wid1) def _max_action_width(self): if not hasattr(self, '_max_action_wid_cache'): wid1 = 0 for val in dnf.transaction.ACTIONS.values(): wid_val = exact_width(val) if wid1 < wid_val: wid1 = wid_val self._max_action_wid_cache = wid1 wid1 = self._max_action_wid_cache return wid1 def _out_progress(self, ti_done, ti_total, ts_done, ts_total, percent, process, pkgname, wid1): if self.output and (sys.stdout.isatty() or ti_done == ti_total): (fmt, wid1, wid2) = self._makefmt(percent, ts_done, ts_total, progress=sys.stdout.isatty(), pkgname=pkgname, wid1=wid1) pkgname = ucd(pkgname) msg = fmt % (fill_exact_width(process, wid1, wid1), fill_exact_width(pkgname, wid2, wid2)) if msg != self.lastmsg: dnf.util._terminal_messenger('write_flush', msg, sys.stdout) self.lastmsg = msg if ti_done == ti_total: print(" ") def _makefmt(self, percent, ts_done, ts_total, progress=True, pkgname=None, wid1=15): l = len(str(ts_total)) size = "%s.%s" % (l, l) fmt_done = "%" + size + "s/%" + size + "s" done = fmt_done % (ts_done, ts_total) # This should probably use TerminLine, but we don't want to dep. on # that. So we kind do an ok job by hand ... at least it's dynamic now. if pkgname is None: pnl = 22 else: pnl = exact_width(pkgname) overhead = (2 * l) + 2 # Length of done, above overhead += 2 + wid1 +2 # Length of beginning (" " action " :") overhead += 1 # Space between pn and done overhead += 2 # Ends for progress overhead += 1 # Space for end width = self.width if width < overhead: width = overhead # Give up width -= overhead if pnl > width // 2: pnl = width // 2 marks = self.width - (overhead + pnl) width = "%s.%s" % (marks, marks) fmt_bar = "[%-" + width + "s]" # pnl = str(28 + marks + 1) full_pnl = pnl + marks + 1 if progress and percent == 100: # Don't chop pkg name on 100% fmt = "\r %s: %s " + done wid2 = full_pnl elif progress: if marks > 5: bar = fmt_bar % (self.mark * int(marks * (percent / 100.0)), ) else: bar = "" fmt = "\r %s: %s " + bar + " " + done wid2 = pnl elif percent == 100: fmt = " %s: %s " + done wid2 = full_pnl else: if marks > 5: bar = fmt_bar % (self.mark * marks, ) else: bar = "" fmt = " %s: %s " + bar + " " + done wid2 = pnl return fmt, wid1, wid2 def progressbar(current, total, name=None): """Output the current status to the terminal using a simple text progress bar consisting of 50 # marks. :param current: a number representing the amount of work already done :param total: a number representing the total amount of work to be done :param name: a name to label the progress bar with """ mark = '#' if not sys.stdout.isatty(): return if current == 0: percent = 0 else: if total != 0: percent = float(current) / total else: percent = 0 width = dnf.cli.term._term_width() if name is None and current == total: name = '-' end = ' %d/%d' % (current, total) width -= len(end) + 1 if width < 0: width = 0 if name is None: width -= 2 if width < 0: width = 0 hashbar = mark * int(width * percent) output = '\r[%-*s]%s' % (width, hashbar, end) elif current == total: # Don't chop name on 100% output = '\r%s%s' % (fill_exact_width(name, width, width), end) else: width -= 4 if width < 0: width = 0 nwid = width // 2 if nwid > exact_width(name): nwid = exact_width(name) width -= nwid hashbar = mark * int(width * percent) output = '\r%s: [%-*s]%s' % (fill_exact_width(name, nwid, nwid), width, hashbar, end) if current <= total: dnf.util._terminal_messenger('write', output, sys.stdout) if current == total: dnf.util._terminal_messenger('write', '\n', sys.stdout) dnf.util._terminal_messenger('flush', out=sys.stdout) PK!r:cli/progress.pynu[# Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. from __future__ import unicode_literals from dnf.cli.format import format_number, format_time from dnf.cli.term import _term_width from dnf.pycomp import unicode from time import time import sys import dnf.callback import dnf.util class MultiFileProgressMeter(dnf.callback.DownloadProgress): """Multi-file download progress meter""" STATUS_2_STR = { dnf.callback.STATUS_FAILED: 'FAILED', dnf.callback.STATUS_ALREADY_EXISTS: 'SKIPPED', dnf.callback.STATUS_MIRROR: 'MIRROR', dnf.callback.STATUS_DRPM: 'DRPM', } def __init__(self, fo=sys.stderr, update_period=0.3, tick_period=1.0, rate_average=5.0): """Creates a new progress meter instance update_period -- how often to update the progress bar tick_period -- how fast to cycle through concurrent downloads rate_average -- time constant for average speed calculation """ self.fo = fo self.update_period = update_period self.tick_period = tick_period self.rate_average = rate_average self.unknown_progres = 0 self.total_drpm = 0 self.isatty = sys.stdout.isatty() self.done_drpm = 0 self.done_files = 0 self.done_size = 0 self.active = [] self.state = {} self.last_time = 0 self.last_size = 0 self.rate = None self.total_files = 0 self.total_size = 0 def message(self, msg): dnf.util._terminal_messenger('write_flush', msg, self.fo) def start(self, total_files, total_size, total_drpms=0): self.total_files = total_files self.total_size = total_size self.total_drpm = total_drpms # download state self.done_drpm = 0 self.done_files = 0 self.done_size = 0 self.active = [] self.state = {} # rate averaging self.last_time = 0 self.last_size = 0 self.rate = None def progress(self, payload, done): now = time() text = unicode(payload) total = int(payload.download_size) done = int(done) # update done_size if text not in self.state: self.state[text] = now, 0 self.active.append(text) start, old = self.state[text] self.state[text] = start, done self.done_size += done - old # update screen if enough time has elapsed if now - self.last_time > self.update_period: if total > self.total_size: self.total_size = total self._update(now) def _update(self, now): if self.last_time: delta_time = now - self.last_time delta_size = self.done_size - self.last_size if delta_time > 0 and delta_size > 0: # update the average rate rate = delta_size / delta_time if self.rate is not None: weight = min(delta_time/self.rate_average, 1) rate = rate*weight + self.rate*(1 - weight) self.rate = rate self.last_time = now self.last_size = self.done_size if not self.isatty: return # pick one of the active downloads text = self.active[int(now/self.tick_period) % len(self.active)] if self.total_files > 1: n = '%d' % (self.done_files + 1) if len(self.active) > 1: n += '-%d' % (self.done_files + len(self.active)) text = '(%s/%d): %s' % (n, self.total_files, text) # average rate, total done size, estimated remaining time if self.rate and self.total_size: time_eta = format_time((self.total_size - self.done_size) / self.rate) else: time_eta = '--:--' msg = ' %5sB/s | %5sB %9s ETA\r' % ( format_number(self.rate) if self.rate else '--- ', format_number(self.done_size), time_eta) left = _term_width() - len(msg) bl = (left - 7)//2 if bl > 8: # use part of the remaining space for progress bar if self.total_size: pct = self.done_size * 100 // self.total_size n, p = divmod(self.done_size * bl * 2 // self.total_size, 2) bar = '=' * n + '-' * p msg = '%3d%% [%-*s]%s' % (pct, bl, bar, msg) left -= bl + 7 else: n = self.unknown_progres - 3 p = 3 n = 0 if n < 0 else n bar = ' ' * n + '=' * p msg = ' [%-*s]%s' % (bl, bar, msg) left -= bl + 7 self.unknown_progres = self.unknown_progres + 3 if self.unknown_progres + 3 < bl \ else 0 self.message('%-*.*s%s' % (left, left, text, msg)) def end(self, payload, status, err_msg): start = now = time() text = unicode(payload) size = int(payload.download_size) done = 0 # update state if status == dnf.callback.STATUS_MIRROR: pass elif status == dnf.callback.STATUS_DRPM: self.done_drpm += 1 elif text in self.state: start, done = self.state.pop(text) self.active.remove(text) size -= done self.done_files += 1 self.done_size += size elif status == dnf.callback.STATUS_ALREADY_EXISTS: self.done_files += 1 self.done_size += size if status: # the error message, no trimming if status is dnf.callback.STATUS_DRPM and self.total_drpm > 1: msg = '[%s %d/%d] %s: ' % (self.STATUS_2_STR[status], self.done_drpm, self.total_drpm, text) else: msg = '[%s] %s: ' % (self.STATUS_2_STR[status], text) left = _term_width() - len(msg) - 1 msg = '%s%-*s\n' % (msg, left, err_msg) else: if self.total_files > 1: text = '(%d/%d): %s' % (self.done_files, self.total_files, text) # average rate, file size, download time tm = max(now - start, 0.001) msg = ' %5sB/s | %5sB %9s \n' % ( format_number(float(done) / tm), format_number(done), format_time(tm)) left = _term_width() - len(msg) msg = '%-*.*s%s' % (left, left, text, msg) self.message(msg) # now there's a blank line. fill it if possible. if self.active: self._update(now) PK!ckf9f9 cli/term.pynu[# Copyright (C) 2013-2014 Red Hat, Inc. # Terminal routines. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import curses import dnf.pycomp import fcntl import re import struct import sys import termios def _real_term_width(fd=1): """ Get the real terminal width """ try: buf = 'abcdefgh' buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf) ret = struct.unpack(b'hhhh', buf)[1] return ret except IOError: return None def _term_width(fd=1): """ Compute terminal width falling to default 80 in case of trouble""" tw = _real_term_width(fd=1) if not tw: return 80 elif tw < 20: return 20 else: return tw class Term(object): """A class to provide some terminal "UI" helpers based on curses.""" # From initial search for "terminfo and python" got: # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/475116 # ...it's probably not copyrightable, but if so ASPN says: # # Except where otherwise noted, recipes in the Python Cookbook are # published under the Python license. __enabled = True real_columns = property(lambda self: _real_term_width()) columns = property(lambda self: _term_width()) __cap_names = { 'underline' : 'smul', 'reverse' : 'rev', 'normal' : 'sgr0', } __colors = { 'black' : 0, 'blue' : 1, 'green' : 2, 'cyan' : 3, 'red' : 4, 'magenta' : 5, 'yellow' : 6, 'white' : 7 } __ansi_colors = { 'black' : 0, 'red' : 1, 'green' : 2, 'yellow' : 3, 'blue' : 4, 'magenta' : 5, 'cyan' : 6, 'white' : 7 } __ansi_forced_MODE = { 'bold' : '\x1b[1m', 'blink' : '\x1b[5m', 'dim' : '', 'reverse' : '\x1b[7m', 'underline' : '\x1b[4m', 'normal' : '\x1b(B\x1b[m' } __ansi_forced_FG_COLOR = { 'black' : '\x1b[30m', 'red' : '\x1b[31m', 'green' : '\x1b[32m', 'yellow' : '\x1b[33m', 'blue' : '\x1b[34m', 'magenta' : '\x1b[35m', 'cyan' : '\x1b[36m', 'white' : '\x1b[37m' } __ansi_forced_BG_COLOR = { 'black' : '\x1b[40m', 'red' : '\x1b[41m', 'green' : '\x1b[42m', 'yellow' : '\x1b[43m', 'blue' : '\x1b[44m', 'magenta' : '\x1b[45m', 'cyan' : '\x1b[46m', 'white' : '\x1b[47m' } def __forced_init(self): self.MODE = self.__ansi_forced_MODE self.FG_COLOR = self.__ansi_forced_FG_COLOR self.BG_COLOR = self.__ansi_forced_BG_COLOR def reinit(self, term_stream=None, color='auto'): """Reinitializes the :class:`Term`. :param term_stream: the terminal stream that the :class:`Term` should be initialized to use. If *term_stream* is not given, :attr:`sys.stdout` is used. :param color: when to colorize output. Valid values are 'always', 'auto', and 'never'. 'always' will use ANSI codes to always colorize output, 'auto' will decide whether do colorize depending on the terminal, and 'never' will never colorize. """ self.__enabled = True self.lines = 24 if color == 'always': self.__forced_init() return # Output modes: self.MODE = { 'bold' : '', 'blink' : '', 'dim' : '', 'reverse' : '', 'underline' : '', 'normal' : '' } # Colours self.FG_COLOR = { 'black' : '', 'blue' : '', 'green' : '', 'cyan' : '', 'red' : '', 'magenta' : '', 'yellow' : '', 'white' : '' } self.BG_COLOR = { 'black' : '', 'blue' : '', 'green' : '', 'cyan' : '', 'red' : '', 'magenta' : '', 'yellow' : '', 'white' : '' } if color == 'never': self.__enabled = False return assert color == 'auto' # If the stream isn't a tty, then assume it has no capabilities. if not term_stream: term_stream = sys.stdout if not term_stream.isatty(): self.__enabled = False return # Check the terminal type. If we fail, then assume that the # terminal has no capabilities. try: curses.setupterm(fd=term_stream.fileno()) except Exception: self.__enabled = False return self._ctigetstr = curses.tigetstr self.lines = curses.tigetnum('lines') # Look up string capabilities. for cap_name in self.MODE: mode = cap_name if cap_name in self.__cap_names: cap_name = self.__cap_names[cap_name] self.MODE[mode] = self._tigetstr(cap_name) # Colors set_fg = self._tigetstr('setf').encode('utf-8') if set_fg: for (color, val) in self.__colors.items(): self.FG_COLOR[color] = curses.tparm(set_fg, val).decode() or '' set_fg_ansi = self._tigetstr('setaf').encode('utf-8') if set_fg_ansi: for (color, val) in self.__ansi_colors.items(): fg_color = curses.tparm(set_fg_ansi, val).decode() or '' self.FG_COLOR[color] = fg_color set_bg = self._tigetstr('setb').encode('utf-8') if set_bg: for (color, val) in self.__colors.items(): self.BG_COLOR[color] = curses.tparm(set_bg, val).decode() or '' set_bg_ansi = self._tigetstr('setab').encode('utf-8') if set_bg_ansi: for (color, val) in self.__ansi_colors.items(): bg_color = curses.tparm(set_bg_ansi, val).decode() or '' self.BG_COLOR[color] = bg_color def __init__(self, term_stream=None, color='auto'): self.reinit(term_stream, color) def _tigetstr(self, cap_name): # String capabilities can include "delays" of the form "$<2>". # For any modern terminal, we should be able to just ignore # these, so strip them out. cap = self._ctigetstr(cap_name) or '' if dnf.pycomp.is_py3bytes(cap): cap = cap.decode() return re.sub(r'\$<\d+>[/*]?', '', cap) def color(self, color, s): """Colorize string with color""" return (self.MODE[color] + str(s) + self.MODE['normal']) def bold(self, s): """Make string bold.""" return self.color('bold', s) def sub(self, haystack, beg, end, needles, escape=None, ignore_case=False): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with *end*, then return the modified string. For example:: >>> yt = Term() >>> yt.sub('spam and eggs', 'x', 'z', ['and']) 'spam xandz eggs' This is particularly useful for emphasizing certain words in output: for example, calling :func:`sub` with *beg* = MODE['bold'] and *end* = MODE['normal'] will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. Note, however, that the :func:`sub_mode`, :func:`sub_bold`, :func:`sub_fg`, and :func:`sub_bg` methods provide convenient ways to access this same emphasizing functionality. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :param escape: a function that accepts a string and returns the same string with problematic characters escaped. By default, :func:`re.escape` is used. :param ignore_case: whether case should be ignored when searching for matches :return: *haystack* with *beg* prefixing, and *end* postfixing, occurrences of the strings in *needles* """ if not self.__enabled: return haystack if not escape: escape = re.escape render = lambda match: beg + match.group() + end for needle in needles: pat = escape(needle) if ignore_case: pat = re.template(pat, re.I) haystack = re.sub(pat, render, haystack) return haystack def sub_norm(self, haystack, beg, needles, **kwds): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with *beg*, and postfix each occurrence with self.MODE['normal'], then return the modified string. If *beg* is an ANSI escape code, such as given by self.MODE['bold'], this method will return *haystack* with the formatting given by the code only applied to the strings in *needles*. :param haystack: the string to be modified :param beg: the string to be prefixed onto matches :param end: the string to be postfixed onto matches :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with *beg* prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* """ return self.sub(haystack, beg, self.MODE['normal'], needles, **kwds) def sub_mode(self, haystack, mode, needles, **kwds): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE[*mode*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given *mode*. :param haystack: the string to be modified :param mode: the mode to set the matches to be in. Valid values are given by self.MODE.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE[*mode*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* """ return self.sub_norm(haystack, self.MODE[mode], needles, **kwds) def sub_bold(self, haystack, needles, **kwds): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.MODE['bold'], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in bold face. :param haystack: the string to be modified :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.MODE['bold'] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* """ return self.sub_mode(haystack, 'bold', needles, **kwds) def sub_fg(self, haystack, color, needles, **kwds): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.FG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* in the given color. :param haystack: the string to be modified :param color: the color to set the matches to be in. Valid values are given by self.FG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.FG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* """ return self.sub_norm(haystack, self.FG_COLOR[color], needles, **kwds) def sub_bg(self, haystack, color, needles, **kwds): """Search the string *haystack* for all occurrences of any string in the list *needles*. Prefix each occurrence with self.BG_COLOR[*color*], and postfix each occurrence with self.MODE['normal'], then return the modified string. This will return a string that when printed to the terminal will appear to be *haystack* with each occurrence of the strings in *needles* highlighted in the given background color. :param haystack: the string to be modified :param color: the background color to set the matches to be in. Valid values are given by self.BG_COLOR.keys(). :param needles: a list of strings to add the prefixes and postfixes to :return: *haystack* with self.BG_COLOR[*color*] prefixing, and self.MODE['normal'] postfixing, occurrences of the strings in *needles* """ return self.sub_norm(haystack, self.BG_COLOR[color], needles, **kwds) PK!*,ϰ cli/utils.pynu[# Copyright (C) 2016 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Various utility functions, and a utility class.""" from __future__ import absolute_import from __future__ import unicode_literals from dnf.cli.format import format_number from dnf.i18n import _ import dnf.util import logging import os import time _USER_HZ = os.sysconf(os.sysconf_names['SC_CLK_TCK']) logger = logging.getLogger('dnf') def jiffies_to_seconds(jiffies): """Convert a number of jiffies to seconds. How many jiffies are in a second is system-dependent, e.g. 100 jiffies = 1 second is common. :param jiffies: a number of jiffies :return: the equivalent number of seconds """ return int(jiffies) / _USER_HZ def seconds_to_ui_time(seconds): """Return a human-readable string representation of the length of a time interval given in seconds. :param seconds: the length of the time interval in seconds :return: a human-readable string representation of the length of the time interval """ if seconds >= 60 * 60 * 24: return "%d day(s) %d:%02d:%02d" % (seconds // (60 * 60 * 24), (seconds // (60 * 60)) % 24, (seconds // 60) % 60, seconds % 60) if seconds >= 60 * 60: return "%d:%02d:%02d" % (seconds // (60 * 60), (seconds // 60) % 60, (seconds % 60)) return "%02d:%02d" % ((seconds // 60), seconds % 60) def get_process_info(pid): """Return info dict about a process.""" pid = int(pid) # Maybe true if /proc isn't mounted, or not Linux ... or something. if (not os.path.exists("/proc/%d/status" % pid) or not os.path.exists("/proc/stat") or not os.path.exists("/proc/%d/stat" % pid)): return ps = {} with open("/proc/%d/status" % pid) as status_file: for line in status_file: if line[-1] != '\n': continue data = line[:-1].split(':\t', 1) if len(data) < 2: continue data[1] = dnf.util.rtrim(data[1], ' kB') ps[data[0].strip().lower()] = data[1].strip() if 'vmrss' not in ps: return if 'vmsize' not in ps: return boot_time = None with open("/proc/stat") as stat_file: for line in stat_file: if line.startswith("btime "): boot_time = int(line[len("btime "):-1]) break if boot_time is None: return with open('/proc/%d/stat' % pid) as stat_file: ps_stat = stat_file.read().split() ps['start_time'] = boot_time + jiffies_to_seconds(ps_stat[21]) ps['state'] = {'R' : _('Running'), 'S' : _('Sleeping'), 'D' : _('Uninterruptible'), 'Z' : _('Zombie'), 'T' : _('Traced/Stopped') }.get(ps_stat[2], _('Unknown')) return ps def show_lock_owner(pid): """Output information about process holding a lock.""" ps = get_process_info(pid) if not ps: msg = _('Unable to find information about the locking process (PID %d)') logger.critical(msg, pid) return msg = _(' The application with PID %d is: %s') % (pid, ps['name']) logger.critical("%s", msg) logger.critical(_(" Memory : %5s RSS (%5sB VSZ)"), format_number(int(ps['vmrss']) * 1024), format_number(int(ps['vmsize']) * 1024)) ago = seconds_to_ui_time(int(time.time()) - ps['start_time']) logger.critical(_(' Started: %s - %s ago'), dnf.util.normalize_time(ps['start_time']), ago) logger.critical(_(' State : %s'), ps['state']) return PK!Q?.conf/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`@spdZddlmZddlmZddlmZmZmZddlmZm Z m Z ddlm Z m Z ddlm Z mZmZeZdS) aL The configuration classes and routines in yum are splattered over too many places, hard to change and debug. The new structure here will replace that. Its goal is to: * accept configuration options from all three sources (the main config file, repo config files, command line switches) * handle all the logic of storing those and producing related values. * returning configuration values. * optionally: asserting no value is overridden once it has been applied somewhere (e.g. do not let a new repo be initialized with different global cache path than an already existing one). )absolute_import)unicode_literals) PRIO_DEFAULTPRIO_MAINCONFIGPRIO_AUTOMATICCONFIG)PRIO_REPOCONFIGPRIO_PLUGINDEFAULTPRIO_PLUGINCONFIG)PRIO_COMMANDLINE PRIO_RUNTIME) BaseConfigMainConfRepoConfN)__doc__Z __future__rrZdnf.conf.configrrrrrr r r r r rZConfrr/usr/lib/python3.6/__init__.py#s  PK!Q?(conf/__pycache__/__init__.cpython-36.pycnu[3 ft`@spdZddlmZddlmZddlmZmZmZddlmZm Z m Z ddlm Z m Z ddlm Z mZmZeZdS) aL The configuration classes and routines in yum are splattered over too many places, hard to change and debug. The new structure here will replace that. Its goal is to: * accept configuration options from all three sources (the main config file, repo config files, command line switches) * handle all the logic of storing those and producing related values. * returning configuration values. * optionally: asserting no value is overridden once it has been applied somewhere (e.g. do not let a new repo be initialized with different global cache path than an already existing one). )absolute_import)unicode_literals) PRIO_DEFAULTPRIO_MAINCONFIGPRIO_AUTOMATICCONFIG)PRIO_REPOCONFIGPRIO_PLUGINDEFAULTPRIO_PLUGINCONFIG)PRIO_COMMANDLINE PRIO_RUNTIME) BaseConfigMainConfRepoConfN)__doc__Z __future__rrZdnf.conf.configrrrrrr r r r r rZConfrr/usr/lib/python3.6/__init__.py#s  PK!':':,conf/__pycache__/config.cpython-36.opt-1.pycnu[3 ft`O@s<ddlmZddlmZddlmZddlmZmZddlm Z m Z ddl Z ddl Z ddlZ ddlZ ddlZ ddlZ ddlZddlZddlZddlZddlZddlZejjjZejjjZejjjZejjj Z!ejjj"Z#ejjj$Z%ejjj&Z'ejjj(Z)ejjj*Z+ej,dZ-Gdd d e.Z/Gd d d e/Z0Gd d d e/Z1dS))absolute_import)unicode_literals)misc)ucd_) basestringurlparseNdnfcs~eZdZdZdddZddZfddZd d Zd d Zd dZ ddZ e fddZ e fddZddZeddZZS) BaseConfigzlBase class for storing configuration definitions. Subclass when creating your own definitions. NcCs||jd<||_dS)N_config)__dict___section)selfconfigsectionparserr/usr/lib/python3.6/config.py__init__<s zBaseConfig.__init__cCszd|jkrtdj|j|t|j|}|dkr4dSy|j}Wn tk rb}zdSd}~XnXt|t rvt |S|S)Nr z!'{}' object has no attribute '{}') r AttributeErrorformat __class__getattrr getValue Exception isinstancestrr)rnameoptionvalueZexrrr __getattr__@s   zBaseConfig.__getattr__cs:t|j|d}|dkr(tt|j||S|j||tdS)N)rr superr __setattr__ _set_value PRIO_RUNTIME)rrrr)rrrr"NszBaseConfig.__setattr__c Cstg}|jd|j|jrjxN|jjD]@}y|jj}Wntk rPd}YnX|jd|j|fq&Wdj|S)Nz[%s]z%s: %s ) appendr r optBindssecondgetValueString RuntimeErrorfirstjoin)routoptBindrrrr__str__Us zBaseConfig.__str__cCst|j|d}|dk S)N)rr )rrmethodrrr _has_optionaszBaseConfig._has_optioncCs$t|j|d}|dkrdS|jS)N)rr r)rrr1rrr _get_valueeszBaseConfig._get_valuecCs$t|j|d}|dkrdS|jS)N)rr Z getPriority)rrr1rrr _get_prioritykszBaseConfig._get_prioritycCst|j|d}|dkr&td|d|}|dkr\y|j||Wntk rXYnXnyrt|tsrt|tr|j|tjj |nDt|tjj st|tjj rt|t r|j|t |n |j||WnHtk r}z*tjjtd|t|ft|dWYdd}~XnXdS)zSSet option's value if priority is equal or higher than current priority.NzOption "z" does not existszError parsing '%s': %s) raw_error)rr rsetrlisttuplelibdnfconf VectorStringZ OptionBoolZOptionChildBoolintboolr+r exceptions ConfigErrorrr)rrrpriorityr1rerrrr#qs*   zBaseConfig._set_valuecCs|j|rx|j|D]}|j||}| s4|dkr8d}t|j|ry|jjj|j||Wqtk r}z,t j t dt |t |t |t |WYdd}~XqXq|dkrt||rt|||qt jt dt |t |t |qWdS)z+Set option values from an INI file section.Noner%z,Invalid configuration value: %s=%s in %s; %sNarchz+Unknown configuration option: %s = %s in %s) hasSectionZoptionsZgetSubstitutedValuehasattrr r(at newStringr+loggererrorrrrsetattrdebug)rrrfilenamer@rrrArrr _populates     0zBaseConfig._populatec Cshd|jg}|jrZxF|jjD]8}y|jd|j|jjfWqtk rTYqXqWdj|dS)z]Return a string representing the values of all the configuration options. z[%s]z%s = %sr&) r r r(r'r,r)r*r+r-)routputr/rrrdumps  zBaseConfig.dumpcCstjj}|j||j|sHx(|jD]}tjjj|||kr(|}q(Wx6|jD]*\}}t|t rndj |}|j |||qRW|j |ddS)z filename - name of config file (.conf or .repo) section_id - id of modified section (e.g. main, fedora, updates) substitutions - instance of base.conf.substitutions modify - dict of modified options  FN) r9r: ConfigParserreadrDZgetData substituteitemsrr7r-ZsetValuewrite)rLZ section_id substitutionsZmodifyrZsectrrrrrwrite_raw_configfiles     zBaseConfig.write_raw_configfile)NNN)__name__ __module__ __qualname____doc__rr r"r0r2r3r4r$r# PRIO_DEFAULTrMrO staticmethodrW __classcell__rr)rrr 5s     r cseZdZdZd%fdd ZddZedd Zd d Zd d Z ddZ ddZ ddZ ddZ ddZeddZejddZeddZejddZeddZejd dZdefd!d"Zed#d$ZZS)&MainConfz?Configuration option definitions for dnf.conf's [main] section.mainNcstjj}tt|j||||jdtjj gt |jdtjj gt tjj j |_ tj|_|jjjt tjjtjjrtjj}d}nVytj}}WnDttfk r}z$tdjt|}tjj|WYdd}~XnX|jj jt ||jj!jt |g|_"dS)NZ pluginpathZpluginconfpathz/var/logzCould not set cachedir: {})#r9r: ConfigMainr!r_rr#r constZ PLUGINPATHr\ZPLUGINCONFPATHrVZ SubstitutionshawkeyZ detect_archrCr Zsystem_cachedirr6ZSYSTEM_CACHEDIRutilZ am_i_rootrZ getCacheDirIOErrorOSErrorrrrr>Errorcachedirlogdir tempfiles)rrrrrhrirAmsg)rrrrs$   zMainConf.__init__cCsx|jD]}tj|qWdS)N)rjosunlink)r file_namerrr__del__s zMainConf.__del__cCsLd}x$|jdD]}tjj|r|}PqW|sH|jdd}tjj||S)zReturns the value of reposdirNZreposdirr)r3rlpathexistsr rdZ ensure_dir)rZ myrepodirZrdirrrr get_reposdirs  zMainConf.get_reposdirc Cs|j|}|j|}t|trtj|}|ddkrF|j||j|ntjj }t j dd\}}|j j |zdy|jd||Wn>tk r}z"tjjtdj|t|WYdd}~XnX|j|||Wdtj|XdS) z In case the option value is a remote URL, download it to the temporary location and use this temporary file instead. rfiler%zdnf-downloaded-config-)prefixNz9Configuration file URL "{}" could not be downloaded: {})rsr%)r4r3rrrr#rpr9ZrepoZ DownloadertempfileZmkstemprjr'Z downloadURLr+r r>r?rrrrlclose) roptnamepriovallocationZ downloaderZtemp_fdZ temp_pathrArrr_check_remote_files"        zMainConf._check_remote_filecsjddkrSj|}|tkr,Sj|}t|tstfdd|Drj|tjj fdd|D|Sn4t j j t j j |jdrj|j||SdS)z Return root used as prefix for option (installroot or "/"). When specified from commandline it returns value from conf.installroot installroot/c3s*|]"}tjjtjj|jdVqdS)r}N)rlrprqr-lstrip).0p)r|rr *sz6MainConf._search_inside_installroot..csg|]}j|qSr)_prepend_installroot_path)rr)rrr -sz7MainConf._search_inside_installroot..)r3r4PRIO_COMMANDLINErranyr#r9r:r;rlrprqr-r~r)rrwrxryr)r|rr_search_inside_installroots$    z#MainConf._search_inside_installrootcCs,|j|}|j|j|}|j|||dS)N)r4rr3r#)rrwrxnew_pathrrrprepend_installroot6s zMainConf.prepend_installrootcCs,tjj|jd|jd}tjjj||j S)Nr|r}) rlrpr-r3r~r9r:rQrSrV)rrpZ root_pathrrrr<sz"MainConf._prepend_installroot_pathc Cs`ddddddddd d d d dd dddddddddddddg}x|D]}t||d}|dk ob|gkrB|j|r$d}|jry|jjj|j}Wntk rYnX|rtjj }||j |kr|j |}xR|D]6}|r|j ||j ||g|q|j |gtjj qWn|j ||tjj qBt ||r>t|||qBtjtdt|t|qBWt|d ddkr|j ddtjj t |dr\x|jjD]\}}x|D]} t |j|r"y|jjj|jt | WnJtk r} z,tjjtd || t| ft| d!WYdd} ~ XnXn.t ||rr?r) roptsZ config_argsrrZ appendValueZ add_priorityitemvaluesryrArkrrr_configure_from_options@s\         . z MainConf._configure_from_optionscCsPd}|dk rL|gkrL|j|r2|j||tjjntjtdt|t|dS)Nrz%Unknown configuration option: %s = %s) r2r#r r:rrHrrr)rZpkgsrrrr exclude_pkgss   zMainConf.exclude_pkgscCs(|jd}|r$|jd| |jddS)z Adjust conf options interactionsrstrictN)r3r#r4)rZskip_broken_valrrr_adjust_conf_optionss zMainConf._adjust_conf_optionscCs |jjdS)N releasever)rVget)rrrrrszMainConf.releasevercCs,|dkr|jjdddSt||jd<dS)Nr)rVpopr)rryrrrrscCs |jjdS)NrC)rVr)rrrrrCsz MainConf.archcCsb|dkr|jjdddS|tjjjkrFtd}tjj|j d|||jd<tjj ||_ dS)NrCzIncorrect or unknown "{}": {}) rVrr rpm _BASEARCH_MAPkeysrr>rgrr)rryrkrrrrCs cCs |jjdS)Nr)rVr)rrrrrszMainConf.basearchcCsT|dkr|jjdddS|tjjjkrFtd}tjj|j d|||jd<dS)NrzIncorrect or unknown "{}": {}) rVrr rrrrr>rgr)rryrkrrrrscCs|dkr|jd}tjj}y|j|Wndtk rd}ztjjt d||fWYdd}~Xn,t k r}zt j |WYdd}~XnX|j ||j|||jd||dS)NrzParsing file "%s" failed: %s)r3r9r:rQrRr+r r>r?rrerHrrMr r#)rrLr@rrArrrrRs  (z MainConf.readcCs|jdtjjkS)Nr)r3r rbZ VERBOSE_LEVEL)rrrrverboseszMainConf.verbose)r`N)rXrYrZr[rropropertyrrr{rrrrrrrsetterrCrr\rRrr^rr)rrr_s& ?     r_cs*eZdZdZdfdd ZddZZS)RepoConfz4Option definitions for repository INI file sections.NcsP|r |jntjj}tt|jtjj|||||_|rL|jj j t |dS)N) r r9r:rar!rrZ ConfigRepoZ_mainConfigRefHolderrr6r\)rparentrrZ mainConfig)rrrrs zRepoConf.__init__c Cst|dddkr0xd D]}|j|dtjjqWt|di}x|jD]\}}tj|j|s^qFx|jD]\}}x|D]}t|j |ry|j j j |j t|WnLt k r} z0tjjtd|j||t| ft| dWYdd} ~ XnXqvtd} tj| |j|qvWqhWqFWdS) zConfigure repos from the opts. rNF repo_gpgcheck repo_setoptsz7Error parsing --setopt with key '%s.%s', value '%s': %s)r5z-Repo %s did not have a %s attr. before setopt)rr)rr#r r:rrTfnmatchr rEr r(rFrGr+r>r?rrrHr) rrrwrZrepoidZsetoptsrrryrArkrrrrs$    2z RepoConf._configure_from_options)NN)rXrYrZr[rrr^rr)rrrs r)2Z __future__rrZdnf.yumrZdnf.i18nrrZ dnf.pycomprrrZdnf.conf.substitutionsr Z dnf.constZdnf.exceptionsZdnf.utilrcZloggingrlZ libdnf.confr9Z libdnf.reporur:ZOptionZPriority_EMPTYZ PRIO_EMPTYZPriority_DEFAULTr\ZPriority_MAINCONFIGZPRIO_MAINCONFIGZPriority_AUTOMATICCONFIGZPRIO_AUTOMATICCONFIGZPriority_REPOCONFIGZPRIO_REPOCONFIGZPriority_PLUGINDEFAULTZPRIO_PLUGINDEFAULTZPriority_PLUGINCONFIGZPRIO_PLUGINCONFIGZPriority_COMMANDLINErZPriority_RUNTIMEr$Z getLoggerrHobjectr r_rrrrrs@              PK!':':&conf/__pycache__/config.cpython-36.pycnu[3 ft`O@s<ddlmZddlmZddlmZddlmZmZddlm Z m Z ddl Z ddl Z ddlZ ddlZ ddlZ ddlZ ddlZddlZddlZddlZddlZddlZejjjZejjjZejjjZejjj Z!ejjj"Z#ejjj$Z%ejjj&Z'ejjj(Z)ejjj*Z+ej,dZ-Gdd d e.Z/Gd d d e/Z0Gd d d e/Z1dS))absolute_import)unicode_literals)misc)ucd_) basestringurlparseNdnfcs~eZdZdZdddZddZfddZd d Zd d Zd dZ ddZ e fddZ e fddZddZeddZZS) BaseConfigzlBase class for storing configuration definitions. Subclass when creating your own definitions. NcCs||jd<||_dS)N_config)__dict___section)selfconfigsectionparserr/usr/lib/python3.6/config.py__init__<s zBaseConfig.__init__cCszd|jkrtdj|j|t|j|}|dkr4dSy|j}Wn tk rb}zdSd}~XnXt|t rvt |S|S)Nr z!'{}' object has no attribute '{}') r AttributeErrorformat __class__getattrr getValue Exception isinstancestrr)rnameoptionvalueZexrrr __getattr__@s   zBaseConfig.__getattr__cs:t|j|d}|dkr(tt|j||S|j||tdS)N)rr superr __setattr__ _set_value PRIO_RUNTIME)rrrr)rrrr"NszBaseConfig.__setattr__c Cstg}|jd|j|jrjxN|jjD]@}y|jj}Wntk rPd}YnX|jd|j|fq&Wdj|S)Nz[%s]z%s: %s ) appendr r optBindssecondgetValueString RuntimeErrorfirstjoin)routoptBindrrrr__str__Us zBaseConfig.__str__cCst|j|d}|dk S)N)rr )rrmethodrrr _has_optionaszBaseConfig._has_optioncCs$t|j|d}|dkrdS|jS)N)rr r)rrr1rrr _get_valueeszBaseConfig._get_valuecCs$t|j|d}|dkrdS|jS)N)rr Z getPriority)rrr1rrr _get_prioritykszBaseConfig._get_prioritycCst|j|d}|dkr&td|d|}|dkr\y|j||Wntk rXYnXnyrt|tsrt|tr|j|tjj |nDt|tjj st|tjj rt|t r|j|t |n |j||WnHtk r}z*tjjtd|t|ft|dWYdd}~XnXdS)zSSet option's value if priority is equal or higher than current priority.NzOption "z" does not existszError parsing '%s': %s) raw_error)rr rsetrlisttuplelibdnfconf VectorStringZ OptionBoolZOptionChildBoolintboolr+r exceptions ConfigErrorrr)rrrpriorityr1rerrrr#qs*   zBaseConfig._set_valuecCs|j|rx|j|D]}|j||}| s4|dkr8d}t|j|ry|jjj|j||Wqtk r}z,t j t dt |t |t |t |WYdd}~XqXq|dkrt||rt|||qt jt dt |t |t |qWdS)z+Set option values from an INI file section.Noner%z,Invalid configuration value: %s=%s in %s; %sNarchz+Unknown configuration option: %s = %s in %s) hasSectionZoptionsZgetSubstitutedValuehasattrr r(at newStringr+loggererrorrrrsetattrdebug)rrrfilenamer@rrrArrr _populates     0zBaseConfig._populatec Cshd|jg}|jrZxF|jjD]8}y|jd|j|jjfWqtk rTYqXqWdj|dS)z]Return a string representing the values of all the configuration options. z[%s]z%s = %sr&) r r r(r'r,r)r*r+r-)routputr/rrrdumps  zBaseConfig.dumpcCstjj}|j||j|sHx(|jD]}tjjj|||kr(|}q(Wx6|jD]*\}}t|t rndj |}|j |||qRW|j |ddS)z filename - name of config file (.conf or .repo) section_id - id of modified section (e.g. main, fedora, updates) substitutions - instance of base.conf.substitutions modify - dict of modified options  FN) r9r: ConfigParserreadrDZgetData substituteitemsrr7r-ZsetValuewrite)rLZ section_id substitutionsZmodifyrZsectrrrrrwrite_raw_configfiles     zBaseConfig.write_raw_configfile)NNN)__name__ __module__ __qualname____doc__rr r"r0r2r3r4r$r# PRIO_DEFAULTrMrO staticmethodrW __classcell__rr)rrr 5s     r cseZdZdZd%fdd ZddZedd Zd d Zd d Z ddZ ddZ ddZ ddZ ddZeddZejddZeddZejddZeddZejd dZdefd!d"Zed#d$ZZS)&MainConfz?Configuration option definitions for dnf.conf's [main] section.mainNcstjj}tt|j||||jdtjj gt |jdtjj gt tjj j |_ tj|_|jjjt tjjtjjrtjj}d}nVytj}}WnDttfk r}z$tdjt|}tjj|WYdd}~XnX|jj jt ||jj!jt |g|_"dS)NZ pluginpathZpluginconfpathz/var/logzCould not set cachedir: {})#r9r: ConfigMainr!r_rr#r constZ PLUGINPATHr\ZPLUGINCONFPATHrVZ SubstitutionshawkeyZ detect_archrCr Zsystem_cachedirr6ZSYSTEM_CACHEDIRutilZ am_i_rootrZ getCacheDirIOErrorOSErrorrrrr>Errorcachedirlogdir tempfiles)rrrrrhrirAmsg)rrrrs$   zMainConf.__init__cCsx|jD]}tj|qWdS)N)rjosunlink)r file_namerrr__del__s zMainConf.__del__cCsLd}x$|jdD]}tjj|r|}PqW|sH|jdd}tjj||S)zReturns the value of reposdirNZreposdirr)r3rlpathexistsr rdZ ensure_dir)rZ myrepodirZrdirrrr get_reposdirs  zMainConf.get_reposdirc Cs|j|}|j|}t|trtj|}|ddkrF|j||j|ntjj }t j dd\}}|j j |zdy|jd||Wn>tk r}z"tjjtdj|t|WYdd}~XnX|j|||Wdtj|XdS) z In case the option value is a remote URL, download it to the temporary location and use this temporary file instead. rfiler%zdnf-downloaded-config-)prefixNz9Configuration file URL "{}" could not be downloaded: {})rsr%)r4r3rrrr#rpr9ZrepoZ DownloadertempfileZmkstemprjr'Z downloadURLr+r r>r?rrrrlclose) roptnamepriovallocationZ downloaderZtemp_fdZ temp_pathrArrr_check_remote_files"        zMainConf._check_remote_filecsjddkrSj|}|tkr,Sj|}t|tstfdd|Drj|tjj fdd|D|Sn4t j j t j j |jdrj|j||SdS)z Return root used as prefix for option (installroot or "/"). When specified from commandline it returns value from conf.installroot installroot/c3s*|]"}tjjtjj|jdVqdS)r}N)rlrprqr-lstrip).0p)r|rr *sz6MainConf._search_inside_installroot..csg|]}j|qSr)_prepend_installroot_path)rr)rrr -sz7MainConf._search_inside_installroot..)r3r4PRIO_COMMANDLINErranyr#r9r:r;rlrprqr-r~r)rrwrxryr)r|rr_search_inside_installroots$    z#MainConf._search_inside_installrootcCs,|j|}|j|j|}|j|||dS)N)r4rr3r#)rrwrxnew_pathrrrprepend_installroot6s zMainConf.prepend_installrootcCs,tjj|jd|jd}tjjj||j S)Nr|r}) rlrpr-r3r~r9r:rQrSrV)rrpZ root_pathrrrr<sz"MainConf._prepend_installroot_pathc Cs`ddddddddd d d d dd dddddddddddddg}x|D]}t||d}|dk ob|gkrB|j|r$d}|jry|jjj|j}Wntk rYnX|rtjj }||j |kr|j |}xR|D]6}|r|j ||j ||g|q|j |gtjj qWn|j ||tjj qBt ||r>t|||qBtjtdt|t|qBWt|d ddkr|j ddtjj t |dr\x|jjD]\}}x|D]} t |j|r"y|jjj|jt | WnJtk r} z,tjjtd || t| ft| d!WYdd} ~ XnXn.t ||rr?r) roptsZ config_argsrrZ appendValueZ add_priorityitemvaluesryrArkrrr_configure_from_options@s\         . z MainConf._configure_from_optionscCsPd}|dk rL|gkrL|j|r2|j||tjjntjtdt|t|dS)Nrz%Unknown configuration option: %s = %s) r2r#r r:rrHrrr)rZpkgsrrrr exclude_pkgss   zMainConf.exclude_pkgscCs(|jd}|r$|jd| |jddS)z Adjust conf options interactionsrstrictN)r3r#r4)rZskip_broken_valrrr_adjust_conf_optionss zMainConf._adjust_conf_optionscCs |jjdS)N releasever)rVget)rrrrrszMainConf.releasevercCs,|dkr|jjdddSt||jd<dS)Nr)rVpopr)rryrrrrscCs |jjdS)NrC)rVr)rrrrrCsz MainConf.archcCsb|dkr|jjdddS|tjjjkrFtd}tjj|j d|||jd<tjj ||_ dS)NrCzIncorrect or unknown "{}": {}) rVrr rpm _BASEARCH_MAPkeysrr>rgrr)rryrkrrrrCs cCs |jjdS)Nr)rVr)rrrrrszMainConf.basearchcCsT|dkr|jjdddS|tjjjkrFtd}tjj|j d|||jd<dS)NrzIncorrect or unknown "{}": {}) rVrr rrrrr>rgr)rryrkrrrrscCs|dkr|jd}tjj}y|j|Wndtk rd}ztjjt d||fWYdd}~Xn,t k r}zt j |WYdd}~XnX|j ||j|||jd||dS)NrzParsing file "%s" failed: %s)r3r9r:rQrRr+r r>r?rrerHrrMr r#)rrLr@rrArrrrRs  (z MainConf.readcCs|jdtjjkS)Nr)r3r rbZ VERBOSE_LEVEL)rrrrverboseszMainConf.verbose)r`N)rXrYrZr[rropropertyrrr{rrrrrrrsetterrCrr\rRrr^rr)rrr_s& ?     r_cs*eZdZdZdfdd ZddZZS)RepoConfz4Option definitions for repository INI file sections.NcsP|r |jntjj}tt|jtjj|||||_|rL|jj j t |dS)N) r r9r:rar!rrZ ConfigRepoZ_mainConfigRefHolderrr6r\)rparentrrZ mainConfig)rrrrs zRepoConf.__init__c Cst|dddkr0xd D]}|j|dtjjqWt|di}x|jD]\}}tj|j|s^qFx|jD]\}}x|D]}t|j |ry|j j j |j t|WnLt k r} z0tjjtd|j||t| ft| dWYdd} ~ XnXqvtd} tj| |j|qvWqhWqFWdS) zConfigure repos from the opts. rNF repo_gpgcheck repo_setoptsz7Error parsing --setopt with key '%s.%s', value '%s': %s)r5z-Repo %s did not have a %s attr. before setopt)rr)rr#r r:rrTfnmatchr rEr r(rFrGr+r>r?rrrHr) rrrwrZrepoidZsetoptsrrryrArkrrrrs$    2z RepoConf._configure_from_options)NN)rXrYrZr[rrr^rr)rrrs r)2Z __future__rrZdnf.yumrZdnf.i18nrrZ dnf.pycomprrrZdnf.conf.substitutionsr Z dnf.constZdnf.exceptionsZdnf.utilrcZloggingrlZ libdnf.confr9Z libdnf.reporur:ZOptionZPriority_EMPTYZ PRIO_EMPTYZPriority_DEFAULTr\ZPriority_MAINCONFIGZPRIO_MAINCONFIGZPriority_AUTOMATICCONFIGZPRIO_AUTOMATICCONFIGZPriority_REPOCONFIGZPRIO_REPOCONFIGZPriority_PLUGINDEFAULTZPRIO_PLUGINDEFAULTZPriority_PLUGINCONFIGZPRIO_PLUGINCONFIGZPriority_COMMANDLINErZPriority_RUNTIMEr$Z getLoggerrHobjectr r_rrrrrs@              PK!\f *conf/__pycache__/read.cpython-36.opt-1.pycnu[3 ft`@s~ddlmZddlmZddlmZmZddlZddlZ ddl Zddl Zddl Z ddl Z ddlZe jdZGdddeZdS))absolute_import)unicode_literals)_ucdNdnfc@s,eZdZddZddZddZddZd S) RepoReadercCs||_||_dS)N)confopts)selfrr r /usr/lib/python3.6/read.py__init__$szRepoReader.__init__c csx|j|jjD] }|VqWg}x8|jjD],}x&tjtjj|dD]}|j|qFWq,W|j dddxT|D]L}yx|j|D] }|VqWWqrt j j k rt jtd|YqrXqrWdS)Nz*.repocSstjj|ddS)N)rutilZ split_path)xr r r 5sz%RepoReader.__iter__..)keyz'Warning: failed loading '%s', skipping.) _get_reposrZconfig_file_pathreposdirglobospathjoinappendsortr exceptions ConfigErrorloggerwarningr)r rZ repo_configsrrrepofnr r r __iter__(s   zRepoReader.__iter__c Cs^tjjj||jj}tjj|}|dk rl||krJtdj |||||}ntdj ||||}tj j |tjj ||j}y|j |||tjjWnZtk r}z>||krtdj |||}ntdj ||}tj j |WYdd}~XnX|jdtjjkr8||kr tdj ||}ntdj |}tj|t|j|_|jj|jj||_|S) z)Build a repository using the parsed data.Nz&Bad id for repo: {} ({}), byte = {} {}z!Bad id for repo: {}, byte = {} {}z.Repository '{}' ({}): Error parsing config: {}z)Repository '{}': Error parsing config: {}namez@Repository '{}' ({}) is missing name in configuration, using id.z;Repository '{}' is missing name in configuration, using id.)libdnfr ConfigParserZ substitute substitutionsrrepoZrepo_id_invalidrformatrrZRepoZ _populateZPRIO_REPOCONFIG ValueErrorZ _get_priorityZ PRIO_DEFAULTrrrr#Z_substitutionsupdateZcfg) r parserZid_r!Zsubstituted_idZinvalidmsgr'er r r _build_repo?s8         zRepoReader._build_repoccs|jj}tjj}|j|y|j|Wndtk rd}ztjj t dj ||WYdd}~Xn,t k r}zt j|WYdd}~XnXx|jD]x}|dkrqy|j|t||}Wn:tjjtjj fk r}zt j|wWYdd}~XnX||_|j|j|VqWdS)z4Parse and yield all repositories from a config file.zParsing file "{}" failed: {}Nmain)rr&r$r%ZsetSubstitutionsread RuntimeErrorrrrrr(IOErrorrrZgetDatar.rZ RepoErrorZrepofileZ_configure_from_optionsr )r r!Zsubstsr+r-ZsectionZthisrepor r r rhs(  (  zRepoReader._get_reposN)__name__ __module__ __qualname__r r"r.rr r r r r#s)r)Z __future__rrZdnf.i18nrrZdnf.confrZ libdnf.confr$Zdnf.exceptionsZdnf.reporZloggingrZ getLoggerrobjectrr r r r s   PK!\f $conf/__pycache__/read.cpython-36.pycnu[3 ft`@s~ddlmZddlmZddlmZmZddlZddlZ ddl Zddl Zddl Z ddl Z ddlZe jdZGdddeZdS))absolute_import)unicode_literals)_ucdNdnfc@s,eZdZddZddZddZddZd S) RepoReadercCs||_||_dS)N)confopts)selfrr r /usr/lib/python3.6/read.py__init__$szRepoReader.__init__c csx|j|jjD] }|VqWg}x8|jjD],}x&tjtjj|dD]}|j|qFWq,W|j dddxT|D]L}yx|j|D] }|VqWWqrt j j k rt jtd|YqrXqrWdS)Nz*.repocSstjj|ddS)N)rutilZ split_path)xr r r 5sz%RepoReader.__iter__..)keyz'Warning: failed loading '%s', skipping.) _get_reposrZconfig_file_pathreposdirglobospathjoinappendsortr exceptions ConfigErrorloggerwarningr)r rZ repo_configsrrrepofnr r r __iter__(s   zRepoReader.__iter__c Cs^tjjj||jj}tjj|}|dk rl||krJtdj |||||}ntdj ||||}tj j |tjj ||j}y|j |||tjjWnZtk r}z>||krtdj |||}ntdj ||}tj j |WYdd}~XnX|jdtjjkr8||kr tdj ||}ntdj |}tj|t|j|_|jj|jj||_|S) z)Build a repository using the parsed data.Nz&Bad id for repo: {} ({}), byte = {} {}z!Bad id for repo: {}, byte = {} {}z.Repository '{}' ({}): Error parsing config: {}z)Repository '{}': Error parsing config: {}namez@Repository '{}' ({}) is missing name in configuration, using id.z;Repository '{}' is missing name in configuration, using id.)libdnfr ConfigParserZ substitute substitutionsrrepoZrepo_id_invalidrformatrrZRepoZ _populateZPRIO_REPOCONFIG ValueErrorZ _get_priorityZ PRIO_DEFAULTrrrr#Z_substitutionsupdateZcfg) r parserZid_r!Zsubstituted_idZinvalidmsgr'er r r _build_repo?s8         zRepoReader._build_repoccs|jj}tjj}|j|y|j|Wndtk rd}ztjj t dj ||WYdd}~Xn,t k r}zt j|WYdd}~XnXx|jD]x}|dkrqy|j|t||}Wn:tjjtjj fk r}zt j|wWYdd}~XnX||_|j|j|VqWdS)z4Parse and yield all repositories from a config file.zParsing file "{}" failed: {}Nmain)rr&r$r%ZsetSubstitutionsread RuntimeErrorrrrrr(IOErrorrrZgetDatar.rZ RepoErrorZrepofileZ_configure_from_optionsr )r r!Zsubstsr+r-ZsectionZthisrepor r r rhs(  (  zRepoReader._get_reposN)__name__ __module__ __qualname__r r"r.rr r r r r#s)r)Z __future__rrZdnf.i18nrrZdnf.confrZ libdnf.confr$Zdnf.exceptionsZdnf.reporZloggingrZ getLoggerrobjectrr r r r s   PK!*jj3conf/__pycache__/substitutions.cpython-36.opt-1.pycnu[3 fj @sLddlZddlZddlZddlmZejdZejdZGddde Z dS)N)_z^DNF_VAR_[A-Za-z0-9_]+$Zdnfcs.eZdZfddZddZd ddZZS) Substitutionscstt|j|jdS)N)superr__init___update_from_env)self) __class__#/usr/lib/python3.6/substitutions.pyr"szSubstitutions.__init__cCs\ddtddD}xBtjjD]4\}}tj|rD|||dd<q ||kr |||<q WdS)NcSsg|] }d|qS)zDNF%dr ).0Znumr r r 'sz2Substitutions._update_from_env..r )rangeosenvironitemsENVIRONMENT_VARS_REmatch)rZ numericvarskeyvalr r r r&s  zSubstitutions._update_from_env/etc/yum/vars//etc/dnf/vars/c Csx|D]}g}y"tjj||jd}tj|}Wntk rJwYnXx|D]}tjj||}d}tjj|rys    PK!*jj-conf/__pycache__/substitutions.cpython-36.pycnu[3 fj @sLddlZddlZddlZddlmZejdZejdZGddde Z dS)N)_z^DNF_VAR_[A-Za-z0-9_]+$Zdnfcs.eZdZfddZddZd ddZZS) Substitutionscstt|j|jdS)N)superr__init___update_from_env)self) __class__#/usr/lib/python3.6/substitutions.pyr"szSubstitutions.__init__cCs\ddtddD}xBtjjD]4\}}tj|rD|||dd<q ||kr |||<q WdS)NcSsg|] }d|qS)zDNF%dr ).0Znumr r r 'sz2Substitutions._update_from_env..r )rangeosenvironitemsENVIRONMENT_VARS_REmatch)rZ numericvarskeyvalr r r r&s  zSubstitutions._update_from_env/etc/yum/vars//etc/dnf/vars/c Csx|D]}g}y"tjj||jd}tj|}Wntk rJwYnXx|D]}tjj||}d}tjj|rys    PK!Zconf/__init__.pynu[# conf.py # dnf configuration classes. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # """ The configuration classes and routines in yum are splattered over too many places, hard to change and debug. The new structure here will replace that. Its goal is to: * accept configuration options from all three sources (the main config file, repo config files, command line switches) * handle all the logic of storing those and producing related values. * returning configuration values. * optionally: asserting no value is overridden once it has been applied somewhere (e.g. do not let a new repo be initialized with different global cache path than an already existing one). """ from __future__ import absolute_import from __future__ import unicode_literals from dnf.conf.config import PRIO_DEFAULT, PRIO_MAINCONFIG, PRIO_AUTOMATICCONFIG from dnf.conf.config import PRIO_REPOCONFIG, PRIO_PLUGINDEFAULT, PRIO_PLUGINCONFIG from dnf.conf.config import PRIO_COMMANDLINE, PRIO_RUNTIME from dnf.conf.config import BaseConfig, MainConf, RepoConf Conf = MainConf PK!EOOconf/config.pynu[# dnf configuration classes. # # Copyright (C) 2016-2017 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.yum import misc from dnf.i18n import ucd, _ from dnf.pycomp import basestring, urlparse import fnmatch import dnf.conf.substitutions import dnf.const import dnf.exceptions import dnf.pycomp import dnf.util import hawkey import logging import os import libdnf.conf import libdnf.repo import tempfile PRIO_EMPTY = libdnf.conf.Option.Priority_EMPTY PRIO_DEFAULT = libdnf.conf.Option.Priority_DEFAULT PRIO_MAINCONFIG = libdnf.conf.Option.Priority_MAINCONFIG PRIO_AUTOMATICCONFIG = libdnf.conf.Option.Priority_AUTOMATICCONFIG PRIO_REPOCONFIG = libdnf.conf.Option.Priority_REPOCONFIG PRIO_PLUGINDEFAULT = libdnf.conf.Option.Priority_PLUGINDEFAULT PRIO_PLUGINCONFIG = libdnf.conf.Option.Priority_PLUGINCONFIG PRIO_COMMANDLINE = libdnf.conf.Option.Priority_COMMANDLINE PRIO_RUNTIME = libdnf.conf.Option.Priority_RUNTIME logger = logging.getLogger('dnf') class BaseConfig(object): """Base class for storing configuration definitions. Subclass when creating your own definitions. """ def __init__(self, config=None, section=None, parser=None): self.__dict__["_config"] = config self._section = section def __getattr__(self, name): if "_config" not in self.__dict__: raise AttributeError("'{}' object has no attribute '{}'".format(self.__class__, name)) option = getattr(self._config, name) if option is None: return None try: value = option().getValue() except Exception as ex: return None if isinstance(value, str): return ucd(value) return value def __setattr__(self, name, value): option = getattr(self._config, name, None) if option is None: # unknown config option, store to BaseConfig only return super(BaseConfig, self).__setattr__(name, value) self._set_value(name, value, PRIO_RUNTIME) def __str__(self): out = [] out.append('[%s]' % self._section) if self._config: for optBind in self._config.optBinds(): try: value = optBind.second.getValueString() except RuntimeError: value = "" out.append('%s: %s' % (optBind.first, value)) return '\n'.join(out) def _has_option(self, name): method = getattr(self._config, name, None) return method is not None def _get_value(self, name): method = getattr(self._config, name, None) if method is None: return None return method().getValue() def _get_priority(self, name): method = getattr(self._config, name, None) if method is None: return None return method().getPriority() def _set_value(self, name, value, priority=PRIO_RUNTIME): """Set option's value if priority is equal or higher than current priority.""" method = getattr(self._config, name, None) if method is None: raise Exception("Option \"" + name + "\" does not exists") option = method() if value is None: try: option.set(priority, value) except Exception: pass else: try: if isinstance(value, list) or isinstance(value, tuple): option.set(priority, libdnf.conf.VectorString(value)) elif (isinstance(option, libdnf.conf.OptionBool) or isinstance(option, libdnf.conf.OptionChildBool) ) and isinstance(value, int): option.set(priority, bool(value)) else: option.set(priority, value) except RuntimeError as e: raise dnf.exceptions.ConfigError(_("Error parsing '%s': %s") % (value, str(e)), raw_error=str(e)) def _populate(self, parser, section, filename, priority=PRIO_DEFAULT): """Set option values from an INI file section.""" if parser.hasSection(section): for name in parser.options(section): value = parser.getSubstitutedValue(section, name) if not value or value == 'None': value = '' if hasattr(self._config, name): try: self._config.optBinds().at(name).newString(priority, value) except RuntimeError as e: logger.error(_('Invalid configuration value: %s=%s in %s; %s'), ucd(name), ucd(value), ucd(filename), str(e)) else: if name == 'arch' and hasattr(self, name): setattr(self, name, value) else: logger.debug( _('Unknown configuration option: %s = %s in %s'), ucd(name), ucd(value), ucd(filename)) def dump(self): # :api """Return a string representing the values of all the configuration options. """ output = ['[%s]' % self._section] if self._config: for optBind in self._config.optBinds(): # if not opt._is_runtimeonly(): try: output.append('%s = %s' % (optBind.first, optBind.second.getValueString())) except RuntimeError: pass return '\n'.join(output) + '\n' @staticmethod def write_raw_configfile(filename, section_id, substitutions, modify): # :api """ filename - name of config file (.conf or .repo) section_id - id of modified section (e.g. main, fedora, updates) substitutions - instance of base.conf.substitutions modify - dict of modified options """ parser = libdnf.conf.ConfigParser() parser.read(filename) # b/c repoids can have $values in them we need to map both ways to figure # out which one is which if not parser.hasSection(section_id): for sect in parser.getData(): if libdnf.conf.ConfigParser.substitute(sect, substitutions) == section_id: section_id = sect for name, value in modify.items(): if isinstance(value, list): value = ' '.join(value) parser.setValue(section_id, name, value) parser.write(filename, False) class MainConf(BaseConfig): # :api """Configuration option definitions for dnf.conf's [main] section.""" def __init__(self, section='main', parser=None): # pylint: disable=R0915 config = libdnf.conf.ConfigMain() super(MainConf, self).__init__(config, section, parser) self._set_value('pluginpath', [dnf.const.PLUGINPATH], PRIO_DEFAULT) self._set_value('pluginconfpath', [dnf.const.PLUGINCONFPATH], PRIO_DEFAULT) self.substitutions = dnf.conf.substitutions.Substitutions() self.arch = hawkey.detect_arch() self._config.system_cachedir().set(PRIO_DEFAULT, dnf.const.SYSTEM_CACHEDIR) # setup different cache and log for non-privileged users if dnf.util.am_i_root(): cachedir = dnf.const.SYSTEM_CACHEDIR logdir = '/var/log' else: try: cachedir = logdir = misc.getCacheDir() except (IOError, OSError) as e: msg = _('Could not set cachedir: {}').format(ucd(e)) raise dnf.exceptions.Error(msg) self._config.cachedir().set(PRIO_DEFAULT, cachedir) self._config.logdir().set(PRIO_DEFAULT, logdir) # track list of temporary files created self.tempfiles = [] def __del__(self): for file_name in self.tempfiles: os.unlink(file_name) @property def get_reposdir(self): # :api """Returns the value of reposdir""" myrepodir = None # put repo file into first reposdir which exists or create it for rdir in self._get_value('reposdir'): if os.path.exists(rdir): myrepodir = rdir break if not myrepodir: myrepodir = self._get_value('reposdir')[0] dnf.util.ensure_dir(myrepodir) return myrepodir def _check_remote_file(self, optname): """ In case the option value is a remote URL, download it to the temporary location and use this temporary file instead. """ prio = self._get_priority(optname) val = self._get_value(optname) if isinstance(val, basestring): location = urlparse.urlparse(val) if location[0] in ('file', ''): # just strip the file:// prefix self._set_value(optname, location.path, prio) else: downloader = libdnf.repo.Downloader() temp_fd, temp_path = tempfile.mkstemp(prefix='dnf-downloaded-config-') self.tempfiles.append(temp_path) try: downloader.downloadURL(None, val, temp_fd) except RuntimeError as e: raise dnf.exceptions.ConfigError( _('Configuration file URL "{}" could not be downloaded:\n' ' {}').format(val, str(e))) else: self._set_value(optname, temp_path, prio) finally: os.close(temp_fd) def _search_inside_installroot(self, optname): """ Return root used as prefix for option (installroot or "/"). When specified from commandline it returns value from conf.installroot """ installroot = self._get_value('installroot') if installroot == "/": return installroot prio = self._get_priority(optname) # don't modify paths specified on commandline if prio >= PRIO_COMMANDLINE: return installroot val = self._get_value(optname) # if it exists inside installroot use it (i.e. adjust configuration) # for lists any component counts if not isinstance(val, str): if any(os.path.exists(os.path.join(installroot, p.lstrip('/'))) for p in val): self._set_value( optname, libdnf.conf.VectorString([self._prepend_installroot_path(p) for p in val]), prio ) return installroot elif os.path.exists(os.path.join(installroot, val.lstrip('/'))): self._set_value(optname, self._prepend_installroot_path(val), prio) return installroot return "/" def prepend_installroot(self, optname): # :api prio = self._get_priority(optname) new_path = self._prepend_installroot_path(self._get_value(optname)) self._set_value(optname, new_path, prio) def _prepend_installroot_path(self, path): root_path = os.path.join(self._get_value('installroot'), path.lstrip('/')) return libdnf.conf.ConfigParser.substitute(root_path, self.substitutions) def _configure_from_options(self, opts): """Configure parts of CLI from the opts """ config_args = ['plugins', 'version', 'config_file_path', 'debuglevel', 'errorlevel', 'installroot', 'best', 'assumeyes', 'assumeno', 'clean_requirements_on_remove', 'gpgcheck', 'showdupesfromrepos', 'plugins', 'ip_resolve', 'rpmverbosity', 'disable_excludes', 'color', 'downloadonly', 'exclude', 'excludepkgs', 'skip_broken', 'tsflags', 'arch', 'basearch', 'ignorearch', 'cacheonly', 'comment'] for name in config_args: value = getattr(opts, name, None) if value is not None and value != []: if self._has_option(name): appendValue = False if self._config: try: appendValue = self._config.optBinds().at(name).getAddValue() except RuntimeError: # fails if option with "name" does not exist in _config (libdnf) pass if appendValue: add_priority = dnf.conf.PRIO_COMMANDLINE if add_priority < self._get_priority(name): add_priority = self._get_priority(name) for item in value: if item: self._set_value(name, self._get_value(name) + [item], add_priority) else: self._set_value(name, [], dnf.conf.PRIO_COMMANDLINE) else: self._set_value(name, value, dnf.conf.PRIO_COMMANDLINE) elif hasattr(self, name): setattr(self, name, value) else: logger.warning(_('Unknown configuration option: %s = %s'), ucd(name), ucd(value)) if getattr(opts, 'gpgcheck', None) is False: self._set_value("localpkg_gpgcheck", False, dnf.conf.PRIO_COMMANDLINE) if hasattr(opts, 'main_setopts'): # now set all the non-first-start opts from main from our setopts # pylint: disable=W0212 for name, values in opts.main_setopts.items(): for val in values: if hasattr(self._config, name): try: # values in main_setopts are strings, try to parse it using newString() self._config.optBinds().at(name).newString(PRIO_COMMANDLINE, val) except RuntimeError as e: raise dnf.exceptions.ConfigError( _("Error parsing --setopt with key '%s', value '%s': %s") % (name, val, str(e)), raw_error=str(e)) else: # if config option with "name" doesn't exist in _config, it could be defined # only in Python layer if hasattr(self, name): setattr(self, name, val) else: msg = _("Main config did not have a %s attr. before setopt") logger.warning(msg, name) def exclude_pkgs(self, pkgs): # :api name = "excludepkgs" if pkgs is not None and pkgs != []: if self._has_option(name): self._set_value(name, pkgs, dnf.conf.PRIO_COMMANDLINE) else: logger.warning(_('Unknown configuration option: %s = %s'), ucd(name), ucd(pkgs)) def _adjust_conf_options(self): """Adjust conf options interactions""" skip_broken_val = self._get_value('skip_broken') if skip_broken_val: self._set_value('strict', not skip_broken_val, self._get_priority('skip_broken')) @property def releasever(self): # :api return self.substitutions.get('releasever') @releasever.setter def releasever(self, val): # :api if val is None: self.substitutions.pop('releasever', None) return self.substitutions['releasever'] = str(val) @property def arch(self): # :api return self.substitutions.get('arch') @arch.setter def arch(self, val): # :api if val is None: self.substitutions.pop('arch', None) return if val not in dnf.rpm._BASEARCH_MAP.keys(): msg = _('Incorrect or unknown "{}": {}') raise dnf.exceptions.Error(msg.format("arch", val)) self.substitutions['arch'] = val self.basearch = dnf.rpm.basearch(val) @property def basearch(self): # :api return self.substitutions.get('basearch') @basearch.setter def basearch(self, val): # :api if val is None: self.substitutions.pop('basearch', None) return if val not in dnf.rpm._BASEARCH_MAP.values(): msg = _('Incorrect or unknown "{}": {}') raise dnf.exceptions.Error(msg.format("basearch", val)) self.substitutions['basearch'] = val def read(self, filename=None, priority=PRIO_DEFAULT): # :api if filename is None: filename = self._get_value('config_file_path') parser = libdnf.conf.ConfigParser() try: parser.read(filename) except RuntimeError as e: raise dnf.exceptions.ConfigError(_('Parsing file "%s" failed: %s') % (filename, e)) except IOError as e: logger.warning(e) self._populate(parser, self._section, filename, priority) # update to where we read the file from self._set_value('config_file_path', filename, priority) @property def verbose(self): return self._get_value('debuglevel') >= dnf.const.VERBOSE_LEVEL class RepoConf(BaseConfig): """Option definitions for repository INI file sections.""" def __init__(self, parent, section=None, parser=None): mainConfig = parent._config if parent else libdnf.conf.ConfigMain() super(RepoConf, self).__init__(libdnf.conf.ConfigRepo(mainConfig), section, parser) # Do not remove! Attribute is a reference holder. # Prevents premature removal of the mainConfig. The libdnf ConfigRepo points to it. self._mainConfigRefHolder = mainConfig if section: self._config.name().set(PRIO_DEFAULT, section) def _configure_from_options(self, opts): """Configure repos from the opts. """ if getattr(opts, 'gpgcheck', None) is False: for optname in ['gpgcheck', 'repo_gpgcheck']: self._set_value(optname, False, dnf.conf.PRIO_COMMANDLINE) repo_setopts = getattr(opts, 'repo_setopts', {}) for repoid, setopts in repo_setopts.items(): if not fnmatch.fnmatch(self._section, repoid): continue for name, values in setopts.items(): for val in values: if hasattr(self._config, name): try: # values in repo_setopts are strings, try to parse it using newString() self._config.optBinds().at(name).newString(PRIO_COMMANDLINE, val) except RuntimeError as e: raise dnf.exceptions.ConfigError( _("Error parsing --setopt with key '%s.%s', value '%s': %s") % (self._section, name, val, str(e)), raw_error=str(e)) else: msg = _("Repo %s did not have a %s attr. before setopt") logger.warning(msg, self._section, name) PK!9 conf/read.pynu[# read.py # Reading configuration from files. # # Copyright (C) 2014-2017 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _, ucd import dnf.conf import libdnf.conf import dnf.exceptions import dnf.repo import glob import logging import os logger = logging.getLogger('dnf') class RepoReader(object): def __init__(self, conf, opts): self.conf = conf self.opts = opts def __iter__(self): # get the repos from the main yum.conf file for r in self._get_repos(self.conf.config_file_path): yield r # read .repo files from directories specified by conf.reposdir repo_configs = [] for reposdir in self.conf.reposdir: for path in glob.glob(os.path.join(reposdir, "*.repo")): repo_configs.append(path) # remove .conf suffix before calling the sort function # also split the path so the separators are not treated as ordinary characters repo_configs.sort(key=lambda x: dnf.util.split_path(x[:-5])) for repofn in repo_configs: try: for r in self._get_repos(repofn): yield r except dnf.exceptions.ConfigError: logger.warning(_("Warning: failed loading '%s', skipping."), repofn) def _build_repo(self, parser, id_, repofn): """Build a repository using the parsed data.""" substituted_id = libdnf.conf.ConfigParser.substitute(id_, self.conf.substitutions) # Check the repo.id against the valid chars invalid = dnf.repo.repo_id_invalid(substituted_id) if invalid is not None: if substituted_id != id_: msg = _("Bad id for repo: {} ({}), byte = {} {}").format(substituted_id, id_, substituted_id[invalid], invalid) else: msg = _("Bad id for repo: {}, byte = {} {}").format(id_, id_[invalid], invalid) raise dnf.exceptions.ConfigError(msg) repo = dnf.repo.Repo(substituted_id, self.conf) try: repo._populate(parser, id_, repofn, dnf.conf.PRIO_REPOCONFIG) except ValueError as e: if substituted_id != id_: msg = _("Repository '{}' ({}): Error parsing config: {}").format(substituted_id, id_, e) else: msg = _("Repository '{}': Error parsing config: {}").format(id_, e) raise dnf.exceptions.ConfigError(msg) # Ensure that the repo name is set if repo._get_priority('name') == dnf.conf.PRIO_DEFAULT: if substituted_id != id_: msg = _("Repository '{}' ({}) is missing name in configuration, using id.").format( substituted_id, id_) else: msg = _("Repository '{}' is missing name in configuration, using id.").format(id_) logger.warning(msg) repo.name = ucd(repo.name) repo._substitutions.update(self.conf.substitutions) repo.cfg = parser return repo def _get_repos(self, repofn): """Parse and yield all repositories from a config file.""" substs = self.conf.substitutions parser = libdnf.conf.ConfigParser() parser.setSubstitutions(substs) try: parser.read(repofn) except RuntimeError as e: raise dnf.exceptions.ConfigError(_('Parsing file "{}" failed: {}').format(repofn, e)) except IOError as e: logger.warning(e) # Check sections in the .repo file that was just slurped up for section in parser.getData(): if section == 'main': continue try: thisrepo = self._build_repo(parser, ucd(section), repofn) except (dnf.exceptions.RepoError, dnf.exceptions.ConfigError) as e: logger.warning(e) continue else: thisrepo.repofile = repofn thisrepo._configure_from_options(self.opts) yield thisrepo PK!"Fj j conf/substitutions.pynu[# substitutions.py # Config file substitutions. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # import logging import os import re from dnf.i18n import _ ENVIRONMENT_VARS_RE = re.compile(r'^DNF_VAR_[A-Za-z0-9_]+$') logger = logging.getLogger('dnf') class Substitutions(dict): # :api def __init__(self): super(Substitutions, self).__init__() self._update_from_env() def _update_from_env(self): numericvars = ['DNF%d' % num for num in range(0, 10)] for key, val in os.environ.items(): if ENVIRONMENT_VARS_RE.match(key): self[key[8:]] = val # remove "DNF_VAR_" prefix elif key in numericvars: self[key] = val def update_from_etc(self, installroot, varsdir=("/etc/yum/vars/", "/etc/dnf/vars/")): # :api for vars_path in varsdir: fsvars = [] try: dir_fsvars = os.path.join(installroot, vars_path.lstrip('/')) fsvars = os.listdir(dir_fsvars) except OSError: continue for fsvar in fsvars: filepath = os.path.join(dir_fsvars, fsvar) val = None if os.path.isfile(filepath): try: with open(filepath) as fp: val = fp.readline() if val and val[-1] == '\n': val = val[:-1] except (OSError, IOError, UnicodeDecodeError) as e: logger.warning(_("Error when parsing a variable from file '{0}': {1}").format(filepath, e)) continue if val is not None: self[fsvar] = val PK!} qq,db/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft` @sdS)Nrrr/usr/lib/python3.6/__init__.pysPK!} qq&db/__pycache__/__init__.cpython-36.pycnu[3 ft` @sdS)Nrrr/usr/lib/python3.6/__init__.pysPK!#G77)db/__pycache__/group.cpython-36.opt-1.pycnu[3 f<@sddlZddlZddlZddlZddlmZddlm Z ddl Z Gddde Z Gddde Z Gdd d e ZGd d d e ZdS) N)_)loggerc@sleZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZdS) PersistorBasecCs"||_i|_i|_i|_i|_dS)N)history _installed_removed _upgraded _downgraded)selfrr /usr/lib/python3.6/group.py__init__ s zPersistorBase.__init__cCs(t|jt|jt|jt|jS)N)lenrrrr )r r r r __len__(szPersistorBase.__len__cCsi|_i|_i|_i|_dS)N)rrrr )r r r r clean+szPersistorBase.cleancCstdS)N)NotImplementedError)r objr r r _get_obj_id1szPersistorBase._get_obj_idcCs*|jjj|d|tjj}|jtjjdS)N)rswdbaddItemlibdnf transactionTransactionItemReason_USERZsetStateZTransactionItemState_DONE)r itemactiontir r r _add_to_history4szPersistorBase._add_to_historycCs$||j|j|<|j|tjjdS)N)rrrrrTransactionItemAction_INSTALL)r rr r r install8szPersistorBase.installcCs$||j|j|<|j|tjjdS)N)rrrrrTransactionItemAction_REMOVE)r rr r r remove<szPersistorBase.removecCs$||j|j|<|j|tjjdS)N)rrrrrTransactionItemAction_UPGRADE)r rr r r upgrade@szPersistorBase.upgradecCs$||j|j|<|j|tjjdS)N)r rrrrTransactionItemAction_DOWNGRADE)r rr r r downgradeDszPersistorBase.downgradecCstdS)N)r)r obj_idnametranslated_name pkg_typesr r r newHszPersistorBase.newcCstdS)N)r)r r&r r r getKszPersistorBase.getcCstdS)N)r)r patternr r r search_by_patternNszPersistorBase.search_by_patternN)__name__ __module__ __qualname__r rrrrrr!r#r%r*r+r-r r r r rsrc@sDeZdZddZddZddZddZd d Zd d Zd dZ dS)GroupPersistorcCs"|jjj}dd|D}t|S)NcSsg|]}|jr|qSr )getCompsGroupItem).0ir r r Vsz+GroupPersistor.__iter__..)rrgetItemsiter)r itemsr r r __iter__Ts zGroupPersistor.__iter__cCs|jS)N) getGroupId)r rr r r rYszGroupPersistor._get_obj_idcCsH|jjj}|j||dk r(|j||dk r:|j||j||S)N)rrZcreateCompsGroupItemZ setGroupIdsetNamesetTranslatedNamesetPackageTypes)r r&r'r(r) swdb_groupr r r r*\s     zGroupPersistor.newcCs"|jjj|}|sdS|j}|S)N)rrr2)r r&r>r r r r+fs zGroupPersistor.getcCs|jjj|S)N)rrZgetCompsGroupItemsByPattern)r r,r r r r-msz GroupPersistor.search_by_patterncCs|jjj|S)N)rrZgetPackageCompsGroups)r pkg_namer r r get_package_groupspsz!GroupPersistor.get_package_groupscCs|jjj|dd}|tjjkr"dSt|j|}xJ|jj D]<\}}x2|j D]&}|j |kr`qN|j sjqN|j |qNWq.)rrr6r7)r r8r r r r9s zEnvironmentPersistor.__iter__cCs|jS)N)ZgetEnvironmentId)r rr r r rsz EnvironmentPersistor._get_obj_idcCsH|jjj}|j||dk r(|j||dk r:|j||j||S)N)rrZcreateCompsEnvironmentItemZsetEnvironmentIdr;r<r=)r r&r'r(r)swdb_envr r r r*s     zEnvironmentPersistor.newcCs"|jjj|}|sdS|j}|S)N)rrrM)r r&rNr r r r+s zEnvironmentPersistor.getcCs|jjj|S)N)rrZ!getCompsEnvironmentItemsByPattern)r r,r r r r-sz&EnvironmentPersistor.search_by_patterncCs|jjj|S)N)rrZgetCompsGroupEnvironments)r rHr r r get_group_environmentssz+EnvironmentPersistor.get_group_environmentscCs|jjj|}|sdSt|j|}xJ|jjD]<\}}x2|jD]&}|j|krTqB|j s^qB|j |qBWq0WxJ|j jD]<\}}x2|jD]&}|j|krq|j sq|j |qWq|W|rdSdS)NFT) rrIr+rDrOrr8Z getGroupsr:rEr!rrF)r rHr>Zgroup_environmentsZenv_idenvrIr r r is_removable_groups*  z'EnvironmentPersistor.is_removable_groupN) r.r/r0r9rr*r+r-rOrQr r r r rLs rLc@seZdZd,ddZddZddZdd Zd-d d Zd d ZddZ ddZ d.ddZ d/ddZ d0ddZ d1ddZd2ddZd3ddZd4ddZd d!Zd"d#Zed$d%Zed&d'Zd(d)Zd*d+ZdS)5RPMTransactionNc Cs:||_||_|js0y|jjjWn YnXi|_dS)N)rrrZinitTransaction _swdb_ti_pkg)r rrr r r r szRPMTransaction.__init__cs8jrjj}n jjj}fdd|D}t|S)Ncs&g|]}|jrtjjjj|qSr ) getRPMItemdnfdbrRPMTransactionItemWrapper)r3r4)r r r r5sz+RPMTransaction.__iter__..)rr6rrr7)r r8r )r r r9s   zRPMTransaction.__iter__cs8jrjj}n jjj}fdd|D}t|S)Ncs&g|]}|jrtjjjj|qSr )rTrUrVrrW)r3r4)r r r r5sz*RPMTransaction.__len__..)rr6rrr)r r8r )r r rs   zRPMTransaction.__len__cCsP|jjj}|j|j|j|jp$d|j|j|j |j |j |j |S)Nr) rrZ createRPMItemr;r'ZsetEpochZepochZ setVersionversionZ setReleasereleaseZsetArcharch)r rJrpm_itemr r r _pkg_to_swdb_rpm_items     z$RPMTransaction._pkg_to_swdb_rpm_itemcCsV|j|}|j|}|dkr&|j|}|jjj||||}|rH|j|||j|<|S)N)r\ get_repoid get_reasonrrr addReplacedByrS)r rJrrG replaced_byr[Zrepoidresultr r r r*s     zRPMTransaction.newcCst|dd}|r|S|jS)NZ_force_swdb_repoid)getattrZreponame)r rJrar r r r]s zRPMTransaction.get_repoidcCs|jjj|j|jdS)zGet reason for package)rrrCr'rZ)r rJr r r r^szRPMTransaction.get_reasoncCstjj|j|S)zGet reason for package)rrZTransactionItemReasonToStringr^)r rJr r r get_reason_nameszRPMTransaction.get_reason_namecCs8|pg}x*|D]"}|j|tjj}|r|j|qWdS)N)r*rrTransactionItemAction_OBSOLETEDr_)r obsoletedr`Zobsrr r r _add_obsoleted s  zRPMTransaction._add_obsoletedcCs6|j|tjj}|j|tjj|d}|j||ddS)N)r`)r*rrr$ TransactionItemAction_DOWNGRADEDrh)r r*oldrgti_newti_oldr r r add_downgradeszRPMTransaction.add_downgradecCs|j||dS)N) add_remove)r rjrGr r r add_eraseszRPMTransaction.add_erasecCs4|dkrtjj}|j|tjj|}|j||ddS)N)r`)rrrr*rrh)r r*rgrGrkr r r add_installszRPMTransaction.add_installcCs6|j|tjj}|j|tjj|d}|j||ddS)N)r`)r*rrTransactionItemAction_REINSTALL!TransactionItemAction_REINSTALLEDrh)r r*rjrgrkrlr r r add_reinstallszRPMTransaction.add_reinstallcCs"|p tjj}|j|tjj|}dS)N)rrrr*r )r rjrGrlr r r rn$s zRPMTransaction.add_removecCs6|j|tjj}|j|tjj|d}|j||ddS)N)r`)r*rrr"TransactionItemAction_UPGRADEDrh)r r*rjrgrkrlr r r add_upgrade(szRPMTransaction.add_upgradec Cs^|jr dS|jjrdSy.|drB|j rBtjtdj|dSWntk rXdSXdS)NrZmodularitylabelz\No available modular metadata for modular package '{}', it cannot be installed on the systemrc) Z _from_cmdlineZrepoZmodule_hotfixesZ_is_in_active_modulerZcriticalrformat ValueError)r hdrrJr r r _test_fail_safe-s zRPMTransaction._test_fail_safecCsRd}x0|D]&}y|jtjjkrP|jj}||j||j7}|j||dn|jtjjkrp|j |jj n|jtjj kr|jj}||j||j7}|j||dnL|jtjj kr|jj}||j||j7}|j||dn|jtjj kr|j |jj n|jtjjkr<|jj}||j||j7}|j||n|jtjjkr\|j |jj n|jtjjkr||j |jj nz|jtjjkr|jj}||j||j7}|j||dn@|jtjjkr|j |jj n |jtjjkrntd|jWq tjk r2}ztjjtd|WYdd}~Xq Xq W|rNtjjtd|S)z!Populate the RPM transaction set.rur4z%TransactionItemAction not handled: %szAn rpm exception occurred: %sNz1No available modular metadata for modular package)rrrr$rJZ_headerryZ addInstallriZaddEraseidxrZTransactionItemAction_OBSOLETErfrqZ addReinstallrrr r"rtZ#TransactionItemAction_REASON_CHANGE RuntimeErrorrpmerrorrU exceptionsErrorr)r ZtsZmodular_problemstsirxer r r _populate_rpm_ts;sR*zRPMTransaction._populate_rpm_tsc CsXt}xL|D]D}|jtjjkr y|j|jWq tk rNtd|Yq Xq W|S)Nz*TransactionItem is has no RPM attached: %s) rDrrUrZFORWARD_ACTIONSrFrJKeyErrorr|)r rarr r r install_setms zRPMTransaction.install_setc Csbt}xV|D]N}|jtjjtjjgkr y|j|jWq t k rXt d|Yq Xq W|S)Nz*TransactionItem is has no RPM attached: %s) rDrrUrZBACKWARD_ACTIONSrrrrFrJrr|)r rarr r r remove_setys zRPMTransaction.remove_setcCs,dd|jD}t|r(td|dSdS)zj Ensures all the members can be passed to rpm as they are to perform the transaction. cSsg|]}|jdkr|qS)src)rZ)r3rJr r r r5sz3RPMTransaction._rpm_limitations..z+Will not install a source rpm package (%s).rN)rrr)r Z src_installsr r r _rpm_limitationss  zRPMTransaction._rpm_limitationscsfdd|DS)Ncsg|]}|jkr|qSr )r)r3r)rr r r5sz-RPMTransaction._get_items..r )r rr )rr _get_itemsszRPMTransaction._get_items)N)NN)N)N)N)NN)N)N)N)r.r/r0r r9rr\r*r]r^rerhrmrorprsrnruryrpropertyrrrrr r r r rRs*          2  rR)Zlibdnf.transactionrZdnf.db.historyrUZdnf.transactionZdnf.exceptionsZdnf.i18nrZdnf.utilrr}objectrr1rLrRr r r r s  3<Vsz+GroupPersistor.__iter__..)rrgetItemsiter)ritemsrrr__iter__Ts zGroupPersistor.__iter__cCs|jS)N) getGroupId)rrrrrrYszGroupPersistor._get_obj_idcCsH|jjj}|j||dk r(|j||dk r:|j||j||S)N)rrZcreateCompsGroupItemZ setGroupIdsetNamesetTranslatedNamesetPackageTypes)rr,r-r.r/ swdb_grouprrrr0\s     zGroupPersistor.newcCs"|jjj|}|sdS|j}|S)N)rrr8)rr,rDrrrr1fs zGroupPersistor.getcCs|jjj|S)N)rrZgetCompsGroupItemsByPattern)rr2rrrr3msz GroupPersistor.search_by_patterncCs|jjj|S)N)rrZgetPackageCompsGroups)rpkg_namerrrget_package_groupspsz!GroupPersistor.get_package_groupscCs|jjj|dd}|tjjkr"dSt|j|}xJ|jj D]<\}}x2|j D]&}|j |kr`qN|j sjqN|j |qNWqZ getPackagesZgetName getInstalledr'r add)rrEreasonZpackage_groupsgroup_idgrouppkgrrris_removable_pkgss*   zGroupPersistor.is_removable_pkgN) r4r5r6r?rr0r1r3rFrQrrrrr7Rs r7c@sDeZdZddZddZddZddZd d Zd d Zd dZ dS)EnvironmentPersistorcCs"|jjj}dd|D}t|S)NcSsg|]}|jr|qSr)getCompsEnvironmentItem)r9r:rrrr;sz1EnvironmentPersistor.__iter__..)rrr<r=)rr>rrrr?s zEnvironmentPersistor.__iter__cCs|jS)N)ZgetEnvironmentId)rrrrrrsz EnvironmentPersistor._get_obj_idcCsH|jjj}|j||dk r(|j||dk r:|j||j||S)N)rrZcreateCompsEnvironmentItemZsetEnvironmentIdrArBrC)rr,r-r.r/swdb_envrrrr0s     zEnvironmentPersistor.newcCs"|jjj|}|sdS|j}|S)N)rrrS)rr,rTrrrr1s zEnvironmentPersistor.getcCs|jjj|S)N)rrZ!getCompsEnvironmentItemsByPattern)rr2rrrr3sz&EnvironmentPersistor.search_by_patterncCs|jjj|S)N)rrZgetCompsGroupEnvironments)rrNrrrget_group_environmentssz+EnvironmentPersistor.get_group_environmentscCs|jjj|}|sdSt|j|}xJ|jjD]<\}}x2|jD]&}|j|krTqB|j s^qB|j |qBWq0WxJ|j jD]<\}}x2|jD]&}|j|krq|j sq|j |qWq|W|rdSdS)NFT) rrOr1rJrUr r>Z getGroupsr@rKr'r rL)rrNrDZgroup_environmentsZenv_idenvrOrrris_removable_groups*  z'EnvironmentPersistor.is_removable_groupN) r4r5r6r?rr0r1r3rUrWrrrrrRs rRc@seZdZd,ddZddZddZdd Zd-d d Zd d ZddZ ddZ d.ddZ d/ddZ d0ddZ d1ddZd2ddZd3ddZd4ddZd d!Zd"d#Zed$d%Zed&d'Zd(d)Zd*d+ZdS)5RPMTransactionNc Cs:||_||_|js0y|jjjWn YnXi|_dS)N)rrrZinitTransaction _swdb_ti_pkg)rrrrrrrszRPMTransaction.__init__cs8jrjj}n jjj}fdd|D}t|S)Ncs&g|]}|jrtjjjj|qSr) getRPMItemrrrRPMTransactionItemWrapper)r9r:)rrrr;sz+RPMTransaction.__iter__..)rr<rrr=)rr>r)rrr?s   zRPMTransaction.__iter__cs8jrjj}n jjj}fdd|D}t|S)Ncs&g|]}|jrtjjjj|qSr)rZrrrr[)r9r:)rrrr;sz*RPMTransaction.__len__..)rr<rrr)rr>r)rrrs   zRPMTransaction.__len__cCsP|jjj}|j|j|j|jp$d|j|j|j |j |j |j |S)Nr) rrZ createRPMItemrAr-ZsetEpochZepochZ setVersionversionZ setReleasereleaseZsetArcharch)rrPrpm_itemrrr_pkg_to_swdb_rpm_items     z$RPMTransaction._pkg_to_swdb_rpm_itemcCsV|j|}|j|}|dkr&|j|}|jjj||||}|rH|j|||j|<|S)N)r` get_repoid get_reasonrrr addReplacedByrY)rrPr!rM replaced_byr_Zrepoidresultrrrr0s     zRPMTransaction.newcCst|dd}|r|S|jS)NZ_force_swdb_repoid)getattrZreponame)rrPrerrrras zRPMTransaction.get_repoidcCs|jjj|j|jdS)zGet reason for package)rrrIr-r^)rrPrrrrbszRPMTransaction.get_reasoncCstjj|j|S)zGet reason for package)rrZTransactionItemReasonToStringrb)rrPrrrget_reason_nameszRPMTransaction.get_reason_namecCs8|pg}x*|D]"}|j|tjj}|r|j|qWdS)N)r0rrTransactionItemAction_OBSOLETEDrc)r obsoletedrdZobsr"rrr_add_obsoleted s  zRPMTransaction._add_obsoletedcCs6|j|tjj}|j|tjj|d}|j||ddS)N)rd)r0rrr* TransactionItemAction_DOWNGRADEDrl)rr0oldrkti_newti_oldrrr add_downgradeszRPMTransaction.add_downgradecCs|j||dS)N) add_remove)rrnrMrrr add_eraseszRPMTransaction.add_erasecCs4|dkrtjj}|j|tjj|}|j||ddS)N)rd)rrrr0r$rl)rr0rkrMrorrr add_installszRPMTransaction.add_installcCs6|j|tjj}|j|tjj|d}|j||ddS)N)rd)r0rrTransactionItemAction_REINSTALL!TransactionItemAction_REINSTALLEDrl)rr0rnrkrorprrr add_reinstallszRPMTransaction.add_reinstallcCs"|p tjj}|j|tjj|}dS)N)rrrr0r&)rrnrMrprrrrr$s zRPMTransaction.add_removecCs6|j|tjj}|j|tjj|d}|j||ddS)N)rd)r0rrr(TransactionItemAction_UPGRADEDrl)rr0rnrkrorprrr add_upgrade(szRPMTransaction.add_upgradec Cs^|jr dS|jjrdSy.|drB|j rBtjtdj|dSWntk rXdSXdS)NrZmodularitylabelz\No available modular metadata for modular package '{}', it cannot be installed on the systemrg) Z _from_cmdlineZrepoZmodule_hotfixesZ_is_in_active_modulerZcriticalrformat ValueError)rhdrrPrrr_test_fail_safe-s zRPMTransaction._test_fail_safecCsRd}x0|D]&}y|jtjjkrP|jj}||j||j7}|j||dn|jtjjkrp|j |jj n|jtjj kr|jj}||j||j7}|j||dnL|jtjj kr|jj}||j||j7}|j||dn|jtjj kr|j |jj n|jtjjkr<|jj}||j||j7}|j||n|jtjjkr\|j |jj n|jtjjkr||j |jj nz|jtjjkr|jj}||j||j7}|j||dn@|jtjjkr|j |jj n |jtjjkrntd|jWq tjk r2}ztjjtd|WYdd}~Xq Xq W|rNtjjtd|S)z!Populate the RPM transaction set.rur:z%TransactionItemAction not handled: %szAn rpm exception occurred: %sNz1No available modular metadata for modular package)r!rrr*rPZ_headerr}Z addInstallrmZaddEraseidxr$ZTransactionItemAction_OBSOLETErjruZ addReinstallrvr&r(rxZ#TransactionItemAction_REASON_CHANGE RuntimeErrorrpmerrorr exceptionsErrorr)rZtsZmodular_problemstsir|errr_populate_rpm_ts;sR*zRPMTransaction._populate_rpm_tsc CsXt}xL|D]D}|jtjjkr y|j|jWq tk rNtd|Yq Xq W|S)Nz*TransactionItem is has no RPM attached: %s) rJr!rrZFORWARD_ACTIONSrLrPKeyErrorr)rrerrrr install_setms zRPMTransaction.install_setc Csbt}xV|D]N}|jtjjtjjgkr y|j|jWq t k rXt d|Yq Xq W|S)Nz*TransactionItem is has no RPM attached: %s) rJr!rrZBACKWARD_ACTIONSrrvrLrPrr)rrerrrr remove_setys zRPMTransaction.remove_setcCs,dd|jD}t|r(td|dSdS)zj Ensures all the members can be passed to rpm as they are to perform the transaction. cSsg|]}|jdkr|qS)src)r^)r9rPrrrr;sz3RPMTransaction._rpm_limitations..z+Will not install a source rpm package (%s).rN)rrr)rZ src_installsrrr_rpm_limitationss  zRPMTransaction._rpm_limitationscsfdd|DS)Ncsg|]}|jkr|qSr)r!)r9r)r!rrr;sz-RPMTransaction._get_items..r)rr!r)r!r _get_itemsszRPMTransaction._get_items)N)NN)N)N)N)NN)N)N)N)r4r5r6rr?rr`r0rarbrirlrqrsrtrwrrryr}rpropertyrrrrrrrrrXs*          2  rX)Zlibdnf.transactionrZdnf.db.historyrZdnf.transactionZdnf.exceptionsZdnf.i18nrZdnf.utilrrobjectrr7rRrXrrrrs  3<d?Z"d@S)ARPMTransactionItemWrappercCs||_||_dS)N)_swdb_item)selfswdbitemr/usr/lib/python3.6/history.py__init__#sz"RPMTransactionItemWrapper.__init__cCs|jjjS)N)r ZgetItemZtoStr)r rrr__str__(sz!RPMTransactionItemWrapper.__str__cCs |j|jkS)N)r )r otherrrr__lt__+sz RPMTransactionItemWrapper.__lt__cCs |j|jkS)N)r )r rrrr__eq__.sz RPMTransactionItemWrapper.__eq__cCs |jjS)N)r __hash__)r rrrr1sz"RPMTransactionItemWrapper.__hash__cCsdS)NTr)r patternrrrmatch4szRPMTransactionItemWrapper.matchcCs|jjdk S)N)r getRPMItem)r rrr is_package7sz$RPMTransactionItemWrapper.is_packagecCs|jjdk S)N)r getCompsGroupItem)r rrris_group:sz"RPMTransactionItemWrapper.is_groupcCs|jjdk S)N)r getCompsEnvironmentItem)r rrris_environment=sz(RPMTransactionItemWrapper.is_environmentcCs |jjS)N)r r)r rrr get_group@sz#RPMTransactionItemWrapper.get_groupcCs |jjS)N)r r)r rrrget_environmentCsz)RPMTransactionItemWrapper.get_environmentcCs|jjjS)N)r rZgetName)r rrrnameFszRPMTransactionItemWrapper.namecCs|jjjS)N)r rZgetEpoch)r rrrepochJszRPMTransactionItemWrapper.epochcCs|jjjS)N)r rZ getVersion)r rrrversionNsz!RPMTransactionItemWrapper.versioncCs|jjjS)N)r rZ getRelease)r rrrreleaseRsz!RPMTransactionItemWrapper.releasecCs|jjjS)N)r rZgetArch)r rrrarchVszRPMTransactionItemWrapper.archcCs*|jrdj|j|j|jSdj|j|jS)Nz{}:{}-{}z{}-{})r"formatr#r$)r rrrevrZszRPMTransactionItemWrapper.evrcCs|jjjS)N)r rZgetNEVRA)r rrrnevra`szRPMTransactionItemWrapper.nevracCs |jjS)N)r Z getAction)r rrractiondsz RPMTransactionItemWrapper.actioncCs|jj|dS)N)r Z setAction)r valuerrrr)hscCs |jjS)N)r Z getReason)r rrrreasonlsz RPMTransactionItemWrapper.reasoncCs |jj|S)N)r Z setReason)r r*rrrr+psc Cs$y |jjStk rdSXdS)N)r Z getActionNameAttributeError)r rrr action_namets z%RPMTransactionItemWrapper.action_namec Cs$y |jjStk rdSXdS)Nr,)r ZgetActionShortr-)r rrr action_short{s z&RPMTransactionItemWrapper.action_shortcCs |jjS)N)r getState)r rrrstateszRPMTransactionItemWrapper.statecCs|jj|dS)N)r setState)r r*rrrr1scCs |jjS)N)r getRepoid)r rrr from_reposz#RPMTransactionItemWrapper.from_repocCs|jjsdSd|jjS)Nr,@)r r3)r rrr ui_from_repos z&RPMTransactionItemWrapper.ui_from_repocCsdS)Nr)r rrr obsoletingsz$RPMTransactionItemWrapper.obsoletingcCs|jjj|S)N)r rpm get_reason)r rrrr9sz$RPMTransactionItemWrapper.get_reasoncCs|jjj|jS)N)r r8Z _swdb_ti_pkgr )r rrrpkgszRPMTransactionItemWrapper.pkgcCs|jjS)N)r:files)r rrrr;szRPMTransactionItemWrapper.filescCs|jS)N)r:)r rrr_activesz!RPMTransactionItemWrapper._activeN)#__name__ __module__ __qualname__rrrrrrrrrrr propertyr!r"r#r$r%r'r(r)setterr+r.r/r1r4r6r7r9r:r;r<rrrrr "sB                r c@seZdZdZdZddZeddZeddZedd Z ed d Z ed d Z eddZ eddZ eddZeddZeddZeddZeddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(S))TransactionWrapperFcCs ||_dS)N)_trans)r transrrrrszTransactionWrapper.__init__cCs |jjS)N)rCgetId)r rrrtidszTransactionWrapper.tidcCs |jjS)N)rCZ getCmdline)r rrrcmdlineszTransactionWrapper.cmdlinecCs |jjS)N)rCZ getReleasever)r rrr releaseverszTransactionWrapper.releasevercCs |jjS)N)rCZ getDtBegin)r rrr beg_timestampsz TransactionWrapper.beg_timestampcCs |jjS)N)rCZgetDtEnd)r rrr end_timestampsz TransactionWrapper.end_timestampcCs |jjS)N)rCgetRpmdbVersionBegin)r rrrbeg_rpmdb_versionsz$TransactionWrapper.beg_rpmdb_versioncCs |jjS)N)rCgetRpmdbVersionEnd)r rrrend_rpmdb_versionsz$TransactionWrapper.end_rpmdb_versioncCst|jjtjjkS)N)intrCr0libdnf transactionTransactionItemState_DONE)r rrr return_codeszTransactionWrapper.return_codecCs |jjS)N)rCZ getUserId)r rrrloginuidszTransactionWrapper.loginuidcCs|jS)N)packages)r rrrdataszTransactionWrapper.datacCs|jj}t|S)N)rCgetConsoleOutputbool)r outputrrr is_outputs zTransactionWrapper.is_outputcCs |jjS)N)rCZ getComment)r rrrcommentszTransactionWrapper.commentcCs |jjgS)N)rCrE)r rrrtidsszTransactionWrapper.tidscCsgS)Nr)r rrrperformed_withsz!TransactionWrapper.performed_withcsjj}fdd|DS)Ncsg|]}t|qSr)r ).0i)r rr sz/TransactionWrapper.packages..)rCZgetItems)r resultr)r rrUs zTransactionWrapper.packagescCsdd|jjDS)NcSsg|] }|dqS)rr)r^r_rrrr`sz-TransactionWrapper.output..)rCrW)r rrrrYszTransactionWrapper.outputcCsgS)Nr)r rrrerrorszTransactionWrapper.errorcCs|jj|k|_dS)N)rCrMaltered_gt_rpmdb)r Zrpmdbvrrrcompare_rpmdbvsz!TransactionWrapper.compare_rpmdbvN)r=r>r?altered_lt_rpmdbrcrr@rFrGrHrIrJrLrNrSrTrVrZr[r\r]rUrYrbrdrrrrrBs*            rBc@sheZdZddZddZeddZddZed d Zed d Z ed dZ eddZ ddZ dS)MergedTransactionWrappercCstjj|j|_dS)N)rPrQZMergedTransactionrC)r rDrrrrsz!MergedTransactionWrapper.__init__cCs|jj|jdS)N)rCmerge)r rDrrrrgszMergedTransactionWrapper.mergecCs |jjS)N)rCZ listUserIds)r rrrrTsz!MergedTransactionWrapper.loginuidcCs |jjS)N)rCZlistIds)r rrrr\szMergedTransactionWrapper.tidscCsdd|jjDS)NcSsg|]}t|tjjkqSr)rOrPrQrR)r^r_rrrr`sz8MergedTransactionWrapper.return_code..)rCZ listStates)r rrrrSsz$MergedTransactionWrapper.return_codecCs |jjS)N)rCZ listCmdlines)r rrrrGsz MergedTransactionWrapper.cmdlinecCs |jjS)N)rCZlistReleasevers)r rrrrH sz#MergedTransactionWrapper.releasevercCs |jjS)N)rCZ listComments)r rrrr[sz MergedTransactionWrapper.commentcCsdd|jjDS)NcSsg|] }|dqS)rr)r^r_rrrr`sz3MergedTransactionWrapper.output..)rCrW)r rrrrYszMergedTransactionWrapper.outputN) r=r>r?rrgr@rTr\rSrGrHr[rYrrrrrfs     rfc@seZdZd6ddZddZeddZedd Zed d Zed d Z eddZ ddZ ddZ eddZ ddZd7ddZd8ddZd d!Zd"d#Zd$d%Zd&d'Zd9d(d)Zd*d+Zd,d-Zd:d.d/Zd;d0d1Zd2d3Zd4d5ZdS)< SwdbInterfacer,cCs8t||_d|_d|_d|_d|_d|_||_g|_dS)N) strrH_rpm_group_envZ _addon_datar _db_dir_output)r Zdb_dirrHrrrrs zSwdbInterface.__init__cCs |jdS)N)close)r rrr__del__%szSwdbInterface.__del__cCs|jdkrt||_|jS)N)rjr)r rrrr8(s  zSwdbInterface.rpmcCs|jdkrt||_|jS)N)rkr)r rrrgroup.s  zSwdbInterface.groupcCs|jdkrt||_|jS)N)rlr)r rrrenv4s  zSwdbInterface.envcCstjj|jtjjjS)N)ospathjoinrmrPrQSwdbZdefaultDatabaseName)r rrrdbpath:szSwdbInterface.dbpathcCsZ|jsTytjj|j|_Wn.tk rH}ztt|WYdd}~XnX|jj|jS)z Lazy initialize Swdb object N) r rPrQrvrw RuntimeErrorrriinitTransaction)r Zexrrrr >s zSwdbInterface.swdbcCstjj||j}|jdS)N)rPrQZ Transformerrw transform)r Z input_dirZ transformerrrrrzKszSwdbInterface.transformc CsZy|`Wntk rYnXd|_d|_d|_|jrJ|jj|jjd|_g|_dS)N) _tidr-rjrkrlr ZcloseTransactionZ closeDatabasern)r rrrroOs  zSwdbInterface.closecCs |jjS)N)r ZgetPath)r rrrrt]szSwdbInterface.pathcCs |jjS)N)r Z resetDatabase)r rrrreset_dbaszSwdbInterface.reset_dbTcCs|jj}|sdSt|S)N)r ZgetLastTransactionrB)r complete_transactions_onlytrrrlastes zSwdbInterface.lastNrFcspgddD|jj}dd|D}rDfdd|D}xJt|D]>\}}|dkr`qN||d}|jj|jjkrNd|_d|_qNW|dddS) NcSsg|] }t|qSr)rO)r^r_rrrr`osz%SwdbInterface.old..cSsg|] }t|qSr)rB)r^r_rrrr`qscsg|]}|jkr|qSr)rF)r^r_)r\rrr`tsrrT)r ZlistTransactions enumeraterCrKrMrerc)r r\limitr}rar_rDZ prev_transr)r\roldms   zSwdbInterface.oldcCst|jjS)N)rBr Z getCurrent)r rrr get_currentszSwdbInterface.get_currentcCsB|jj|}|j|}tjj}|jj||||}|jtjj |S)zSet reason for package) r8Z_pkg_to_swdb_rpm_itemreporPrQZ#TransactionItemAction_REASON_CHANGEr ZaddItemr2rR)r r:r+rpm_itemZrepoidr)Ztirrr set_reasons   zSwdbInterface.set_reasoncCs|jjt|S)zGet repository of package)r Z getRPMRepori)r r:rrrrszSwdbInterface.repocCs*|jjt|}|dkr|St||}|S)zGet package data for packageN)r ZgetRPMTransactionItemrir )r r:rarrr package_datas  zSwdbInterface.package_datac Csfy|jjWn YnX|jjttjtjt||p>dtt j |}|jj |j ||_ |S)Nr,)r ryZbeginTransactionrOcalendarZtimegmtimeZgmtimerirZ getloginuidZ setReleaseverrHr{)r Z rpmdb_versionZ using_pkgsZtsisrGr[rFrrrbegs zSwdbInterface.begcCsN|jj}|j|j|j|jp"d|j|j|j|j |j |j |S)Nr) r Z createRPMItemZsetNamer!ZsetEpochr"Z setVersionr#Z setReleaser$ZsetArchr%)r Zporrrrpkg_to_swdb_rpm_items     z"SwdbInterface.pkg_to_swdb_rpm_itemcCsDt|dsdS|sdSx(|jD]}t|}|jjd|fq WdS)Nr{r)hasattr splitlinesrrnappend)r msglinerrrlog_scriptlet_outputs z"SwdbInterface.log_scriptlet_outputcCst|dsdS|dkrFtjj}x&|jD]}|jtjjkr&tjj}Pq&Wx |jD]\}}|j j ||qNWg|_|j j t t j t||dS)Nr{)rrPrQZTransactionState_DONEr8r1ZTransactionItemState_ERRORZTransactionState_ERRORrnr ZaddConsoleOutputLineZendTransactionrOrri)r rNrSerrorsZtsiZfile_descriptorrrrrends   zSwdbInterface.endcCs |jj|S)z{ Search for history transactions which contain specified packages al. la. "yum list". Returns transaction ids. )r ZsearchTransactionsByRPM)r ZpatternsZ ignore_caserrrsearchszSwdbInterface.searchcCs8|jj|j|jd}|tjjkr$dS|tjjkr4dSdS)z)Returns True if package is user installedrTFr)r resolveRPMTransactionItemReasonr!r%rPrQTransactionItemReason_USERTransactionItemReason_UNKNOWN)r r:r+rrruser_installeds   zSwdbInterface.user_installedcCsF|r|jj|j|j|}n|jj|j|jd}|tjjkrBtjj}|S)a2Get reason of package before transaction being undone. If package is already installed in the system, keep his reason. :param pkg: package being installed :param first_trans: id of first transaction being undone :param rollback: True if transaction is performing a rollbackrr)r rr!r%rPrQrr)r r:Z first_transZrollbackrarrrget_erased_reasons  zSwdbInterface.get_erased_reason)r,)T)NrF)Nr,)r,NN)T)r=r>r?rrpr@r8rqrrrwr rzrortr|rrrrrrrrrrrrrrrrrrhs0              rh)rrsrZlibdnf.transactionrPZ libdnf.utilsZdnf.i18nrZdnf.yumrZdnf.exceptionsrrqrrrobjectr rBrfrhrrrrs   M"PK!9wMFF%db/__pycache__/history.cpython-36.pycnu[3 ft`%:@sddlZddlZddlZddlZddlZddlmZddlm Z ddl m Z ddl m Z mZmZGdddeZGd d d eZGd d d eZGd ddeZdS)N)ucd)misc) DatabaseError)GroupPersistorEnvironmentPersistorRPMTransactionc@sjeZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ eddZeddZeddZeddZedd Zed!d"Zed#d$Zed%d&Zejd'd&Zed(d)Zejd*d)Zed+d,Zed-d.Zed/d0Zejd1d0Zed2d3Zd4d5Zed6d7Zd8d9Zed:d;Z edd?Z"d@S)ARPMTransactionItemWrappercCs|dk s t||_||_dS)N)AssertionError_swdb_item)selfswdbitemr/usr/lib/python3.6/history.py__init__#s z"RPMTransactionItemWrapper.__init__cCs|jjjS)N)r ZgetItemZtoStr)r rrr__str__(sz!RPMTransactionItemWrapper.__str__cCs |j|jkS)N)r )r otherrrr__lt__+sz RPMTransactionItemWrapper.__lt__cCs |j|jkS)N)r )r rrrr__eq__.sz RPMTransactionItemWrapper.__eq__cCs |jjS)N)r __hash__)r rrrr1sz"RPMTransactionItemWrapper.__hash__cCsdS)NTr)r patternrrrmatch4szRPMTransactionItemWrapper.matchcCs|jjdk S)N)r getRPMItem)r rrr is_package7sz$RPMTransactionItemWrapper.is_packagecCs|jjdk S)N)r getCompsGroupItem)r rrris_group:sz"RPMTransactionItemWrapper.is_groupcCs|jjdk S)N)r getCompsEnvironmentItem)r rrris_environment=sz(RPMTransactionItemWrapper.is_environmentcCs |jjS)N)r r)r rrr get_group@sz#RPMTransactionItemWrapper.get_groupcCs |jjS)N)r r)r rrrget_environmentCsz)RPMTransactionItemWrapper.get_environmentcCs|jjjS)N)r rZgetName)r rrrnameFszRPMTransactionItemWrapper.namecCs|jjjS)N)r rZgetEpoch)r rrrepochJszRPMTransactionItemWrapper.epochcCs|jjjS)N)r rZ getVersion)r rrrversionNsz!RPMTransactionItemWrapper.versioncCs|jjjS)N)r rZ getRelease)r rrrreleaseRsz!RPMTransactionItemWrapper.releasecCs|jjjS)N)r rZgetArch)r rrrarchVszRPMTransactionItemWrapper.archcCs*|jrdj|j|j|jSdj|j|jS)Nz{}:{}-{}z{}-{})r#formatr$r%)r rrrevrZszRPMTransactionItemWrapper.evrcCs|jjjS)N)r rZgetNEVRA)r rrrnevra`szRPMTransactionItemWrapper.nevracCs |jjS)N)r Z getAction)r rrractiondsz RPMTransactionItemWrapper.actioncCs|jj|dS)N)r Z setAction)r valuerrrr*hscCs |jjS)N)r Z getReason)r rrrreasonlsz RPMTransactionItemWrapper.reasoncCs |jj|S)N)r Z setReason)r r+rrrr,psc Cs$y |jjStk rdSXdS)N)r Z getActionNameAttributeError)r rrr action_namets z%RPMTransactionItemWrapper.action_namec Cs$y |jjStk rdSXdS)Nr-)r ZgetActionShortr.)r rrr action_short{s z&RPMTransactionItemWrapper.action_shortcCs |jjS)N)r getState)r rrrstateszRPMTransactionItemWrapper.statecCs|jj|dS)N)r setState)r r+rrrr2scCs |jjS)N)r getRepoid)r rrr from_reposz#RPMTransactionItemWrapper.from_repocCs|jjsdSd|jjS)Nr-@)r r4)r rrr ui_from_repos z&RPMTransactionItemWrapper.ui_from_repocCsdS)Nr)r rrr obsoletingsz$RPMTransactionItemWrapper.obsoletingcCs|jjj|S)N)r rpm get_reason)r rrrr:sz$RPMTransactionItemWrapper.get_reasoncCs|jjj|jS)N)r r9Z _swdb_ti_pkgr )r rrrpkgszRPMTransactionItemWrapper.pkgcCs|jjS)N)r;files)r rrrr<szRPMTransactionItemWrapper.filescCs|jS)N)r;)r rrr_activesz!RPMTransactionItemWrapper._activeN)#__name__ __module__ __qualname__rrrrrrrrrr r!propertyr"r#r$r%r&r(r)r*setterr,r/r0r2r5r7r8r:r;r<r=rrrrr "sB                r c@seZdZdZdZddZeddZeddZedd Z ed d Z ed d Z eddZ eddZ eddZeddZeddZeddZeddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(S))TransactionWrapperFcCs ||_dS)N)_trans)r transrrrrszTransactionWrapper.__init__cCs |jjS)N)rDgetId)r rrrtidszTransactionWrapper.tidcCs |jjS)N)rDZ getCmdline)r rrrcmdlineszTransactionWrapper.cmdlinecCs |jjS)N)rDZ getReleasever)r rrr releaseverszTransactionWrapper.releasevercCs |jjS)N)rDZ getDtBegin)r rrr beg_timestampsz TransactionWrapper.beg_timestampcCs |jjS)N)rDZgetDtEnd)r rrr end_timestampsz TransactionWrapper.end_timestampcCs |jjS)N)rDgetRpmdbVersionBegin)r rrrbeg_rpmdb_versionsz$TransactionWrapper.beg_rpmdb_versioncCs |jjS)N)rDgetRpmdbVersionEnd)r rrrend_rpmdb_versionsz$TransactionWrapper.end_rpmdb_versioncCst|jjtjjkS)N)intrDr1libdnf transactionTransactionItemState_DONE)r rrr return_codeszTransactionWrapper.return_codecCs |jjS)N)rDZ getUserId)r rrrloginuidszTransactionWrapper.loginuidcCs|jS)N)packages)r rrrdataszTransactionWrapper.datacCs|jj}t|S)N)rDgetConsoleOutputbool)r outputrrr is_outputs zTransactionWrapper.is_outputcCs |jjS)N)rDZ getComment)r rrrcommentszTransactionWrapper.commentcCs |jjgS)N)rDrF)r rrrtidsszTransactionWrapper.tidscCsgS)Nr)r rrrperformed_withsz!TransactionWrapper.performed_withcsjj}fdd|DS)Ncsg|]}t|qSr)r ).0i)r rr sz/TransactionWrapper.packages..)rDZgetItems)r resultr)r rrVs zTransactionWrapper.packagescCsdd|jjDS)NcSsg|] }|dqS)rr)r_r`rrrrasz-TransactionWrapper.output..)rDrX)r rrrrZszTransactionWrapper.outputcCsgS)Nr)r rrrerrorszTransactionWrapper.errorcCs|jj|k|_dS)N)rDrNaltered_gt_rpmdb)r Zrpmdbvrrrcompare_rpmdbvsz!TransactionWrapper.compare_rpmdbvN)r>r?r@altered_lt_rpmdbrdrrArGrHrIrJrKrMrOrTrUrWr[r\r]r^rVrZrcrerrrrrCs*            rCc@sheZdZddZddZeddZddZed d Zed d Z ed dZ eddZ ddZ dS)MergedTransactionWrappercCstjj|j|_dS)N)rQrRZMergedTransactionrD)r rErrrrsz!MergedTransactionWrapper.__init__cCs|jj|jdS)N)rDmerge)r rErrrrhszMergedTransactionWrapper.mergecCs |jjS)N)rDZ listUserIds)r rrrrUsz!MergedTransactionWrapper.loginuidcCs |jjS)N)rDZlistIds)r rrrr]szMergedTransactionWrapper.tidscCsdd|jjDS)NcSsg|]}t|tjjkqSr)rPrQrRrS)r_r`rrrrasz8MergedTransactionWrapper.return_code..)rDZ listStates)r rrrrTsz$MergedTransactionWrapper.return_codecCs |jjS)N)rDZ listCmdlines)r rrrrHsz MergedTransactionWrapper.cmdlinecCs |jjS)N)rDZlistReleasevers)r rrrrI sz#MergedTransactionWrapper.releasevercCs |jjS)N)rDZ listComments)r rrrr\sz MergedTransactionWrapper.commentcCsdd|jjDS)NcSsg|] }|dqS)rr)r_r`rrrrasz3MergedTransactionWrapper.output..)rDrX)r rrrrZszMergedTransactionWrapper.outputN) r>r?r@rrhrArUr]rTrHrIr\rZrrrrrgs     rgc@seZdZd6ddZddZeddZedd Zed d Zed d Z eddZ ddZ ddZ eddZ ddZd7ddZd8ddZd d!Zd"d#Zd$d%Zd&d'Zd9d(d)Zd*d+Zd,d-Zd:d.d/Zd;d0d1Zd2d3Zd4d5ZdS)< SwdbInterfacer-cCs8t||_d|_d|_d|_d|_d|_||_g|_dS)N) strrI_rpm_group_envZ _addon_datar _db_dir_output)r Zdb_dirrIrrrrs zSwdbInterface.__init__cCs |jdS)N)close)r rrr__del__%szSwdbInterface.__del__cCs|jdkrt||_|jS)N)rkr)r rrrr9(s  zSwdbInterface.rpmcCs|jdkrt||_|jS)N)rlr)r rrrgroup.s  zSwdbInterface.groupcCs|jdkrt||_|jS)N)rmr)r rrrenv4s  zSwdbInterface.envcCstjj|jtjjjS)N)ospathjoinrnrQrRSwdbZdefaultDatabaseName)r rrrdbpath:szSwdbInterface.dbpathcCsZ|jsTytjj|j|_Wn.tk rH}ztt|WYdd}~XnX|jj|jS)z Lazy initialize Swdb object N) r rQrRrwrx RuntimeErrorrrjinitTransaction)r Zexrrrr>s zSwdbInterface.swdbcCstjj||j}|jdS)N)rQrRZ Transformerrx transform)r Z input_dirZ transformerrrrr{KszSwdbInterface.transformc CsZy|`Wntk rYnXd|_d|_d|_|jrJ|jj|jjd|_g|_dS)N) _tidr.rkrlrmr ZcloseTransactionZ closeDatabasero)r rrrrpOs  zSwdbInterface.closecCs |jjS)N)rZgetPath)r rrrru]szSwdbInterface.pathcCs |jjS)N)rZ resetDatabase)r rrrreset_dbaszSwdbInterface.reset_dbTcCs|jj}|sdSt|S)N)rZgetLastTransactionrC)r complete_transactions_onlytrrrlastes zSwdbInterface.lastNrFcspgddD|jj}dd|D}rDfdd|D}xJt|D]>\}}|dkr`qN||d}|jj|jjkrNd|_d|_qNW|dddS) NcSsg|] }t|qSr)rP)r_r`rrrraosz%SwdbInterface.old..cSsg|] }t|qSr)rC)r_r`rrrraqscsg|]}|jkr|qSr)rG)r_r`)r]rrratsrrT)rZlistTransactions enumeraterDrLrNrfrd)r r]limitr~rbr`rEZ prev_transr)r]roldms   zSwdbInterface.oldcCst|jjS)N)rCrZ getCurrent)r rrr get_currentszSwdbInterface.get_currentcCsB|jj|}|j|}tjj}|jj||||}|jtjj |S)zSet reason for package) r9Z_pkg_to_swdb_rpm_itemreporQrRZ#TransactionItemAction_REASON_CHANGErZaddItemr3rS)r r;r,rpm_itemZrepoidr*Ztirrr set_reasons   zSwdbInterface.set_reasoncCs|jjt|S)zGet repository of package)rZ getRPMReporj)r r;rrrrszSwdbInterface.repocCs*|jjt|}|dkr|St||}|S)zGet package data for packageN)rZgetRPMTransactionItemrjr )r r;rbrrr package_datas  zSwdbInterface.package_datac Csfy|jjWn YnX|jjttjtjt||p>dtt j |}|jj |j ||_ |S)Nr-)rrzZbeginTransactionrPcalendarZtimegmtimeZgmtimerjrZ getloginuidZ setReleaseverrIr|)r Z rpmdb_versionZ using_pkgsZtsisrHr\rGrrrbegs zSwdbInterface.begcCsN|jj}|j|j|j|jp"d|j|j|j|j |j |j |S)Nr) rZ createRPMItemZsetNamer"ZsetEpochr#Z setVersionr$Z setReleaser%ZsetArchr&)r Zporrrrpkg_to_swdb_rpm_items     z"SwdbInterface.pkg_to_swdb_rpm_itemcCsDt|dsdS|sdSx(|jD]}t|}|jjd|fq WdS)Nr|r)hasattr splitlinesrroappend)r msglinerrrlog_scriptlet_outputs z"SwdbInterface.log_scriptlet_outputcCst|dsdS|dkrFtjj}x&|jD]}|jtjjkr&tjj}Pq&Wx |jD]\}}|j j ||qNWg|_|j j t t j t||dS)Nr|)rrQrRZTransactionState_DONEr9r2ZTransactionItemState_ERRORZTransactionState_ERRORrorZaddConsoleOutputLineZendTransactionrPrrj)r rOrTerrorsZtsiZfile_descriptorrrrrends   zSwdbInterface.endcCs |jj|S)z{ Search for history transactions which contain specified packages al. la. "yum list". Returns transaction ids. )rZsearchTransactionsByRPM)r ZpatternsZ ignore_caserrrsearchszSwdbInterface.searchcCs8|jj|j|jd}|tjjkr$dS|tjjkr4dSdS)z)Returns True if package is user installedrTFr)rresolveRPMTransactionItemReasonr"r&rQrRTransactionItemReason_USERTransactionItemReason_UNKNOWN)r r;r,rrruser_installeds   zSwdbInterface.user_installedcCsF|r|jj|j|j|}n|jj|j|jd}|tjjkrBtjj}|S)a2Get reason of package before transaction being undone. If package is already installed in the system, keep his reason. :param pkg: package being installed :param first_trans: id of first transaction being undone :param rollback: True if transaction is performing a rollbackrr)rrr"r&rQrRrr)r r;Z first_transZrollbackrbrrrget_erased_reasons  zSwdbInterface.get_erased_reason)r-)T)NrF)Nr-)r-NN)T)r>r?r@rrqrAr9rrrsrxrr{rprur}rrrrrrrrrrrrrrrrrris0              ri)rrtrZlibdnf.transactionrQZ libdnf.utilsZdnf.i18nrZdnf.yumrZdnf.exceptionsrrrrrrobjectr rCrgrirrrrs   M"PK!㦆  db/__init__.pynu[# Copyright (C) 2017 Red Hat, Inc. # # DNF database subpackage # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. PK!'<< db/group.pynu[# -*- coding: utf-8 -*- # Copyright (C) 2017-2018 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # import libdnf.transaction import dnf.db.history import dnf.transaction import dnf.exceptions from dnf.i18n import _ from dnf.util import logger import rpm class PersistorBase(object): def __init__(self, history): assert isinstance(history, dnf.db.history.SwdbInterface), str(type(history)) self.history = history self._installed = {} self._removed = {} self._upgraded = {} self._downgraded = {} def __len__(self): return len(self._installed) + len(self._removed) + len(self._upgraded) + len(self._downgraded) def clean(self): self._installed = {} self._removed = {} self._upgraded = {} self._downgraded = {} def _get_obj_id(self, obj): raise NotImplementedError def _add_to_history(self, item, action): ti = self.history.swdb.addItem(item, "", action, libdnf.transaction.TransactionItemReason_USER) ti.setState(libdnf.transaction.TransactionItemState_DONE) def install(self, obj): self._installed[self._get_obj_id(obj)] = obj self._add_to_history(obj, libdnf.transaction.TransactionItemAction_INSTALL) def remove(self, obj): self._removed[self._get_obj_id(obj)] = obj self._add_to_history(obj, libdnf.transaction.TransactionItemAction_REMOVE) def upgrade(self, obj): self._upgraded[self._get_obj_id(obj)] = obj self._add_to_history(obj, libdnf.transaction.TransactionItemAction_UPGRADE) def downgrade(self, obj): self._downgraded[self._get_obj_id(obj)] = obj self._add_to_history(obj, libdnf.transaction.TransactionItemAction_DOWNGRADE) def new(self, obj_id, name, translated_name, pkg_types): raise NotImplementedError def get(self, obj_id): raise NotImplementedError def search_by_pattern(self, pattern): raise NotImplementedError class GroupPersistor(PersistorBase): def __iter__(self): items = self.history.swdb.getItems() items = [i for i in items if i.getCompsGroupItem()] return iter(items) def _get_obj_id(self, obj): return obj.getGroupId() def new(self, obj_id, name, translated_name, pkg_types): swdb_group = self.history.swdb.createCompsGroupItem() swdb_group.setGroupId(obj_id) if name is not None: swdb_group.setName(name) if translated_name is not None: swdb_group.setTranslatedName(translated_name) swdb_group.setPackageTypes(pkg_types) return swdb_group def get(self, obj_id): swdb_group = self.history.swdb.getCompsGroupItem(obj_id) if not swdb_group: return None swdb_group = swdb_group.getCompsGroupItem() return swdb_group def search_by_pattern(self, pattern): return self.history.swdb.getCompsGroupItemsByPattern(pattern) def get_package_groups(self, pkg_name): return self.history.swdb.getPackageCompsGroups(pkg_name) def is_removable_pkg(self, pkg_name): # for group removal and autoremove reason = self.history.swdb.resolveRPMTransactionItemReason(pkg_name, "", -2) if reason != libdnf.transaction.TransactionItemReason_GROUP: return False # TODO: implement lastTransId == -2 in libdnf package_groups = set(self.get_package_groups(pkg_name)) for group_id, group in self._removed.items(): for pkg in group.getPackages(): if pkg.getName() != pkg_name: continue if not pkg.getInstalled(): continue package_groups.remove(group_id) for group_id, group in self._installed.items(): for pkg in group.getPackages(): if pkg.getName() != pkg_name: continue if not pkg.getInstalled(): continue package_groups.add(group_id) if package_groups: return False return True class EnvironmentPersistor(PersistorBase): def __iter__(self): items = self.history.swdb.getItems() items = [i for i in items if i.getCompsEnvironmentItem()] return iter(items) def _get_obj_id(self, obj): return obj.getEnvironmentId() def new(self, obj_id, name, translated_name, pkg_types): swdb_env = self.history.swdb.createCompsEnvironmentItem() swdb_env.setEnvironmentId(obj_id) if name is not None: swdb_env.setName(name) if translated_name is not None: swdb_env.setTranslatedName(translated_name) swdb_env.setPackageTypes(pkg_types) return swdb_env def get(self, obj_id): swdb_env = self.history.swdb.getCompsEnvironmentItem(obj_id) if not swdb_env: return None swdb_env = swdb_env.getCompsEnvironmentItem() return swdb_env def search_by_pattern(self, pattern): return self.history.swdb.getCompsEnvironmentItemsByPattern(pattern) def get_group_environments(self, group_id): return self.history.swdb.getCompsGroupEnvironments(group_id) def is_removable_group(self, group_id): # for environment removal swdb_group = self.history.group.get(group_id) if not swdb_group: return False # TODO: implement lastTransId == -2 in libdnf group_environments = set(self.get_group_environments(group_id)) for env_id, env in self._removed.items(): for group in env.getGroups(): if group.getGroupId() != group_id: continue if not group.getInstalled(): continue group_environments.remove(env_id) for env_id, env in self._installed.items(): for group in env.getGroups(): if group.getGroupId() != group_id: continue if not group.getInstalled(): continue group_environments.add(env_id) if group_environments: return False return True class RPMTransaction(object): def __init__(self, history, transaction=None): self.history = history self.transaction = transaction if not self.transaction: try: self.history.swdb.initTransaction() except: pass self._swdb_ti_pkg = {} # TODO: close trans if needed def __iter__(self): # :api if self.transaction: items = self.transaction.getItems() else: items = self.history.swdb.getItems() items = [dnf.db.history.RPMTransactionItemWrapper(self.history, i) for i in items if i.getRPMItem()] return iter(items) def __len__(self): if self.transaction: items = self.transaction.getItems() else: items = self.history.swdb.getItems() items = [dnf.db.history.RPMTransactionItemWrapper(self.history, i) for i in items if i.getRPMItem()] return len(items) def _pkg_to_swdb_rpm_item(self, pkg): rpm_item = self.history.swdb.createRPMItem() rpm_item.setName(pkg.name) rpm_item.setEpoch(pkg.epoch or 0) rpm_item.setVersion(pkg.version) rpm_item.setRelease(pkg.release) rpm_item.setArch(pkg.arch) return rpm_item def new(self, pkg, action, reason=None, replaced_by=None): rpm_item = self._pkg_to_swdb_rpm_item(pkg) repoid = self.get_repoid(pkg) if reason is None: reason = self.get_reason(pkg) result = self.history.swdb.addItem(rpm_item, repoid, action, reason) if replaced_by: result.addReplacedBy(replaced_by) self._swdb_ti_pkg[result] = pkg return result def get_repoid(self, pkg): result = getattr(pkg, "_force_swdb_repoid", None) if result: return result return pkg.reponame def get_reason(self, pkg): """Get reason for package""" return self.history.swdb.resolveRPMTransactionItemReason(pkg.name, pkg.arch, -1) def get_reason_name(self, pkg): """Get reason for package""" return libdnf.transaction.TransactionItemReasonToString(self.get_reason(pkg)) def _add_obsoleted(self, obsoleted, replaced_by=None): obsoleted = obsoleted or [] for obs in obsoleted: ti = self.new(obs, libdnf.transaction.TransactionItemAction_OBSOLETED) if replaced_by: ti.addReplacedBy(replaced_by) def add_downgrade(self, new, old, obsoleted=None): ti_new = self.new(new, libdnf.transaction.TransactionItemAction_DOWNGRADE) ti_old = self.new(old, libdnf.transaction.TransactionItemAction_DOWNGRADED, replaced_by=ti_new) self._add_obsoleted(obsoleted, replaced_by=ti_new) def add_erase(self, old, reason=None): self.add_remove(old, reason) def add_install(self, new, obsoleted=None, reason=None): if reason is None: reason = libdnf.transaction.TransactionItemReason_USER ti_new = self.new(new, libdnf.transaction.TransactionItemAction_INSTALL, reason) self._add_obsoleted(obsoleted, replaced_by=ti_new) def add_reinstall(self, new, old, obsoleted=None): ti_new = self.new(new, libdnf.transaction.TransactionItemAction_REINSTALL) ti_old = self.new(old, libdnf.transaction.TransactionItemAction_REINSTALLED, replaced_by=ti_new) self._add_obsoleted(obsoleted, replaced_by=ti_new) def add_remove(self, old, reason=None): reason = reason or libdnf.transaction.TransactionItemReason_USER ti_old = self.new(old, libdnf.transaction.TransactionItemAction_REMOVE, reason) def add_upgrade(self, new, old, obsoleted=None): ti_new = self.new(new, libdnf.transaction.TransactionItemAction_UPGRADE) ti_old = self.new(old, libdnf.transaction.TransactionItemAction_UPGRADED, replaced_by=ti_new) self._add_obsoleted(obsoleted, replaced_by=ti_new) def _test_fail_safe(self, hdr, pkg): if pkg._from_cmdline: return 0 if pkg.repo.module_hotfixes: return 0 try: if hdr['modularitylabel'] and not pkg._is_in_active_module(): logger.critical(_("No available modular metadata for modular package '{}', " "it cannot be installed on the system").format(pkg)) return 1 except ValueError: return 0 return 0 def _populate_rpm_ts(self, ts): """Populate the RPM transaction set.""" modular_problems = 0 for tsi in self: try: if tsi.action == libdnf.transaction.TransactionItemAction_DOWNGRADE: hdr = tsi.pkg._header modular_problems += self._test_fail_safe(hdr, tsi.pkg) ts.addInstall(hdr, tsi, 'u') elif tsi.action == libdnf.transaction.TransactionItemAction_DOWNGRADED: ts.addErase(tsi.pkg.idx) elif tsi.action == libdnf.transaction.TransactionItemAction_INSTALL: hdr = tsi.pkg._header modular_problems += self._test_fail_safe(hdr, tsi.pkg) ts.addInstall(hdr, tsi, 'i') elif tsi.action == libdnf.transaction.TransactionItemAction_OBSOLETE: hdr = tsi.pkg._header modular_problems += self._test_fail_safe(hdr, tsi.pkg) ts.addInstall(hdr, tsi, 'u') elif tsi.action == libdnf.transaction.TransactionItemAction_OBSOLETED: ts.addErase(tsi.pkg.idx) elif tsi.action == libdnf.transaction.TransactionItemAction_REINSTALL: # note: in rpm 4.12 there should not be set # rpm.RPMPROB_FILTER_REPLACEPKG to work hdr = tsi.pkg._header modular_problems += self._test_fail_safe(hdr, tsi.pkg) ts.addReinstall(hdr, tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_REINSTALLED: # Required when multiple packages with the same NEVRA marked as installed ts.addErase(tsi.pkg.idx) elif tsi.action == libdnf.transaction.TransactionItemAction_REMOVE: ts.addErase(tsi.pkg.idx) elif tsi.action == libdnf.transaction.TransactionItemAction_UPGRADE: hdr = tsi.pkg._header modular_problems += self._test_fail_safe(hdr, tsi.pkg) ts.addInstall(hdr, tsi, 'u') elif tsi.action == libdnf.transaction.TransactionItemAction_UPGRADED: ts.addErase(tsi.pkg.idx) elif tsi.action == libdnf.transaction.TransactionItemAction_REASON_CHANGE: pass else: raise RuntimeError("TransactionItemAction not handled: %s" % tsi.action) except rpm.error as e: raise dnf.exceptions.Error(_("An rpm exception occurred: %s" % e)) if modular_problems: raise dnf.exceptions.Error(_("No available modular metadata for modular package")) return ts @property def install_set(self): # :api result = set() for tsi in self: if tsi.action in dnf.transaction.FORWARD_ACTIONS: try: result.add(tsi.pkg) except KeyError: raise RuntimeError("TransactionItem is has no RPM attached: %s" % tsi) return result @property def remove_set(self): # :api result = set() for tsi in self: if tsi.action in dnf.transaction.BACKWARD_ACTIONS + [libdnf.transaction.TransactionItemAction_REINSTALLED]: try: result.add(tsi.pkg) except KeyError: raise RuntimeError("TransactionItem is has no RPM attached: %s" % tsi) return result def _rpm_limitations(self): """ Ensures all the members can be passed to rpm as they are to perform the transaction. """ src_installs = [pkg for pkg in self.install_set if pkg.arch == 'src'] if len(src_installs): return _("Will not install a source rpm package (%s).") % \ src_installs[0] return None def _get_items(self, action): return [tsi for tsi in self if tsi.action == action] PK!CW"%:%: db/history.pynu[# -*- coding: utf-8 -*- # Copyright (C) 2009, 2012-2018 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # import calendar import os import time import libdnf.transaction import libdnf.utils from dnf.i18n import ucd from dnf.yum import misc from dnf.exceptions import DatabaseError from .group import GroupPersistor, EnvironmentPersistor, RPMTransaction class RPMTransactionItemWrapper(object): def __init__(self, swdb, item): assert item is not None self._swdb = swdb self._item = item def __str__(self): return self._item.getItem().toStr() def __lt__(self, other): return self._item < other._item def __eq__(self, other): return self._item == other._item def __hash__(self): return self._item.__hash__() def match(self, pattern): return True def is_package(self): return self._item.getRPMItem() is not None def is_group(self): return self._item.getCompsGroupItem() is not None def is_environment(self): return self._item.getCompsEnvironmentItem() is not None def get_group(self): return self._item.getCompsGroupItem() def get_environment(self): return self._item.getCompsEnvironmentItem() @property def name(self): return self._item.getRPMItem().getName() @property def epoch(self): return self._item.getRPMItem().getEpoch() @property def version(self): return self._item.getRPMItem().getVersion() @property def release(self): return self._item.getRPMItem().getRelease() @property def arch(self): return self._item.getRPMItem().getArch() @property def evr(self): if self.epoch: return "{}:{}-{}".format(self.epoch, self.version, self.release) return "{}-{}".format(self.version, self.release) @property def nevra(self): return self._item.getRPMItem().getNEVRA() @property def action(self): return self._item.getAction() @action.setter def action(self, value): self._item.setAction(value) @property def reason(self): return self._item.getReason() @reason.setter def reason(self, value): return self._item.setReason(value) @property def action_name(self): try: return self._item.getActionName() except AttributeError: return "" @property def action_short(self): try: return self._item.getActionShort() except AttributeError: return "" @property def state(self): return self._item.getState() @state.setter def state(self, value): self._item.setState(value) @property def from_repo(self): return self._item.getRepoid() def ui_from_repo(self): if not self._item.getRepoid(): return "" return "@" + self._item.getRepoid() @property def obsoleting(self): return None def get_reason(self): # TODO: get_history_reason return self._swdb.rpm.get_reason(self) @property def pkg(self): return self._swdb.rpm._swdb_ti_pkg[self._item] @property def files(self): return self.pkg.files @property def _active(self): return self.pkg class TransactionWrapper(object): altered_lt_rpmdb = False altered_gt_rpmdb = False def __init__(self, trans): self._trans = trans @property def tid(self): return self._trans.getId() @property def cmdline(self): return self._trans.getCmdline() @property def releasever(self): return self._trans.getReleasever() @property def beg_timestamp(self): return self._trans.getDtBegin() @property def end_timestamp(self): return self._trans.getDtEnd() @property def beg_rpmdb_version(self): return self._trans.getRpmdbVersionBegin() @property def end_rpmdb_version(self): return self._trans.getRpmdbVersionEnd() @property def return_code(self): return int(self._trans.getState() != libdnf.transaction.TransactionItemState_DONE) @property def loginuid(self): return self._trans.getUserId() @property def data(self): return self.packages @property def is_output(self): output = self._trans.getConsoleOutput() return bool(output) @property def comment(self): return self._trans.getComment() def tids(self): return [self._trans.getId()] def performed_with(self): return [] def packages(self): result = self._trans.getItems() return [RPMTransactionItemWrapper(self, i) for i in result] def output(self): return [i[1] for i in self._trans.getConsoleOutput()] def error(self): return [] def compare_rpmdbv(self, rpmdbv): self.altered_gt_rpmdb = self._trans.getRpmdbVersionEnd() != rpmdbv class MergedTransactionWrapper(TransactionWrapper): def __init__(self, trans): self._trans = libdnf.transaction.MergedTransaction(trans._trans) def merge(self, trans): self._trans.merge(trans._trans) @property def loginuid(self): return self._trans.listUserIds() def tids(self): return self._trans.listIds() @property def return_code(self): return [int(i != libdnf.transaction.TransactionItemState_DONE) for i in self._trans.listStates()] @property def cmdline(self): return self._trans.listCmdlines() @property def releasever(self): return self._trans.listReleasevers() @property def comment(self): return self._trans.listComments() def output(self): return [i[1] for i in self._trans.getConsoleOutput()] class SwdbInterface(object): def __init__(self, db_dir, releasever=""): # TODO: record all vars # TODO: remove relreasever from options self.releasever = str(releasever) self._rpm = None self._group = None self._env = None self._addon_data = None self._swdb = None self._db_dir = db_dir self._output = [] def __del__(self): self.close() @property def rpm(self): if self._rpm is None: self._rpm = RPMTransaction(self) return self._rpm @property def group(self): if self._group is None: self._group = GroupPersistor(self) return self._group @property def env(self): if self._env is None: self._env = EnvironmentPersistor(self) return self._env @property def dbpath(self): return os.path.join(self._db_dir, libdnf.transaction.Swdb.defaultDatabaseName) @property def swdb(self): """ Lazy initialize Swdb object """ if not self._swdb: # _db_dir == persistdir which is prepended with installroot already try: self._swdb = libdnf.transaction.Swdb(self.dbpath) except RuntimeError as ex: raise DatabaseError(str(ex)) self._swdb.initTransaction() # TODO: vars -> libdnf return self._swdb def transform(self, input_dir): transformer = libdnf.transaction.Transformer(input_dir, self.dbpath) transformer.transform() def close(self): try: del self._tid except AttributeError: pass self._rpm = None self._group = None self._env = None if self._swdb: self._swdb.closeTransaction() self._swdb.closeDatabase() self._swdb = None self._output = [] @property def path(self): return self.swdb.getPath() def reset_db(self): return self.swdb.resetDatabase() # TODO: rename to get_last_transaction? def last(self, complete_transactions_only=True): # TODO: complete_transactions_only t = self.swdb.getLastTransaction() if not t: return None return TransactionWrapper(t) # TODO: rename to: list_transactions? def old(self, tids=None, limit=0, complete_transactions_only=False): tids = tids or [] tids = [int(i) for i in tids] result = self.swdb.listTransactions() result = [TransactionWrapper(i) for i in result] # TODO: move to libdnf if tids: result = [i for i in result if i.tid in tids] # populate altered_lt_rpmdb and altered_gt_rpmdb for i, trans in enumerate(result): if i == 0: continue prev_trans = result[i-1] if trans._trans.getRpmdbVersionBegin() != prev_trans._trans.getRpmdbVersionEnd(): trans.altered_lt_rpmdb = True prev_trans.altered_gt_rpmdb = True return result[::-1] def get_current(self): return TransactionWrapper(self.swdb.getCurrent()) def set_reason(self, pkg, reason): """Set reason for package""" rpm_item = self.rpm._pkg_to_swdb_rpm_item(pkg) repoid = self.repo(pkg) action = libdnf.transaction.TransactionItemAction_REASON_CHANGE ti = self.swdb.addItem(rpm_item, repoid, action, reason) ti.setState(libdnf.transaction.TransactionItemState_DONE) return ti ''' def package(self, pkg): """Get SwdbPackage from package""" return self.swdb.package(str(pkg)) ''' def repo(self, pkg): """Get repository of package""" return self.swdb.getRPMRepo(str(pkg)) def package_data(self, pkg): """Get package data for package""" # trans item is returned result = self.swdb.getRPMTransactionItem(str(pkg)) if result is None: return result result = RPMTransactionItemWrapper(self, result) return result # def reason(self, pkg): # """Get reason for package""" # result = self.swdb.resolveRPMTransactionItemReason(pkg.name, pkg.arch, -1) # return result # TODO: rename to begin_transaction? def beg(self, rpmdb_version, using_pkgs, tsis, cmdline=None, comment=""): try: self.swdb.initTransaction() except: pass tid = self.swdb.beginTransaction( int(calendar.timegm(time.gmtime())), str(rpmdb_version), cmdline or "", int(misc.getloginuid()), comment) self.swdb.setReleasever(self.releasever) self._tid = tid return tid def pkg_to_swdb_rpm_item(self, po): rpm_item = self.swdb.createRPMItem() rpm_item.setName(po.name) rpm_item.setEpoch(po.epoch or 0) rpm_item.setVersion(po.version) rpm_item.setRelease(po.release) rpm_item.setArch(po.arch) return rpm_item def log_scriptlet_output(self, msg): if not hasattr(self, '_tid'): return if not msg: return for line in msg.splitlines(): line = ucd(line) # logging directly to database fails if transaction runs in a background process self._output.append((1, line)) ''' def _log_errors(self, errors): for error in errors: error = ucd(error) self.swdb.log_error(self._tid, error) ''' def end(self, end_rpmdb_version="", return_code=None, errors=None): if not hasattr(self, '_tid'): return # Failed at beg() time if return_code is None: # return_code/state auto-detection return_code = libdnf.transaction.TransactionState_DONE for tsi in self.rpm: if tsi.state == libdnf.transaction.TransactionItemState_ERROR: return_code = libdnf.transaction.TransactionState_ERROR break for file_descriptor, line in self._output: self.swdb.addConsoleOutputLine(file_descriptor, line) self._output = [] self.swdb.endTransaction( int(time.time()), str(end_rpmdb_version), return_code, ) # Closing and cleanup is done in the close() method. # It is important to keep data around after the transaction ends # because it's needed by plugins to report installed packages etc. # TODO: ignore_case, more patterns def search(self, patterns, ignore_case=True): """ Search for history transactions which contain specified packages al. la. "yum list". Returns transaction ids. """ return self.swdb.searchTransactionsByRPM(patterns) def user_installed(self, pkg): """Returns True if package is user installed""" reason = self.swdb.resolveRPMTransactionItemReason(pkg.name, pkg.arch, -1) if reason == libdnf.transaction.TransactionItemReason_USER: return True # if reason is not known, consider a package user-installed # because it was most likely installed via rpm if reason == libdnf.transaction.TransactionItemReason_UNKNOWN: return True return False def get_erased_reason(self, pkg, first_trans, rollback): """Get reason of package before transaction being undone. If package is already installed in the system, keep his reason. :param pkg: package being installed :param first_trans: id of first transaction being undone :param rollback: True if transaction is performing a rollback""" if rollback: # return the reason at the point of rollback; we're setting that reason result = self.swdb.resolveRPMTransactionItemReason(pkg.name, pkg.arch, first_trans) else: result = self.swdb.resolveRPMTransactionItemReason(pkg.name, pkg.arch, -1) # consider unknown reason as user-installed if result == libdnf.transaction.TransactionItemReason_UNKNOWN: result = libdnf.transaction.TransactionItemReason_USER return result PK!P<<0module/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft` @sPddlmZdZdZdZdZdZeedeedeed eed eed iZd S) )_z#Enabling different stream for '{}'.zNothing to show.z;Installing newer version of '{}' than specified. Reason: {}zEnabled modules: {}.z6No profile specified for '{}', please specify profile.N)Zdnf.i18nrZDIFFERENT_STREAM_INFOZNOTHING_TO_SHOWZINSTALLING_NEWER_VERSIONZENABLED_MODULESZNO_PROFILE_SPECIFIEDZmodule_messagesrr/usr/lib/python3.6/__init__.pys PK!P<<*module/__pycache__/__init__.cpython-36.pycnu[3 ft` @sPddlmZdZdZdZdZdZeedeedeed eed eed iZd S) )_z#Enabling different stream for '{}'.zNothing to show.z;Installing newer version of '{}' than specified. Reason: {}zEnabled modules: {}.z6No profile specified for '{}', please specify profile.N)Zdnf.i18nrZDIFFERENT_STREAM_INFOZNOTHING_TO_SHOWZINSTALLING_NEWER_VERSIONZENABLED_MODULESZNO_PROFILE_SPECIFIEDZmodule_messagesrr/usr/lib/python3.6/__init__.pys PK!2module/__pycache__/exceptions.cpython-36.opt-1.pycnu[3 ft`F@sddlZddlmZmZddlmZGdddejjZGdddejjZ Gdd d ejjZ Gd d d ejjZ Gd d d ejjZ GdddejjZ GdddejjZGdddejjZGdddejjZGdddejjZGdddejjZdS)N)module_messagesNO_PROFILE_SPECIFIED)_cseZdZfddZZS)NoModuleExceptioncs"tdj|}tt|j|dS)NzNo such module: {})rformatsuperr__init__)self module_specvalue) __class__ /usr/lib/python3.6/exceptions.pyrszNoModuleException.__init__)__name__ __module__ __qualname__r __classcell__r r )r rrsrcseZdZfddZZS)NoStreamExceptioncs"tdj|}tt|j|dS)NzNo such stream: {})rrrrr)r streamr )r r rr szNoStreamException.__init__)rrrrrr r )r rrsrcseZdZfddZZS)EnabledStreamExceptioncs"tdj|}tt|j|dS)Nz No enabled stream for module: {})rrrrr)r r r )r r rr&szEnabledStreamException.__init__)rrrrrr r )r rr%srcseZdZdfdd ZZS)EnableMultipleStreamsExceptionNcs*|dkrtdj|}tt|j|dS)NzsrcseZdZfddZZS)NoStreamSpecifiedExceptioncs"tdj|}tt|j|dS)Nz3No stream specified for '{}', please specify stream)rrrrr)r r r )r r rrEsz#NoStreamSpecifiedException.__init__)rrrrrr r )r rrDsrcseZdZfddZZS)NoProfileSpecifiedExceptioncs"ttj|}tt|j|dS)N)rrrrrr)r r r )r r rrKsz$NoProfileSpecifiedException.__init__)rrrrrr r )r rrJsrcseZdZfddZZS)NoProfilesExceptioncs"tdj|}tt|j|dS)Nz*No such profile: {}. No profiles available)rrrrr)r r r )r r rrQszNoProfilesException.__init__)rrrrrr r )r rrPsrcseZdZfddZZS)NoProfileToRemoveExceptioncs"tdj|}tt|j|dS)NzNo profile to remove for '{}')rrrrr)r r r )r r rrWsz#NoProfileToRemoveException.__init__)rrrrrr r )r rrVsr)ZdnfZ dnf.modulerrZdnf.i18nr exceptionsErrorrrrrrrrrrrrr r r rs PK!,module/__pycache__/exceptions.cpython-36.pycnu[3 ft`F@sddlZddlmZmZddlmZGdddejjZGdddejjZ Gdd d ejjZ Gd d d ejjZ Gd d d ejjZ GdddejjZ GdddejjZGdddejjZGdddejjZGdddejjZGdddejjZdS)N)module_messagesNO_PROFILE_SPECIFIED)_cseZdZfddZZS)NoModuleExceptioncs"tdj|}tt|j|dS)NzNo such module: {})rformatsuperr__init__)self module_specvalue) __class__ /usr/lib/python3.6/exceptions.pyrszNoModuleException.__init__)__name__ __module__ __qualname__r __classcell__r r )r rrsrcseZdZfddZZS)NoStreamExceptioncs"tdj|}tt|j|dS)NzNo such stream: {})rrrrr)r streamr )r r rr szNoStreamException.__init__)rrrrrr r )r rrsrcseZdZfddZZS)EnabledStreamExceptioncs"tdj|}tt|j|dS)Nz No enabled stream for module: {})rrrrr)r r r )r r rr&szEnabledStreamException.__init__)rrrrrr r )r rr%srcseZdZdfdd ZZS)EnableMultipleStreamsExceptionNcs*|dkrtdj|}tt|j|dS)NzsrcseZdZfddZZS)NoStreamSpecifiedExceptioncs"tdj|}tt|j|dS)Nz3No stream specified for '{}', please specify stream)rrrrr)r r r )r r rrEsz#NoStreamSpecifiedException.__init__)rrrrrr r )r rrDsrcseZdZfddZZS)NoProfileSpecifiedExceptioncs"ttj|}tt|j|dS)N)rrrrrr)r r r )r r rrKsz$NoProfileSpecifiedException.__init__)rrrrrr r )r rrJsrcseZdZfddZZS)NoProfilesExceptioncs"tdj|}tt|j|dS)Nz*No such profile: {}. No profiles available)rrrrr)r r r )r r rrQszNoProfilesException.__init__)rrrrrr r )r rrPsrcseZdZfddZZS)NoProfileToRemoveExceptioncs"tdj|}tt|j|dS)NzNo profile to remove for '{}')rrrrr)r r r )r r rrWsz#NoProfileToRemoveException.__init__)rrrrrr r )r rrVsr)ZdnfZ dnf.modulerrZdnf.i18nr exceptionsErrorrrrrrrrrrrrr r r rs PK!" ^^3module/__pycache__/module_base.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlZddlZddlZddlZddlZddl m Z ddl m Z ddl mZmZmZddlZejjjZejjjZejjjZejjjZedZedZdd ZGd d d eZ d d Z!dS)) OrderedDictN)EnableMultipleStreamsException)logger)_P_ucdz6 Hint: [d]efault, [e]nabled, [x]disabled, [i]nstalledz@ Hint: [d]efault, [e]nabled, [x]disabled, [i]nstalled, [a]ctivecCs|jS)N)getName)profiler !/usr/lib/python3.6/module_base.py_profile_comparison_key'sr c@seZdZddZddZddZdBdd ZdCd d Zd d ZddZ ddZ ddZ ddZ ddZ dDddZddZddZddZd d!Zd"d#ZdEd%d&Zd'd(Zd)d*Zd+d,ZdFd-d.Zd/d0Zed1d2Zd3d4Zd5d6Zd7d8Zd9d:Zd;d<Z d=d>Z!d?d@Z"dAS)G ModuleBasecCs ||_dS)N)base)selfrr r r __init__.szModuleBase.__init__c Csn|j|\}}}}x8|jD],\}\}}|jrtjtdj|j|jqW|sX|sX|rjtj j |||ddS)Nz%Ignoring unnecessary profile: '{}/{}')no_match_group_specserror_group_specsmodule_depsolv_errors) !_resolve_specs_enable_update_sackitemsr rinforformatnamednf exceptions MarkingErrors) r module_specsno_match_specs error_specs solver_errors module_dictsspecnsvcap module_dictr r r enable2s  zModuleBase.enablecCs,|j|t\}}|s|r(tjj||ddS)N)rr)_modules_reset_or_disableSTATE_DISABLEDrrr)rrrrr r r disable?szModuleBase.disableTc sj|\}}}}tj}i}t} d} xt|jD]f\} \} } xV| jD]H\}}x<|jD].\}}fdd|D}|stjtdj| |||j | qbg}j |}|j |krtd}tj |j|j |d} | jrd|j|j| j|s(|j}|r>djtdd|D}td j| |||}ntd j| }tj||j | qbnĈjjj||}|s|j}|rdjtd d|D}td j|||}ntd j||}tj||j | xNt|D]B}|j|}|stjtdj||||j | |j|qWxJ|D]B}jjj||jx&|jD]}|j|tj| qPWq.Wx|D]}| j|jqzWqbWqLWq2W| rtjjtdj | ||\}}|r|j||s|s|rtjj!|||ddS)NFcs"g|]}jjj|jr|qSr )r_moduleContainerisModuleActivegetId).0x)rr r Ssz&ModuleBase.install..zAAll matches for argument '{0}' in module '{1}:{2}' are not activezDInstalling module '{0}' from Fail-Safe repository {1} is not allowedTz, cSsg|] }|jqSr )r)r+r r r r r-gszKUnable to match profile for argument {}. Available profiles for '{}:{}': {}z'Unable to match profile for argument {}cSsg|] }|jqSr )r)r+r r r r r-wsz| jD]0\} }x"|jD]\}}fd d |D}|stjtd j| | ||j| q@g}j|}|j|krtd }tj|j|j|d}| jrJ|j |j| j|s|j}|r"djtdd |D} tdj| | || }ntdj| }tj||j| q@nT| |krxH|| D]<}!|j|!}"|"stjtdj|!| |q^|j |"q^WxJ|D]B}!jjj||!j x&|!j!D]}|j"|tj | qWqWxh|D]`} | j}#|j#|#xF|#D]>}|j ddd}||kr.q |j ddd}|j |q WqWq@Wq*WqW|r|t$j%j&tdj'|||\}$}%|%r|j |%t}&|&j#||&j#|jj(jj)dd}'jj(jj)ddgdj*}(x|&D]}|(j+|d})|)j,}*|*sq|)j-}+|+sJtjtdj|||kr|'j.|)}'q|)j/|$},|,r^|,})t$j0j1jj(}-|-j|)djj2j3|-dqWjj4|'|s|s|rt$j%j5|||ddS)Nnosrcsrc.-rFcs"g|]}jjj|jr|qSr )rr(r)r*)r+r,)rr r r-sz(ModuleBase.switch_to..z8No active matches for argument '{0}' in module '{1}:{2}'zDInstalling module '{0}' from Fail-Safe repository {1} is not allowedTz, cSsg|] }|jqSr )r)r+r r r r r-szKUnable to match profile for argument {}. Available profiles for '{}:{}': {}z'Unable to match profile for argument {}zEInstalled profile '{0}' is not available in module '{1}' stream '{2}'z:Installing module from Fail-Safe repository is not allowed)empty) arch__neq)rz9No packages available to distrosync for package name '{}')pkg)select)rrr)6_resolve_specs_enabler0rkeysrr(queryr)rArsplitr? _update_sack_enable_dependenciesr7r.r/ZgetRemovedProfilesrr1rrr2r3r4r5r6r r8r9r:Zwarningr<rr=r>r@rrrBrCsackfiltermapplyfilter installed availableunion intersectionselectorSelector_goalZ distupgrade_remove_if_unneededr).rrrDrrr Znew_artifacts_namesZactive_artifacts_namesZ src_archesr!r"rIrrUartifactarchrTrdependency_error_specrErFrGrHZremoved_profilesrJrKrLrMrNrOrPrQrRr rS artifactsinstall_base_queryrWZ all_namesZ remove_querybase_no_source_queryrermrnZonly_new_modulesltrr )rr switch_tos                  (           zModuleBase.switch_tocCs(|j|t\}}|r$tjj||ddS)N)rr)r% STATE_UNKNOWNrrr)rrrrr r r resetszModuleBase.resetcs,g}tj}d}jjjjddgdj}x|D]}j|\}}|sV|j|q2fdd|D} | st j t dj |q2j | |d} t} x| jD]\} } x| jD]\}}| jj||j|}|j|kr t d}t j|j |j|d }|jrH|j|j}|s(qx|D]}| j|jq.Wqx |jD]}| j|jqRWxD|jD]8}tj|}x&|jtjgd D]}| j|jqWqtWqWqW| st j t d j ||j| d }|r2t j!j"jj}|j|d jj#j$|dq2W|r(t j%j&t d|S)NFrYrX)r`cs"g|]}jjj|jr|qSr )rr(r)r*)r+r,)rr r r-3sz&ModuleBase.upgrade..zUnable to resolve argument {}zCUpgrading module '{0}' from Fail-Safe repository {1} is not allowedT)Zformsz&Unable to match profile in argument {})r)ra)rbz9Upgrading module from Fail-Safe repository is not allowed)'r.r/rrirerjrk _get_modulesr2rr1rr_create_module_dict_and_enabler0rr@)_get_package_name_set_and_remove_profilesr3r4r5r6r r8r=rASubjectZget_nevra_possibilitiesZ FORM_NEVRAr?rrlrrqrrrsupgraderrB)rrrrErHrzr!rLr"Zupdate_module_listr#Zupgrade_package_setrrJrKmodule_list_from_dictrOrP profiles_setr rusubjZ nevra_objrer{r )rr r%sb         zModuleBase.upgradecCsg}t}x|D]}|j|\}}|s2|j|q|j||d}g}x>|jD]2\} } x(| jD]\} } |j|j| |dq`WqNW|stjt dj ||j |qW|r|j j j} |j| }|r|j jjjj|d}|r|j j||S)NFTz&Unable to match profile in argument {})r)r0rr2rrr7rrr1rrr@rr(ZgetInstalledPkgNames differencerirermrjrt)rrrZremove_package_setr!rLr"r#Zremove_packages_namesrrJrKrZkeep_pkg_namesrer r r remove_s0     zModuleBase.removecCs |j|S)N)r)r module_specr r r get_modules|szModuleBase.get_modulesc Cstj|}x|jD]}|jr$|jnd}|jr4|jnd}d}|jrH|jnd}|jrX|jnd}|jrv|jdkrvt|j}|j j j |||||} | r| |fSqWfdfS)NrZr\) r.rZnsvcap_possibilitiesrrKcontextrvversionstrrr(re) rrrr"rrKrrrvmodulesr r r rs   zModuleBase._get_modulescCs>d}|r:|d}x(|ddD]}|j|jkr|}qW|S)Nrr\)Z getVersionNum)rrLlatestrUr r r r3szModuleBase._get_latestc Csvi}x.|D]&}|j|jij|jgj|q Wx:|jD],\}}|jjj|}t|dkrF|t kr|t kr|t krdj t |jtj|jjjd} tdj|t|| |d} t|| |t kr|jjj|} n|jjj|} | s| |krt|xjt |jD]0} | | kr8|r|jjj|| q|| =qWq@|r@x"|jD]} |jjj|| qTWq@W|S)Nr\z', ')keyzArgument '{argument}' matches {stream_count} streams ('{streams}') of module '{module}', but none of the streams are enabled or default)ZargumentZ stream_countZstreamsrU)r>r getStreamr2rrr(ZgetModuleStatelen STATE_DEFAULT STATE_ENABLEDr&r9r:rd functools cmp_to_keyriZevr_cmprrrZgetEnabledStreamgetDefaultStreamr$) rrLr!r$ moduleDictrUZ moduleName streamDictZ moduleStateZ streams_strrPrKrr r r rs> "   z)ModuleBase._create_module_dict_and_enablec Csg}g}i}x|D]}|j|\}}|s4|j|qy|j||d}||f||<Wqttfk r} z2|j|tjt| tjtdj |WYdd} ~ XqXqW|||fS)NTzUnable to resolve argument {}) rr2r RuntimeErrorrrr1rrr) rrr error_specr r!rLr"r#er r r rcs    *z ModuleBase._resolve_specs_enablecCsdd|jjjD}y4|jjj|jj||jjj|jjjd|jjj d}Wn4t j k r~}zt j jt|WYdd}~XnX|S)NcSsg|]}|jr|jqSr )module_hotfixesid)r+ir r r r-sz+ModuleBase._update_sack..T)Z update_onlyZ debugsolver)rrepos iter_enabledriZfilter_modulesr(confZ installrootZmodule_platform_idZ debug_solverr. ExceptionrrrBr)r hot_fix_reposrrr r r rgs "zModuleBase._update_sackc Csg}x|jD]\}\}}x|jD]}xz|jD]n}y|jjjtjj|Wq2tk r}z2|j |t j t |t j t dj|WYdd}~Xq2Xq2Wq$WqW|S)NzUnable to resolve argument {})rvaluesrr(ZenableDependencyTreelibdnfrUZVectorModulePackagePtrrr2rr1rrr) rr rr!r"rrrrr r r rhs 2zModuleBase._enable_dependenciescCs<|j|\}}}|j}|j|}|r0|j|||||fS)N)rcrgrhr7)rrrrr rrwr r r rs   z,ModuleBase._resolve_specs_enable_update_sackc Csg}x|D]}|j|\}}|s@tjtdj||j|q |js^|js^|js^|j s^|j rrtj tdj|t }x|D]}|j |jq~Wx8|D]0} |tkr|jjj| |tkr|jjj| qWq W|j} || fS)NzUnable to resolve argument {}zMOnly module name is required. Ignoring unneeded information in argument: '{}')rrr1rrr2rKrrrvr rr0r?rr}rr(r~r&r'rg) rrZto_staterr!rLr"Z module_namesrUrrr r r r%s(     z$ModuleBase._modules_reset_or_disableFc Cst}|j|}t|jjj|j}|s0tS|jr|j|j}|sLtSx|D]6}|j|krR|rz|jjj||j|j |j qRWnDxB|D]:} |r|jjj|| x |j| D]}|j |j qWqW|S)N) r0r3rr(getInstalledProfilesrr r8Z uninstallr@r=) rrLr"rZpackage_name_setrOZinstalled_profiles_stringsrr Zprofile_stringr r r r s*       z4ModuleBase._get_package_name_set_and_remove_profilesc Cst}x|D]}|j|\}}|s8tjtdj|q |jrXtjtdj|j|jxl|D]d}t}|j |d<x8t |j t dD]$}dj dd|jD||j<qW|j|j|jq^Wq Wdj t |S) NzUnable to resolve argument {}z%Ignoring unnecessary profile: '{}/{}'Name)r cSsg|]}|qSr r )r+ZpkgNamer r r r-5sz1ModuleBase._get_info_profiles..z )r0rrrrrr rrgetFullIdentifierr:r8r r9r=rr?_create_simple_tabletoString) rroutputrrLr"rUlinesr r r r _get_info_profiles#s"     zModuleBase._get_info_profilescCs|jjj|j}|j}d}xTt|tdD]D}|dj|j|j|krLdnd7}||j|krj|rjdnd7}q,W|ddS) NrZ)rz{}{}z [d]z [i], z, r^)rr(rrr8r:r r)r modulePackagedefault_profiles enabled_strZinstalled_profilesrQ profiles_strr r r r _profile_report_formatter:s  z$ModuleBase._profile_report_formattercCs|jjddS)Nr )stripreplace)rZsummaryr r r _summary_report_formatterFsz$ModuleBase._summary_report_formattercCsd}d}d}|j|jjj|jkr*d}|jjj|rJ|s@d}|d7}n|jjj|rh|s`d}|d7}|r|jjj|r|sd}|d7}|||fS)NrZz [d]rz[e]z[x]z[a])rrr(rrZ isEnabledZ isDisabledr))rr markActive default_strr disabled_strr r r _module_strs_formatterIs&  z!ModuleBase._module_strs_formatterc Cst}x|D]}|j|\}}|s.F)r)rrrZ setTermforceZTermForce_AUTOZ enableMaxoutrrrrverboseZhiddenrrrr(r;rrrrrrr)rrrrZ column_streamZcolumn_profilesZ column_infoZlatest_per_repoZnameStreamArchrZactiverrrrrrZ summary_strr )rr _create_and_fill_tablesD              z!ModuleBase._create_and_fill_tablecCsg}|r0x2|D]}|j|\}}|j|qWn |jjj}|jjj||}|sTdS|j|}d} d} y"|jj|dddjj } Wn(t k r|dddj} YnXt |d} |j |} |j | }|| 7}xtd|jD]}| | |kr| | 7} | d7} y"|jj|| ddjj } Wn*t k rX|| ddj} YnXt || } |d7}||j | 7}|| 7}|j|}||j||7}qW|tS)NrZrr\r)rr7rr(rZgetLatestModulesPerReporrr4rKeyErrorr_format_header_format_repoidrangeZgetNumberOfLinesgetLinerMODULE_TABLE_HINT)rrZ module_staterr!rLr"rrZcurrent_repo_id_indexZalready_printed_lines repo_nameZversionsheaderrrrr r r _get_brief_descriptionsH   "     z!ModuleBase._get_brief_descriptioncCs&|jd}|j||jddddS)Nrrr\)rrsplit)rrrr r r r.s zModuleBase._format_headercCsdj|jjjj|S)Nz{} )rrrrr)rrr r r r2szModuleBase._format_repoidcCs|jjjjddgdj}|j|d}g}dd|jjjD}|j||jd}|j |}x|j D]\} } |j| d} | s|j| d} | sx | D]} t j t d j| qWt j t d j| |j| qh|jjjj| tjj|jj} | j| d |jjj| | d qhW||fS) NrYrX)r`)rcSsg|]}|jr|jqSr )rr)r+rr r r r-<sz9ModuleBase._install_profiles_internal..)Zreponamer)rzUnable to resolve argument {}zNo match for package {})ra)rbZoptional)rrirerjrkrlrrrdrorrr1rrr7rsZ group_membersr?rrqrrr0r<)rrGrFrDrzryrrZhotfix_packagesrTZ set_specsrer!r{r r r rC5s,       z%ModuleBase._install_profiles_internalN)T)T)T)F)F)#__name__ __module__ __qualname__rr$r'r<r|r~rrrrr3rrcrgrhrr%rrrrrr staticmethodrrrrrrrrCr r r r r +s@  U : %     - (**r cCs&tjj|}djtddt||gS)NrzModular dependency problem:zModular dependency problems:)rutilZ_format_resolve_problemsr9rr)errorsrPr r r format_modular_solver_errorsSs r)" collectionsrr.Zlibdnf.smartcolsrZ libdnf.moduleZ dnf.selectorrZdnf.exceptionsZdnf.module.exceptionsrZdnf.utilrZdnf.i18nrrrrrUZModulePackageContainerZModuleState_DEFAULTrZModuleState_ENABLEDrZModuleState_DISABLEDr&ZModuleState_UNKNOWNr}rrr objectr rr r r r s0       .PK!7^^-module/__pycache__/module_base.cpython-36.pycnu[3 ft`@sddlmZddlZddlZddlZddlZddlZddl m Z ddl m Z ddl mZmZmZddlZejjjZejjjZejjjZejjjZedZedZdd ZGd d d eZ d d Z!dS)) OrderedDictN)EnableMultipleStreamsException)logger)_P_ucdz6 Hint: [d]efault, [e]nabled, [x]disabled, [i]nstalledz@ Hint: [d]efault, [e]nabled, [x]disabled, [i]nstalled, [a]ctivecCs|jS)N)getName)profiler !/usr/lib/python3.6/module_base.py_profile_comparison_key'sr c@seZdZddZddZddZdBdd ZdCd d Zd d ZddZ ddZ ddZ ddZ ddZ dDddZddZddZddZd d!Zd"d#ZdEd%d&Zd'd(Zd)d*Zd+d,ZdFd-d.Zd/d0Zed1d2Zd3d4Zd5d6Zd7d8Zd9d:Zd;d<Z d=d>Z!d?d@Z"dAS)G ModuleBasecCs ||_dS)N)base)selfrr r r __init__.szModuleBase.__init__c Csn|j|\}}}}x8|jD],\}\}}|jrtjtdj|j|jqW|sX|sX|rjtj j |||ddS)Nz%Ignoring unnecessary profile: '{}/{}')no_match_group_specserror_group_specsmodule_depsolv_errors) !_resolve_specs_enable_update_sackitemsr rinforformatnamednf exceptions MarkingErrors) r module_specsno_match_specs error_specs solver_errors module_dictsspecnsvcap module_dictr r r enable2s  zModuleBase.enablecCs,|j|t\}}|s|r(tjj||ddS)N)rr)_modules_reset_or_disableSTATE_DISABLEDrrr)rrrrr r r disable?szModuleBase.disableTc sj|\}}}}tj}i}t} d} xt|jD]f\} \} } xV| jD]H\}}x<|jD].\}}fdd|D}|stjtdj| |||j | qbg}j |}|j |krtd}tj |j|j |d} | jrd|j|j| j|s(|j}|r>djtdd|D}td j| |||}ntd j| }tj||j | qbnĈjjj||}|s|j}|rdjtd d|D}td j|||}ntd j||}tj||j | xNt|D]B}|j|}|stjtdj||||j | |j|qWxJ|D]B}jjj||jx&|jD]}|j|tj| qPWq.Wx|D]}| j|jqzWqbWqLWq2W| rtjjtdj | ||\}}|r|j||s|s|rtjj!|||ddS)NFcs"g|]}jjj|jr|qSr )r_moduleContainerisModuleActivegetId).0x)rr r Ssz&ModuleBase.install..zAAll matches for argument '{0}' in module '{1}:{2}' are not activezDInstalling module '{0}' from Fail-Safe repository {1} is not allowedTz, cSsg|] }|jqSr )r)r+r r r r r-gszKUnable to match profile for argument {}. Available profiles for '{}:{}': {}z'Unable to match profile for argument {}cSsg|] }|jqSr )r)r+r r r r r-wsz| jD]0\} }x"|jD]\}}fd d |D}|stjtd j| | ||j| q@g}j|}|j|krtd }tj|j|j|d}| jrJ|j |j| j|s|j}|r"djtdd |D} tdj| | || }ntdj| }tj||j| q@nT| |krxH|| D]<}!|j|!}"|"stjtdj|!| |q^|j |"q^WxJ|D]B}!jjj||!j x&|!j!D]}|j"|tj | qWqWxh|D]`} | j}#|j#|#xF|#D]>}|j ddd}||kr.q |j ddd}|j |q WqWq@Wq*WqW|r|t$j%j&tdj'|||\}$}%|%r|j |%t}&|&j#||&j#|jj(jj)dd}'jj(jj)ddgdj*}(x|&D]}|(j+|d})|)j,}*|*sq|)j-}+|+sJtjtdj|||kr|'j.|)}'q|)j/|$},|,r^|,})t$j0j1jj(}-|-j|)djj2j3|-dqWjj4|'|s|s|rt$j%j5|||ddS)Nnosrcsrc.-rFcs"g|]}jjj|jr|qSr )rr(r)r*)r+r,)rr r r-sz(ModuleBase.switch_to..z8No active matches for argument '{0}' in module '{1}:{2}'zDInstalling module '{0}' from Fail-Safe repository {1} is not allowedTz, cSsg|] }|jqSr )r)r+r r r r r-szKUnable to match profile for argument {}. Available profiles for '{}:{}': {}z'Unable to match profile for argument {}zEInstalled profile '{0}' is not available in module '{1}' stream '{2}'z:Installing module from Fail-Safe repository is not allowed)empty) arch__neq)rz9No packages available to distrosync for package name '{}')pkg)select)rrr)6_resolve_specs_enabler0rkeysrr(queryr)rArsplitr? _update_sack_enable_dependenciesr7r.r/ZgetRemovedProfilesrr1rrr2r3r4r5r6r r8r9r:Zwarningr<rr=r>r@rrrBrCsackfiltermapplyfilter installed availableunion intersectionselectorSelector_goalZ distupgrade_remove_if_unneededr).rrrDrrr Znew_artifacts_namesZactive_artifacts_namesZ src_archesr!r"rIrrUartifactarchrTrdependency_error_specrErFrGrHZremoved_profilesrJrKrLrMrNrOrPrQrRr rS artifactsinstall_base_queryrWZ all_namesZ remove_querybase_no_source_queryrermrnZonly_new_modulesltrr )rr switch_tos                  (           zModuleBase.switch_tocCs(|j|t\}}|r$tjj||ddS)N)rr)r% STATE_UNKNOWNrrr)rrrrr r r resetszModuleBase.resetcs,g}tj}d}jjjjddgdj}x|D]}j|\}}|sV|j|q2fdd|D} | st j t dj |q2j | |d} t} x| jD]\} } x| jD]\}}| jj||j|}|j|kr t d}t j|j |j|d }|jrH|j|j}|s(qx|D]}| j|jq.Wqx |jD]}| j|jqRWxD|jD]8}tj|}x&|jtjgd D]}| j|jqWqtWqWqW| st j t d j ||j| d }|r2t j!j"jj}|j|d jj#j$|dq2W|r(t j%j&t d|S)NFrYrX)r`cs"g|]}jjj|jr|qSr )rr(r)r*)r+r,)rr r r-3sz&ModuleBase.upgrade..zUnable to resolve argument {}zCUpgrading module '{0}' from Fail-Safe repository {1} is not allowedT)Zformsz&Unable to match profile in argument {})r)ra)rbz9Upgrading module from Fail-Safe repository is not allowed)'r.r/rrirerjrk _get_modulesr2rr1rr_create_module_dict_and_enabler0rr@)_get_package_name_set_and_remove_profilesr3r4r5r6r r8r=rASubjectZget_nevra_possibilitiesZ FORM_NEVRAr?rrlrrqrrrsupgraderrB)rrrrErHrzr!rLr"Zupdate_module_listr#Zupgrade_package_setrrJrKmodule_list_from_dictrOrP profiles_setr rusubjZ nevra_objrer{r )rr r%sb         zModuleBase.upgradecCsg}t}x|D]}|j|\}}|s2|j|q|j||d}g}x>|jD]2\} } x(| jD]\} } |j|j| |dq`WqNW|stjt dj ||j |qW|r|j j j} |j| }|r|j jjjj|d}|r|j j||S)NFTz&Unable to match profile in argument {})r)r0rr2rrr7rrr1rrr@rr(ZgetInstalledPkgNames differencerirermrjrt)rrrZremove_package_setr!rLr"r#Zremove_packages_namesrrJrKrZkeep_pkg_namesrer r r remove_s0     zModuleBase.removecCs |j|S)N)r)r module_specr r r get_modules|szModuleBase.get_modulesc Cstj|}x|jD]}|jr$|jnd}|jr4|jnd}d}|jrH|jnd}|jrX|jnd}|jrv|jdkrvt|j}|j j j |||||} | r| |fSqWfdfS)NrZr\) r.rZnsvcap_possibilitiesrrKcontextrvversionstrrr(re) rrrr"rrKrrrvmodulesr r r rs   zModuleBase._get_modulescCs>d}|r:|d}x(|ddD]}|j|jkr|}qW|S)Nrr\)Z getVersionNum)rrLlatestrUr r r r3szModuleBase._get_latestc Csi}x.|D]&}|j|jij|jgj|q WxL|jD]>\}}|jjj|}t|dkrF|t kr|t kr|t krdj t |jtj|jjjd} tdj|t|| |d} t|| |t kr|jjj|} n|jjj|} | s| |krt|xlt |jD]0} | | kr8|r|jjj|| q|| =qWn*|rpx"|jD]} |jjj|| qVWt|dks@tq@W|S)Nr\z', ')keyzArgument '{argument}' matches {stream_count} streams ('{streams}') of module '{module}', but none of the streams are enabled or default)ZargumentZ stream_countZstreamsrU)r>r getStreamr2rrr(ZgetModuleStatelen STATE_DEFAULT STATE_ENABLEDr&r9r:rd functools cmp_to_keyriZevr_cmprrrZgetEnabledStreamgetDefaultStreamr$AssertionError) rrLr!r$ moduleDictrUZ moduleName streamDictZ moduleStateZ streams_strrPrKrr r r rs> "   z)ModuleBase._create_module_dict_and_enablec Csg}g}i}x|D]}|j|\}}|s4|j|qy|j||d}||f||<Wqttfk r} z2|j|tjt| tjtdj |WYdd} ~ XqXqW|||fS)NTzUnable to resolve argument {}) rr2r RuntimeErrorrrr1rrr) rrr error_specr r!rLr"r#er r r rcs    *z ModuleBase._resolve_specs_enablecCsdd|jjjD}y4|jjj|jj||jjj|jjjd|jjj d}Wn4t j k r~}zt j jt|WYdd}~XnX|S)NcSsg|]}|jr|jqSr )module_hotfixesid)r+ir r r r-sz+ModuleBase._update_sack..T)Z update_onlyZ debugsolver)rrepos iter_enabledriZfilter_modulesr(confZ installrootZmodule_platform_idZ debug_solverr. ExceptionrrrBr)r hot_fix_reposrrr r r rgs "zModuleBase._update_sackc Csg}x|jD]\}\}}x|jD]}xz|jD]n}y|jjjtjj|Wq2tk r}z2|j |t j t |t j t dj|WYdd}~Xq2Xq2Wq$WqW|S)NzUnable to resolve argument {})rvaluesrr(ZenableDependencyTreelibdnfrUZVectorModulePackagePtrrr2rr1rrr) rr rr!r"rrrrr r r rhs 2zModuleBase._enable_dependenciescCs<|j|\}}}|j}|j|}|r0|j|||||fS)N)rcrgrhr7)rrrrr rrwr r r rs   z,ModuleBase._resolve_specs_enable_update_sackc Csg}x|D]}|j|\}}|s@tjtdj||j|q |js^|js^|js^|j s^|j rrtj tdj|t }x|D]}|j |jq~Wx8|D]0} |tkr|jjj| |tkr|jjj| qWq W|j} || fS)NzUnable to resolve argument {}zMOnly module name is required. Ignoring unneeded information in argument: '{}')rrr1rrr2rKrrrvr rr0r?rr}rr(r~r&r'rg) rrZto_staterr!rLr"Z module_namesrUrrr r r r%s(     z$ModuleBase._modules_reset_or_disableFc Cst}|j|}t|jjj|j}|s0tS|jr|j|j}|sLtSx|D]6}|j|krR|rz|jjj||j|j |j qRWnDxB|D]:} |r|jjj|| x |j| D]}|j |j qWqW|S)N) r0r3rr(getInstalledProfilesrr r8Z uninstallr@r=) rrLr"rZpackage_name_setrOZinstalled_profiles_stringsrr Zprofile_stringr r r r s*       z4ModuleBase._get_package_name_set_and_remove_profilesc Cst}x|D]}|j|\}}|s8tjtdj|q |jrXtjtdj|j|jxl|D]d}t}|j |d<x8t |j t dD]$}dj dd|jD||j<qW|j|j|jq^Wq Wdj t |S) NzUnable to resolve argument {}z%Ignoring unnecessary profile: '{}/{}'Name)r cSsg|]}|qSr r )r+ZpkgNamer r r r-5sz1ModuleBase._get_info_profiles..z )r0rrrrrr rrgetFullIdentifierr:r8r r9r=rr?_create_simple_tabletoString) rroutputrrLr"rUlinesr r r r _get_info_profiles#s"     zModuleBase._get_info_profilescCs|jjj|j}|j}d}xTt|tdD]D}|dj|j|j|krLdnd7}||j|krj|rjdnd7}q,W|ddS) NrZ)rz{}{}z [d]z [i], z, r^)rr(rrr8r:r r)r modulePackagedefault_profiles enabled_strZinstalled_profilesrQ profiles_strr r r r _profile_report_formatter:s  z$ModuleBase._profile_report_formattercCs|jjddS)Nr )stripreplace)rZsummaryr r r _summary_report_formatterFsz$ModuleBase._summary_report_formattercCsd}d}d}|j|jjj|jkr*d}|jjj|rJ|s@d}|d7}n|jjj|rh|s`d}|d7}|r|jjj|r|sd}|d7}|||fS)NrZz [d]rz[e]z[x]z[a])rrr(rrZ isEnabledZ isDisabledr))rr markActive default_strr disabled_strr r r _module_strs_formatterIs&  z!ModuleBase._module_strs_formatterc Cst}x|D]}|j|\}}|s.F)r)rrrZ setTermforceZTermForce_AUTOZ enableMaxoutrrrrverboseZhiddenrrrr(r;rrrrrrr)rrrrZ column_streamZcolumn_profilesZ column_infoZlatest_per_repoZnameStreamArchrZactiverrrrrrZ summary_strr )rr _create_and_fill_tablesD              z!ModuleBase._create_and_fill_tablecCsg}|r0x2|D]}|j|\}}|j|qWn |jjj}|jjj||}|sTdS|j|}d} d} y"|jj|dddjj } Wn(t k r|dddj} YnXt |d} |j |} |j | }|| 7}xtd|jD]}| | |kr| | 7} | d7} y"|jj|| ddjj } Wn*t k rX|| ddj} YnXt || } |d7}||j | 7}|| 7}|j|}||j||7}qW|tS)NrZrr\r)rr7rr(rZgetLatestModulesPerReporrr4rKeyErrorr_format_header_format_repoidrangeZgetNumberOfLinesgetLinerMODULE_TABLE_HINT)rrZ module_staterr!rLr"rrZcurrent_repo_id_indexZalready_printed_lines repo_nameZversionsheaderrrrr r r _get_brief_descriptionsH   "     z!ModuleBase._get_brief_descriptioncCs&|jd}|j||jddddS)Nrrr\)rrsplit)rrrr r r r.s zModuleBase._format_headercCsdj|jjjj|S)Nz{} )rrrrr)rrr r r r2szModuleBase._format_repoidcCs|jjjjddgdj}|j|d}g}dd|jjjD}|j||jd}|j |}x|j D]\} } |j| d} | s|j| d} | sx | D]} t j t d j| qWt j t d j| |j| qh|jjjj| tjj|jj} | j| d |jjj| | d qhW||fS) NrYrX)r`)rcSsg|]}|jr|jqSr )rr)r+rr r r r-<sz9ModuleBase._install_profiles_internal..)Zreponamer)rzUnable to resolve argument {}zNo match for package {})ra)rbZoptional)rrirerjrkrlrrrdrorrr1rrr7rsZ group_membersr?rrqrrr0r<)rrGrFrDrzryrrZhotfix_packagesrTZ set_specsrer!r{r r r rC5s,       z%ModuleBase._install_profiles_internalN)T)T)T)F)F)#__name__ __module__ __qualname__rr$r'r<r|r~rrrrr3rrcrgrhrr%rrrrrr staticmethodrrrrrrrrCr r r r r +s@  U : %     - (**r cCs&tjj|}djtddt||gS)NrzModular dependency problem:zModular dependency problems:)rutilZ_format_resolve_problemsr9rr)errorsrPr r r format_modular_solver_errorsSs r)" collectionsrr.Zlibdnf.smartcolsrZ libdnf.moduleZ dnf.selectorrZdnf.exceptionsZdnf.module.exceptionsrZdnf.utilrZdnf.i18nrrrrrUZModulePackageContainerZModuleState_DEFAULTrZModuleState_ENABLEDrZModuleState_DISABLEDr&ZModuleState_UNKNOWNr}rrr objectr rr r r r s0       .PK!Tmodule/__init__.pynu[# Copyright (C) 2017 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from dnf.i18n import _ DIFFERENT_STREAM_INFO = 1 NOTHING_TO_SHOW = 2 INSTALLING_NEWER_VERSION = 4 ENABLED_MODULES = 5 NO_PROFILE_SPECIFIED = 6 module_messages = { DIFFERENT_STREAM_INFO: _("Enabling different stream for '{}'."), NOTHING_TO_SHOW: _("Nothing to show."), INSTALLING_NEWER_VERSION: _("Installing newer version of '{}' than specified. Reason: {}"), ENABLED_MODULES: _("Enabled modules: {}."), NO_PROFILE_SPECIFIED: _("No profile specified for '{}', please specify profile."), } PK!E#FFmodule/exceptions.pynu[# supplies the 'module' command. # # Copyright (C) 2014-2017 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # import dnf from dnf.module import module_messages, NO_PROFILE_SPECIFIED from dnf.i18n import _ class NoModuleException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("No such module: {}").format(module_spec) super(NoModuleException, self).__init__(value) class NoStreamException(dnf.exceptions.Error): def __init__(self, stream): value = _("No such stream: {}").format(stream) super(NoStreamException, self).__init__(value) class EnabledStreamException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("No enabled stream for module: {}").format(module_spec) super(EnabledStreamException, self).__init__(value) class EnableMultipleStreamsException(dnf.exceptions.Error): def __init__(self, module_spec, value=None): if value is None: value = _("Cannot enable more streams from module '{}' at the same time").format(module_spec) super(EnableMultipleStreamsException, self).__init__(value) class DifferentStreamEnabledException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("Different stream enabled for module: {}").format(module_spec) super(DifferentStreamEnabledException, self).__init__(value) class NoProfileException(dnf.exceptions.Error): def __init__(self, profile): value = _("No such profile: {}").format(profile) super(NoProfileException, self).__init__(value) class ProfileNotInstalledException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("Specified profile not installed for {}").format(module_spec) super(ProfileNotInstalledException, self).__init__(value) class NoStreamSpecifiedException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("No stream specified for '{}', please specify stream").format(module_spec) super(NoStreamSpecifiedException, self).__init__(value) class NoProfileSpecifiedException(dnf.exceptions.Error): def __init__(self, module_spec): value = module_messages[NO_PROFILE_SPECIFIED].format(module_spec) super(NoProfileSpecifiedException, self).__init__(value) class NoProfilesException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("No such profile: {}. No profiles available").format(module_spec) super(NoProfilesException, self).__init__(value) class NoProfileToRemoveException(dnf.exceptions.Error): def __init__(self, module_spec): value = _("No profile to remove for '{}'").format(module_spec) super(NoProfileToRemoveException, self).__init__(value) PK!hQmodule/module_base.pynu[# Copyright (C) 2017-2018 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from collections import OrderedDict import hawkey import libdnf.smartcols import libdnf.module import dnf.selector import dnf.exceptions from dnf.module.exceptions import EnableMultipleStreamsException from dnf.util import logger from dnf.i18n import _, P_, ucd import functools STATE_DEFAULT = libdnf.module.ModulePackageContainer.ModuleState_DEFAULT STATE_ENABLED = libdnf.module.ModulePackageContainer.ModuleState_ENABLED STATE_DISABLED = libdnf.module.ModulePackageContainer.ModuleState_DISABLED STATE_UNKNOWN = libdnf.module.ModulePackageContainer.ModuleState_UNKNOWN MODULE_TABLE_HINT = _("\n\nHint: [d]efault, [e]nabled, [x]disabled, [i]nstalled") MODULE_INFO_TABLE_HINT = _("\n\nHint: [d]efault, [e]nabled, [x]disabled, [i]nstalled, [a]ctive") def _profile_comparison_key(profile): return profile.getName() class ModuleBase(object): # :api def __init__(self, base): # :api self.base = base def enable(self, module_specs): # :api no_match_specs, error_specs, solver_errors, module_dicts = \ self._resolve_specs_enable_update_sack(module_specs) for spec, (nsvcap, module_dict) in module_dicts.items(): if nsvcap.profile: logger.info(_("Ignoring unnecessary profile: '{}/{}'").format( nsvcap.name, nsvcap.profile)) if no_match_specs or error_specs or solver_errors: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs, error_group_specs=error_specs, module_depsolv_errors=solver_errors) def disable(self, module_specs): # :api no_match_specs, solver_errors = self._modules_reset_or_disable(module_specs, STATE_DISABLED) if no_match_specs or solver_errors: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs, module_depsolv_errors=solver_errors) def install(self, module_specs, strict=True): # :api no_match_specs, error_specs, solver_errors, module_dicts = \ self._resolve_specs_enable_update_sack(module_specs) # fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME install_dict = {} install_set_artifacts = set() fail_safe_repo_used = False for spec, (nsvcap, moduledict) in module_dicts.items(): for name, streamdict in moduledict.items(): for stream, module_list in streamdict.items(): install_module_list = [x for x in module_list if self.base._moduleContainer.isModuleActive(x.getId())] if not install_module_list: logger.error(_("All matches for argument '{0}' in module '{1}:{2}' are not " "active").format(spec, name, stream)) error_specs.append(spec) continue profiles = [] latest_module = self._get_latest(install_module_list) if latest_module.getRepoID() == fail_safe_repo: msg = _( "Installing module '{0}' from Fail-Safe repository {1} is not allowed") logger.critical(msg.format(latest_module.getNameStream(), fail_safe_repo)) fail_safe_repo_used = True if nsvcap.profile: profiles.extend(latest_module.getProfiles(nsvcap.profile)) if not profiles: available_profiles = latest_module.getProfiles() if available_profiles: profile_names = ", ".join(sorted( [profile.getName() for profile in available_profiles])) msg = _("Unable to match profile for argument {}. Available " "profiles for '{}:{}': {}").format( spec, name, stream, profile_names) else: msg = _("Unable to match profile for argument {}").format(spec) logger.error(msg) no_match_specs.append(spec) continue else: profiles_strings = self.base._moduleContainer.getDefaultProfiles( name, stream) if not profiles_strings: available_profiles = latest_module.getProfiles() if available_profiles: profile_names = ", ".join(sorted( [profile.getName() for profile in available_profiles])) msg = _("No default profiles for module {}:{}. Available profiles" ": {}").format( name, stream, profile_names) else: msg = _("No profiles for module {}:{}").format(name, stream) logger.error(msg) error_specs.append(spec) for profile in set(profiles_strings): module_profiles = latest_module.getProfiles(profile) if not module_profiles: logger.error( _("Default profile {} not available in module {}:{}").format( profile, name, stream)) error_specs.append(spec) profiles.extend(module_profiles) for profile in profiles: self.base._moduleContainer.install(latest_module ,profile.getName()) for pkg_name in profile.getContent(): install_dict.setdefault(pkg_name, set()).add(spec) for module in install_module_list: install_set_artifacts.update(module.getArtifacts()) if fail_safe_repo_used: raise dnf.exceptions.Error(_( "Installing module from Fail-Safe repository is not allowed")) __, profiles_errors = self._install_profiles_internal( install_set_artifacts, install_dict, strict) if profiles_errors: error_specs.extend(profiles_errors) if no_match_specs or error_specs or solver_errors: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs, error_group_specs=error_specs, module_depsolv_errors=solver_errors) def switch_to(self, module_specs, strict=True): # :api no_match_specs, error_specs, module_dicts = self._resolve_specs_enable(module_specs) # collect name of artifacts from new modules for distrosync new_artifacts_names = set() # collect name of artifacts from active modules for distrosync before sack update active_artifacts_names = set() src_arches = {"nosrc", "src"} for spec, (nsvcap, moduledict) in module_dicts.items(): for name in moduledict.keys(): for module in self.base._moduleContainer.query(name, "", "", "", ""): if self.base._moduleContainer.isModuleActive(module): for artifact in module.getArtifacts(): arch = artifact.rsplit(".", 1)[1] if arch in src_arches: continue pkg_name = artifact.rsplit("-", 2)[0] active_artifacts_names.add(pkg_name) solver_errors = self._update_sack() dependency_error_spec = self._enable_dependencies(module_dicts) if dependency_error_spec: error_specs.extend(dependency_error_spec) # fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME install_dict = {} install_set_artifacts = set() fail_safe_repo_used = False # list of name: [profiles] for module profiles being removed removed_profiles = self.base._moduleContainer.getRemovedProfiles() for spec, (nsvcap, moduledict) in module_dicts.items(): for name, streamdict in moduledict.items(): for stream, module_list in streamdict.items(): install_module_list = [x for x in module_list if self.base._moduleContainer.isModuleActive(x.getId())] if not install_module_list: "No active matches for argument '{0}' in module '{1}:{2}'" logger.error(_("No active matches for argument '{0}' in module " "'{1}:{2}'").format(spec, name, stream)) error_specs.append(spec) continue profiles = [] latest_module = self._get_latest(install_module_list) if latest_module.getRepoID() == fail_safe_repo: msg = _( "Installing module '{0}' from Fail-Safe repository {1} is not allowed") logger.critical(msg.format(latest_module.getNameStream(), fail_safe_repo)) fail_safe_repo_used = True if nsvcap.profile: profiles.extend(latest_module.getProfiles(nsvcap.profile)) if not profiles: available_profiles = latest_module.getProfiles() if available_profiles: profile_names = ", ".join(sorted( [profile.getName() for profile in available_profiles])) msg = _("Unable to match profile for argument {}. Available " "profiles for '{}:{}': {}").format( spec, name, stream, profile_names) else: msg = _("Unable to match profile for argument {}").format(spec) logger.error(msg) no_match_specs.append(spec) continue elif name in removed_profiles: for profile in removed_profiles[name]: module_profiles = latest_module.getProfiles(profile) if not module_profiles: logger.warning( _("Installed profile '{0}' is not available in module " "'{1}' stream '{2}'").format(profile, name, stream)) continue profiles.extend(module_profiles) for profile in profiles: self.base._moduleContainer.install(latest_module, profile.getName()) for pkg_name in profile.getContent(): install_dict.setdefault(pkg_name, set()).add(spec) for module in install_module_list: artifacts = module.getArtifacts() install_set_artifacts.update(artifacts) for artifact in artifacts: arch = artifact.rsplit(".", 1)[1] if arch in src_arches: continue pkg_name = artifact.rsplit("-", 2)[0] new_artifacts_names.add(pkg_name) if fail_safe_repo_used: raise dnf.exceptions.Error(_( "Installing module from Fail-Safe repository is not allowed")) install_base_query, profiles_errors = self._install_profiles_internal( install_set_artifacts, install_dict, strict) if profiles_errors: error_specs.extend(profiles_errors) # distrosync module name all_names = set() all_names.update(new_artifacts_names) all_names.update(active_artifacts_names) remove_query = self.base.sack.query().filterm(empty=True) base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply() for pkg_name in all_names: query = base_no_source_query.filter(name=pkg_name) installed = query.installed() if not installed: continue available = query.available() if not available: logger.warning(_("No packages available to distrosync for package name " "'{}'").format(pkg_name)) if pkg_name not in new_artifacts_names: remove_query = remove_query.union(query) continue only_new_module = query.intersection(install_base_query) if only_new_module: query = only_new_module sltr = dnf.selector.Selector(self.base.sack) sltr.set(pkg=query) self.base._goal.distupgrade(select=sltr) self.base._remove_if_unneeded(remove_query) if no_match_specs or error_specs or solver_errors: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs, error_group_specs=error_specs, module_depsolv_errors=solver_errors) def reset(self, module_specs): # :api no_match_specs, solver_errors = self._modules_reset_or_disable(module_specs, STATE_UNKNOWN) if no_match_specs: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_specs, module_depsolv_errors=solver_errors) def upgrade(self, module_specs): # :api no_match_specs = [] fail_safe_repo = hawkey.MODULE_FAIL_SAFE_REPO_NAME fail_safe_repo_used = False # Remove source packages because they cannot be installed or upgraded base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply() for spec in module_specs: module_list, nsvcap = self._get_modules(spec) if not module_list: no_match_specs.append(spec) continue update_module_list = [x for x in module_list if self.base._moduleContainer.isModuleActive(x.getId())] if not update_module_list: logger.error(_("Unable to resolve argument {}").format(spec)) continue module_dict = self._create_module_dict_and_enable(update_module_list, spec, False) upgrade_package_set = set() for name, streamdict in module_dict.items(): for stream, module_list_from_dict in streamdict.items(): upgrade_package_set.update(self._get_package_name_set_and_remove_profiles( module_list_from_dict, nsvcap)) latest_module = self._get_latest(module_list_from_dict) if latest_module.getRepoID() == fail_safe_repo: msg = _( "Upgrading module '{0}' from Fail-Safe repository {1} is not allowed") logger.critical(msg.format(latest_module.getNameStream(), fail_safe_repo)) fail_safe_repo_used = True if nsvcap.profile: profiles_set = latest_module.getProfiles(nsvcap.profile) if not profiles_set: continue for profile in profiles_set: upgrade_package_set.update(profile.getContent()) else: for profile in latest_module.getProfiles(): upgrade_package_set.update(profile.getContent()) for artifact in latest_module.getArtifacts(): subj = hawkey.Subject(artifact) for nevra_obj in subj.get_nevra_possibilities( forms=[hawkey.FORM_NEVRA]): upgrade_package_set.add(nevra_obj.name) if not upgrade_package_set: logger.error(_("Unable to match profile in argument {}").format(spec)) query = base_no_source_query.filter(name=upgrade_package_set) if query: sltr = dnf.selector.Selector(self.base.sack) sltr.set(pkg=query) self.base._goal.upgrade(select=sltr) if fail_safe_repo_used: raise dnf.exceptions.Error(_( "Upgrading module from Fail-Safe repository is not allowed")) return no_match_specs def remove(self, module_specs): # :api no_match_specs = [] remove_package_set = set() for spec in module_specs: module_list, nsvcap = self._get_modules(spec) if not module_list: no_match_specs.append(spec) continue module_dict = self._create_module_dict_and_enable(module_list, spec, False) remove_packages_names = [] for name, streamdict in module_dict.items(): for stream, module_list_from_dict in streamdict.items(): remove_packages_names.extend(self._get_package_name_set_and_remove_profiles( module_list_from_dict, nsvcap, True)) if not remove_packages_names: logger.error(_("Unable to match profile in argument {}").format(spec)) remove_package_set.update(remove_packages_names) if remove_package_set: keep_pkg_names = self.base._moduleContainer.getInstalledPkgNames() remove_package_set = remove_package_set.difference(keep_pkg_names) if remove_package_set: query = self.base.sack.query().installed().filterm(name=remove_package_set) if query: self.base._remove_if_unneeded(query) return no_match_specs def get_modules(self, module_spec): # :api return self._get_modules(module_spec) def _get_modules(self, module_spec): # used by ansible (lib/ansible/modules/packaging/os/dnf.py) subj = hawkey.Subject(module_spec) for nsvcap in subj.nsvcap_possibilities(): name = nsvcap.name if nsvcap.name else "" stream = nsvcap.stream if nsvcap.stream else "" version = "" context = nsvcap.context if nsvcap.context else "" arch = nsvcap.arch if nsvcap.arch else "" if nsvcap.version and nsvcap.version != -1: version = str(nsvcap.version) modules = self.base._moduleContainer.query(name, stream, version, context, arch) if modules: return modules, nsvcap return (), None def _get_latest(self, module_list): latest = None if module_list: latest = module_list[0] for module in module_list[1:]: if module.getVersionNum() > latest.getVersionNum(): latest = module return latest def _create_module_dict_and_enable(self, module_list, spec, enable=True): moduleDict = {} for module in module_list: moduleDict.setdefault( module.getName(), {}).setdefault(module.getStream(), []).append(module) for moduleName, streamDict in moduleDict.items(): moduleState = self.base._moduleContainer.getModuleState(moduleName) if len(streamDict) > 1: if moduleState != STATE_DEFAULT and moduleState != STATE_ENABLED \ and moduleState != STATE_DISABLED: streams_str = "', '".join( sorted(streamDict.keys(), key=functools.cmp_to_key(self.base.sack.evr_cmp))) msg = _("Argument '{argument}' matches {stream_count} streams ('{streams}') of " "module '{module}', but none of the streams are enabled or " "default").format( argument=spec, stream_count=len(streamDict), streams=streams_str, module=moduleName) raise EnableMultipleStreamsException(moduleName, msg) if moduleState == STATE_ENABLED: stream = self.base._moduleContainer.getEnabledStream(moduleName) else: stream = self.base._moduleContainer.getDefaultStream(moduleName) if not stream or stream not in streamDict: raise EnableMultipleStreamsException(moduleName) for key in sorted(streamDict.keys()): if key == stream: if enable: self.base._moduleContainer.enable(moduleName, key) continue del streamDict[key] elif enable: for key in streamDict.keys(): self.base._moduleContainer.enable(moduleName, key) assert len(streamDict) == 1 return moduleDict def _resolve_specs_enable(self, module_specs): no_match_specs = [] error_spec = [] module_dicts = {} for spec in module_specs: module_list, nsvcap = self._get_modules(spec) if not module_list: no_match_specs.append(spec) continue try: module_dict = self._create_module_dict_and_enable(module_list, spec, True) module_dicts[spec] = (nsvcap, module_dict) except (RuntimeError, EnableMultipleStreamsException) as e: error_spec.append(spec) logger.error(ucd(e)) logger.error(_("Unable to resolve argument {}").format(spec)) return no_match_specs, error_spec, module_dicts def _update_sack(self): hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes] try: solver_errors = self.base.sack.filter_modules( self.base._moduleContainer, hot_fix_repos, self.base.conf.installroot, self.base.conf.module_platform_id, update_only=True, debugsolver=self.base.conf.debug_solver) except hawkey.Exception as e: raise dnf.exceptions.Error(ucd(e)) return solver_errors def _enable_dependencies(self, module_dicts): error_spec = [] for spec, (nsvcap, moduleDict) in module_dicts.items(): for streamDict in moduleDict.values(): for modules in streamDict.values(): try: self.base._moduleContainer.enableDependencyTree( libdnf.module.VectorModulePackagePtr(modules)) except RuntimeError as e: error_spec.append(spec) logger.error(ucd(e)) logger.error(_("Unable to resolve argument {}").format(spec)) return error_spec def _resolve_specs_enable_update_sack(self, module_specs): no_match_specs, error_spec, module_dicts = self._resolve_specs_enable(module_specs) solver_errors = self._update_sack() dependency_error_spec = self._enable_dependencies(module_dicts) if dependency_error_spec: error_spec.extend(dependency_error_spec) return no_match_specs, error_spec, solver_errors, module_dicts def _modules_reset_or_disable(self, module_specs, to_state): no_match_specs = [] for spec in module_specs: module_list, nsvcap = self._get_modules(spec) if not module_list: logger.error(_("Unable to resolve argument {}").format(spec)) no_match_specs.append(spec) continue if nsvcap.stream or nsvcap.version or nsvcap.context or nsvcap.arch or nsvcap.profile: logger.info(_("Only module name is required. " "Ignoring unneeded information in argument: '{}'").format(spec)) module_names = set() for module in module_list: module_names.add(module.getName()) for name in module_names: if to_state == STATE_UNKNOWN: self.base._moduleContainer.reset(name) if to_state == STATE_DISABLED: self.base._moduleContainer.disable(name) solver_errors = self._update_sack() return no_match_specs, solver_errors def _get_package_name_set_and_remove_profiles(self, module_list, nsvcap, remove=False): package_name_set = set() latest_module = self._get_latest(module_list) installed_profiles_strings = set(self.base._moduleContainer.getInstalledProfiles( latest_module.getName())) if not installed_profiles_strings: return set() if nsvcap.profile: profiles_set = latest_module.getProfiles(nsvcap.profile) if not profiles_set: return set() for profile in profiles_set: if profile.getName() in installed_profiles_strings: if remove: self.base._moduleContainer.uninstall(latest_module, profile.getName()) package_name_set.update(profile.getContent()) else: for profile_string in installed_profiles_strings: if remove: self.base._moduleContainer.uninstall(latest_module, profile_string) for profile in latest_module.getProfiles(profile_string): package_name_set.update(profile.getContent()) return package_name_set def _get_info_profiles(self, module_specs): output = set() for module_spec in module_specs: module_list, nsvcap = self._get_modules(module_spec) if not module_list: logger.info(_("Unable to resolve argument {}").format(module_spec)) continue if nsvcap.profile: logger.info(_("Ignoring unnecessary profile: '{}/{}'").format( nsvcap.name, nsvcap.profile)) for module in module_list: lines = OrderedDict() lines["Name"] = module.getFullIdentifier() for profile in sorted(module.getProfiles(), key=_profile_comparison_key): lines[profile.getName()] = "\n".join( [pkgName for pkgName in profile.getContent()]) output.add(self._create_simple_table(lines).toString()) return "\n\n".join(sorted(output)) def _profile_report_formatter(self, modulePackage, default_profiles, enabled_str): installed_profiles = self.base._moduleContainer.getInstalledProfiles( modulePackage.getName()) available_profiles = modulePackage.getProfiles() profiles_str = "" for profile in sorted(available_profiles, key=_profile_comparison_key): profiles_str += "{}{}".format( profile.getName(), " [d]" if profile.getName() in default_profiles else "") profiles_str += " [i], " if profile.getName() in installed_profiles and enabled_str \ else ", " return profiles_str[:-2] def _summary_report_formatter(self, summary): return summary.strip().replace("\n", " ") def _module_strs_formatter(self, modulePackage, markActive=False): default_str = "" enabled_str = "" disabled_str = "" if modulePackage.getStream() == self.base._moduleContainer.getDefaultStream( modulePackage.getName()): default_str = " [d]" if self.base._moduleContainer.isEnabled(modulePackage): if not default_str: enabled_str = " " enabled_str += "[e]" elif self.base._moduleContainer.isDisabled(modulePackage): if not default_str: disabled_str = " " disabled_str += "[x]" if markActive and self.base._moduleContainer.isModuleActive(modulePackage): if not default_str: disabled_str = " " disabled_str += "[a]" return default_str, enabled_str, disabled_str def _get_info(self, module_specs): output = set() for module_spec in module_specs: module_list, nsvcap = self._get_modules(module_spec) if not module_list: logger.info(_("Unable to resolve argument {}").format(module_spec)) continue if nsvcap.profile: logger.info(_("Ignoring unnecessary profile: '{}/{}'").format( nsvcap.name, nsvcap.profile)) for modulePackage in module_list: default_str, enabled_str, disabled_str = self._module_strs_formatter( modulePackage, markActive=True) default_profiles = self.base._moduleContainer.getDefaultProfiles( modulePackage.getName(), modulePackage.getStream()) profiles_str = self._profile_report_formatter( modulePackage, default_profiles, enabled_str) lines = OrderedDict() lines["Name"] = modulePackage.getName() lines["Stream"] = modulePackage.getStream() + default_str + enabled_str + \ disabled_str lines["Version"] = modulePackage.getVersion() lines["Context"] = modulePackage.getContext() lines["Architecture"] = modulePackage.getArch() lines["Profiles"] = profiles_str lines["Default profiles"] = " ".join(default_profiles) lines["Repo"] = modulePackage.getRepoID() lines["Summary"] = modulePackage.getSummary() lines["Description"] = modulePackage.getDescription() req_set = set() for req in modulePackage.getModuleDependencies(): for require_dict in req.getRequires(): for mod_require, stream in require_dict.items(): req_set.add("{}:[{}]".format(mod_require, ",".join(stream))) lines["Requires"] = "\n".join(sorted(req_set)) lines["Artifacts"] = "\n".join(sorted(modulePackage.getArtifacts())) output.add(self._create_simple_table(lines).toString()) str_table = "\n\n".join(sorted(output)) if str_table: str_table += MODULE_INFO_TABLE_HINT return str_table @staticmethod def _create_simple_table(lines): table = libdnf.smartcols.Table() table.enableNoheadings(True) table.setColumnSeparator(" : ") column_name = table.newColumn("Name") column_value = table.newColumn("Value") column_value.setWrap(True) column_value.setSafechars("\n") column_value.setNewlineWrapFunction() for line_name, value in lines.items(): if value is None: value = "" line = table.newLine() line.getColumnCell(column_name).setData(line_name) line.getColumnCell(column_value).setData(str(value)) return table def _get_full_info(self, module_specs): output = set() for module_spec in module_specs: module_list, nsvcap = self._get_modules(module_spec) if not module_list: logger.info(_("Unable to resolve argument {}").format(module_spec)) continue if nsvcap.profile: logger.info(_("Ignoring unnecessary profile: '{}/{}'").format( nsvcap.name, nsvcap.profile)) for modulePackage in module_list: info = modulePackage.getYaml() if info: output.add(info) output_string = "\n\n".join(sorted(output)) return output_string def _what_provides(self, rpm_specs): output = set() modulePackages = self.base._moduleContainer.getModulePackages() baseQuery = self.base.sack.query().filterm(empty=True).apply() getBestInitQuery = self.base.sack.query(flags=hawkey.IGNORE_MODULAR_EXCLUDES) for spec in rpm_specs: subj = dnf.subject.Subject(spec) baseQuery = baseQuery.union(subj.get_best_query( self.base.sack, with_nevra=True, with_provides=False, with_filenames=False, query=getBestInitQuery)) baseQuery.apply() for modulePackage in modulePackages: artifacts = modulePackage.getArtifacts() if not artifacts: continue query = baseQuery.filter(nevra_strict=artifacts) if query: for pkg in query: string_output = "" profiles = [] for profile in sorted(modulePackage.getProfiles(), key=_profile_comparison_key): if pkg.name in profile.getContent(): profiles.append(profile.getName()) lines = OrderedDict() lines["Module"] = modulePackage.getFullIdentifier() lines["Profiles"] = " ".join(sorted(profiles)) lines["Repo"] = modulePackage.getRepoID() lines["Summary"] = modulePackage.getSummary() table = self._create_simple_table(lines) string_output += "{}\n".format(self.base.output.term.bold(str(pkg))) string_output += "{}".format(table.toString()) output.add(string_output) return "\n\n".join(sorted(output)) def _create_and_fill_table(self, latest): table = libdnf.smartcols.Table() table.setTermforce(libdnf.smartcols.Table.TermForce_AUTO) table.enableMaxout(True) column_name = table.newColumn("Name") column_stream = table.newColumn("Stream") column_profiles = table.newColumn("Profiles") column_profiles.setWrap(True) column_info = table.newColumn("Summary") column_info.setWrap(True) if not self.base.conf.verbose: column_info.hidden = True for latest_per_repo in latest: for nameStreamArch in latest_per_repo: if len(nameStreamArch) == 1: modulePackage = nameStreamArch[0] else: active = [module for module in nameStreamArch if self.base._moduleContainer.isModuleActive(module)] if active: modulePackage = active[0] else: modulePackage = nameStreamArch[0] line = table.newLine() default_str, enabled_str, disabled_str = self._module_strs_formatter( modulePackage, markActive=False) default_profiles = self.base._moduleContainer.getDefaultProfiles( modulePackage.getName(), modulePackage.getStream()) profiles_str = self._profile_report_formatter(modulePackage, default_profiles, enabled_str) line.getColumnCell(column_name).setData(modulePackage.getName()) line.getColumnCell( column_stream).setData( modulePackage.getStream() + default_str + enabled_str + disabled_str) line.getColumnCell(column_profiles).setData(profiles_str) summary_str = self._summary_report_formatter(modulePackage.getSummary()) line.getColumnCell(column_info).setData(summary_str) return table def _get_brief_description(self, module_specs, module_state): modules = [] if module_specs: for spec in module_specs: module_list, nsvcap = self._get_modules(spec) modules.extend(module_list) else: modules = self.base._moduleContainer.getModulePackages() latest = self.base._moduleContainer.getLatestModulesPerRepo(module_state, modules) if not latest: return "" table = self._create_and_fill_table(latest) current_repo_id_index = 0 already_printed_lines = 0 try: repo_name = self.base.repos[latest[0][0][0].getRepoID()].name except KeyError: repo_name = latest[0][0][0].getRepoID() versions = len(latest[0]) header = self._format_header(table) str_table = self._format_repoid(repo_name) str_table += header for i in range(0, table.getNumberOfLines()): if versions + already_printed_lines <= i: already_printed_lines += versions current_repo_id_index += 1 # Fail-Safe repository is not in self.base.repos try: repo_name = self.base.repos[ latest[current_repo_id_index][0][0].getRepoID()].name except KeyError: repo_name = latest[current_repo_id_index][0][0].getRepoID() versions = len(latest[current_repo_id_index]) str_table += "\n" str_table += self._format_repoid(repo_name) str_table += header line = table.getLine(i) str_table += table.toString(line, line) return str_table + MODULE_TABLE_HINT def _format_header(self, table): line = table.getLine(0) return table.toString(line, line).split('\n', 1)[0] + '\n' def _format_repoid(self, repo_name): return "{}\n".format(self.base.output.term.bold(repo_name)) def _install_profiles_internal(self, install_set_artifacts, install_dict, strict): # Remove source packages because they cannot be installed or upgraded base_no_source_query = self.base.sack.query().filterm(arch__neq=['src', 'nosrc']).apply() install_base_query = base_no_source_query.filter(nevra_strict=install_set_artifacts) error_specs = [] # add hot-fix packages hot_fix_repos = [i.id for i in self.base.repos.iter_enabled() if i.module_hotfixes] hotfix_packages = base_no_source_query.filter( reponame=hot_fix_repos, name=install_dict.keys()) install_base_query = install_base_query.union(hotfix_packages) for pkg_name, set_specs in install_dict.items(): query = install_base_query.filter(name=pkg_name) if not query: # package can also be non-modular or part of another stream query = base_no_source_query.filter(name=pkg_name) if not query: for spec in set_specs: logger.error(_("Unable to resolve argument {}").format(spec)) logger.error(_("No match for package {}").format(pkg_name)) error_specs.extend(set_specs) continue self.base._goal.group_members.add(pkg_name) sltr = dnf.selector.Selector(self.base.sack) sltr.set(pkg=query) self.base._goal.install(select=sltr, optional=(not strict)) return install_base_query, error_specs def format_modular_solver_errors(errors): msg = dnf.util._format_resolve_problems(errors) return "\n".join( [P_('Modular dependency problem:', 'Modular dependency problems:', len(errors)), msg]) PK!edA-rpm/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlZddl Zddl Z ddZ d d Z d d Z e dJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dEZdFdGZdHdIZdS)a)absolute_import)unicode_literals) transaction) is_py3bytesNc )Cstj|d}|jtjtjBxztjjD]l}tj j rDt |d}y|j d|}Wn<t tjfk r}ztjjdt|WYdd}~XnXt|sq,y t|}Wn$tk rd}tjj|YnX|d}yy|tjj|}Wn,tk r|tjj|jd}YnX|tj|} |tj|} | tjkrd| rd|d ||jdfkrd| }Wntttfk rYnXt|rt|d}|SWdS) z-Calculate the release version for the system.)rootzutf-8Zprovidesz Error: %sNz:Error: rpmdb failed to list provides. Try: rpm --rebuilddbversionutf8name)rinitReadOnlyTransactionZ pushVSFlagsrpmZ_RPMVSF_NOSIGNATURESZ_RPMVSF_NODIGESTSdnfconstZ DISTROVERPKGZpycompZPY3bytesZdbMatch TypeErrorerror exceptionsErrorstrlennext StopIterationZRPMTAG_PROVIDENAMEindex ValueErrordecodeZRPMTAG_PROVIDEFLAGSZRPMTAG_PROVIDEVERSIONZRPMSENSE_EQUALKeyError IndexErrorr) Z installroottsZ distroverpkgidxehdrmsgZ releaseverZoffflagZverr#/usr/lib/python3.6/__init__.pydetect_releaseversB  &   r%cCsptj}t|V}|j}y|j|}Wn8tjk r`}ztjj dj ||WYdd}~XnX|SQRXdS)zReturn RPM header of the file.z {0}: '{1}'N) rr openfilenoZ hdrFromFdnor rr rrformat)pathrpackageZfdnor rr#r#r$_headerIs &r+csfddDS)Ncs i|]}|D] }||qqSr#r#).0kv)dctr#r$ Vsz_invert..r#)r/r#)r/r$_invertUsr1aarch64alphaalphaev4 alphaev45alphaev5 alphaev56alphaev6 alphaev67 alphaev68alphaev7 alphapca56 armv5tejlarmv5telarmv5tlarmv6larmv7larmv8larmv6hlarmv7hlarmv7hnlarmv8hli386athlongeodei486i586i686ia64mipsmipselmips64mips64elnoarchppcppc64 ppc64iseriesppc64p7 ppc64pseriesppc64leriscv32riscv64riscv128s390s390xsh3sh4sh4asparcsparc64sparc64vsparcv8sparcv9sparcv9vx86_64amd64ia32e)r2r3ZarmZarmhfprGrMrNrOrPrQrRrSrTrXrYrZr[r\r]r^r_rargcCst|S)N) _BASEARCH_MAP)Zarchr#r#r$basearchusrkcCs||}t|rt|d}|S)z Returns value of rpm_hdr[key] as a string. Rpm has switched from bytes to str and we need to handle both properly. zutf-8)rr)Zrpm_hdrkeyvaluer#r#r$ getheaderzs rn)r2) r3r4r5r6r7r8r9r:r;r<)r=r>r?r@rArB)rCrDrErF)rGrHrIrGrJrKrL)rM)rN)rO)rP)rQ)rR)rS)rTrUrVrW)rX)rY)rZ)r[)r\)r])r^)r_r`)rarbrcrdrerf)rgrhri)Z __future__rrrZ dnf.pycomprZ dnf.constr Zdnf.exceptionsr r%r+r1rjrkrnr#r#r#r$sJ    ,  PK!edA'rpm/__pycache__/__init__.cpython-36.pycnu[3 ft`@sddlmZddlmZddlmZddlmZddlZddl Zddl Z ddZ d d Z d d Z e dJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dEZdFdGZdHdIZdS)a)absolute_import)unicode_literals) transaction) is_py3bytesNc )Cstj|d}|jtjtjBxztjjD]l}tj j rDt |d}y|j d|}Wn<t tjfk r}ztjjdt|WYdd}~XnXt|sq,y t|}Wn$tk rd}tjj|YnX|d}yy|tjj|}Wn,tk r|tjj|jd}YnX|tj|} |tj|} | tjkrd| rd|d ||jdfkrd| }Wntttfk rYnXt|rt|d}|SWdS) z-Calculate the release version for the system.)rootzutf-8Zprovidesz Error: %sNz:Error: rpmdb failed to list provides. Try: rpm --rebuilddbversionutf8name)rinitReadOnlyTransactionZ pushVSFlagsrpmZ_RPMVSF_NOSIGNATURESZ_RPMVSF_NODIGESTSdnfconstZ DISTROVERPKGZpycompZPY3bytesZdbMatch TypeErrorerror exceptionsErrorstrlennext StopIterationZRPMTAG_PROVIDENAMEindex ValueErrordecodeZRPMTAG_PROVIDEFLAGSZRPMTAG_PROVIDEVERSIONZRPMSENSE_EQUALKeyError IndexErrorr) Z installroottsZ distroverpkgidxehdrmsgZ releaseverZoffflagZverr#/usr/lib/python3.6/__init__.pydetect_releaseversB  &   r%cCsptj}t|V}|j}y|j|}Wn8tjk r`}ztjj dj ||WYdd}~XnX|SQRXdS)zReturn RPM header of the file.z {0}: '{1}'N) rr openfilenoZ hdrFromFdnor rr rrformat)pathrpackageZfdnor rr#r#r$_headerIs &r+csfddDS)Ncs i|]}|D] }||qqSr#r#).0kv)dctr#r$ Vsz_invert..r#)r/r#)r/r$_invertUsr1aarch64alphaalphaev4 alphaev45alphaev5 alphaev56alphaev6 alphaev67 alphaev68alphaev7 alphapca56 armv5tejlarmv5telarmv5tlarmv6larmv7larmv8larmv6hlarmv7hlarmv7hnlarmv8hli386athlongeodei486i586i686ia64mipsmipselmips64mips64elnoarchppcppc64 ppc64iseriesppc64p7 ppc64pseriesppc64leriscv32riscv64riscv128s390s390xsh3sh4sh4asparcsparc64sparc64vsparcv8sparcv9sparcv9vx86_64amd64ia32e)r2r3ZarmZarmhfprGrMrNrOrPrQrRrSrTrXrYrZr[r\r]r^r_rargcCst|S)N) _BASEARCH_MAP)Zarchr#r#r$basearchusrkcCs||}t|rt|d}|S)z Returns value of rpm_hdr[key] as a string. Rpm has switched from bytes to str and we need to handle both properly. zutf-8)rr)Zrpm_hdrkeyvaluer#r#r$ getheaderzs rn)r2) r3r4r5r6r7r8r9r:r;r<)r=r>r?r@rArB)rCrDrErF)rGrHrIrGrJrKrL)rM)rN)rO)rP)rQ)rR)rS)rTrUrVrW)rX)rY)rZ)r[)r\)r])r^)r_r`)rarbrcrdrerf)rgrhri)Z __future__rrrZ dnf.pycomprZ dnf.constr Zdnf.exceptionsr r%r+r1rjrkrnr#r#r#r$sJ    ,  PK!ǣ$/rpm/__pycache__/connection.cpython-36.opt-1.pycnu[3 ft`Y@s@ddlmZddlmZddlmZddlZGdddeZdS))absolute_import)unicode_literals)initReadOnlyTransactionNc@s,eZdZddZeejjdddZdS) RpmConnectioncCs ||_dS)N)root)selfrr /usr/lib/python3.6/connection.py__init__szRpmConnection.__init__Z _readonly_tscCs t|jS)N)rr)rr r r readonly_tsszRpmConnection.readonly_tsN) __name__ __module__ __qualname__r propertydnfutilZlazyattrr r r r r rsr) Z __future__rrZ transactionrZdnf.utilrobjectrr r r r s   PK!ǣ$)rpm/__pycache__/connection.cpython-36.pycnu[3 ft`Y@s@ddlmZddlmZddlmZddlZGdddeZdS))absolute_import)unicode_literals)initReadOnlyTransactionNc@s,eZdZddZeejjdddZdS) RpmConnectioncCs ||_dS)N)root)selfrr /usr/lib/python3.6/connection.py__init__szRpmConnection.__init__Z _readonly_tscCs t|jS)N)rr)rr r r readonly_tsszRpmConnection.readonly_tsN) __name__ __module__ __qualname__r propertydnfutilZlazyattrr r r r r rsr) Z __future__rrZ transactionrZdnf.utilrobjectrr r r r s   PK!j*rpm/__pycache__/error.cpython-36.opt-1.pycnu[3 ft`@sGdddeZdS)c@s eZdZdS) RpmUtilsErrorN)__name__ __module__ __qualname__rr/usr/lib/python3.6/error.pyrsrN) ExceptionrrrrrsPK!j$rpm/__pycache__/error.cpython-36.pycnu[3 ft`@sGdddeZdS)c@s eZdZdS) RpmUtilsErrorN)__name__ __module__ __qualname__rr/usr/lib/python3.6/error.pyrsrN) ExceptionrrrrrsPK!7 .rpm/__pycache__/miscutils.cpython-36.opt-1.pycnu[3 f@svddlmZmZmZddlZddlZddlZddlmZddl m Z ej dZ da ddZdd Zd d Zd d ZdS))print_functionabsolute_importunicode_literalsN)which)_ZdnfcCs$tdkr tdatjtdttS)Nrpmkeysz3Using rpmkeys executable at %s to verify signatures)_rpmkeys_binaryr_loggerdebugrr r /usr/lib/python3.6/miscutils.py_find_rpmkeys_binarys  r cCst|dks |ddks |dr$dSd\}}}}x^|ddD]N}d|krNdS|jdr^d }q>|jd rnd }q>|jd r~d }q>|jd s>dSq>W|rdS|rdS|rd SdS)Nrs-:Fs: BADs: NOKEYTs : NOTTRUSTEDs : NOTFOUNDs: OK)FFFFr)lenendswith)dataZseen_sigZ missing_keyZ not_trustedZ not_signedir r r _process_rpm_output$s*      rc Cst}|dkstjj| r.tjtddSddd|dddd f}tj||d d itj d |d }|j d}WdQRX|j }t |t k rtd|dks|dkrdSt|jd}|r|S|rdSdS)Nz4Cannot find rpmkeys executable to verify signatures.rrz --checksigz--rootz --verbosez#--define=_pkgverify_level signaturez--define=_pkgverify_flags 0x0-LC_ALLC/)args executableenvstdoutcwdstdinrz Popen set return code to non-int )r ospathisfiler Zcriticalr subprocessPopenPIPEZ communicate returncodetypeintAssertionErrorrsplit)packageZ installrootZrpmkeys_binaryrprr)retr r r _verifyPackageUsingRpmkeys?s0   r1c Cs>tj|tjtjBtjB}zt||jj}Wdtj|X|S)a Takes a transaction set and a package, check it's sigs, return 0 if they are all fine return 1 if the gpg key can't be found return 2 if the header is in someway damaged return 3 if the key is not trusted return 4 if the pkg is not gpg or pgp signedN) r#openO_RDONLYO_NOCTTY O_CLOEXECr1tsZrootDirclose)r6r.Zfdnovaluer r r checkSig^s  r9)Z __future__rrrr#r&ZloggingZshutilrZdnf.i18nrZ getLoggerr rr rr1r9r r r r s   PK!7 (rpm/__pycache__/miscutils.cpython-36.pycnu[3 f@svddlmZmZmZddlZddlZddlZddlmZddl m Z ej dZ da ddZdd Zd d Zd d ZdS))print_functionabsolute_importunicode_literalsN)which)_ZdnfcCs$tdkr tdatjtdttS)Nrpmkeysz3Using rpmkeys executable at %s to verify signatures)_rpmkeys_binaryr_loggerdebugrr r /usr/lib/python3.6/miscutils.py_find_rpmkeys_binarys  r cCst|dks |ddks |dr$dSd\}}}}x^|ddD]N}d|krNdS|jdr^d }q>|jd rnd }q>|jd r~d }q>|jd s>dSq>W|rdS|rdS|rd SdS)Nrs-:Fs: BADs: NOKEYTs : NOTTRUSTEDs : NOTFOUNDs: OK)FFFFr)lenendswith)dataZseen_sigZ missing_keyZ not_trustedZ not_signedir r r _process_rpm_output$s*      rc Cst}|dkstjj| r.tjtddSddd|dddd f}tj||d d itj d |d }|j d}WdQRX|j }t |t k rtd|dks|dkrdSt|jd}|r|S|rdSdS)Nz4Cannot find rpmkeys executable to verify signatures.rrz --checksigz--rootz --verbosez#--define=_pkgverify_level signaturez--define=_pkgverify_flags 0x0-LC_ALLC/)args executableenvstdoutcwdstdinrz Popen set return code to non-int )r ospathisfiler Zcriticalr subprocessPopenPIPEZ communicate returncodetypeintAssertionErrorrsplit)packageZ installrootZrpmkeys_binaryrprr)retr r r _verifyPackageUsingRpmkeys?s0   r1c Cs>tj|tjtjBtjB}zt||jj}Wdtj|X|S)a Takes a transaction set and a package, check it's sigs, return 0 if they are all fine return 1 if the gpg key can't be found return 2 if the header is in someway damaged return 3 if the key is not trusted return 4 if the pkg is not gpg or pgp signedN) r#openO_RDONLYO_NOCTTY O_CLOEXECr1tsZrootDirclose)r6r.Zfdnovaluer r r checkSig^s  r9)Z __future__rrrr#r&ZloggingZshutilrZdnf.i18nrZ getLoggerr rr rr1r9r r r r s   PK!Jhh0rpm/__pycache__/transaction.cpython-36.opt-1.pycnu[3 ft`@sRddlmZddlmZddlmZddlZdZdZGdddeZ d dd Z dS) )absolute_import)unicode_literals)_Nc@szeZdZdddZddZddZdd Zd d Zd d ZddZ ddZ ddZ ddZ ddZ ddZifddZdS)TransactionWrapper/cCs@tj||_ddddddddd d d d d ddg|_g|_d|_dS)NZcheckorderZaddEraseZ addInstallZ addReinstallrunZpgpImportPubkeyZ pgpPrtPktsZproblemssetFlags setVSFlags setProbFilterZ hdrFromFdnonextZcleanT)rpmZTransactionSetts_methodstsflagsopen)selfrootr!/usr/lib/python3.6/transaction.py__init__s$ zTransactionWrapper.__init__cCs |jdS)N)close)rrrr__del__+szTransactionWrapper.__del__cCs |jr|jjd|_d|_dS)NF)rrZcloseDB)rrrrr/s zTransactionWrapper.closecOsLd|kr|jd}ng}|jj||}x |D]\}}}|j|||q,W|S)Npatterns)poprdbMatchpattern)rargskwdsrZmitagtpZpatrrrr5s zTransactionWrapper.dbMatchcCs ||jkr|j|St|dS)N)r getMethodAttributeError)rattrrrr __getattr__@s  zTransactionWrapper.__getattr__cCs|jS)N)r)rrrr__iter__FszTransactionWrapper.__iter__cCs t|j|S)N)getattrr)rmethodrrrr!IszTransactionWrapper.getMethodcCs"|jj||jj|jddS)N)rappendrr )rflagsrrr pushVSFlagsQs zTransactionWrapper.pushVSFlagscCs |jjd}|jj||BdS)Nr)rr )rflagcurflagsrrr addTsFlagUs zTransactionWrapper.addTsFlagcCs|jjd}|jj||S)Nr)rr )rr.rrr getTsFlagsYs  zTransactionWrapper.getTsFlagscCs|j}t||@S)N)r0bool)rr-valrrr isTsFlagSet^szTransactionWrapper.isTsFlagSetcCs|j|j_dS)N)filenorZscriptFd)rfdrrr setScriptFdbszTransactionWrapper.setScriptFdc Cs|j}|jtj|jddkr0|jjtj|jj|j d}|jj |g}|dk rx |D]\}\}}} |j |q^W|s|j t d|S)zetests the ts we've setup, takes a callback function and a conf dict for flags and what notZdiskspacecheckrNz(Errors occurred during test transaction.) r0r/r ZRPMTRANS_FLAG_TESTgetrr ZRPMPROB_FILTER_DISKSPACErcallbackr r*r) rcbZconfZ origflagsZtserrorsZ reserrorsZdescretypeZmountZneedrrrtestes  zTransactionWrapper.testN)r)__name__ __module__ __qualname__rrrrr$r%r!r,r/r0r3r6r<rrrrrs  rrcCs t|d}|jtjtjB|S)N)r)rr,r Z_RPMVSF_NOSIGNATURESZ_RPMVSF_NODIGESTS)rread_tsrrrinitReadOnlyTransaction{s rA)r) Z __future__rrZdnf.i18nrr r@robjectrrArrrr s   ePK!Jhh*rpm/__pycache__/transaction.cpython-36.pycnu[3 ft`@sRddlmZddlmZddlmZddlZdZdZGdddeZ d dd Z dS) )absolute_import)unicode_literals)_Nc@szeZdZdddZddZddZdd Zd d Zd d ZddZ ddZ ddZ ddZ ddZ ddZifddZdS)TransactionWrapper/cCs@tj||_ddddddddd d d d d ddg|_g|_d|_dS)NZcheckorderZaddEraseZ addInstallZ addReinstallrunZpgpImportPubkeyZ pgpPrtPktsZproblemssetFlags setVSFlags setProbFilterZ hdrFromFdnonextZcleanT)rpmZTransactionSetts_methodstsflagsopen)selfrootr!/usr/lib/python3.6/transaction.py__init__s$ zTransactionWrapper.__init__cCs |jdS)N)close)rrrr__del__+szTransactionWrapper.__del__cCs |jr|jjd|_d|_dS)NF)rrZcloseDB)rrrrr/s zTransactionWrapper.closecOsLd|kr|jd}ng}|jj||}x |D]\}}}|j|||q,W|S)Npatterns)poprdbMatchpattern)rargskwdsrZmitagtpZpatrrrr5s zTransactionWrapper.dbMatchcCs ||jkr|j|St|dS)N)r getMethodAttributeError)rattrrrr __getattr__@s  zTransactionWrapper.__getattr__cCs|jS)N)r)rrrr__iter__FszTransactionWrapper.__iter__cCs t|j|S)N)getattrr)rmethodrrrr!IszTransactionWrapper.getMethodcCs"|jj||jj|jddS)N)rappendrr )rflagsrrr pushVSFlagsQs zTransactionWrapper.pushVSFlagscCs |jjd}|jj||BdS)Nr)rr )rflagcurflagsrrr addTsFlagUs zTransactionWrapper.addTsFlagcCs|jjd}|jj||S)Nr)rr )rr.rrr getTsFlagsYs  zTransactionWrapper.getTsFlagscCs|j}t||@S)N)r0bool)rr-valrrr isTsFlagSet^szTransactionWrapper.isTsFlagSetcCs|j|j_dS)N)filenorZscriptFd)rfdrrr setScriptFdbszTransactionWrapper.setScriptFdc Cs|j}|jtj|jddkr0|jjtj|jj|j d}|jj |g}|dk rx |D]\}\}}} |j |q^W|s|j t d|S)zetests the ts we've setup, takes a callback function and a conf dict for flags and what notZdiskspacecheckrNz(Errors occurred during test transaction.) r0r/r ZRPMTRANS_FLAG_TESTgetrr ZRPMPROB_FILTER_DISKSPACErcallbackr r*r) rcbZconfZ origflagsZtserrorsZ reserrorsZdescretypeZmountZneedrrrtestes  zTransactionWrapper.testN)r)__name__ __module__ __qualname__rrrrr$r%r!r,r/r0r3r6r<rrrrrs  rrcCs t|d}|jtjtjB|S)N)r)rr,r Z_RPMVSF_NOSIGNATURESZ_RPMVSF_NODIGESTS)rread_tsrrrinitReadOnlyTransaction{s rA)r) Z __future__rrZdnf.i18nrr r@robjectrrArrrr s   ePK!BOfrpm/__init__.pynu[# __init__.py # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from . import transaction from dnf.pycomp import is_py3bytes import dnf.const import dnf.exceptions import rpm # used by ansible (dnf.rpm.rpm.labelCompare in lib/ansible/modules/packaging/os/dnf.py) def detect_releasever(installroot): # :api """Calculate the release version for the system.""" ts = transaction.initReadOnlyTransaction(root=installroot) ts.pushVSFlags(~(rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS)) for distroverpkg in dnf.const.DISTROVERPKG: if dnf.pycomp.PY3: distroverpkg = bytes(distroverpkg, 'utf-8') try: idx = ts.dbMatch('provides', distroverpkg) except (TypeError, rpm.error) as e: raise dnf.exceptions.Error('Error: %s' % str(e)) if not len(idx): continue try: hdr = next(idx) except StopIteration: msg = 'Error: rpmdb failed to list provides. Try: rpm --rebuilddb' raise dnf.exceptions.Error(msg) releasever = hdr['version'] try: try: # header returns bytes -> look for bytes # it may fail because rpm returns a decoded string since 10 Apr 2019 off = hdr[rpm.RPMTAG_PROVIDENAME].index(distroverpkg) except ValueError: # header returns a string -> look for a string off = hdr[rpm.RPMTAG_PROVIDENAME].index(distroverpkg.decode("utf8")) flag = hdr[rpm.RPMTAG_PROVIDEFLAGS][off] ver = hdr[rpm.RPMTAG_PROVIDEVERSION][off] if flag == rpm.RPMSENSE_EQUAL and ver: if hdr['name'] not in (distroverpkg, distroverpkg.decode("utf8")): # override the package version releasever = ver except (ValueError, KeyError, IndexError): pass if is_py3bytes(releasever): releasever = str(releasever, "utf-8") return releasever return None def _header(path): """Return RPM header of the file.""" ts = transaction.initReadOnlyTransaction() with open(path) as package: fdno = package.fileno() try: hdr = ts.hdrFromFdno(fdno) except rpm.error as e: raise dnf.exceptions.Error("{0}: '{1}'".format(e, path)) return hdr def _invert(dct): return {v: k for k in dct for v in dct[k]} _BASEARCH_MAP = _invert({ 'aarch64': ('aarch64',), 'alpha': ('alpha', 'alphaev4', 'alphaev45', 'alphaev5', 'alphaev56', 'alphaev6', 'alphaev67', 'alphaev68', 'alphaev7', 'alphapca56'), 'arm': ('armv5tejl', 'armv5tel', 'armv5tl', 'armv6l', 'armv7l', 'armv8l'), 'armhfp': ('armv6hl', 'armv7hl', 'armv7hnl', 'armv8hl'), 'i386': ('i386', 'athlon', 'geode', 'i386', 'i486', 'i586', 'i686'), 'ia64': ('ia64',), 'mips': ('mips',), 'mipsel': ('mipsel',), 'mips64': ('mips64',), 'mips64el': ('mips64el',), 'noarch': ('noarch',), 'ppc': ('ppc',), 'ppc64': ('ppc64', 'ppc64iseries', 'ppc64p7', 'ppc64pseries'), 'ppc64le': ('ppc64le',), 'riscv32' : ('riscv32',), 'riscv64' : ('riscv64',), 'riscv128' : ('riscv128',), 's390': ('s390',), 's390x': ('s390x',), 'sh3': ('sh3',), 'sh4': ('sh4', 'sh4a'), 'sparc': ('sparc', 'sparc64', 'sparc64v', 'sparcv8', 'sparcv9', 'sparcv9v'), 'x86_64': ('x86_64', 'amd64', 'ia32e'), }) def basearch(arch): # :api return _BASEARCH_MAP[arch] def getheader(rpm_hdr, key): ''' Returns value of rpm_hdr[key] as a string. Rpm has switched from bytes to str and we need to handle both properly. ''' value = rpm_hdr[key] if is_py3bytes(value): value = str(value, "utf-8") return value PK!5\YYrpm/connection.pynu[# connection.py # Maintain RPMDB connections. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from .transaction import initReadOnlyTransaction import dnf.util class RpmConnection(object): def __init__(self, root): self.root = root @property @dnf.util.lazyattr("_readonly_ts") def readonly_ts(self): return initReadOnlyTransaction(self.root) PK!# rpm/error.pynu[# error.py # RpmUtilsError # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # class RpmUtilsError(Exception): pass PK!k?rpm/miscutils.pynu[# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Copyright 2003 Duke University from __future__ import print_function, absolute_import, unicode_literals import os import subprocess import logging from shutil import which from dnf.i18n import _ _logger = logging.getLogger('dnf') _rpmkeys_binary = None def _find_rpmkeys_binary(): global _rpmkeys_binary if _rpmkeys_binary is None: _rpmkeys_binary = which("rpmkeys") _logger.debug(_('Using rpmkeys executable at %s to verify signatures'), _rpmkeys_binary) return _rpmkeys_binary def _process_rpm_output(data): # No signatures or digests = corrupt package. # There is at least one line for -: and another (empty) entry after the # last newline. if len(data) < 3 or data[0] != b'-:' or data[-1]: return 2 seen_sig, missing_key, not_trusted, not_signed = False, False, False, False for i in data[1:-1]: if b': BAD' in i: return 2 elif i.endswith(b': NOKEY'): missing_key = True elif i.endswith(b': NOTTRUSTED'): not_trusted = True elif i.endswith(b': NOTFOUND'): not_signed = True elif not i.endswith(b': OK'): return 2 if not_trusted: return 3 elif missing_key: return 1 elif not_signed: return 4 # we still check return code, so this is safe return 0 def _verifyPackageUsingRpmkeys(package, installroot): rpmkeys_binary = _find_rpmkeys_binary() if rpmkeys_binary is None or not os.path.isfile(rpmkeys_binary): _logger.critical(_('Cannot find rpmkeys executable to verify signatures.')) return 2 # "--define=_pkgverify_level signature" enforces signature checking; # "--define=_pkgverify_flags 0x0" ensures that all signatures are checked. args = ('rpmkeys', '--checksig', '--root', installroot, '--verbose', '--define=_pkgverify_level signature', '--define=_pkgverify_flags 0x0', '-') with subprocess.Popen( args=args, executable=rpmkeys_binary, env={'LC_ALL': 'C'}, stdout=subprocess.PIPE, cwd='/', stdin=package) as p: data = p.communicate()[0] returncode = p.returncode if type(returncode) is not int: raise AssertionError('Popen set return code to non-int') # rpmkeys can return something other than 0 or 1 in the case of a # fatal error (OOM, abort() called, SIGSEGV, etc) if returncode >= 2 or returncode < 0: return 2 ret = _process_rpm_output(data.split(b'\n')) if ret: return ret return 2 if returncode else 0 def checkSig(ts, package): """Takes a transaction set and a package, check it's sigs, return 0 if they are all fine return 1 if the gpg key can't be found return 2 if the header is in someway damaged return 3 if the key is not trusted return 4 if the pkg is not gpg or pgp signed""" fdno = os.open(package, os.O_RDONLY|os.O_NOCTTY|os.O_CLOEXEC) try: value = _verifyPackageUsingRpmkeys(fdno, ts.ts.rootDir) finally: os.close(fdno) return value PK!rpm/transaction.pynu[# # Client code for Update Agent # Copyright (c) 1999-2002 Red Hat, Inc. Distributed under GPL. # # Adrian Likins # Some Edits by Seth Vidal # # a couple of classes wrapping up transactions so that we # can share transactions instead of creating new ones all over # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ import rpm read_ts = None ts = None # wrapper/proxy class for rpm.Transaction so we can # instrument it, etc easily class TransactionWrapper(object): def __init__(self, root='/'): self.ts = rpm.TransactionSet(root) self._methods = ['check', 'order', 'addErase', 'addInstall', 'addReinstall', 'run', 'pgpImportPubkey', 'pgpPrtPkts', 'problems', 'setFlags', 'setVSFlags', 'setProbFilter', 'hdrFromFdno', 'next', 'clean'] self.tsflags = [] self.open = True def __del__(self): # Automatically close the rpm transaction when the reference is lost self.close() def close(self): if self.open: self.ts.closeDB() self.ts = None self.open = False def dbMatch(self, *args, **kwds): if 'patterns' in kwds: patterns = kwds.pop('patterns') else: patterns = [] mi = self.ts.dbMatch(*args, **kwds) for (tag, tp, pat) in patterns: mi.pattern(tag, tp, pat) return mi def __getattr__(self, attr): if attr in self._methods: return self.getMethod(attr) else: raise AttributeError(attr) def __iter__(self): return self.ts def getMethod(self, method): # in theory, we can override this with # profile/etc info return getattr(self.ts, method) # push/pop methods so we dont lose the previous # set value, and we can potentiall debug a bit # easier def pushVSFlags(self, flags): self.tsflags.append(flags) self.ts.setVSFlags(self.tsflags[-1]) def addTsFlag(self, flag): curflags = self.ts.setFlags(0) self.ts.setFlags(curflags | flag) def getTsFlags(self): curflags = self.ts.setFlags(0) self.ts.setFlags(curflags) return curflags def isTsFlagSet(self, flag): val = self.getTsFlags() return bool(flag & val) def setScriptFd(self, fd): self.ts.scriptFd = fd.fileno() def test(self, cb, conf={}): """tests the ts we've setup, takes a callback function and a conf dict for flags and what not""" origflags = self.getTsFlags() self.addTsFlag(rpm.RPMTRANS_FLAG_TEST) # FIXME GARBAGE - remove once this is reimplemented elsewhere # KEEPING FOR API COMPLIANCE ONLY if conf.get('diskspacecheck') == 0: self.ts.setProbFilter(rpm.RPMPROB_FILTER_DISKSPACE) tserrors = self.ts.run(cb.callback, '') self.ts.setFlags(origflags) reserrors = [] if tserrors is not None: for (descr, (etype, mount, need)) in tserrors: reserrors.append(descr) if not reserrors: reserrors.append(_('Errors occurred during test transaction.')) return reserrors def initReadOnlyTransaction(root='/'): read_ts = TransactionWrapper(root=root) read_ts.pushVSFlags((rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)) return read_ts PK!7#qq-yum/__pycache__/__init__.cpython-36.opt-1.pycnu[3 ft`@sdS)Nrrr/usr/lib/python3.6/__init__.pysPK!7#qq'yum/__pycache__/__init__.cpython-36.pycnu[3 ft`@sdS)Nrrr/usr/lib/python3.6/__init__.pysPK!2s&&)yum/__pycache__/misc.cpython-36.opt-1.pycnu[3 ft`W.@sZdZddlmZmZddlmZddlmZmZmZddl Tddl Z ddl Z ddlZ ddlZ ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZdgZdadd Zdad d Zd d ZGddde Z!ddZ"ddZ#d,ddZ$ddZ%ddZ&ddZ'd-ddZ(d d!Z)da*d"d#Z+d.d$d%Z,d&d'Z-d(d)Z.d/d*d+Z/dS)0z% Assorted utility functions for yum. )print_functionabsolute_import)unicode_literals)base64_decodebytes basestringunicode)*NZsha256cCstdkrtjdjat|S)z( Tests if a string is a shell wildcard. Nz [*?]|\[.+\])_re_compiled_glob_matchrecompilesearch)sr/usr/lib/python3.6/misc.pyre_glob.s rcCsFtdkr(tjdj}tjdj}||faxtD]}||r.dSq.WdS)zC Tests if a string needs a full nevra match, instead of just name. Nz.*([-.*?]|\[.+\]).z[0-9]+:TF)_re_compiled_full_matchr r match)r ZoneZtwoZrecrrrre_full_search_needed6s   rcCstdS)Nr)_default_checksumsrrrrget_default_chksum_typeDsrc@s:eZdZdZd ddZddZddZd d Zd d ZdS) GenericHolderzGeneric Holder class used to hold other objects of known types It exists purely to be able to do object.somestuff, object.someotherstuff or object[key] and pass object to another function that will understand itNcCs ||_dS)N)_GenericHolder__iter)selfiterrrr__init__MszGenericHolder.__init__cCs|jdk rt||jSdS)N)rr)rrrr__iter__Ps zGenericHolder.__iter__cCs t||rt||St|dS)N)hasattrgetattrKeyError)ritemrrr __getitem__Ts  zGenericHolder.__getitem__cCsddt|jDS)z!Return a dictionary of all lists.cSs"i|]\}}t|tkr||qSr)typelist).0keyZlist_rrr \sz+GenericHolder.all_lists..)varsitems)rrrr all_listsZszGenericHolder.all_listscCs4x.|jjD]\}}t|j|gj|qW|S)z7 Concatenate the list attributes from 'other' to ours. )r(r'r& setdefaultextend)rotherr$valrrr merge_lists_szGenericHolder.merge_lists)N) __name__ __module__ __qualname____doc__rrr r(r-rrrrrGs  rcCstjdd|}tj}d}d}xn|jdD]`}|jdr>d}q*|rT|jdkrTd}q*|rf|jdrfPq*|rx|jdrxPq*|r*|j|dq*Wt|j S) z,Convert ASCII-armored GPG key to binary s ? rs$-----BEGIN PGP PUBLIC KEY BLOCK-----s"-----END PGP PUBLIC KEY BLOCK-----=) r subioBytesIOsplit startswithstripwritergetvalue)rawkeyblockZinblockZ pastheaderslinerrr procgpgkeyes  rAcCsPxJ|jddD]:}|d|krt|dd}||kr8dS||krDdSdSqWd S) ab Return if the GPG key described by the given keyid and timestamp are installed in the rpmdb. The keyid and timestamp should both be passed as integers. The ts is an rpm transaction set object Return values: - -1 key is not installed - 0 key with matching ID and timestamp is installed - 1 key with matching ID is installed but has an older timestamp - 2 key with matching ID is installed but has a newer timestamp No effort is made to handle duplicates. The first matching keyid is used to calculate the return result. namez gpg-pubkeyversionreleaserr3)ZdbMatchint)ZtskeyidZ timestampZhdrZ installedtsrrr keyInstalleds rJTc Cstjj|stj|tjj|tjj}ttjj |dd}|j dWdQRX|j ||r|d}tjj|stj|ddxFt j |dD]4}tjj |}|d|} tj|| tj| dqWd } ttjj |dd d}|j | WdQRXd SQRXWdQRXdS) Nzgpg.confwbr4z-roi)modez/*/zlock-never no-auto-check-trustdb trust-model direct no-expensive-trust-checks no-permission-warning preserve-permissions wT)ospathexistsmakedirsdnfZcryptoZ pubring_dirZContextopenjoinr<Z op_importglobbasenameshutilcopychmod) r>rIZgpgdirZ make_ro_copyZctxfpZrodirfrWZro_fZoptsrrrimport_key_to_pubrings&      r]c Cstj}y.tj|}tjj|d}dtjj|f}Wn$t k rZdtjj|f}YnXdtjj |f}t t j |}xB|D]:}tj |}t|drt|ddkr|d|kr|SqWtj|tjj d}|S)zqreturn a path to a valid and safe cachedir - only used when not running as root or when --tempcache is setrz%s-%s-z%s/%s*i)prefixdir)rOgeteuidpwdgetpwuidrSZi18nZucdconstZPREFIXrZTMPDIRsortedrVlstatS_ISDIRS_IMODEtempfileZmkdtemp) ZuidZusertupZusernamer_dirpathZ cachedirsZthisdirZstatsZcachedirrrr getCacheDirs   (rkcCsfg}t|}t|}d}x6||krN||}|j|||||7}||8}qW|j||d|S)zE Given a seq, split into a list of lists of length max_entries each. rN)lenr"append)seqZ max_entriesretZnumZbegendrrr seq_max_splits  rqcCsDytj|Wn0tk r>}z|jtjkr.WYdd}~XnXdS)z| Call os.unlink, but don't die if the file isn't there. This is the main difference between "rm -f" and plain "rm". N)rOunlinkOSErrorerrnoENOENT)filenameerrrunlink_fs  rxFcCs^y tj|Stk rX}z2|jtjtjfkr2dS|rF|jtjkrFdSWYdd}~XnXdS)zF Call os.stat(), but don't die if the file isn't there. Returns None. N)rOstatrsrtruENOTDIRZEACCES)rvZ ignore_EACCESrwrrrstat_fs r{cCsFy$td}|j}t|SQRXWnttfk r@tjSXdS)z Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. Note that no caching is done here. z/proc/self/loginuidN)rTreadrHIOError ValueErrorrOgetuid)fodatarrr _getloginuids  rcCstdkrtatS)z Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. The value is cached, so you don't have to save it. N)_cached_getloginuidrrrrr getloginuid src Cs|r |}nJd}|jd}|dkr@||d}|d kr@|d|}|dkrTtjjd |rt|}t|}|r|r|j|jkr|Sytjj||d Wn2t k r}ztjjt |WYdd}~XnX|r|rt j ||j|jf|S) ztake a filename and decompress it into the same relative location. When the compression type is not recognized (or file is not compressed), the content of the file is copied to the destinationN.r.zck.xz.bz2.gz.lzma.zstz(Could not determine destination filenamei)rrrrrr) rfindrS exceptionsZ MiscErrorr{st_mtimelibdnfZutils decompress RuntimeErrorstrrOutime) rvdestcheck_timestampsoutZdot_posZextZfirrwrrrrs,    "rcCs:tjj|}|d7}tjj|s.tj|dd|d|S)Nz/geni)rLrM)rOrPdirnamerQrR)rvgenerated_namerrrrcalculate_repo_gen_dest8s   rcCst||}t||ddS)z This is a wrapper around decompress, where we work out a cached generated name, and use check_timestamps. filename _must_ be from a repo. and generated_name is the type of the file. T)rr)rr)rvrrrrrrepo_gen_decompress@s rc Csg}xtj|D]}t|n}xf|D]^}tjd|r6q$|j}|j}|sLq$|rx|jdd}|jdd}|j|jq$|j |q$WWdQRXqW|S)a( Takes a glob of a dir (like /etc/foo.d/\*.foo) returns a list of all the lines in all the files matching that glob, ignores comments and blank lines, optional paramater 'line_as_list tells whether to treat each line as a space or comma-separated list, defaults to True. z\s*(#|$)  ,N) rVrTr rrstriplstripreplacer*r9rm)ZthisglobZ line_as_listresultsZfnamer\r@rrrread_in_items_from_dot_dirHs"     r)NT)F)NF)T)0r1Z __future__rrrZ dnf.pycomprrrryZ libdnf.utilsrZ dnf.constrSZ dnf.cryptoZdnf.exceptionsZdnf.i18nrtrVr7rOZos.pathrbr rXrirr rrrrobjectrrArJr]rkrqrxr{rrrrrrrrrrrsN  #   #PK!2s&&#yum/__pycache__/misc.cpython-36.pycnu[3 ft`W.@sZdZddlmZmZddlmZddlmZmZmZddl Tddl Z ddl Z ddlZ ddlZ ddlZ ddlZddlZddlZddlZddlZddlZddlZddlZddlZdgZdadd Zdad d Zd d ZGddde Z!ddZ"ddZ#d,ddZ$ddZ%ddZ&ddZ'd-ddZ(d d!Z)da*d"d#Z+d.d$d%Z,d&d'Z-d(d)Z.d/d*d+Z/dS)0z% Assorted utility functions for yum. )print_functionabsolute_import)unicode_literals)base64_decodebytes basestringunicode)*NZsha256cCstdkrtjdjat|S)z( Tests if a string is a shell wildcard. Nz [*?]|\[.+\])_re_compiled_glob_matchrecompilesearch)sr/usr/lib/python3.6/misc.pyre_glob.s rcCsFtdkr(tjdj}tjdj}||faxtD]}||r.dSq.WdS)zC Tests if a string needs a full nevra match, instead of just name. Nz.*([-.*?]|\[.+\]).z[0-9]+:TF)_re_compiled_full_matchr r match)r ZoneZtwoZrecrrrre_full_search_needed6s   rcCstdS)Nr)_default_checksumsrrrrget_default_chksum_typeDsrc@s:eZdZdZd ddZddZddZd d Zd d ZdS) GenericHolderzGeneric Holder class used to hold other objects of known types It exists purely to be able to do object.somestuff, object.someotherstuff or object[key] and pass object to another function that will understand itNcCs ||_dS)N)_GenericHolder__iter)selfiterrrr__init__MszGenericHolder.__init__cCs|jdk rt||jSdS)N)rr)rrrr__iter__Ps zGenericHolder.__iter__cCs t||rt||St|dS)N)hasattrgetattrKeyError)ritemrrr __getitem__Ts  zGenericHolder.__getitem__cCsddt|jDS)z!Return a dictionary of all lists.cSs"i|]\}}t|tkr||qSr)typelist).0keyZlist_rrr \sz+GenericHolder.all_lists..)varsitems)rrrr all_listsZszGenericHolder.all_listscCs4x.|jjD]\}}t|j|gj|qW|S)z7 Concatenate the list attributes from 'other' to ours. )r(r'r& setdefaultextend)rotherr$valrrr merge_lists_szGenericHolder.merge_lists)N) __name__ __module__ __qualname____doc__rrr r(r-rrrrrGs  rcCstjdd|}tj}d}d}xn|jdD]`}|jdr>d}q*|rT|jdkrTd}q*|rf|jdrfPq*|rx|jdrxPq*|r*|j|dq*Wt|j S) z,Convert ASCII-armored GPG key to binary s ? rs$-----BEGIN PGP PUBLIC KEY BLOCK-----s"-----END PGP PUBLIC KEY BLOCK-----=) r subioBytesIOsplit startswithstripwritergetvalue)rawkeyblockZinblockZ pastheaderslinerrr procgpgkeyes  rAcCsPxJ|jddD]:}|d|krt|dd}||kr8dS||krDdSdSqWd S) ab Return if the GPG key described by the given keyid and timestamp are installed in the rpmdb. The keyid and timestamp should both be passed as integers. The ts is an rpm transaction set object Return values: - -1 key is not installed - 0 key with matching ID and timestamp is installed - 1 key with matching ID is installed but has an older timestamp - 2 key with matching ID is installed but has a newer timestamp No effort is made to handle duplicates. The first matching keyid is used to calculate the return result. namez gpg-pubkeyversionreleaserr3)ZdbMatchint)ZtskeyidZ timestampZhdrZ installedtsrrr keyInstalleds rJTc Cstjj|stj|tjj|tjj}ttjj |dd}|j dWdQRX|j ||r|d}tjj|stj|ddxFt j |dD]4}tjj |}|d|} tj|| tj| dqWd } ttjj |dd d}|j | WdQRXd SQRXWdQRXdS) Nzgpg.confwbr4z-roi)modez/*/zlock-never no-auto-check-trustdb trust-model direct no-expensive-trust-checks no-permission-warning preserve-permissions wT)ospathexistsmakedirsdnfZcryptoZ pubring_dirZContextopenjoinr<Z op_importglobbasenameshutilcopychmod) r>rIZgpgdirZ make_ro_copyZctxfpZrodirfrWZro_fZoptsrrrimport_key_to_pubrings&      r]c Cstj}y.tj|}tjj|d}dtjj|f}Wn$t k rZdtjj|f}YnXdtjj |f}t t j |}xB|D]:}tj |}t|drt|ddkr|d|kr|SqWtj|tjj d}|S)zqreturn a path to a valid and safe cachedir - only used when not running as root or when --tempcache is setrz%s-%s-z%s/%s*i)prefixdir)rOgeteuidpwdgetpwuidrSZi18nZucdconstZPREFIXrZTMPDIRsortedrVlstatS_ISDIRS_IMODEtempfileZmkdtemp) ZuidZusertupZusernamer_dirpathZ cachedirsZthisdirZstatsZcachedirrrr getCacheDirs   (rkcCsfg}t|}t|}d}x6||krN||}|j|||||7}||8}qW|j||d|S)zE Given a seq, split into a list of lists of length max_entries each. rN)lenr"append)seqZ max_entriesretZnumZbegendrrr seq_max_splits  rqcCsDytj|Wn0tk r>}z|jtjkr.WYdd}~XnXdS)z| Call os.unlink, but don't die if the file isn't there. This is the main difference between "rm -f" and plain "rm". N)rOunlinkOSErrorerrnoENOENT)filenameerrrunlink_fs  rxFcCs^y tj|Stk rX}z2|jtjtjfkr2dS|rF|jtjkrFdSWYdd}~XnXdS)zF Call os.stat(), but don't die if the file isn't there. Returns None. N)rOstatrsrtruENOTDIRZEACCES)rvZ ignore_EACCESrwrrrstat_fs r{cCsFy$td}|j}t|SQRXWnttfk r@tjSXdS)z Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. Note that no caching is done here. z/proc/self/loginuidN)rTreadrHIOError ValueErrorrOgetuid)fodatarrr _getloginuids  rcCstdkrtatS)z Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. The value is cached, so you don't have to save it. N)_cached_getloginuidrrrrr getloginuid src Cs|r |}nJd}|jd}|dkr@||d}|d kr@|d|}|dkrTtjjd |rt|}t|}|r|r|j|jkr|Sytjj||d Wn2t k r}ztjjt |WYdd}~XnX|r|rt j ||j|jf|S) ztake a filename and decompress it into the same relative location. When the compression type is not recognized (or file is not compressed), the content of the file is copied to the destinationN.r.zck.xz.bz2.gz.lzma.zstz(Could not determine destination filenamei)rrrrrr) rfindrS exceptionsZ MiscErrorr{st_mtimelibdnfZutils decompress RuntimeErrorstrrOutime) rvdestcheck_timestampsoutZdot_posZextZfirrwrrrrs,    "rcCs:tjj|}|d7}tjj|s.tj|dd|d|S)Nz/geni)rLrM)rOrPdirnamerQrR)rvgenerated_namerrrrcalculate_repo_gen_dest8s   rcCst||}t||ddS)z This is a wrapper around decompress, where we work out a cached generated name, and use check_timestamps. filename _must_ be from a repo. and generated_name is the type of the file. T)rr)rr)rvrrrrrrepo_gen_decompress@s rc Csg}xtj|D]}t|n}xf|D]^}tjd|r6q$|j}|j}|sLq$|rx|jdd}|jdd}|j|jq$|j |q$WWdQRXqW|S)a( Takes a glob of a dir (like /etc/foo.d/\*.foo) returns a list of all the lines in all the files matching that glob, ignores comments and blank lines, optional paramater 'line_as_list tells whether to treat each line as a space or comma-separated list, defaults to True. z\s*(#|$)  ,N) rVrTr rrstriplstripreplacer*r9rm)ZthisglobZ line_as_listresultsZfnamer\r@rrrread_in_items_from_dot_dirHs"     r)NT)F)NF)T)0r1Z __future__rrrZ dnf.pycomprrrryZ libdnf.utilsrZ dnf.constrSZ dnf.cryptoZdnf.exceptionsZdnf.i18nrtrVr7rOZos.pathrbr rXrirr rrrrobjectrrArJr]rkrqrxr{rrrrrrrrrrrsN  #   #PK!nZ66-yum/__pycache__/rpmtrans.cpython-36.opt-1.pycnu[3 ft`> @sHddlmZmZddlmZddlZddlmZmZddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZdZdZdZdZd Zd Zd Zd ZeeegZeeegZejjejj ejj!ejj"ejj#ejj$ejj%ejj&ejj'h Z(ej)d Z*ddZ+Gddde,Z-Gddde-Z.Gddde-Z/Gddde,Z0dS))print_functionabsolute_import)unicode_literalsN)_ucd (2<FZddnfcstfdd}|S)zb Wrapper to return a deprecated action constant while printing a deprecation warning. cs2d|jjf}tj|tddttj}|S)Nz1%s.%s is deprecated. Use dnf.callback.%s instead.) stacklevel) __class____name__warningswarnDeprecationWarninggetattrrcallback)selfmsgvalue)name/usr/lib/python3.6/rpmtrans.py_funcCs  z%_add_deprecated_action.._func)property)rrr)rr_add_deprecated_action>sr!c@seZdZddZedZedZedZeZedZ edZ edZ ed Z ed Z ed Zed Zed ZddZddZddZddZddZdS)TransactionDisplaycCsdS)Nr)rrrr__init__PszTransactionDisplay.__init__ PKG_CLEANUP PKG_DOWNGRADE PKG_REMOVE PKG_INSTALL PKG_OBSOLETE PKG_REINSTALL PKG_UPGRADE PKG_VERIFYTRANS_PREPARATION PKG_SCRIPTLET TRANS_POSTcCsdS)aReport ongoing progress on a transaction item. :api :param package: a package being processed :param action: the action being performed :param ti_done: number of processed bytes of the transaction item being processed :param ti_total: total number of bytes of the transaction item being processed :param ts_done: number of actions processed in the whole transaction :param ts_total: total number of actions in the whole transaction Nr)rpackageactionZti_doneZti_totalZts_doneZts_totalrrrprogressbszTransactionDisplay.progresscCsdS)z_Hook for reporting an rpm scriptlet output. :param msgs: the scriptlet output Nr)rmsgsrrr scriptoutsszTransactionDisplay.scriptoutcCsdS)z:Report an error that occurred during the transaction. :apiNr)rmessagerrrerrorzszTransactionDisplay.errorcCsdS)z|package is the same as in progress() - a package object or simple string action is also the same as in progress()Nr)rr/r0rrrfilelog~szTransactionDisplay.filelogcCs|j|tjjdd||dS)Nr)r1r transactionr+)rpkgcounttotalrrrverify_tsi_packagesz%TransactionDisplay.verify_tsi_packageN)r __module__ __qualname__r#r!r$r%r&Z PKG_ERASEr'r(r)r*r+r,r-r.r1r3r5r6r;rrrrr"Ms$r"cs eZdZdZfddZZS)ErrorTransactionDisplayz@An RPMTransaction display that prints errors to standard output.cs&tt|j|tjjd|tjdS)Nprint)superr>r5rutilZ_terminal_messengersysstderr)rr4)rrrr5szErrorTransactionDisplay.error)rr<r=__doc__r5 __classcell__rr)rrr>sr>cs8eZdZdZfddZddZddZdd ZZS) LoggingTransactionDisplayz@ Base class for a RPMTransaction display callback class cstt|jtjd|_dS)Nzdnf.rpm)r@rFr#logging getLogger rpm_logger)r)rrrr#sz"LoggingTransactionDisplay.__init__cCs|jj|dS)N)rIr5)rr4rrrr5szLoggingTransactionDisplay.errorcCs.tjj|}d||f}|jjtjj|dS)Nz%s: %s)rr7Z FILE_ACTIONSrIlogrGZSUBDEBUG)rr/r0Z action_strrrrrr6s  z!LoggingTransactionDisplay.filelogcCs|r|jjt|dS)N)rIinfor)rr2rrrr3sz#LoggingTransactionDisplay.scriptout) rr<r=rDr#r5r6r3rErr)rrrFs  rFc@seZdZdffddZd8ddZddZd d Zd d Zd dZddZ ddZ ddZ ddZ ddZ ddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6Zd7S)9RPMTransactionFcCsn|s tg}||_||_||_d|_d|_d|_d|_d|_t |_ d|_ |j |j jg|_d|_d|_dS)NFr)r>displaysbasetest trans_runningfd total_actionstotal_installedcomplete_actionssetinstalled_pkg_names total_removed_setupOutputLoggingZconf rpmverbosity_te_list _te_index _tsi_cache)rrNrOrMrrrr#s zRPMTransaction.__init__rKcCs~tj}||_t|jd|_|jjj|jddddddj ||}d|j }t t |s^d }t j tt |t j|jdS) Nzw+bZcritZemergerrrKZwarning)criticalZ emergencyr5Z informationrZRPMLOG_Z RPMLOG_INFO)tempfileZNamedTemporaryFile _readpipeopenr _writepiperN_tsZ setScriptFdgetupperhasattrrpm setVerbosityr setLogFile)rrYZio_rrrrrXs  z"RPMTransaction._setupOutputLoggingc Cs8tjtjtjtjy|jjWn YnXdS)N)rgrhZ RPMLOG_NOTICErirBrCrbclose)rrrr_shutdownOutputLoggings   z%RPMTransaction._shutdownOutputLoggingc CsBy(|jj|jj|jj}|s&dS|Stk r<YnXdS)N)r`seektellreadIOError)routrrr _scriptOutputs zRPMTransaction._scriptOutputccs,|j}|r(x|jD]}t|VqWdS)N)rq splitlinesr)rmessageslinerrrrsszRPMTransaction.messagescCs4|j}x|jD]}|j|qW|jjj|dS)N)rqrMr3rNhistoryZlog_scriptlet_output)rr2displayrrr _scriptouts zRPMTransaction._scriptoutcCs |jdS)N)rk)rrrr__del__szRPMTransaction.__del__cCst|dr|}|gS|j|j}tjj|}|jrJt|jd|krJ|jSg}x2|jj D]&}|j t krhqXt||krX|j |qXW|r||_|St d|dS)z3Obtain the package related to the calling callback.r8rz%TransactionItem not found for key: %sN)rfrZr[rrAZ _te_nevrar\strrNr7r0RPM_ACTIONS_SETappend RuntimeError)rZcbkeyZtsiZteZte_nevraitemsrrr_extract_cbkeys$     zRPMTransaction._extract_cbkeyc Csyt|trt|}|tjkr.|j|nv|tjkr<nh|tjkrV|j||nN|tj krp|j ||n4|tj kr|j |S|tj kr|j|n|tjkr|j|n|tjkr|j|n|tjkr|j|||n|tjkr|j|n|tjkr|j|n|tjkr,|j|||nx|tjkrD|j|n`|tjkr\|j|nH|tjkrx|j |||n,|tj!kr|j"|n|tj#kr|j$WnBt%k rt&j'\}}}t(j)|||} t*j+dj,| YnXdS)N)- isinstanceryrrgZRPMCALLBACK_TRANS_START _transStartZRPMCALLBACK_TRANS_STOPZRPMCALLBACK_TRANS_PROGRESS_trans_progressZRPMCALLBACK_ELEM_PROGRESS _elemProgressZRPMCALLBACK_INST_OPEN_FILE _instOpenFileZRPMCALLBACK_INST_CLOSE_FILE_instCloseFileZRPMCALLBACK_INST_START _inst_startZRPMCALLBACK_INST_STOP _inst_stopZRPMCALLBACK_INST_PROGRESS _instProgressZRPMCALLBACK_UNINST_START _uninst_startZRPMCALLBACK_UNINST_STOP _unInstStopZRPMCALLBACK_UNINST_PROGRESS_uninst_progressZRPMCALLBACK_CPIO_ERROR _cpioErrorZRPMCALLBACK_UNPACK_ERROR _unpackErrorZRPMCALLBACK_SCRIPT_ERROR _scriptErrorZRPMCALLBACK_SCRIPT_START _script_startZRPMCALLBACK_SCRIPT_STOP _scriptStop ExceptionrBexc_info tracebackformat_exceptionloggerr^join) rZwhatamountr:keyZ client_dataexc_type exc_value exc_tracebackZ except_listrrrrsR                           zRPMTransaction.callbackcCs(||_|jrdSd|_t|jj|_dS)NT)rRrOrPlistrNrcrZ)rr:rrrr<s zRPMTransaction._transStartcCs4tjj}x&|jD]}|jd||d|ddqWdS)Nr)rr7r,rMr1)rrr:r0rvrrrrBs zRPMTransaction._trans_progresscCsP||_|jd7_|jsL|j|}x&|jD]}|j|dj|djq,WdS)Nrr)r[rTrOr~rMr6r8r0)rrindextransaction_listrvrrrrGs   zRPMTransaction._elemProgresscCsd|_|j|}|dj}|j}yt||_WnDtk rt}z(x |jD]}|jd||fqJWWYdd}~Xn.X|j r|j d7_ |j j |j |jjSdS)NrzError: Cannot open file %s: %sr)Zlastmsgr~r8ZlocalPkgrarQrorMr5rPrSrVaddrfileno)rrrr8ZrpmlocervrrrrOs   (zRPMTransaction._instOpenFilecCs|jjd|_dS)N)rQrj)rrrrrr_s zRPMTransaction._instCloseFilecCsdS)Nr)rrrrrrcszRPMTransaction._inst_startcCsV|js|j rdS|j|j|jkrRtjj}x"|jD]}|j d|ddddq6WdS)N) rOrPrwrTrRrr7r.rMr1)rrr0rvrrrrfs  zRPMTransaction._inst_stopcCsJ|j|}|dj}|dj}x&|jD]}|j|||||j|jq&WdS)Nr)r~r8r0rMr1rTrR)rrr:rrr8r0rvrrrrrs     zRPMTransaction._instProgresscCs|jd7_dS)Nr)rW)rrrrrryszRPMTransaction._uninst_startcCsJ|j|}|dj}|dj}x&|jD]}|j|||||j|jq&WdS)Nr)r~r8r0rMr1rTrR)rrr:rrr8r0rvrrrr|s     zRPMTransaction._uninst_progresscCs|jr dS|jdS)N)rOrw)rrrrrrszRPMTransaction._unInstStopcCs6|j|}d|dj}x|jD]}|j|q WdS)Nz'Error in cpio payload of rpm package %sr)r~r8rMr5)rrrrrvrrrrs  zRPMTransaction._cpioErrorcCs6|j|}d|dj}x|jD]}|j|q WdS)NzError unpacking rpm package %sr)r~r8rMr5)rrrrrvrrrrs  zRPMTransaction._unpackErrorc CsNtjj|d}|j|}|djj}d||f}x|jD]}|j|q8WdS)Nz rz'Error in %s scriptlet in rpm package %s)rgZtagnamesrdr~r8rrMr5) rrr:rZscriptlet_namerrrrvrrrrs     zRPMTransaction._scriptErrorcCstjj}|dkr |jgkr d}n|j|}|dj}|jdkrN|jdkrN|jnd}|jdkrl|jdkrl|jnd}x"|jD]}|j ||dd||qxWdS)NNonerrr) rr7r-rZr~r8rRrTrMr1)rrr0r8rZcompleter:rvrrrrs   zRPMTransaction._script_startcCs |jdS)N)rw)rrrrrszRPMTransaction._scriptStopcCs"x|jD]}|j|||qWdS)N)rMr;)rr8r9r:rvrrrr;s z!RPMTransaction.verify_tsi_packageN)rK)rr<r=r#rXrkrqrsrwrxr~rrrrrrrrrrrrrrrrrr;rrrrrLs4  .  rL)1Z __future__rrrZlibdnf.transactionZlibdnfZdnf.i18nrrZ dnf.callbackrZdnf.transactionZdnf.utilrgosrGrBr_rrZ TS_UPDATEZ TS_INSTALLZTS_ERASEZ TS_OBSOLETEDZ TS_OBSOLETINGZ TS_AVAILABLEZ TS_UPDATEDZ TS_FAILEDZTS_INSTALL_STATESZTS_REMOVE_STATESr7ZTransactionItemAction_INSTALLZTransactionItemAction_DOWNGRADEZ TransactionItemAction_DOWNGRADEDZTransactionItemAction_OBSOLETEZTransactionItemAction_OBSOLETEDZTransactionItemAction_UPGRADEZTransactionItemAction_UPGRADEDZTransactionItemAction_REMOVEZ!TransactionItemAction_REINSTALLEDrzrHrr!objectr"r>rFrLrrrrsL     < PK!nZ66'yum/__pycache__/rpmtrans.cpython-36.pycnu[3 ft`> @sHddlmZmZddlmZddlZddlmZmZddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZdZdZdZdZd Zd Zd Zd ZeeegZeeegZejjejj ejj!ejj"ejj#ejj$ejj%ejj&ejj'h Z(ej)d Z*ddZ+Gddde,Z-Gddde-Z.Gddde-Z/Gddde,Z0dS))print_functionabsolute_import)unicode_literalsN)_ucd (2<FZddnfcstfdd}|S)zb Wrapper to return a deprecated action constant while printing a deprecation warning. cs2d|jjf}tj|tddttj}|S)Nz1%s.%s is deprecated. Use dnf.callback.%s instead.) stacklevel) __class____name__warningswarnDeprecationWarninggetattrrcallback)selfmsgvalue)name/usr/lib/python3.6/rpmtrans.py_funcCs  z%_add_deprecated_action.._func)property)rrr)rr_add_deprecated_action>sr!c@seZdZddZedZedZedZeZedZ edZ edZ ed Z ed Z ed Zed Zed ZddZddZddZddZddZdS)TransactionDisplaycCsdS)Nr)rrrr__init__PszTransactionDisplay.__init__ PKG_CLEANUP PKG_DOWNGRADE PKG_REMOVE PKG_INSTALL PKG_OBSOLETE PKG_REINSTALL PKG_UPGRADE PKG_VERIFYTRANS_PREPARATION PKG_SCRIPTLET TRANS_POSTcCsdS)aReport ongoing progress on a transaction item. :api :param package: a package being processed :param action: the action being performed :param ti_done: number of processed bytes of the transaction item being processed :param ti_total: total number of bytes of the transaction item being processed :param ts_done: number of actions processed in the whole transaction :param ts_total: total number of actions in the whole transaction Nr)rpackageactionZti_doneZti_totalZts_doneZts_totalrrrprogressbszTransactionDisplay.progresscCsdS)z_Hook for reporting an rpm scriptlet output. :param msgs: the scriptlet output Nr)rmsgsrrr scriptoutsszTransactionDisplay.scriptoutcCsdS)z:Report an error that occurred during the transaction. :apiNr)rmessagerrrerrorzszTransactionDisplay.errorcCsdS)z|package is the same as in progress() - a package object or simple string action is also the same as in progress()Nr)rr/r0rrrfilelog~szTransactionDisplay.filelogcCs|j|tjjdd||dS)Nr)r1r transactionr+)rpkgcounttotalrrrverify_tsi_packagesz%TransactionDisplay.verify_tsi_packageN)r __module__ __qualname__r#r!r$r%r&Z PKG_ERASEr'r(r)r*r+r,r-r.r1r3r5r6r;rrrrr"Ms$r"cs eZdZdZfddZZS)ErrorTransactionDisplayz@An RPMTransaction display that prints errors to standard output.cs&tt|j|tjjd|tjdS)Nprint)superr>r5rutilZ_terminal_messengersysstderr)rr4)rrrr5szErrorTransactionDisplay.error)rr<r=__doc__r5 __classcell__rr)rrr>sr>cs8eZdZdZfddZddZddZdd ZZS) LoggingTransactionDisplayz@ Base class for a RPMTransaction display callback class cstt|jtjd|_dS)Nzdnf.rpm)r@rFr#logging getLogger rpm_logger)r)rrrr#sz"LoggingTransactionDisplay.__init__cCs|jj|dS)N)rIr5)rr4rrrr5szLoggingTransactionDisplay.errorcCs.tjj|}d||f}|jjtjj|dS)Nz%s: %s)rr7Z FILE_ACTIONSrIlogrGZSUBDEBUG)rr/r0Z action_strrrrrr6s  z!LoggingTransactionDisplay.filelogcCs|r|jjt|dS)N)rIinfor)rr2rrrr3sz#LoggingTransactionDisplay.scriptout) rr<r=rDr#r5r6r3rErr)rrrFs  rFc@seZdZdffddZd8ddZddZd d Zd d Zd dZddZ ddZ ddZ ddZ ddZ ddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6Zd7S)9RPMTransactionFcCsn|s tg}||_||_||_d|_d|_d|_d|_d|_t |_ d|_ |j |j jg|_d|_d|_dS)NFr)r>displaysbasetest trans_runningfd total_actionstotal_installedcomplete_actionssetinstalled_pkg_names total_removed_setupOutputLoggingZconf rpmverbosity_te_list _te_index _tsi_cache)rrNrOrMrrrr#s zRPMTransaction.__init__rKcCs~tj}||_t|jd|_|jjj|jddddddj ||}d|j }t t |s^d }t j tt |t j|jdS) Nzw+bZcritZemergerrrKZwarning)criticalZ emergencyr5Z informationrZRPMLOG_Z RPMLOG_INFO)tempfileZNamedTemporaryFile _readpipeopenr _writepiperN_tsZ setScriptFdgetupperhasattrrpm setVerbosityr setLogFile)rrYZio_rrrrrXs  z"RPMTransaction._setupOutputLoggingc Cs8tjtjtjtjy|jjWn YnXdS)N)rgrhZ RPMLOG_NOTICErirBrCrbclose)rrrr_shutdownOutputLoggings   z%RPMTransaction._shutdownOutputLoggingc CsBy(|jj|jj|jj}|s&dS|Stk r<YnXdS)N)r`seektellreadIOError)routrrr _scriptOutputs zRPMTransaction._scriptOutputccs,|j}|r(x|jD]}t|VqWdS)N)rq splitlinesr)rmessageslinerrrrsszRPMTransaction.messagescCs4|j}x|jD]}|j|qW|jjj|dS)N)rqrMr3rNhistoryZlog_scriptlet_output)rr2displayrrr _scriptouts zRPMTransaction._scriptoutcCs |jdS)N)rk)rrrr__del__szRPMTransaction.__del__cCst|dr|}|gS|j|j}tjj|}|jrJt|jd|krJ|jSg}x2|jj D]&}|j t krhqXt||krX|j |qXW|r||_|St d|dS)z3Obtain the package related to the calling callback.r8rz%TransactionItem not found for key: %sN)rfrZr[rrAZ _te_nevrar\strrNr7r0RPM_ACTIONS_SETappend RuntimeError)rZcbkeyZtsiZteZte_nevraitemsrrr_extract_cbkeys$     zRPMTransaction._extract_cbkeyc Csyt|trt|}|tjkr.|j|nv|tjkr<nh|tjkrV|j||nN|tj krp|j ||n4|tj kr|j |S|tj kr|j|n|tjkr|j|n|tjkr|j|n|tjkr|j|||n|tjkr|j|n|tjkr|j|n|tjkr,|j|||nx|tjkrD|j|n`|tjkr\|j|nH|tjkrx|j |||n,|tj!kr|j"|n|tj#kr|j$WnBt%k rt&j'\}}}t(j)|||} t*j+dj,| YnXdS)N)- isinstanceryrrgZRPMCALLBACK_TRANS_START _transStartZRPMCALLBACK_TRANS_STOPZRPMCALLBACK_TRANS_PROGRESS_trans_progressZRPMCALLBACK_ELEM_PROGRESS _elemProgressZRPMCALLBACK_INST_OPEN_FILE _instOpenFileZRPMCALLBACK_INST_CLOSE_FILE_instCloseFileZRPMCALLBACK_INST_START _inst_startZRPMCALLBACK_INST_STOP _inst_stopZRPMCALLBACK_INST_PROGRESS _instProgressZRPMCALLBACK_UNINST_START _uninst_startZRPMCALLBACK_UNINST_STOP _unInstStopZRPMCALLBACK_UNINST_PROGRESS_uninst_progressZRPMCALLBACK_CPIO_ERROR _cpioErrorZRPMCALLBACK_UNPACK_ERROR _unpackErrorZRPMCALLBACK_SCRIPT_ERROR _scriptErrorZRPMCALLBACK_SCRIPT_START _script_startZRPMCALLBACK_SCRIPT_STOP _scriptStop ExceptionrBexc_info tracebackformat_exceptionloggerr^join) rZwhatamountr:keyZ client_dataexc_type exc_value exc_tracebackZ except_listrrrrsR                           zRPMTransaction.callbackcCs(||_|jrdSd|_t|jj|_dS)NT)rRrOrPlistrNrcrZ)rr:rrrr<s zRPMTransaction._transStartcCs4tjj}x&|jD]}|jd||d|ddqWdS)Nr)rr7r,rMr1)rrr:r0rvrrrrBs zRPMTransaction._trans_progresscCsP||_|jd7_|jsL|j|}x&|jD]}|j|dj|djq,WdS)Nrr)r[rTrOr~rMr6r8r0)rrindextransaction_listrvrrrrGs   zRPMTransaction._elemProgresscCsd|_|j|}|dj}|j}yt||_WnDtk rt}z(x |jD]}|jd||fqJWWYdd}~Xn.X|j r|j d7_ |j j |j |jjSdS)NrzError: Cannot open file %s: %sr)Zlastmsgr~r8ZlocalPkgrarQrorMr5rPrSrVaddrfileno)rrrr8ZrpmlocervrrrrOs   (zRPMTransaction._instOpenFilecCs|jjd|_dS)N)rQrj)rrrrrr_s zRPMTransaction._instCloseFilecCsdS)Nr)rrrrrrcszRPMTransaction._inst_startcCsV|js|j rdS|j|j|jkrRtjj}x"|jD]}|j d|ddddq6WdS)N) rOrPrwrTrRrr7r.rMr1)rrr0rvrrrrfs  zRPMTransaction._inst_stopcCsJ|j|}|dj}|dj}x&|jD]}|j|||||j|jq&WdS)Nr)r~r8r0rMr1rTrR)rrr:rrr8r0rvrrrrrs     zRPMTransaction._instProgresscCs|jd7_dS)Nr)rW)rrrrrryszRPMTransaction._uninst_startcCsJ|j|}|dj}|dj}x&|jD]}|j|||||j|jq&WdS)Nr)r~r8r0rMr1rTrR)rrr:rrr8r0rvrrrr|s     zRPMTransaction._uninst_progresscCs|jr dS|jdS)N)rOrw)rrrrrrszRPMTransaction._unInstStopcCs6|j|}d|dj}x|jD]}|j|q WdS)Nz'Error in cpio payload of rpm package %sr)r~r8rMr5)rrrrrvrrrrs  zRPMTransaction._cpioErrorcCs6|j|}d|dj}x|jD]}|j|q WdS)NzError unpacking rpm package %sr)r~r8rMr5)rrrrrvrrrrs  zRPMTransaction._unpackErrorc CsNtjj|d}|j|}|djj}d||f}x|jD]}|j|q8WdS)Nz rz'Error in %s scriptlet in rpm package %s)rgZtagnamesrdr~r8rrMr5) rrr:rZscriptlet_namerrrrvrrrrs     zRPMTransaction._scriptErrorcCstjj}|dkr |jgkr d}n|j|}|dj}|jdkrN|jdkrN|jnd}|jdkrl|jdkrl|jnd}x"|jD]}|j ||dd||qxWdS)NNonerrr) rr7r-rZr~r8rRrTrMr1)rrr0r8rZcompleter:rvrrrrs   zRPMTransaction._script_startcCs |jdS)N)rw)rrrrrszRPMTransaction._scriptStopcCs"x|jD]}|j|||qWdS)N)rMr;)rr8r9r:rvrrrr;s z!RPMTransaction.verify_tsi_packageN)rK)rr<r=r#rXrkrqrsrwrxr~rrrrrrrrrrrrrrrrrr;rrrrrLs4  .  rL)1Z __future__rrrZlibdnf.transactionZlibdnfZdnf.i18nrrZ dnf.callbackrZdnf.transactionZdnf.utilrgosrGrBr_rrZ TS_UPDATEZ TS_INSTALLZTS_ERASEZ TS_OBSOLETEDZ TS_OBSOLETINGZ TS_AVAILABLEZ TS_UPDATEDZ TS_FAILEDZTS_INSTALL_STATESZTS_REMOVE_STATESr7ZTransactionItemAction_INSTALLZTransactionItemAction_DOWNGRADEZ TransactionItemAction_DOWNGRADEDZTransactionItemAction_OBSOLETEZTransactionItemAction_OBSOLETEDZTransactionItemAction_UPGRADEZTransactionItemAction_UPGRADEDZTransactionItemAction_REMOVEZ!TransactionItemAction_REINSTALLEDrzrHrr!objectr"r>rFrLrrrrsL     < PK!yum/__init__.pynu[# __init__.py # The legacy YUM subpackage. # # Copyright (C) 2013 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # PK!ͩfW.W. yum/misc.pynu[# misc.py # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # """ Assorted utility functions for yum. """ from __future__ import print_function, absolute_import from __future__ import unicode_literals from dnf.pycomp import base64_decodebytes, basestring, unicode from stat import * import libdnf.utils import dnf.const import dnf.crypto import dnf.exceptions import dnf.i18n import errno import glob import io import os import os.path import pwd import re import shutil import tempfile _default_checksums = ['sha256'] _re_compiled_glob_match = None def re_glob(s): """ Tests if a string is a shell wildcard. """ global _re_compiled_glob_match if _re_compiled_glob_match is None: _re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search return _re_compiled_glob_match(s) _re_compiled_full_match = None def re_full_search_needed(s): """ Tests if a string needs a full nevra match, instead of just name. """ global _re_compiled_full_match if _re_compiled_full_match is None: # A glob, or a "." or "-" separator, followed by something (the ".") one = re.compile(r'.*([-.*?]|\[.+\]).').match # Any epoch, for envra two = re.compile('[0-9]+:').match _re_compiled_full_match = (one, two) for rec in _re_compiled_full_match: if rec(s): return True return False def get_default_chksum_type(): return _default_checksums[0] class GenericHolder(object): """Generic Holder class used to hold other objects of known types It exists purely to be able to do object.somestuff, object.someotherstuff or object[key] and pass object to another function that will understand it""" def __init__(self, iter=None): self.__iter = iter def __iter__(self): if self.__iter is not None: return iter(self[self.__iter]) def __getitem__(self, item): if hasattr(self, item): return getattr(self, item) else: raise KeyError(item) def all_lists(self): """Return a dictionary of all lists.""" return {key: list_ for key, list_ in vars(self).items() if type(list_) is list} def merge_lists(self, other): """ Concatenate the list attributes from 'other' to ours. """ for (key, val) in other.all_lists().items(): vars(self).setdefault(key, []).extend(val) return self def procgpgkey(rawkey): '''Convert ASCII-armored GPG key to binary ''' # Normalise newlines rawkey = re.sub(b'\r\n?', b'\n', rawkey) # Extract block block = io.BytesIO() inblock = 0 pastheaders = 0 for line in rawkey.split(b'\n'): if line.startswith(b'-----BEGIN PGP PUBLIC KEY BLOCK-----'): inblock = 1 elif inblock and line.strip() == b'': pastheaders = 1 elif inblock and line.startswith(b'-----END PGP PUBLIC KEY BLOCK-----'): # Hit the end of the block, get out break elif pastheaders and line.startswith(b'='): # Hit the CRC line, don't include this and stop break elif pastheaders: block.write(line + b'\n') # Decode and return return base64_decodebytes(block.getvalue()) def keyInstalled(ts, keyid, timestamp): ''' Return if the GPG key described by the given keyid and timestamp are installed in the rpmdb. The keyid and timestamp should both be passed as integers. The ts is an rpm transaction set object Return values: - -1 key is not installed - 0 key with matching ID and timestamp is installed - 1 key with matching ID is installed but has an older timestamp - 2 key with matching ID is installed but has a newer timestamp No effort is made to handle duplicates. The first matching keyid is used to calculate the return result. ''' # Search for hdr in ts.dbMatch('name', 'gpg-pubkey'): if hdr['version'] == keyid: installedts = int(hdr['release'], 16) if installedts == timestamp: return 0 elif installedts < timestamp: return 1 else: return 2 return -1 def import_key_to_pubring(rawkey, keyid, gpgdir=None, make_ro_copy=True): if not os.path.exists(gpgdir): os.makedirs(gpgdir) with dnf.crypto.pubring_dir(gpgdir), dnf.crypto.Context() as ctx: # import the key with open(os.path.join(gpgdir, 'gpg.conf'), 'wb') as fp: fp.write(b'') ctx.op_import(rawkey) if make_ro_copy: rodir = gpgdir + '-ro' if not os.path.exists(rodir): os.makedirs(rodir, mode=0o755) for f in glob.glob(gpgdir + '/*'): basename = os.path.basename(f) ro_f = rodir + '/' + basename shutil.copy(f, ro_f) os.chmod(ro_f, 0o755) # yes it is this stupid, why do you ask? opts = """lock-never no-auto-check-trustdb trust-model direct no-expensive-trust-checks no-permission-warning preserve-permissions """ with open(os.path.join(rodir, 'gpg.conf'), 'w', 0o755) as fp: fp.write(opts) return True def getCacheDir(): """return a path to a valid and safe cachedir - only used when not running as root or when --tempcache is set""" uid = os.geteuid() try: usertup = pwd.getpwuid(uid) username = dnf.i18n.ucd(usertup[0]) prefix = '%s-%s-' % (dnf.const.PREFIX, username) except KeyError: prefix = '%s-%s-' % (dnf.const.PREFIX, uid) # check for /var/tmp/prefix-* - dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix) cachedirs = sorted(glob.glob(dirpath)) for thisdir in cachedirs: stats = os.lstat(thisdir) if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid: return thisdir # make the dir (tempfile.mkdtemp()) cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR) return cachedir def seq_max_split(seq, max_entries): """ Given a seq, split into a list of lists of length max_entries each. """ ret = [] num = len(seq) seq = list(seq) # Trying to use a set/etc. here is bad beg = 0 while num > max_entries: end = beg + max_entries ret.append(seq[beg:end]) beg += max_entries num -= max_entries ret.append(seq[beg:]) return ret def unlink_f(filename): """ Call os.unlink, but don't die if the file isn't there. This is the main difference between "rm -f" and plain "rm". """ try: os.unlink(filename) except OSError as e: if e.errno != errno.ENOENT: raise def stat_f(filename, ignore_EACCES=False): """ Call os.stat(), but don't die if the file isn't there. Returns None. """ try: return os.stat(filename) except OSError as e: if e.errno in (errno.ENOENT, errno.ENOTDIR): return None if ignore_EACCES and e.errno == errno.EACCES: return None raise def _getloginuid(): """ Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. Note that no caching is done here. """ # We might normally call audit.audit_getloginuid(), except that requires # importing all of the audit module. And it doesn't work anyway: BZ 518721 try: with open("/proc/self/loginuid") as fo: data = fo.read() return int(data) except (IOError, ValueError): return os.getuid() _cached_getloginuid = None def getloginuid(): """ Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. The value is cached, so you don't have to save it. """ global _cached_getloginuid if _cached_getloginuid is None: _cached_getloginuid = _getloginuid() return _cached_getloginuid def decompress(filename, dest=None, check_timestamps=False): """take a filename and decompress it into the same relative location. When the compression type is not recognized (or file is not compressed), the content of the file is copied to the destination""" if dest: out = dest else: out = None dot_pos = filename.rfind('.') if dot_pos > 0: ext = filename[dot_pos:] if ext in ('.zck', '.xz', '.bz2', '.gz', '.lzma', '.zst'): out = filename[:dot_pos] if out is None: raise dnf.exceptions.MiscError("Could not determine destination filename") if check_timestamps: fi = stat_f(filename) fo = stat_f(out) if fi and fo and fo.st_mtime == fi.st_mtime: return out try: # libdnf.utils.decompress either decompress file to the destination or # copy the content if the compression type is not recognized libdnf.utils.decompress(filename, out, 0o644) except RuntimeError as e: raise dnf.exceptions.MiscError(str(e)) if check_timestamps and fi: os.utime(out, (fi.st_mtime, fi.st_mtime)) return out def calculate_repo_gen_dest(filename, generated_name): dest = os.path.dirname(filename) dest += '/gen' if not os.path.exists(dest): os.makedirs(dest, mode=0o755) return dest + '/' + generated_name def repo_gen_decompress(filename, generated_name): """ This is a wrapper around decompress, where we work out a cached generated name, and use check_timestamps. filename _must_ be from a repo. and generated_name is the type of the file. """ dest = calculate_repo_gen_dest(filename, generated_name) return decompress(filename, dest=dest, check_timestamps=True) def read_in_items_from_dot_dir(thisglob, line_as_list=True): """ Takes a glob of a dir (like /etc/foo.d/\\*.foo) returns a list of all the lines in all the files matching that glob, ignores comments and blank lines, optional paramater 'line_as_list tells whether to treat each line as a space or comma-separated list, defaults to True. """ results = [] for fname in glob.glob(thisglob): with open(fname) as f: for line in f: if re.match(r'\s*(#|$)', line): continue line = line.rstrip() # no more trailing \n's line = line.lstrip() # be nice if not line: continue if line_as_list: line = line.replace('\n', ' ') line = line.replace(',', ' ') results.extend(line.split()) continue results.append(line) return results PK!`Õ>>yum/rpmtrans.pynu[# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Copyright 2005 Duke University # Parts Copyright 2007 Red Hat, Inc from __future__ import print_function, absolute_import from __future__ import unicode_literals import libdnf.transaction from dnf.i18n import _, ucd import dnf.callback import dnf.transaction import dnf.util import rpm import os import logging import sys import tempfile import traceback import warnings # TODO: merge w/ libdnf # transaction set states TS_UPDATE = 10 TS_INSTALL = 20 TS_ERASE = 40 TS_OBSOLETED = 50 TS_OBSOLETING = 60 TS_AVAILABLE = 70 TS_UPDATED = 90 TS_FAILED = 100 TS_INSTALL_STATES = [TS_INSTALL, TS_UPDATE, TS_OBSOLETING] TS_REMOVE_STATES = [TS_ERASE, TS_OBSOLETED, TS_UPDATED] RPM_ACTIONS_SET = {libdnf.transaction.TransactionItemAction_INSTALL, libdnf.transaction.TransactionItemAction_DOWNGRADE, libdnf.transaction.TransactionItemAction_DOWNGRADED, libdnf.transaction.TransactionItemAction_OBSOLETE, libdnf.transaction.TransactionItemAction_OBSOLETED, libdnf.transaction.TransactionItemAction_UPGRADE, libdnf.transaction.TransactionItemAction_UPGRADED, libdnf.transaction.TransactionItemAction_REMOVE, libdnf.transaction.TransactionItemAction_REINSTALLED} logger = logging.getLogger('dnf') def _add_deprecated_action(name): """ Wrapper to return a deprecated action constant while printing a deprecation warning. """ @property def _func(self): msg = "%s.%s is deprecated. Use dnf.callback.%s instead." \ % (self.__class__.__name__, name, name) warnings.warn(msg, DeprecationWarning, stacklevel=2) value = getattr(dnf.callback, name) return value return _func class TransactionDisplay(object): # :api def __init__(self): # :api pass # use constants from dnf.callback which are the official API PKG_CLEANUP = _add_deprecated_action("PKG_CLEANUP") PKG_DOWNGRADE = _add_deprecated_action("PKG_DOWNGRADE") PKG_REMOVE = _add_deprecated_action("PKG_REMOVE") PKG_ERASE = PKG_REMOVE PKG_INSTALL = _add_deprecated_action("PKG_INSTALL") PKG_OBSOLETE = _add_deprecated_action("PKG_OBSOLETE") PKG_REINSTALL = _add_deprecated_action("PKG_REINSTALL") PKG_UPGRADE = _add_deprecated_action("PKG_UPGRADE") PKG_VERIFY = _add_deprecated_action("PKG_VERIFY") TRANS_PREPARATION = _add_deprecated_action("TRANS_PREPARATION") PKG_SCRIPTLET = _add_deprecated_action("PKG_SCRIPTLET") TRANS_POST = _add_deprecated_action("TRANS_POST") def progress(self, package, action, ti_done, ti_total, ts_done, ts_total): """Report ongoing progress on a transaction item. :api :param package: a package being processed :param action: the action being performed :param ti_done: number of processed bytes of the transaction item being processed :param ti_total: total number of bytes of the transaction item being processed :param ts_done: number of actions processed in the whole transaction :param ts_total: total number of actions in the whole transaction """ pass def scriptout(self, msgs): """Hook for reporting an rpm scriptlet output. :param msgs: the scriptlet output """ pass def error(self, message): """Report an error that occurred during the transaction. :api""" pass def filelog(self, package, action): # check package object type - if it is a string - just output it """package is the same as in progress() - a package object or simple string action is also the same as in progress()""" pass def verify_tsi_package(self, pkg, count, total): # TODO: replace with verify_tsi? self.progress(pkg, dnf.transaction.PKG_VERIFY, 100, 100, count, total) class ErrorTransactionDisplay(TransactionDisplay): """An RPMTransaction display that prints errors to standard output.""" def error(self, message): super(ErrorTransactionDisplay, self).error(message) dnf.util._terminal_messenger('print', message, sys.stderr) class LoggingTransactionDisplay(TransactionDisplay): ''' Base class for a RPMTransaction display callback class ''' def __init__(self): super(LoggingTransactionDisplay, self).__init__() self.rpm_logger = logging.getLogger('dnf.rpm') def error(self, message): self.rpm_logger.error(message) def filelog(self, package, action): action_str = dnf.transaction.FILE_ACTIONS[action] msg = '%s: %s' % (action_str, package) self.rpm_logger.log(dnf.logging.SUBDEBUG, msg) def scriptout(self, msgs): if msgs: self.rpm_logger.info(ucd(msgs)) class RPMTransaction(object): def __init__(self, base, test=False, displays=()): if not displays: displays = [ErrorTransactionDisplay()] self.displays = displays self.base = base self.test = test # are we a test? self.trans_running = False self.fd = None self.total_actions = 0 self.total_installed = 0 self.complete_actions = 0 self.installed_pkg_names = set() self.total_removed = 0 self._setupOutputLogging(base.conf.rpmverbosity) self._te_list = [] # Index in _te_list of the transaction element being processed (for use # in callbacks) self._te_index = 0 self._tsi_cache = None def _setupOutputLogging(self, rpmverbosity="info"): # UGLY... set up the transaction to record output from scriptlets io_r = tempfile.NamedTemporaryFile() self._readpipe = io_r self._writepipe = open(io_r.name, 'w+b') self.base._ts.setScriptFd(self._writepipe) rpmverbosity = {'critical' : 'crit', 'emergency' : 'emerg', 'error' : 'err', 'information' : 'info', 'warn' : 'warning'}.get(rpmverbosity, rpmverbosity) rpmverbosity = 'RPMLOG_' + rpmverbosity.upper() if not hasattr(rpm, rpmverbosity): rpmverbosity = 'RPMLOG_INFO' rpm.setVerbosity(getattr(rpm, rpmverbosity)) rpm.setLogFile(self._writepipe) def _shutdownOutputLogging(self): # reset rpm bits from recording output rpm.setVerbosity(rpm.RPMLOG_NOTICE) rpm.setLogFile(sys.stderr) try: self._writepipe.close() except: pass def _scriptOutput(self): try: # XXX ugly workaround of problem which started after upgrading glibc # from glibc-2.27-32.fc28.x86_64 to glibc-2.28-9.fc29.x86_64 # After this upgrade nothing is read from _readpipe, so every # posttrans and postun scriptlet output is lost. The problem # only occurs when using dnf-2, dnf-3 is OK. # I did not find the root cause of this error yet. self._readpipe.seek(self._readpipe.tell()) out = self._readpipe.read() if not out: return None return out except IOError: pass def messages(self): messages = self._scriptOutput() if messages: for line in messages.splitlines(): yield ucd(line) def _scriptout(self): msgs = self._scriptOutput() for display in self.displays: display.scriptout(msgs) self.base.history.log_scriptlet_output(msgs) def __del__(self): self._shutdownOutputLogging() def _extract_cbkey(self, cbkey): """Obtain the package related to the calling callback.""" if hasattr(cbkey, "pkg"): tsi = cbkey return [tsi] te = self._te_list[self._te_index] te_nevra = dnf.util._te_nevra(te) if self._tsi_cache: if str(self._tsi_cache[0]) == te_nevra: return self._tsi_cache items = [] for tsi in self.base.transaction: if tsi.action not in RPM_ACTIONS_SET: # skip REINSTALL in order to return REINSTALLED, or REASON_CHANGE to avoid crash continue if str(tsi) == te_nevra: items.append(tsi) if items: self._tsi_cache = items return items raise RuntimeError("TransactionItem not found for key: %s" % cbkey) def callback(self, what, amount, total, key, client_data): try: if isinstance(key, str): key = ucd(key) if what == rpm.RPMCALLBACK_TRANS_START: self._transStart(total) elif what == rpm.RPMCALLBACK_TRANS_STOP: pass elif what == rpm.RPMCALLBACK_TRANS_PROGRESS: self._trans_progress(amount, total) elif what == rpm.RPMCALLBACK_ELEM_PROGRESS: # This callback type is issued every time the next transaction # element is about to be processed by RPM, before any other # callbacks are issued. "amount" carries the index of the element. self._elemProgress(key, amount) elif what == rpm.RPMCALLBACK_INST_OPEN_FILE: return self._instOpenFile(key) elif what == rpm.RPMCALLBACK_INST_CLOSE_FILE: self._instCloseFile(key) elif what == rpm.RPMCALLBACK_INST_START: self._inst_start(key) elif what == rpm.RPMCALLBACK_INST_STOP: self._inst_stop(key) elif what == rpm.RPMCALLBACK_INST_PROGRESS: self._instProgress(amount, total, key) elif what == rpm.RPMCALLBACK_UNINST_START: self._uninst_start(key) elif what == rpm.RPMCALLBACK_UNINST_STOP: self._unInstStop(key) elif what == rpm.RPMCALLBACK_UNINST_PROGRESS: self._uninst_progress(amount, total, key) elif what == rpm.RPMCALLBACK_CPIO_ERROR: self._cpioError(key) elif what == rpm.RPMCALLBACK_UNPACK_ERROR: self._unpackError(key) elif what == rpm.RPMCALLBACK_SCRIPT_ERROR: self._scriptError(amount, total, key) elif what == rpm.RPMCALLBACK_SCRIPT_START: self._script_start(key) elif what == rpm.RPMCALLBACK_SCRIPT_STOP: self._scriptStop() except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() except_list = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.critical(''.join(except_list)) def _transStart(self, total): self.total_actions = total if self.test: return self.trans_running = True self._te_list = list(self.base._ts) def _trans_progress(self, amount, total): action = dnf.transaction.TRANS_PREPARATION for display in self.displays: display.progress('', action, amount + 1, total, 1, 1) def _elemProgress(self, key, index): self._te_index = index self.complete_actions += 1 if not self.test: transaction_list = self._extract_cbkey(key) for display in self.displays: display.filelog(transaction_list[0].pkg, transaction_list[0].action) def _instOpenFile(self, key): self.lastmsg = None transaction_list = self._extract_cbkey(key) pkg = transaction_list[0].pkg rpmloc = pkg.localPkg() try: self.fd = open(rpmloc) except IOError as e: for display in self.displays: display.error("Error: Cannot open file %s: %s" % (rpmloc, e)) else: if self.trans_running: self.total_installed += 1 self.installed_pkg_names.add(pkg.name) return self.fd.fileno() def _instCloseFile(self, key): self.fd.close() self.fd = None def _inst_start(self, key): pass def _inst_stop(self, key): if self.test or not self.trans_running: return self._scriptout() if self.complete_actions == self.total_actions: # RPM doesn't explicitly report when post-trans phase starts action = dnf.transaction.TRANS_POST for display in self.displays: display.progress(None, action, None, None, None, None) def _instProgress(self, amount, total, key): transaction_list = self._extract_cbkey(key) pkg = transaction_list[0].pkg action = transaction_list[0].action for display in self.displays: display.progress(pkg, action, amount, total, self.complete_actions, self.total_actions) def _uninst_start(self, key): self.total_removed += 1 def _uninst_progress(self, amount, total, key): transaction_list = self._extract_cbkey(key) pkg = transaction_list[0].pkg action = transaction_list[0].action for display in self.displays: display.progress(pkg, action, amount, total, self.complete_actions, self.total_actions) def _unInstStop(self, key): if self.test: return self._scriptout() def _cpioError(self, key): transaction_list = self._extract_cbkey(key) msg = "Error in cpio payload of rpm package %s" % transaction_list[0].pkg for display in self.displays: display.error(msg) def _unpackError(self, key): transaction_list = self._extract_cbkey(key) msg = "Error unpacking rpm package %s" % transaction_list[0].pkg for display in self.displays: display.error(msg) def _scriptError(self, amount, total, key): # "amount" carries the failed scriptlet tag, # "total" carries fatal/non-fatal status scriptlet_name = rpm.tagnames.get(amount, "") transaction_list = self._extract_cbkey(key) name = transaction_list[0].pkg.name msg = ("Error in %s scriptlet in rpm package %s" % (scriptlet_name, name)) for display in self.displays: display.error(msg) def _script_start(self, key): # TODO: this doesn't fit into libdnf TransactionItem use cases action = dnf.transaction.PKG_SCRIPTLET if key is None and self._te_list == []: pkg = 'None' else: transaction_list = self._extract_cbkey(key) pkg = transaction_list[0].pkg complete = self.complete_actions if self.total_actions != 0 and self.complete_actions != 0 \ else 1 total = self.total_actions if self.total_actions != 0 and self.complete_actions != 0 else 1 for display in self.displays: display.progress(pkg, action, 100, 100, complete, total) def _scriptStop(self): self._scriptout() def verify_tsi_package(self, pkg, count, total): for display in self.displays: display.verify_tsi_package(pkg, count, total) PK!4?mm __init__.pynu[# __init__.py # The toplevel DNF package. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals import warnings import dnf.pycomp warnings.filterwarnings('once', category=DeprecationWarning, module=r'^dnf\..*$') from dnf.const import VERSION __version__ = VERSION # :api import dnf.base Base = dnf.base.Base # :api import dnf.plugin Plugin = dnf.plugin.Plugin # :api # setup libraries dnf.pycomp.urlparse.uses_fragment.append("media") PK!U+base.pynu[# Copyright 2005 Duke University # Copyright (C) 2012-2018 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ Supplies the Base class. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import argparse import dnf import libdnf.transaction from copy import deepcopy from dnf.comps import CompsQuery from dnf.i18n import _, P_, ucd from dnf.util import _parse_specs from dnf.db.history import SwdbInterface from dnf.yum import misc try: from collections.abc import Sequence except ImportError: from collections import Sequence import datetime import dnf.callback import dnf.comps import dnf.conf import dnf.conf.read import dnf.crypto import dnf.dnssec import dnf.drpm import dnf.exceptions import dnf.goal import dnf.history import dnf.lock import dnf.logging # WITH_MODULES is used by ansible (lib/ansible/modules/packaging/os/dnf.py) try: import dnf.module.module_base WITH_MODULES = True except ImportError: WITH_MODULES = False import dnf.persistor import dnf.plugin import dnf.query import dnf.repo import dnf.repodict import dnf.rpm.connection import dnf.rpm.miscutils import dnf.rpm.transaction import dnf.sack import dnf.selector import dnf.subject import dnf.transaction import dnf.util import dnf.yum.rpmtrans import functools import gc import hawkey import itertools import logging import math import os import operator import re import rpm import time import shutil logger = logging.getLogger("dnf") class Base(object): def __init__(self, conf=None): # :api self._closed = False self._conf = conf or self._setup_default_conf() self._goal = None self._repo_persistor = None self._sack = None self._transaction = None self._priv_ts = None self._comps = None self._comps_trans = dnf.comps.TransactionBunch() self._history = None self._tempfiles = set() self._trans_tempfiles = set() self._ds_callback = dnf.callback.Depsolve() self._logging = dnf.logging.Logging() self._repos = dnf.repodict.RepoDict() self._rpm_probfilter = set([rpm.RPMPROB_FILTER_OLDPACKAGE]) self._plugins = dnf.plugin.Plugins() self._trans_success = False self._trans_install_set = False self._tempfile_persistor = None # self._update_security_filters is used by ansible self._update_security_filters = [] self._update_security_options = {} self._allow_erasing = False self._repo_set_imported_gpg_keys = set() self.output = None def __enter__(self): return self def __exit__(self, *exc_args): self.close() def __del__(self): self.close() def _add_tempfiles(self, files): if self._transaction: self._trans_tempfiles.update(files) elif self.conf.destdir: pass else: self._tempfiles.update(files) def _add_repo_to_sack(self, repo): repo.load() mdload_flags = dict(load_filelists=True, load_presto=repo.deltarpm, load_updateinfo=True) if repo.load_metadata_other: mdload_flags["load_other"] = True try: self._sack.load_repo(repo._repo, build_cache=True, **mdload_flags) except hawkey.Exception as e: logger.debug(_("loading repo '{}' failure: {}").format(repo.id, e)) raise dnf.exceptions.RepoError( _("Loading repository '{}' has failed").format(repo.id)) @staticmethod def _setup_default_conf(): conf = dnf.conf.Conf() subst = conf.substitutions if 'releasever' not in subst: subst['releasever'] = \ dnf.rpm.detect_releasever(conf.installroot) return conf def _setup_modular_excludes(self): hot_fix_repos = [i.id for i in self.repos.iter_enabled() if i.module_hotfixes] try: solver_errors = self.sack.filter_modules( self._moduleContainer, hot_fix_repos, self.conf.installroot, self.conf.module_platform_id, update_only=False, debugsolver=self.conf.debug_solver, module_obsoletes=self.conf.module_obsoletes) except hawkey.Exception as e: raise dnf.exceptions.Error(ucd(e)) if solver_errors: logger.warning( dnf.module.module_base.format_modular_solver_errors(solver_errors[0])) def _setup_excludes_includes(self, only_main=False): disabled = set(self.conf.disable_excludes) if 'all' in disabled and WITH_MODULES: self._setup_modular_excludes() return repo_includes = [] repo_excludes = [] # first evaluate repo specific includes/excludes if not only_main: for r in self.repos.iter_enabled(): if r.id in disabled: continue if len(r.includepkgs) > 0: incl_query = self.sack.query().filterm(empty=True) for incl in set(r.includepkgs): subj = dnf.subject.Subject(incl) incl_query = incl_query.union(subj.get_best_query( self.sack, with_nevra=True, with_provides=False, with_filenames=False)) incl_query.filterm(reponame=r.id) repo_includes.append((incl_query.apply(), r.id)) excl_query = self.sack.query().filterm(empty=True) for excl in set(r.excludepkgs): subj = dnf.subject.Subject(excl) excl_query = excl_query.union(subj.get_best_query( self.sack, with_nevra=True, with_provides=False, with_filenames=False)) excl_query.filterm(reponame=r.id) if excl_query: repo_excludes.append((excl_query, r.id)) # then main (global) includes/excludes because they can mask # repo specific settings if 'main' not in disabled: include_query = self.sack.query().filterm(empty=True) if len(self.conf.includepkgs) > 0: for incl in set(self.conf.includepkgs): subj = dnf.subject.Subject(incl) include_query = include_query.union(subj.get_best_query( self.sack, with_nevra=True, with_provides=False, with_filenames=False)) exclude_query = self.sack.query().filterm(empty=True) for excl in set(self.conf.excludepkgs): subj = dnf.subject.Subject(excl) exclude_query = exclude_query.union(subj.get_best_query( self.sack, with_nevra=True, with_provides=False, with_filenames=False)) if len(self.conf.includepkgs) > 0: self.sack.add_includes(include_query) self.sack.set_use_includes(True) if exclude_query: self.sack.add_excludes(exclude_query) if repo_includes: for query, repoid in repo_includes: self.sack.add_includes(query) self.sack.set_use_includes(True, repoid) if repo_excludes: for query, repoid in repo_excludes: self.sack.add_excludes(query) if not only_main and WITH_MODULES: self._setup_modular_excludes() def _store_persistent_data(self): if self._repo_persistor and not self.conf.cacheonly: expired = [r.id for r in self.repos.iter_enabled() if (r.metadata and r._repo.isExpired())] self._repo_persistor.expired_to_add.update(expired) self._repo_persistor.save() if self._tempfile_persistor: self._tempfile_persistor.save() @property def comps(self): # :api if self._comps is None: self.read_comps(arch_filter=True) return self._comps @property def conf(self): # :api return self._conf @property def repos(self): # :api return self._repos @repos.deleter def repos(self): # :api self._repos = None @property @dnf.util.lazyattr("_priv_rpmconn") def _rpmconn(self): return dnf.rpm.connection.RpmConnection(self.conf.installroot) @property def sack(self): # :api return self._sack @property def _moduleContainer(self): if self.sack is None: raise dnf.exceptions.Error("Sack was not initialized") if self.sack._moduleContainer is None: self.sack._moduleContainer = libdnf.module.ModulePackageContainer( False, self.conf.installroot, self.conf.substitutions["arch"], self.conf.persistdir) return self.sack._moduleContainer @property def transaction(self): # :api return self._transaction @transaction.setter def transaction(self, value): # :api if self._transaction: raise ValueError('transaction already set') self._transaction = value def _activate_persistor(self): self._repo_persistor = dnf.persistor.RepoPersistor(self.conf.cachedir) def init_plugins(self, disabled_glob=(), enable_plugins=(), cli=None): # :api """Load plugins and run their __init__().""" if self.conf.plugins: self._plugins._load(self.conf, disabled_glob, enable_plugins) self._plugins._run_init(self, cli) def pre_configure_plugins(self): # :api """Run plugins pre_configure() method.""" self._plugins._run_pre_config() def configure_plugins(self): # :api """Run plugins configure() method.""" self._plugins._run_config() def unload_plugins(self): # :api """Run plugins unload() method.""" self._plugins._unload() def update_cache(self, timer=False): # :api period = self.conf.metadata_timer_sync if self._repo_persistor is None: self._activate_persistor() persistor = self._repo_persistor if timer: if dnf.util.on_metered_connection(): msg = _('Metadata timer caching disabled ' 'when running on metered connection.') logger.info(msg) return False if dnf.util.on_ac_power() is False: msg = _('Metadata timer caching disabled ' 'when running on a battery.') logger.info(msg) return False if period <= 0: msg = _('Metadata timer caching disabled.') logger.info(msg) return False since_last_makecache = persistor.since_last_makecache() if since_last_makecache is not None and since_last_makecache < period: logger.info(_('Metadata cache refreshed recently.')) return False for repo in self.repos.values(): repo._repo.setMaxMirrorTries(1) if not self.repos._any_enabled(): logger.info(_('There are no enabled repositories in "{}".').format( '", "'.join(self.conf.reposdir))) return False for r in self.repos.iter_enabled(): (is_cache, expires_in) = r._metadata_expire_in() if expires_in is None: logger.info(_('%s: will never be expired and will not be refreshed.'), r.id) elif not is_cache or expires_in <= 0: logger.debug(_('%s: has expired and will be refreshed.'), r.id) r._repo.expire() elif timer and expires_in < period: # expires within the checking period: msg = _("%s: metadata will expire after %d seconds and will be refreshed now") logger.debug(msg, r.id, expires_in) r._repo.expire() else: logger.debug(_('%s: will expire after %d seconds.'), r.id, expires_in) if timer: persistor.reset_last_makecache = True self.fill_sack(load_system_repo=False, load_available_repos=True) # performs the md sync logger.info(_('Metadata cache created.')) return True def fill_sack(self, load_system_repo=True, load_available_repos=True): # :api """Prepare the Sack and the Goal objects. """ timer = dnf.logging.Timer('sack setup') self.reset(sack=True, goal=True) self._sack = dnf.sack._build_sack(self) lock = dnf.lock.build_metadata_lock(self.conf.cachedir, self.conf.exit_on_lock) with lock: if load_system_repo is not False: try: # FIXME: If build_cache=True, @System.solv is incorrectly updated in install- # remove loops self._sack.load_system_repo(build_cache=False) except IOError: if load_system_repo != 'auto': raise if load_available_repos: error_repos = [] mts = 0 age = time.time() # Iterate over installed GPG keys and check their validity using DNSSEC if self.conf.gpgkey_dns_verification: dnf.dnssec.RpmImportedKeys.check_imported_keys_validity() for r in self.repos.iter_enabled(): try: self._add_repo_to_sack(r) if r._repo.getTimestamp() > mts: mts = r._repo.getTimestamp() if r._repo.getAge() < age: age = r._repo.getAge() logger.debug(_("%s: using metadata from %s."), r.id, dnf.util.normalize_time( r._repo.getMaxTimestamp())) except dnf.exceptions.RepoError as e: r._repo.expire() if r.skip_if_unavailable is False: raise logger.warning("Error: %s", e) error_repos.append(r.id) r.disable() if error_repos: logger.warning( _("Ignoring repositories: %s"), ', '.join(error_repos)) if self.repos._any_enabled(): if age != 0 and mts != 0: logger.info(_("Last metadata expiration check: %s ago on %s."), datetime.timedelta(seconds=int(age)), dnf.util.normalize_time(mts)) else: self.repos.all().disable() conf = self.conf self._sack._configure(conf.installonlypkgs, conf.installonly_limit, conf.allow_vendor_change) self._setup_excludes_includes() timer() self._goal = dnf.goal.Goal(self._sack) self._goal.protect_running_kernel = conf.protect_running_kernel self._plugins.run_sack() return self._sack def fill_sack_from_repos_in_cache(self, load_system_repo=True): # :api """ Prepare Sack and Goal objects and also load all enabled repositories from cache only, it doesn't download anything and it doesn't check if metadata are expired. If there is not enough metadata present (repond.xml or both primary.xml and solv file are missing) given repo is either skipped or it throws a RepoError exception depending on skip_if_unavailable configuration. """ timer = dnf.logging.Timer('sack setup') self.reset(sack=True, goal=True) self._sack = dnf.sack._build_sack(self) lock = dnf.lock.build_metadata_lock(self.conf.cachedir, self.conf.exit_on_lock) with lock: if load_system_repo is not False: try: # FIXME: If build_cache=True, @System.solv is incorrectly updated in install- # remove loops self._sack.load_system_repo(build_cache=False) except IOError: if load_system_repo != 'auto': raise error_repos = [] # Iterate over installed GPG keys and check their validity using DNSSEC if self.conf.gpgkey_dns_verification: dnf.dnssec.RpmImportedKeys.check_imported_keys_validity() for repo in self.repos.iter_enabled(): try: repo._repo.loadCache(throwExcept=True, ignoreMissing=True) mdload_flags = dict(load_filelists=True, load_presto=repo.deltarpm, load_updateinfo=True) if repo.load_metadata_other: mdload_flags["load_other"] = True self._sack.load_repo(repo._repo, **mdload_flags) logger.debug(_("%s: using metadata from %s."), repo.id, dnf.util.normalize_time( repo._repo.getMaxTimestamp())) except (RuntimeError, hawkey.Exception) as e: if repo.skip_if_unavailable is False: raise dnf.exceptions.RepoError( _("loading repo '{}' failure: {}").format(repo.id, e)) else: logger.debug(_("loading repo '{}' failure: {}").format(repo.id, e)) error_repos.append(repo.id) repo.disable() if error_repos: logger.warning( _("Ignoring repositories: %s"), ', '.join(error_repos)) conf = self.conf self._sack._configure(conf.installonlypkgs, conf.installonly_limit, conf.allow_vendor_change) self._setup_excludes_includes() timer() self._goal = dnf.goal.Goal(self._sack) self._goal.protect_running_kernel = conf.protect_running_kernel self._plugins.run_sack() return self._sack def _finalize_base(self): self._tempfile_persistor = dnf.persistor.TempfilePersistor( self.conf.cachedir) if not self.conf.keepcache: self._clean_packages(self._tempfiles) if self._trans_success: self._trans_tempfiles.update( self._tempfile_persistor.get_saved_tempfiles()) self._tempfile_persistor.empty() if self._trans_install_set: self._clean_packages(self._trans_tempfiles) else: self._tempfile_persistor.tempfiles_to_add.update( self._trans_tempfiles) if self._tempfile_persistor.tempfiles_to_add: logger.info(_("The downloaded packages were saved in cache " "until the next successful transaction.")) logger.info(_("You can remove cached packages by executing " "'%s'."), "{prog} clean packages".format(prog=dnf.util.MAIN_PROG)) # Do not trigger the lazy creation: if self._history is not None: self.history.close() self._store_persistent_data() self._closeRpmDB() self._trans_success = False def close(self): # :api """Close all potential handles and clean cache. Typically the handles are to data sources and sinks. """ if self._closed: return logger.log(dnf.logging.DDEBUG, 'Cleaning up.') self._closed = True self._finalize_base() self.reset(sack=True, repos=True, goal=True) self._plugins = None def read_all_repos(self, opts=None): # :api """Read repositories from the main conf file and from .repo files.""" reader = dnf.conf.read.RepoReader(self.conf, opts) for repo in reader: try: self.repos.add(repo) except dnf.exceptions.ConfigError as e: logger.warning(e) def reset(self, sack=False, repos=False, goal=False): # :api """Make the Base object forget about various things.""" if sack: self._sack = None if repos: self._repos = dnf.repodict.RepoDict() if goal: self._goal = None if self._sack is not None: self._goal = dnf.goal.Goal(self._sack) self._goal.protect_running_kernel = self.conf.protect_running_kernel if self._sack and self._moduleContainer: # sack must be set to enable operations on moduleContainer self._moduleContainer.rollback() if self._history is not None: self.history.close() self._comps_trans = dnf.comps.TransactionBunch() self._transaction = None self._update_security_filters = [] if sack and goal: # We've just done this, above: # # _sack _goal # | | # -- [CUT] -- -- [CUT] -- # | | # v | v # +----------------+ [C] +-------------+ # | DnfSack object | <-[U]- | Goal object | # +----------------+ [T] +-------------+ # |^ |^ |^ | # || || || # || || || | # +--||----||----||---+ [C] # | v| v| v| | <--[U]-- _transaction # | Pkg1 Pkg2 PkgN | [T] # | | | # | Transaction oject | # +-------------------+ # # At this point, the DnfSack object would be released only # eventually, by Python's generational garbage collector, due to the # cyclic references DnfSack<->Pkg1 ... DnfSack<->PkgN. # # The delayed release is a problem: the DnfSack object may # (indirectly) own "page file" file descriptors in libsolv, via # libdnf. For example, # # sack->priv->pool->repos[1]->repodata[1]->store.pagefd = 7 # sack->priv->pool->repos[1]->repodata[2]->store.pagefd = 8 # # These file descriptors are closed when the DnfSack object is # eventually released, that is, when dnf_sack_finalize() (in libdnf) # calls pool_free() (in libsolv). # # We need that to happen right now, as callers may want to unmount # the filesystems which those file descriptors refer to immediately # after reset() returns. Therefore, force a garbage collection here. gc.collect() def _closeRpmDB(self): """Closes down the instances of rpmdb that could be open.""" del self._ts _TS_FLAGS_TO_RPM = {'noscripts': rpm.RPMTRANS_FLAG_NOSCRIPTS, 'notriggers': rpm.RPMTRANS_FLAG_NOTRIGGERS, 'nodocs': rpm.RPMTRANS_FLAG_NODOCS, 'test': rpm.RPMTRANS_FLAG_TEST, 'justdb': rpm.RPMTRANS_FLAG_JUSTDB, 'nocontexts': rpm.RPMTRANS_FLAG_NOCONTEXTS, 'nocrypto': rpm.RPMTRANS_FLAG_NOFILEDIGEST} if hasattr(rpm, 'RPMTRANS_FLAG_NOCAPS'): # Introduced in rpm-4.14 _TS_FLAGS_TO_RPM['nocaps'] = rpm.RPMTRANS_FLAG_NOCAPS _TS_VSFLAGS_TO_RPM = {'nocrypto': rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS} @property def goal(self): return self._goal @property def _ts(self): """Set up the RPM transaction set that will be used for all the work.""" if self._priv_ts is not None: return self._priv_ts self._priv_ts = dnf.rpm.transaction.TransactionWrapper( self.conf.installroot) self._priv_ts.setFlags(0) # reset everything. for flag in self.conf.tsflags: rpm_flag = self._TS_FLAGS_TO_RPM.get(flag) if rpm_flag is None: logger.critical(_('Invalid tsflag in config file: %s'), flag) continue self._priv_ts.addTsFlag(rpm_flag) vs_flag = self._TS_VSFLAGS_TO_RPM.get(flag) if vs_flag is not None: self._priv_ts.pushVSFlags(vs_flag) if not self.conf.diskspacecheck: self._rpm_probfilter.add(rpm.RPMPROB_FILTER_DISKSPACE) if self.conf.ignorearch: self._rpm_probfilter.add(rpm.RPMPROB_FILTER_IGNOREARCH) probfilter = functools.reduce(operator.or_, self._rpm_probfilter, 0) self._priv_ts.setProbFilter(probfilter) return self._priv_ts @_ts.deleter def _ts(self): """Releases the RPM transaction set. """ if self._priv_ts is None: return self._priv_ts.close() del self._priv_ts self._priv_ts = None def read_comps(self, arch_filter=False): # :api """Create the groups object to access the comps metadata.""" timer = dnf.logging.Timer('loading comps') self._comps = dnf.comps.Comps() logger.log(dnf.logging.DDEBUG, 'Getting group metadata') for repo in self.repos.iter_enabled(): if not repo.enablegroups: continue if not repo.metadata: continue comps_fn = repo._repo.getCompsFn() if not comps_fn: continue logger.log(dnf.logging.DDEBUG, 'Adding group file from repository: %s', repo.id) if repo._repo.getSyncStrategy() == dnf.repo.SYNC_ONLY_CACHE: decompressed = misc.calculate_repo_gen_dest(comps_fn, 'groups.xml') if not os.path.exists(decompressed): # root privileges are needed for comps decompression continue else: decompressed = misc.repo_gen_decompress(comps_fn, 'groups.xml') try: self._comps._add_from_xml_filename(decompressed) except dnf.exceptions.CompsError as e: msg = _('Failed to add groups file for repository: %s - %s') logger.critical(msg, repo.id, e) if arch_filter: self._comps._i.arch_filter( [self._conf.substitutions['basearch']]) timer() return self._comps def _getHistory(self): """auto create the history object that to access/append the transaction history information. """ if self._history is None: releasever = self.conf.releasever self._history = SwdbInterface(self.conf.persistdir, releasever=releasever) return self._history history = property(fget=lambda self: self._getHistory(), fset=lambda self, value: setattr( self, "_history", value), fdel=lambda self: setattr(self, "_history", None), doc="DNF SWDB Interface Object") def _goal2transaction(self, goal): ts = self.history.rpm all_obsoleted = set(goal.list_obsoleted()) installonly_query = self._get_installonly_query() installonly_query.apply() installonly_query_installed = installonly_query.installed().apply() for pkg in goal.list_downgrades(): obs = goal.obsoleted_by_package(pkg) downgraded = obs[0] self._ds_callback.pkg_added(downgraded, 'dd') self._ds_callback.pkg_added(pkg, 'd') ts.add_downgrade(pkg, downgraded, obs[1:]) for pkg in goal.list_reinstalls(): self._ds_callback.pkg_added(pkg, 'r') obs = goal.obsoleted_by_package(pkg) nevra_pkg = str(pkg) # reinstall could obsolete multiple packages with the same NEVRA or different NEVRA # Set the package with the same NEVRA as reinstalled obsoletes = [] for obs_pkg in obs: if str(obs_pkg) == nevra_pkg: obsoletes.insert(0, obs_pkg) else: obsoletes.append(obs_pkg) reinstalled = obsoletes[0] ts.add_reinstall(pkg, reinstalled, obsoletes[1:]) for pkg in goal.list_installs(): self._ds_callback.pkg_added(pkg, 'i') obs = goal.obsoleted_by_package(pkg) # Skip obsoleted packages that are not part of all_obsoleted, # they are handled as upgrades/downgrades. # Also keep RPMs with the same name - they're not always in all_obsoleted. obs = [i for i in obs if i in all_obsoleted or i.name == pkg.name] reason = goal.get_reason(pkg) # Inherit reason if package is installonly an package with same name is installed # Use the same logic like upgrade # Upgrade of installonly packages result in install or install and remove step if pkg in installonly_query and installonly_query_installed.filter(name=pkg.name): reason = ts.get_reason(pkg) # inherit the best reason from obsoleted packages for obsolete in obs: reason_obsolete = ts.get_reason(obsolete) if libdnf.transaction.TransactionItemReasonCompare(reason, reason_obsolete) == -1: reason = reason_obsolete ts.add_install(pkg, obs, reason) cb = lambda pkg: self._ds_callback.pkg_added(pkg, 'od') dnf.util.mapall(cb, obs) for pkg in goal.list_upgrades(): obs = goal.obsoleted_by_package(pkg) upgraded = None for i in obs: # try to find a package with matching name as the upgrade if i.name == pkg.name: upgraded = i break if upgraded is None: # no matching name -> pick the first one upgraded = obs.pop(0) else: obs.remove(upgraded) # Skip obsoleted packages that are not part of all_obsoleted, # they are handled as upgrades/downgrades. # Also keep RPMs with the same name - they're not always in all_obsoleted. obs = [i for i in obs if i in all_obsoleted or i.name == pkg.name] cb = lambda pkg: self._ds_callback.pkg_added(pkg, 'od') dnf.util.mapall(cb, obs) if pkg in installonly_query: ts.add_install(pkg, obs) else: ts.add_upgrade(pkg, upgraded, obs) self._ds_callback.pkg_added(upgraded, 'ud') self._ds_callback.pkg_added(pkg, 'u') erasures = goal.list_erasures() if erasures: remaining_installed_query = self.sack.query(flags=hawkey.IGNORE_EXCLUDES).installed() remaining_installed_query.filterm(pkg__neq=erasures) for pkg in erasures: if remaining_installed_query.filter(name=pkg.name): remaining = remaining_installed_query[0] ts.get_reason(remaining) self.history.set_reason(remaining, ts.get_reason(remaining)) self._ds_callback.pkg_added(pkg, 'e') reason = goal.get_reason(pkg) ts.add_erase(pkg, reason) return ts def _query_matches_installed(self, q): """ See what packages in the query match packages (also in older versions, but always same architecture) that are already installed. Unlike in case of _sltr_matches_installed(), it is practical here to know even the packages in the original query that can still be installed. """ inst = q.installed() inst_per_arch = inst._na_dict() avail_per_arch = q.available()._na_dict() avail_l = [] inst_l = [] for na in avail_per_arch: if na in inst_per_arch: inst_l.append(inst_per_arch[na][0]) else: avail_l.append(avail_per_arch[na]) return inst_l, avail_l def _sltr_matches_installed(self, sltr): """ See if sltr matches a patches that is (in older version or different architecture perhaps) already installed. """ inst = self.sack.query().installed().filterm(pkg=sltr.matches()) return list(inst) def iter_userinstalled(self): """Get iterator over the packages installed by the user.""" return (pkg for pkg in self.sack.query().installed() if self.history.user_installed(pkg)) def _run_hawkey_goal(self, goal, allow_erasing): ret = goal.run( allow_uninstall=allow_erasing, force_best=self.conf.best, ignore_weak_deps=(not self.conf.install_weak_deps)) if self.conf.debug_solver: goal.write_debugdata('./debugdata/rpms') return ret def resolve(self, allow_erasing=False): # :api """Build the transaction set.""" exc = None self._finalize_comps_trans() timer = dnf.logging.Timer('depsolve') self._ds_callback.start() goal = self._goal if goal.req_has_erase(): goal.push_userinstalled(self.sack.query().installed(), self.history) elif not self.conf.upgrade_group_objects_upgrade: # exclude packages installed from groups # these packages will be marked to installation # which could prevent them from upgrade, downgrade # to prevent "conflicting job" error it's not applied # to "remove" and "reinstall" commands solver = self._build_comps_solver() solver._exclude_packages_from_installed_groups(self) goal.add_protected(self.sack.query().filterm( name=self.conf.protected_packages)) if not self._run_hawkey_goal(goal, allow_erasing): if self.conf.debuglevel >= 6: goal.log_decisions() msg = dnf.util._format_resolve_problems(goal.problem_rules()) exc = dnf.exceptions.DepsolveError(msg) else: self._transaction = self._goal2transaction(goal) self._ds_callback.end() timer() got_transaction = self._transaction is not None and \ len(self._transaction) > 0 if got_transaction: msg = self._transaction._rpm_limitations() if msg: exc = dnf.exceptions.Error(msg) if exc is not None: raise exc self._plugins.run_resolved() # auto-enable module streams based on installed RPMs new_pkgs = self._goal.list_installs() new_pkgs += self._goal.list_upgrades() new_pkgs += self._goal.list_downgrades() new_pkgs += self._goal.list_reinstalls() self.sack.set_modules_enabled_by_pkgset(self._moduleContainer, new_pkgs) return got_transaction def do_transaction(self, display=()): # :api if not isinstance(display, Sequence): display = [display] display = \ [dnf.yum.rpmtrans.LoggingTransactionDisplay()] + list(display) if not self.transaction: # packages are not changed, but comps and modules changes need to be committed self._moduleContainer.save() self._moduleContainer.updateFailSafeData() if self._history and (self._history.group or self._history.env): cmdline = None if hasattr(self, 'args') and self.args: cmdline = ' '.join(self.args) elif hasattr(self, 'cmds') and self.cmds: cmdline = ' '.join(self.cmds) old = self.history.last() if old is None: rpmdb_version = self.sack._rpmdb_version() else: rpmdb_version = old.end_rpmdb_version self.history.beg(rpmdb_version, [], [], cmdline) self.history.end(rpmdb_version) self._plugins.run_pre_transaction() self._plugins.run_transaction() self._trans_success = True return tid = None logger.info(_('Running transaction check')) lock = dnf.lock.build_rpmdb_lock(self.conf.persistdir, self.conf.exit_on_lock) with lock: self.transaction._populate_rpm_ts(self._ts) msgs = self._run_rpm_check() if msgs: msg = _('Error: transaction check vs depsolve:') logger.error(msg) for msg in msgs: logger.error(msg) raise dnf.exceptions.TransactionCheckError(msg) logger.info(_('Transaction check succeeded.')) timer = dnf.logging.Timer('transaction test') logger.info(_('Running transaction test')) self._ts.order() # order the transaction self._ts.clean() # release memory not needed beyond this point testcb = dnf.yum.rpmtrans.RPMTransaction(self, test=True) tserrors = self._ts.test(testcb) if len(tserrors) > 0: for msg in testcb.messages(): logger.critical(_('RPM: {}').format(msg)) errstring = _('Transaction test error:') + '\n' for descr in tserrors: errstring += ' %s\n' % ucd(descr) summary = self._trans_error_summary(errstring) if summary: errstring += '\n' + summary raise dnf.exceptions.Error(errstring) del testcb logger.info(_('Transaction test succeeded.')) # With RPMTRANS_FLAG_TEST return just before anything is stored permanently if self._ts.isTsFlagSet(rpm.RPMTRANS_FLAG_TEST): return timer() # save module states on disk right before entering rpm transaction, # because we want system in recoverable state if transaction gets interrupted self._moduleContainer.save() self._moduleContainer.updateFailSafeData() # unset the sigquit handler timer = dnf.logging.Timer('transaction') # setup our rpm ts callback cb = dnf.yum.rpmtrans.RPMTransaction(self, displays=display) if self.conf.debuglevel < 2: for display_ in cb.displays: display_.output = False self._plugins.run_pre_transaction() logger.info(_('Running transaction')) tid = self._run_transaction(cb=cb) timer() self._plugins.unload_removed_plugins(self.transaction) self._plugins.run_transaction() # log post transaction summary def _pto_callback(action, tsis): msgs = [] for tsi in tsis: msgs.append('{}: {}'.format(action, str(tsi))) return msgs for msg in dnf.util._post_transaction_output(self, self.transaction, _pto_callback): logger.debug(msg) return tid def _trans_error_summary(self, errstring): """Parse the error string for 'interesting' errors which can be grouped, such as disk space issues. :param errstring: the error string :return: a string containing a summary of the errors """ summary = '' # do disk space report first p = re.compile(r'needs (\d+)(K|M)B(?: more space)? on the (\S+) filesystem') disk = {} for m in p.finditer(errstring): size_in_mb = int(m.group(1)) if m.group(2) == 'M' else math.ceil( int(m.group(1)) / 1024.0) if m.group(3) not in disk: disk[m.group(3)] = size_in_mb if disk[m.group(3)] < size_in_mb: disk[m.group(3)] = size_in_mb if disk: summary += _('Disk Requirements:') + "\n" for k in disk: summary += " " + P_( 'At least {0}MB more space needed on the {1} filesystem.', 'At least {0}MB more space needed on the {1} filesystem.', disk[k]).format(disk[k], k) + '\n' if not summary: return None summary = _('Error Summary') + '\n-------------\n' + summary return summary def _record_history(self): return self.conf.history_record and \ not self._ts.isTsFlagSet(rpm.RPMTRANS_FLAG_TEST) def _run_transaction(self, cb): """ Perform the RPM transaction. :return: history database transaction ID or None """ tid = None if self._record_history(): using_pkgs_pats = list(self.conf.history_record_packages) installed_query = self.sack.query().installed() using_pkgs = installed_query.filter(name=using_pkgs_pats).run() rpmdbv = self.sack._rpmdb_version() lastdbv = self.history.last() if lastdbv is not None: lastdbv = lastdbv.end_rpmdb_version if lastdbv is None or rpmdbv != lastdbv: logger.debug(_("RPMDB altered outside of {prog}.").format( prog=dnf.util.MAIN_PROG_UPPER)) cmdline = None if hasattr(self, 'args') and self.args: cmdline = ' '.join(self.args) elif hasattr(self, 'cmds') and self.cmds: cmdline = ' '.join(self.cmds) comment = self.conf.comment if self.conf.comment else "" tid = self.history.beg(rpmdbv, using_pkgs, [], cmdline, comment) if self.conf.reset_nice: onice = os.nice(0) if onice: try: os.nice(-onice) except: onice = 0 logger.log(dnf.logging.DDEBUG, 'RPM transaction start.') errors = self._ts.run(cb.callback, '') logger.log(dnf.logging.DDEBUG, 'RPM transaction over.') # ts.run() exit codes are, hmm, "creative": None means all ok, empty # list means some errors happened in the transaction and non-empty # list that there were errors preventing the ts from starting... if self.conf.reset_nice: try: os.nice(onice) except: pass dnf.util._sync_rpm_trans_with_swdb(self._ts, self._transaction) if errors is None: pass elif len(errors) == 0: # If there is no failing element it means that some "global" error # occurred (like rpm failed to obtain the transaction lock). Just pass # the rpm logs on to the user and raise an Error. # If there are failing elements the problem is related to those # elements and the Error is raised later, after saving the failure # to the history and printing out the transaction table to user. failed = [el for el in self._ts if el.Failed()] if not failed: for msg in cb.messages(): logger.critical(_('RPM: {}').format(msg)) msg = _('Could not run transaction.') raise dnf.exceptions.Error(msg) else: logger.critical(_("Transaction couldn't start:")) for e in errors: logger.critical(ucd(e[0])) if self._record_history() and not self._ts.isTsFlagSet(rpm.RPMTRANS_FLAG_TEST): self.history.end(rpmdbv) msg = _("Could not run transaction.") raise dnf.exceptions.Error(msg) for i in ('ts_all_fn', 'ts_done_fn'): if hasattr(cb, i): fn = getattr(cb, i) try: misc.unlink_f(fn) except (IOError, OSError): msg = _('Failed to remove transaction file %s') logger.critical(msg, fn) # keep install_set status because _verify_transaction will clean it self._trans_install_set = bool(self._transaction.install_set) # sync up what just happened versus what is in the rpmdb if not self._ts.isTsFlagSet(rpm.RPMTRANS_FLAG_TEST): self._verify_transaction(cb.verify_tsi_package) return tid def _verify_transaction(self, verify_pkg_cb=None): transaction_items = [ tsi for tsi in self.transaction if tsi.action != libdnf.transaction.TransactionItemAction_REASON_CHANGE] total = len(transaction_items) def display_banner(pkg, count): count += 1 if verify_pkg_cb is not None: verify_pkg_cb(pkg, count, total) return count timer = dnf.logging.Timer('verify transaction') count = 0 rpmdb_sack = dnf.sack.rpmdb_sack(self) # mark group packages that are installed on the system as installed in the db q = rpmdb_sack.query().installed() names = set([i.name for i in q]) for ti in self.history.group: g = ti.getCompsGroupItem() for p in g.getPackages(): if p.getName() in names: p.setInstalled(True) p.save() # TODO: installed groups in environments # Post-transaction verification is no longer needed, # because DNF trusts error codes returned by RPM. # Verification banner is displayed to preserve UX. # TODO: drop in future DNF for tsi in transaction_items: count = display_banner(tsi.pkg, count) rpmdbv = rpmdb_sack._rpmdb_version() self.history.end(rpmdbv) timer() self._trans_success = True def _download_remote_payloads(self, payloads, drpm, progress, callback_total, fail_fast=True): lock = dnf.lock.build_download_lock(self.conf.cachedir, self.conf.exit_on_lock) with lock: beg_download = time.time() est_remote_size = sum(pload.download_size for pload in payloads) total_drpm = len( [payload for payload in payloads if isinstance(payload, dnf.drpm.DeltaPayload)]) # compatibility part for tools that do not accept total_drpms keyword if progress.start.__code__.co_argcount == 4: progress.start(len(payloads), est_remote_size, total_drpms=total_drpm) else: progress.start(len(payloads), est_remote_size) errors = dnf.repo._download_payloads(payloads, drpm, fail_fast) if errors._irrecoverable(): raise dnf.exceptions.DownloadError(errors._irrecoverable()) remote_size = sum(errors._bandwidth_used(pload) for pload in payloads) saving = dnf.repo._update_saving((0, 0), payloads, errors._recoverable) retries = self.conf.retries forever = retries == 0 while errors._recoverable and (forever or retries > 0): if retries > 0: retries -= 1 msg = _("Some packages were not downloaded. Retrying.") logger.info(msg) remaining_pkgs = [pkg for pkg in errors._recoverable] payloads = \ [dnf.repo._pkg2payload(pkg, progress, dnf.repo.RPMPayload) for pkg in remaining_pkgs] est_remote_size = sum(pload.download_size for pload in payloads) progress.start(len(payloads), est_remote_size) errors = dnf.repo._download_payloads(payloads, drpm, fail_fast) if errors._irrecoverable(): raise dnf.exceptions.DownloadError(errors._irrecoverable()) remote_size += \ sum(errors._bandwidth_used(pload) for pload in payloads) saving = dnf.repo._update_saving(saving, payloads, {}) if errors._recoverable: msg = dnf.exceptions.DownloadError.errmap2str( errors._recoverable) logger.info(msg) if callback_total is not None: callback_total(remote_size, beg_download) (real, full) = saving if real != full: if real < full: msg = _("Delta RPMs reduced %.1f MB of updates to %.1f MB " "(%d.1%% saved)") elif real > full: msg = _("Failed Delta RPMs increased %.1f MB of updates to %.1f MB " "(%d.1%% wasted)") percent = 100 - real / full * 100 logger.info(msg, full / 1024 ** 2, real / 1024 ** 2, percent) def download_packages(self, pkglist, progress=None, callback_total=None): # :api """Download the packages specified by the given list of packages. `pkglist` is a list of packages to download, `progress` is an optional DownloadProgress instance, `callback_total` an optional callback to output messages about the download operation. """ remote_pkgs, local_pkgs = self._select_remote_pkgs(pkglist) if remote_pkgs: if progress is None: progress = dnf.callback.NullDownloadProgress() drpm = dnf.drpm.DeltaInfo(self.sack.query().installed(), progress, self.conf.deltarpm_percentage) self._add_tempfiles([pkg.localPkg() for pkg in remote_pkgs]) payloads = [dnf.repo._pkg2payload(pkg, progress, drpm.delta_factory, dnf.repo.RPMPayload) for pkg in remote_pkgs] self._download_remote_payloads(payloads, drpm, progress, callback_total) if self.conf.destdir: for pkg in local_pkgs: if pkg.baseurl: location = os.path.join(pkg.get_local_baseurl(), pkg.location.lstrip("/")) else: location = os.path.join(pkg.repo.pkgdir, pkg.location.lstrip("/")) shutil.copy(location, self.conf.destdir) def add_remote_rpms(self, path_list, strict=True, progress=None): # :api pkgs = [] if not path_list: return pkgs if self._goal.req_length(): raise dnf.exceptions.Error( _("Cannot add local packages, because transaction job already exists")) pkgs_error = [] for path in path_list: if not os.path.exists(path) and '://' in path: # download remote rpm to a tempfile path = dnf.util._urlopen_progress(path, self.conf, progress) self._add_tempfiles([path]) try: pkgs.append(self.sack.add_cmdline_package(path)) except IOError as e: logger.warning(e) pkgs_error.append(path) self._setup_excludes_includes(only_main=True) if pkgs_error and strict: raise IOError(_("Could not open: {}").format(' '.join(pkgs_error))) return pkgs def _sig_check_pkg(self, po): """Verify the GPG signature of the given package object. :param po: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. """ if po._from_cmdline: check = self.conf.localpkg_gpgcheck hasgpgkey = 0 else: repo = self.repos[po.repoid] check = repo.gpgcheck hasgpgkey = not not repo.gpgkey if check: root = self.conf.installroot ts = dnf.rpm.transaction.initReadOnlyTransaction(root) sigresult = dnf.rpm.miscutils.checkSig(ts, po.localPkg()) localfn = os.path.basename(po.localPkg()) del ts if sigresult == 0: result = 0 msg = '' elif sigresult == 1: if hasgpgkey: result = 1 else: result = 2 msg = _('Public key for %s is not installed') % localfn elif sigresult == 2: result = 2 msg = _('Problem opening package %s') % localfn elif sigresult == 3: if hasgpgkey: result = 1 else: result = 2 result = 1 msg = _('Public key for %s is not trusted') % localfn elif sigresult == 4: result = 2 msg = _('Package %s is not signed') % localfn else: result = 0 msg = '' return result, msg def package_signature_check(self, pkg): # :api """Verify the GPG signature of the given package object. :param pkg: the package object to verify the signature of :return: (result, error_string) where result is:: 0 = GPG signature verifies ok or verification is not required. 1 = GPG verification failed but installation of the right GPG key might help. 2 = Fatal GPG verification error, give up. """ return self._sig_check_pkg(pkg) def _clean_packages(self, packages): for fn in packages: if not os.path.exists(fn): continue try: misc.unlink_f(fn) except OSError: logger.warning(_('Cannot remove %s'), fn) continue else: logger.log(dnf.logging.DDEBUG, _('%s removed'), fn) def _do_package_lists(self, pkgnarrow='all', patterns=None, showdups=None, ignore_case=False, reponame=None): """Return a :class:`misc.GenericHolder` containing lists of package objects. The contents of the lists are specified in various ways by the arguments. :param pkgnarrow: a string specifying which types of packages lists to produces, such as updates, installed, available, etc. :param patterns: a list of names or wildcards specifying packages to list :param showdups: whether to include duplicate packages in the lists :param ignore_case: whether to ignore case when searching by package names :param reponame: limit packages list to the given repository :return: a :class:`misc.GenericHolder` instance with the following lists defined:: available = list of packageObjects installed = list of packageObjects upgrades = tuples of packageObjects (updating, installed) extras = list of packageObjects obsoletes = tuples of packageObjects (obsoleting, installed) recent = list of packageObjects """ if showdups is None: showdups = self.conf.showdupesfromrepos if patterns is None: return self._list_pattern( pkgnarrow, patterns, showdups, ignore_case, reponame) assert not dnf.util.is_string_type(patterns) list_fn = functools.partial( self._list_pattern, pkgnarrow, showdups=showdups, ignore_case=ignore_case, reponame=reponame) if patterns is None or len(patterns) == 0: return list_fn(None) yghs = map(list_fn, patterns) return functools.reduce(lambda a, b: a.merge_lists(b), yghs) def _list_pattern(self, pkgnarrow, pattern, showdups, ignore_case, reponame=None): def is_from_repo(package): """Test whether given package originates from the repository.""" if reponame is None: return True return self.history.repo(package) == reponame def pkgs_from_repo(packages): """Filter out the packages which do not originate from the repo.""" return (package for package in packages if is_from_repo(package)) def query_for_repo(query): """Filter out the packages which do not originate from the repo.""" if reponame is None: return query return query.filter(reponame=reponame) ygh = misc.GenericHolder(iter=pkgnarrow) installed = [] available = [] reinstall_available = [] old_available = [] updates = [] obsoletes = [] obsoletesTuples = [] recent = [] extras = [] autoremove = [] # do the initial pre-selection ic = ignore_case q = self.sack.query() if pattern is not None: subj = dnf.subject.Subject(pattern, ignore_case=ic) q = subj.get_best_query(self.sack, with_provides=False) # list all packages - those installed and available: if pkgnarrow == 'all': dinst = {} ndinst = {} # Newest versions by name.arch for po in q.installed(): dinst[po.pkgtup] = po if showdups: continue key = (po.name, po.arch) if key not in ndinst or po > ndinst[key]: ndinst[key] = po installed = list(pkgs_from_repo(dinst.values())) avail = query_for_repo(q.available()) if not showdups: avail = avail.filterm(latest_per_arch_by_priority=True) for pkg in avail: if showdups: if pkg.pkgtup in dinst: reinstall_available.append(pkg) else: available.append(pkg) else: key = (pkg.name, pkg.arch) if pkg.pkgtup in dinst: reinstall_available.append(pkg) elif key not in ndinst or pkg.evr_gt(ndinst[key]): available.append(pkg) else: old_available.append(pkg) # produce the updates list of tuples elif pkgnarrow == 'upgrades': updates = query_for_repo(q).filterm(upgrades_by_priority=True) # reduce a query to security upgrades if they are specified updates = self._merge_update_filters(updates, upgrade=True) # reduce a query to remove src RPMs updates.filterm(arch__neq=['src', 'nosrc']) # reduce a query to latest packages updates = updates.latest().run() # installed only elif pkgnarrow == 'installed': installed = list(pkgs_from_repo(q.installed())) # available in a repository elif pkgnarrow == 'available': if showdups: avail = query_for_repo(q).available() installed_dict = q.installed()._na_dict() for avail_pkg in avail: key = (avail_pkg.name, avail_pkg.arch) installed_pkgs = installed_dict.get(key, []) same_ver = [pkg for pkg in installed_pkgs if pkg.evr == avail_pkg.evr] if len(same_ver) > 0: reinstall_available.append(avail_pkg) else: available.append(avail_pkg) else: # we will only look at the latest versions of packages: available_dict = query_for_repo( q).available().filterm(latest_per_arch_by_priority=True)._na_dict() installed_dict = q.installed().latest()._na_dict() for (name, arch) in available_dict: avail_pkg = available_dict[(name, arch)][0] inst_pkg = installed_dict.get((name, arch), [None])[0] if not inst_pkg or avail_pkg.evr_gt(inst_pkg): available.append(avail_pkg) elif avail_pkg.evr_eq(inst_pkg): reinstall_available.append(avail_pkg) else: old_available.append(avail_pkg) # packages to be removed by autoremove elif pkgnarrow == 'autoremove': autoremove_q = query_for_repo(q)._unneeded(self.history.swdb) autoremove = autoremove_q.run() # not in a repo but installed elif pkgnarrow == 'extras': extras = [pkg for pkg in q.extras() if is_from_repo(pkg)] # obsoleting packages (and what they obsolete) elif pkgnarrow == 'obsoletes': inst = q.installed() obsoletes = query_for_repo( self.sack.query()).filter(obsoletes_by_priority=inst) # reduce a query to security upgrades if they are specified obsoletes = self._merge_update_filters(obsoletes, warning=False, upgrade=True) # reduce a query to remove src RPMs obsoletes.filterm(arch__neq=['src', 'nosrc']) obsoletesTuples = [] for new in obsoletes: obsoleted_reldeps = new.obsoletes obsoletesTuples.extend( [(new, old) for old in inst.filter(provides=obsoleted_reldeps)]) # packages recently added to the repositories elif pkgnarrow == 'recent': avail = q.available() if not showdups: avail = avail.filterm(latest_per_arch_by_priority=True) recent = query_for_repo(avail)._recent(self.conf.recent) ygh.installed = installed ygh.available = available ygh.reinstall_available = reinstall_available ygh.old_available = old_available ygh.updates = updates ygh.obsoletes = obsoletes ygh.obsoletesTuples = obsoletesTuples ygh.recent = recent ygh.extras = extras ygh.autoremove = autoremove return ygh def _add_comps_trans(self, trans): self._comps_trans += trans return len(trans) def _remove_if_unneeded(self, query): """ Mark to remove packages that are not required by any user installed package (reason group or user) :param query: dnf.query.Query() object """ query = query.installed() if not query: return unneeded_pkgs = query._safe_to_remove(self.history.swdb, debug_solver=False) unneeded_pkgs_history = query.filter( pkg=[i for i in query if self.history.group.is_removable_pkg(i.name)]) pkg_with_dependent_pkgs = unneeded_pkgs_history.difference(unneeded_pkgs) # mark packages with dependent packages as a dependency to allow removal with dependent # package for pkg in pkg_with_dependent_pkgs: self.history.set_reason(pkg, libdnf.transaction.TransactionItemReason_DEPENDENCY) unneeded_pkgs = unneeded_pkgs.intersection(unneeded_pkgs_history) remove_packages = query.intersection(unneeded_pkgs) if remove_packages: for pkg in remove_packages: self._goal.erase(pkg, clean_deps=self.conf.clean_requirements_on_remove) def _finalize_comps_trans(self): trans = self._comps_trans basearch = self.conf.substitutions['basearch'] def trans_upgrade(query, remove_query, comps_pkg): sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=query) self._goal.upgrade(select=sltr) return remove_query def trans_install(query, remove_query, comps_pkg, strict): if self.conf.multilib_policy == "all": if not comps_pkg.requires: self._install_multiarch(query, strict=strict) else: # it installs only one arch for conditional packages installed_query = query.installed().apply() self._report_already_installed(installed_query) sltr = dnf.selector.Selector(self.sack) sltr.set(provides="({} if {})".format(comps_pkg.name, comps_pkg.requires)) self._goal.install(select=sltr, optional=not strict) else: sltr = dnf.selector.Selector(self.sack) if comps_pkg.requires: sltr.set(provides="({} if {})".format(comps_pkg.name, comps_pkg.requires)) else: if self.conf.obsoletes: query = query.union(self.sack.query().filterm(obsoletes=query)) sltr.set(pkg=query) self._goal.install(select=sltr, optional=not strict) return remove_query def trans_remove(query, remove_query, comps_pkg): remove_query = remove_query.union(query) return remove_query remove_query = self.sack.query().filterm(empty=True) attr_fn = ((trans.install, functools.partial(trans_install, strict=True)), (trans.install_opt, functools.partial(trans_install, strict=False)), (trans.upgrade, trans_upgrade), (trans.remove, trans_remove)) for (attr, fn) in attr_fn: for comps_pkg in attr: query_args = {'name': comps_pkg.name} if (comps_pkg.basearchonly): query_args.update({'arch': basearch}) q = self.sack.query().filterm(**query_args).apply() q.filterm(arch__neq=["src", "nosrc"]) if not q: package_string = comps_pkg.name if comps_pkg.basearchonly: package_string += '.' + basearch logger.warning(_('No match for group package "{}"').format(package_string)) continue remove_query = fn(q, remove_query, comps_pkg) self._goal.group_members.add(comps_pkg.name) self._remove_if_unneeded(remove_query) def _build_comps_solver(self): def reason_fn(pkgname): q = self.sack.query().installed().filterm(name=pkgname) if not q: return None try: return self.history.rpm.get_reason(q[0]) except AttributeError: return libdnf.transaction.TransactionItemReason_UNKNOWN return dnf.comps.Solver(self.history, self._comps, reason_fn) def environment_install(self, env_id, types, exclude=None, strict=True, exclude_groups=None): # :api """Installs packages of environment group identified by env_id. :param types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). """ assert dnf.util.is_string_type(env_id) solver = self._build_comps_solver() if not isinstance(types, int): types = libdnf.transaction.listToCompsPackageType(types) trans = solver._environment_install(env_id, types, exclude or set(), strict, exclude_groups) if not trans: return 0 return self._add_comps_trans(trans) def environment_remove(self, env_id): # :api assert dnf.util.is_string_type(env_id) solver = self._build_comps_solver() trans = solver._environment_remove(env_id) return self._add_comps_trans(trans) def group_install(self, grp_id, pkg_types, exclude=None, strict=True): # :api """Installs packages of selected group :param pkg_types: Types of packages to install. Either an integer as a logical conjunction of CompsPackageType ids or a list of string package type ids (conditional, default, mandatory, optional). :param exclude: list of package name glob patterns that will be excluded from install set :param strict: boolean indicating whether group packages that exist but are non-installable due to e.g. dependency issues should be skipped (False) or cause transaction to fail to resolve (True) """ def _pattern_to_pkgname(pattern): if dnf.util.is_glob_pattern(pattern): q = self.sack.query().filterm(name__glob=pattern) return map(lambda p: p.name, q) else: return (pattern,) assert dnf.util.is_string_type(grp_id) exclude_pkgnames = None if exclude: nested_excludes = [_pattern_to_pkgname(p) for p in exclude] exclude_pkgnames = itertools.chain.from_iterable(nested_excludes) solver = self._build_comps_solver() if not isinstance(pkg_types, int): pkg_types = libdnf.transaction.listToCompsPackageType(pkg_types) trans = solver._group_install(grp_id, pkg_types, exclude_pkgnames, strict) if not trans: return 0 if strict: instlog = trans.install else: instlog = trans.install_opt logger.debug(_("Adding packages from group '%s': %s"), grp_id, instlog) return self._add_comps_trans(trans) def env_group_install(self, patterns, types, strict=True, exclude=None, exclude_groups=None): q = CompsQuery(self.comps, self.history, CompsQuery.ENVIRONMENTS | CompsQuery.GROUPS, CompsQuery.AVAILABLE) cnt = 0 done = True for pattern in patterns: try: res = q.get(pattern) except dnf.exceptions.CompsError as err: logger.error(ucd(err)) done = False continue for group_id in res.groups: if not exclude_groups or group_id not in exclude_groups: cnt += self.group_install(group_id, types, exclude=exclude, strict=strict) for env_id in res.environments: cnt += self.environment_install(env_id, types, exclude=exclude, strict=strict, exclude_groups=exclude_groups) if not done and strict: raise dnf.exceptions.Error(_('Nothing to do.')) return cnt def group_remove(self, grp_id): # :api assert dnf.util.is_string_type(grp_id) solver = self._build_comps_solver() trans = solver._group_remove(grp_id) return self._add_comps_trans(trans) def env_group_remove(self, patterns): q = CompsQuery(self.comps, self.history, CompsQuery.ENVIRONMENTS | CompsQuery.GROUPS, CompsQuery.INSTALLED) try: res = q.get(*patterns) except dnf.exceptions.CompsError as err: logger.error("Warning: %s", ucd(err)) raise dnf.exceptions.Error(_('No groups marked for removal.')) cnt = 0 for env in res.environments: cnt += self.environment_remove(env) for grp in res.groups: cnt += self.group_remove(grp) return cnt def env_group_upgrade(self, patterns): q = CompsQuery(self.comps, self.history, CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS, CompsQuery.INSTALLED) group_upgraded = False for pattern in patterns: try: res = q.get(pattern) except dnf.exceptions.CompsError as err: logger.error(ucd(err)) continue for env in res.environments: try: self.environment_upgrade(env) group_upgraded = True except dnf.exceptions.CompsError as err: logger.error(ucd(err)) continue for grp in res.groups: try: self.group_upgrade(grp) group_upgraded = True except dnf.exceptions.CompsError as err: logger.error(ucd(err)) continue if not group_upgraded: msg = _('No group marked for upgrade.') raise dnf.cli.CliError(msg) def environment_upgrade(self, env_id): # :api assert dnf.util.is_string_type(env_id) solver = self._build_comps_solver() trans = solver._environment_upgrade(env_id) return self._add_comps_trans(trans) def group_upgrade(self, grp_id): # :api assert dnf.util.is_string_type(grp_id) solver = self._build_comps_solver() trans = solver._group_upgrade(grp_id) return self._add_comps_trans(trans) def _gpg_key_check(self): """Checks for the presence of GPG keys in the rpmdb. :return: 0 if there are no GPG keys in the rpmdb, and 1 if there are keys """ gpgkeyschecked = self.conf.cachedir + '/.gpgkeyschecked.yum' if os.path.exists(gpgkeyschecked): return 1 installroot = self.conf.installroot myts = dnf.rpm.transaction.initReadOnlyTransaction(root=installroot) myts.pushVSFlags(~(rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS)) idx = myts.dbMatch('name', 'gpg-pubkey') keys = len(idx) del idx del myts if keys == 0: return 0 else: mydir = os.path.dirname(gpgkeyschecked) if not os.path.exists(mydir): os.makedirs(mydir) fo = open(gpgkeyschecked, 'w') fo.close() del fo return 1 def _install_multiarch(self, query, reponame=None, strict=True): already_inst, available = self._query_matches_installed(query) self._report_already_installed(already_inst) for packages in available: sltr = dnf.selector.Selector(self.sack) q = self.sack.query().filterm(pkg=packages) if self.conf.obsoletes: q = q.union(self.sack.query().filterm(obsoletes=q)) sltr = sltr.set(pkg=q) if reponame is not None: sltr = sltr.set(reponame=reponame) self._goal.install(select=sltr, optional=(not strict)) return len(available) def _categorize_specs(self, install, exclude): """ Categorize :param install and :param exclude list into two groups each (packages and groups) :param install: list of specs, whether packages ('foo') or groups/modules ('@bar') :param exclude: list of specs, whether packages ('foo') or groups/modules ('@bar') :return: categorized install and exclude specs (stored in argparse.Namespace class) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs """ install_specs = argparse.Namespace() exclude_specs = argparse.Namespace() _parse_specs(install_specs, install) _parse_specs(exclude_specs, exclude) return install_specs, exclude_specs def _exclude_package_specs(self, exclude_specs): glob_excludes = [exclude for exclude in exclude_specs.pkg_specs if dnf.util.is_glob_pattern(exclude)] excludes = [exclude for exclude in exclude_specs.pkg_specs if exclude not in glob_excludes] exclude_query = self.sack.query().filter(name=excludes) glob_exclude_query = self.sack.query().filter(name__glob=glob_excludes) self.sack.add_excludes(exclude_query) self.sack.add_excludes(glob_exclude_query) def _expand_groups(self, group_specs): groups = set() q = CompsQuery(self.comps, self.history, CompsQuery.ENVIRONMENTS | CompsQuery.GROUPS, CompsQuery.AVAILABLE | CompsQuery.INSTALLED) for pattern in group_specs: try: res = q.get(pattern) except dnf.exceptions.CompsError as err: logger.error("Warning: Module or %s", ucd(err)) continue groups.update(res.groups) groups.update(res.environments) for environment_id in res.environments: environment = self.comps._environment_by_id(environment_id) for group in environment.groups_iter(): groups.add(group.id) return list(groups) def _install_groups(self, group_specs, excludes, skipped, strict=True): for group_spec in group_specs: try: types = self.conf.group_package_types if '/' in group_spec: split = group_spec.split('/') group_spec = split[0] types = split[1].split(',') self.env_group_install([group_spec], types, strict, excludes.pkg_specs, excludes.grp_specs) except dnf.exceptions.Error: skipped.append("@" + group_spec) def install_specs(self, install, exclude=None, reponame=None, strict=True, forms=None): # :api if exclude is None: exclude = [] no_match_group_specs = [] error_group_specs = [] no_match_pkg_specs = [] error_pkg_specs = [] install_specs, exclude_specs = self._categorize_specs(install, exclude) self._exclude_package_specs(exclude_specs) for spec in install_specs.pkg_specs: try: self.install(spec, reponame=reponame, strict=strict, forms=forms) except dnf.exceptions.MarkingError as e: logger.error(str(e)) no_match_pkg_specs.append(spec) no_match_module_specs = [] module_depsolv_errors = () if WITH_MODULES and install_specs.grp_specs: try: module_base = dnf.module.module_base.ModuleBase(self) module_base.install(install_specs.grp_specs, strict) except dnf.exceptions.MarkingErrors as e: if e.no_match_group_specs: for e_spec in e.no_match_group_specs: no_match_module_specs.append(e_spec) if e.error_group_specs: for e_spec in e.error_group_specs: error_group_specs.append("@" + e_spec) module_depsolv_errors = e.module_depsolv_errors else: no_match_module_specs = install_specs.grp_specs if no_match_module_specs: exclude_specs.grp_specs = self._expand_groups(exclude_specs.grp_specs) self._install_groups(no_match_module_specs, exclude_specs, no_match_group_specs, strict) if no_match_group_specs or error_group_specs or no_match_pkg_specs or error_pkg_specs \ or module_depsolv_errors: raise dnf.exceptions.MarkingErrors(no_match_group_specs=no_match_group_specs, error_group_specs=error_group_specs, no_match_pkg_specs=no_match_pkg_specs, error_pkg_specs=error_pkg_specs, module_depsolv_errors=module_depsolv_errors) def install(self, pkg_spec, reponame=None, strict=True, forms=None): # :api """Mark package(s) given by pkg_spec and reponame for installation.""" subj = dnf.subject.Subject(pkg_spec) solution = subj.get_best_solution(self.sack, forms=forms, with_src=False) if self.conf.multilib_policy == "all" or subj._is_arch_specified(solution): q = solution['query'] if reponame is not None: q.filterm(reponame=reponame) if not q: self._raise_package_not_found_error(pkg_spec, forms, reponame) return self._install_multiarch(q, reponame=reponame, strict=strict) elif self.conf.multilib_policy == "best": sltrs = subj._get_best_selectors(self, forms=forms, obsoletes=self.conf.obsoletes, reponame=reponame, reports=True, solution=solution) if not sltrs: self._raise_package_not_found_error(pkg_spec, forms, reponame) for sltr in sltrs: self._goal.install(select=sltr, optional=(not strict)) return 1 return 0 def package_downgrade(self, pkg, strict=False): # :api if pkg._from_system: msg = 'downgrade_package() for an installed package.' raise NotImplementedError(msg) q = self.sack.query().installed().filterm(name=pkg.name, arch=[pkg.arch, "noarch"]) if not q: msg = _("Package %s not installed, cannot downgrade it.") logger.warning(msg, pkg.name) raise dnf.exceptions.MarkingError(_('No match for argument: %s') % pkg.location, pkg.name) elif sorted(q)[0] > pkg: sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=[pkg]) self._goal.install(select=sltr, optional=(not strict)) return 1 else: msg = _("Package %s of lower version already installed, " "cannot downgrade it.") logger.warning(msg, pkg.name) return 0 def package_install(self, pkg, strict=True): # :api q = self.sack.query()._nevra(pkg.name, pkg.evr, pkg.arch) already_inst, available = self._query_matches_installed(q) if pkg in already_inst: self._report_already_installed([pkg]) elif pkg not in itertools.chain.from_iterable(available): raise dnf.exceptions.PackageNotFoundError(_('No match for argument: %s'), pkg.location) else: sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=[pkg]) self._goal.install(select=sltr, optional=(not strict)) return 1 def package_reinstall(self, pkg): if self.sack.query().installed().filterm(name=pkg.name, evr=pkg.evr, arch=pkg.arch): self._goal.install(pkg) return 1 msg = _("Package %s not installed, cannot reinstall it.") logger.warning(msg, str(pkg)) raise dnf.exceptions.MarkingError(_('No match for argument: %s') % pkg.location, pkg.name) def package_remove(self, pkg): self._goal.erase(pkg) return 1 def package_upgrade(self, pkg): # :api if pkg._from_system: msg = 'upgrade_package() for an installed package.' raise NotImplementedError(msg) if pkg.arch == 'src': msg = _("File %s is a source package and cannot be updated, ignoring.") logger.info(msg, pkg.location) return 0 installed = self.sack.query().installed().apply() if self.conf.obsoletes and self.sack.query().filterm(pkg=[pkg]).filterm(obsoletes=installed): sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=[pkg]) self._goal.upgrade(select=sltr) return 1 # do not filter by arch if the package is noarch if pkg.arch == "noarch": q = installed.filter(name=pkg.name) else: q = installed.filter(name=pkg.name, arch=[pkg.arch, "noarch"]) if not q: msg = _("Package %s not installed, cannot update it.") logger.warning(msg, pkg.name) raise dnf.exceptions.MarkingError( _('No match for argument: %s') % pkg.location, pkg.name) elif sorted(q)[-1] < pkg: sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=[pkg]) self._goal.upgrade(select=sltr) return 1 else: msg = _("The same or higher version of %s is already installed, " "cannot update it.") logger.warning(msg, pkg.name) return 0 def _upgrade_internal(self, query, obsoletes, reponame, pkg_spec=None): installed_all = self.sack.query().installed() # Add only relevant obsoletes to transaction => installed, upgrades q = query.intersection(self.sack.query().filterm(name=[pkg.name for pkg in installed_all])) installed_query = q.installed() if obsoletes: obsoletes = self.sack.query().available().filterm( obsoletes=installed_query.union(q.upgrades())) # add obsoletes into transaction query = query.union(obsoletes) if reponame is not None: query.filterm(reponame=reponame) query = self._merge_update_filters(query, pkg_spec=pkg_spec, upgrade=True) if query: # Given that we use libsolv's targeted transactions, we need to ensure that the transaction contains both # the new targeted version and also the current installed version (for the upgraded package). This is # because if it only contained the new version, libsolv would decide to reinstall the package even if it # had just a different buildtime or vendor but the same version # (https://github.com/openSUSE/libsolv/issues/287) # - In general, the query already contains both the new and installed versions but not always. # If repository-packages command is used, the installed packages are filtered out because they are from # the @system repo. We need to add them back in. # - However we need to add installed versions of just the packages that are being upgraded. We don't want # to add all installed packages because it could increase the number of solutions for the transaction # (especially without --best) and since libsolv prefers the smallest possible upgrade it could result # in no upgrade even if there is one available. This is a problem in general but its critical with # --security transactions (https://bugzilla.redhat.com/show_bug.cgi?id=2097757) # - We want to add only the latest versions of installed packages, this is specifically for installonly # packages. Otherwise if for example kernel-1 and kernel-3 were installed and present in the # transaction libsolv could decide to install kernel-2 because it is an upgrade for kernel-1 even # though we don't want it because there already is a newer version present. query = query.union(installed_all.latest().filter(name=[pkg.name for pkg in query])) sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=query) self._goal.upgrade(select=sltr) return 1 def upgrade(self, pkg_spec, reponame=None): # :api subj = dnf.subject.Subject(pkg_spec) solution = subj.get_best_solution(self.sack) q = solution["query"] if q: wildcard = dnf.util.is_glob_pattern(pkg_spec) # wildcard shouldn't print not installed packages # only solution with nevra.name provide packages with same name if not wildcard and solution['nevra'] and solution['nevra'].name: pkg_name = solution['nevra'].name installed = self.sack.query().installed().apply() obsoleters = q.filter(obsoletes=installed) \ if self.conf.obsoletes else self.sack.query().filterm(empty=True) if not obsoleters: installed_name = installed.filter(name=pkg_name).apply() if not installed_name: msg = _('Package %s available, but not installed.') logger.warning(msg, pkg_name) raise dnf.exceptions.PackagesNotInstalledError( _('No match for argument: %s') % pkg_spec, pkg_spec) elif solution['nevra'].arch and not dnf.util.is_glob_pattern(solution['nevra'].arch): if not installed_name.filterm(arch=solution['nevra'].arch): msg = _('Package %s available, but installed for different architecture.') logger.warning(msg, "{}.{}".format(pkg_name, solution['nevra'].arch)) obsoletes = self.conf.obsoletes and solution['nevra'] \ and solution['nevra'].has_just_name() return self._upgrade_internal(q, obsoletes, reponame, pkg_spec) raise dnf.exceptions.MarkingError(_('No match for argument: %s') % pkg_spec, pkg_spec) def upgrade_all(self, reponame=None): # :api # provide only available packages to solver to trigger targeted upgrade # possibilities will be ignored # usage of selected packages will unify dnf behavior with other upgrade functions return self._upgrade_internal( self.sack.query(), self.conf.obsoletes, reponame, pkg_spec=None) def distro_sync(self, pkg_spec=None): if pkg_spec is None: self._goal.distupgrade_all() else: subject = dnf.subject.Subject(pkg_spec) solution = subject.get_best_solution(self.sack, with_src=False) solution["query"].filterm(reponame__neq=hawkey.SYSTEM_REPO_NAME) sltrs = subject._get_best_selectors(self, solution=solution, obsoletes=self.conf.obsoletes, reports=True) if not sltrs: logger.info(_('No package %s installed.'), pkg_spec) return 0 for sltr in sltrs: self._goal.distupgrade(select=sltr) return 1 def autoremove(self, forms=None, pkg_specs=None, grp_specs=None, filenames=None): # :api """Removes all 'leaf' packages from the system that were originally installed as dependencies of user-installed packages but which are no longer required by any such package.""" if any([grp_specs, pkg_specs, filenames]): pkg_specs += filenames done = False # Remove groups. if grp_specs and forms: for grp_spec in grp_specs: msg = _('Not a valid form: %s') logger.warning(msg, grp_spec) elif grp_specs: if self.env_group_remove(grp_specs): done = True for pkg_spec in pkg_specs: try: self.remove(pkg_spec, forms=forms) except dnf.exceptions.MarkingError as e: logger.info(str(e)) else: done = True if not done: logger.warning(_('No packages marked for removal.')) else: pkgs = self.sack.query()._unneeded(self.history.swdb, debug_solver=self.conf.debug_solver) for pkg in pkgs: self.package_remove(pkg) def remove(self, pkg_spec, reponame=None, forms=None): # :api """Mark the specified package for removal.""" matches = dnf.subject.Subject(pkg_spec).get_best_query(self.sack, forms=forms) installed = [ pkg for pkg in matches.installed() if reponame is None or self.history.repo(pkg) == reponame] if not installed: self._raise_package_not_installed_error(pkg_spec, forms, reponame) clean_deps = self.conf.clean_requirements_on_remove for pkg in installed: self._goal.erase(pkg, clean_deps=clean_deps) return len(installed) def reinstall(self, pkg_spec, old_reponame=None, new_reponame=None, new_reponame_neq=None, remove_na=False): subj = dnf.subject.Subject(pkg_spec) q = subj.get_best_query(self.sack) installed_pkgs = [ pkg for pkg in q.installed() if old_reponame is None or self.history.repo(pkg) == old_reponame] available_q = q.available() if new_reponame is not None: available_q.filterm(reponame=new_reponame) if new_reponame_neq is not None: available_q.filterm(reponame__neq=new_reponame_neq) available_nevra2pkg = dnf.query._per_nevra_dict(available_q) if not installed_pkgs: raise dnf.exceptions.PackagesNotInstalledError( 'no package matched', pkg_spec, available_nevra2pkg.values()) cnt = 0 clean_deps = self.conf.clean_requirements_on_remove for installed_pkg in installed_pkgs: try: available_pkg = available_nevra2pkg[ucd(installed_pkg)] except KeyError: if not remove_na: continue self._goal.erase(installed_pkg, clean_deps=clean_deps) else: self._goal.install(available_pkg) cnt += 1 if cnt == 0: raise dnf.exceptions.PackagesNotAvailableError( 'no package matched', pkg_spec, installed_pkgs) return cnt def downgrade(self, pkg_spec): # :api """Mark a package to be downgraded. This is equivalent to first removing the currently installed package, and then installing an older version. """ return self.downgrade_to(pkg_spec) def downgrade_to(self, pkg_spec, strict=False): """Downgrade to specific version if specified otherwise downgrades to one version lower than the package installed. """ subj = dnf.subject.Subject(pkg_spec) q = subj.get_best_query(self.sack) if not q: msg = _('No match for argument: %s') % pkg_spec raise dnf.exceptions.PackageNotFoundError(msg, pkg_spec) done = 0 available_pkgs = q.available() available_pkg_names = list(available_pkgs._name_dict().keys()) q_installed = self.sack.query().installed().filterm(name=available_pkg_names) if len(q_installed) == 0: msg = _('Packages for argument %s available, but not installed.') % pkg_spec raise dnf.exceptions.PackagesNotInstalledError(msg, pkg_spec, available_pkgs) for pkg_name in q_installed._name_dict().keys(): downgrade_pkgs = available_pkgs.downgrades().filter(name=pkg_name) if not downgrade_pkgs: msg = _("Package %s of lowest version already installed, cannot downgrade it.") logger.warning(msg, pkg_name) continue sltr = dnf.selector.Selector(self.sack) sltr.set(pkg=downgrade_pkgs) self._goal.install(select=sltr, optional=(not strict)) done = 1 return done def provides(self, provides_spec): providers = self.sack.query().filterm(file__glob=provides_spec) if providers: return providers, [provides_spec] providers = dnf.query._by_provides(self.sack, provides_spec) if providers: return providers, [provides_spec] if provides_spec.startswith('/bin/') or provides_spec.startswith('/sbin/'): # compatibility for packages that didn't do UsrMove binary_provides = ['/usr' + provides_spec] elif provides_spec.startswith('/'): # provides_spec is a file path return providers, [provides_spec] else: # suppose that provides_spec is a command, search in /usr/sbin/ binary_provides = [prefix + provides_spec for prefix in ['/bin/', '/sbin/', '/usr/bin/', '/usr/sbin/']] return self.sack.query().filterm(file__glob=binary_provides), binary_provides def add_security_filters(self, cmp_type, types=(), advisory=(), bugzilla=(), cves=(), severity=()): # :api """ It modifies results of install, upgrade, and distrosync methods according to provided filters. :param cmp_type: only 'eq' or 'gte' allowed :param types: List or tuple with strings. E.g. 'bugfix', 'enhancement', 'newpackage', 'security' :param advisory: List or tuple with strings. E.g.Eg. FEDORA-2201-123 :param bugzilla: List or tuple with strings. Include packages that fix a Bugzilla ID, Eg. 123123. :param cves: List or tuple with strings. Include packages that fix a CVE (Common Vulnerabilities and Exposures) ID. Eg. CVE-2201-0123 :param severity: List or tuple with strings. Includes packages that provide a fix for an issue of the specified severity. """ cmp_dict = {'eq': '__eqg', 'gte': '__eqg__gt'} if cmp_type not in cmp_dict: raise ValueError("Unsupported value for `cmp_type`") cmp = cmp_dict[cmp_type] if types: key = 'advisory_type' + cmp self._update_security_options.setdefault(key, set()).update(types) if advisory: key = 'advisory' + cmp self._update_security_options.setdefault(key, set()).update(advisory) if bugzilla: key = 'advisory_bug' + cmp self._update_security_options.setdefault(key, set()).update(bugzilla) if cves: key = 'advisory_cve' + cmp self._update_security_options.setdefault(key, set()).update(cves) if severity: key = 'advisory_severity' + cmp self._update_security_options.setdefault(key, set()).update(severity) def reset_security_filters(self): # :api """ Reset all security filters """ self._update_security_options = {} def _merge_update_filters(self, q, pkg_spec=None, warning=True, upgrade=False): """ Merge Queries in _update_filters and return intersection with q Query @param q: Query @return: Query """ if not (self._update_security_options or self._update_security_filters) or not q: return q merged_queries = self.sack.query().filterm(empty=True) if self._update_security_filters: for query in self._update_security_filters: merged_queries = merged_queries.union(query) self._update_security_filters = [merged_queries] if self._update_security_options: for filter_name, values in self._update_security_options.items(): if upgrade: filter_name = filter_name + '__upgrade' kwargs = {filter_name: values} merged_queries = merged_queries.union(q.filter(**kwargs)) merged_queries = q.intersection(merged_queries) if not merged_queries: if warning: q = q.upgrades() count = len(q._name_dict().keys()) if count > 0: if pkg_spec is None: msg1 = _("No security updates needed, but {} update " "available").format(count) msg2 = _("No security updates needed, but {} updates " "available").format(count) logger.warning(P_(msg1, msg2, count)) else: msg1 = _('No security updates needed for "{}", but {} ' 'update available').format(pkg_spec, count) msg2 = _('No security updates needed for "{}", but {} ' 'updates available').format(pkg_spec, count) logger.warning(P_(msg1, msg2, count)) return merged_queries def _get_key_for_package(self, po, askcb=None, fullaskcb=None): """Retrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param po: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys """ if po._from_cmdline: # raise an exception, because po.repoid is not in self.repos msg = _('Unable to retrieve a key for a commandline package: %s') raise ValueError(msg % po) repo = self.repos[po.repoid] key_installed = repo.id in self._repo_set_imported_gpg_keys keyurls = [] if key_installed else repo.gpgkey def _prov_key_data(msg): msg += _('. Failing package is: %s') % (po) + '\n ' msg += _('GPG Keys are configured as: %s') % \ (', '.join(repo.gpgkey)) return msg user_cb_fail = False self._repo_set_imported_gpg_keys.add(repo.id) for keyurl in keyurls: keys = dnf.crypto.retrieve(keyurl, repo) for info in keys: # Check if key is already installed if misc.keyInstalled(self._ts, info.rpm_id, info.timestamp) >= 0: msg = _('GPG key at %s (0x%s) is already installed') logger.info(msg, keyurl, info.short_id) continue # DNS Extension: create a key object, pass it to the verification class # and print its result as an advice to the user. if self.conf.gpgkey_dns_verification: dns_input_key = dnf.dnssec.KeyInfo.from_rpm_key_object(info.userid, info.raw_key) dns_result = dnf.dnssec.DNSSECKeyVerification.verify(dns_input_key) logger.info(dnf.dnssec.nice_user_msg(dns_input_key, dns_result)) # Try installing/updating GPG key info.url = keyurl if self.conf.gpgkey_dns_verification: dnf.crypto.log_dns_key_import(info, dns_result) else: dnf.crypto.log_key_import(info) rc = False if self.conf.assumeno: rc = False elif self.conf.assumeyes: # DNS Extension: We assume, that the key is trusted in case it is valid, # its existence is explicitly denied or in case the domain is not signed # and therefore there is no way to know for sure (this is mainly for # backward compatibility) # FAQ: # * What is PROVEN_NONEXISTENCE? # In DNSSEC, your domain does not need to be signed, but this state # (not signed) has to be proven by the upper domain. e.g. when example.com. # is not signed, com. servers have to sign the message, that example.com. # does not have any signing key (KSK to be more precise). if self.conf.gpgkey_dns_verification: if dns_result in (dnf.dnssec.Validity.VALID, dnf.dnssec.Validity.PROVEN_NONEXISTENCE): rc = True logger.info(dnf.dnssec.any_msg(_("The key has been approved."))) else: rc = False logger.info(dnf.dnssec.any_msg(_("The key has been rejected."))) else: rc = True # grab the .sig/.asc for the keyurl, if it exists if it # does check the signature on the key if it is signed by # one of our ca-keys for this repo or the global one then # rc = True else ask as normal. elif fullaskcb: rc = fullaskcb({"po": po, "userid": info.userid, "hexkeyid": info.short_id, "keyurl": keyurl, "fingerprint": info.fingerprint, "timestamp": info.timestamp}) elif askcb: rc = askcb(po, info.userid, info.short_id) if not rc: user_cb_fail = True continue # Import the key # If rpm.RPMTRANS_FLAG_TEST in self._ts, gpg keys cannot be imported successfully # therefore the flag was removed for import operation test_flag = self._ts.isTsFlagSet(rpm.RPMTRANS_FLAG_TEST) if test_flag: orig_flags = self._ts.getTsFlags() self._ts.setFlags(orig_flags - rpm.RPMTRANS_FLAG_TEST) result = self._ts.pgpImportPubkey(misc.procgpgkey(info.raw_key)) if test_flag: self._ts.setFlags(orig_flags) if result != 0: msg = _('Key import failed (code %d)') % result raise dnf.exceptions.Error(_prov_key_data(msg)) logger.info(_('Key imported successfully')) key_installed = True if not key_installed and user_cb_fail: raise dnf.exceptions.Error(_("Didn't install any keys")) if not key_installed: msg = _('The GPG keys listed for the "%s" repository are ' 'already installed but they are not correct for this ' 'package.\n' 'Check that the correct key URLs are configured for ' 'this repository.') % repo.name raise dnf.exceptions.Error(_prov_key_data(msg)) # Check if the newly installed keys helped result, errmsg = self._sig_check_pkg(po) if result != 0: if keyurls: msg = _("Import of key(s) didn't help, wrong key(s)?") logger.info(msg) errmsg = ucd(errmsg) raise dnf.exceptions.Error(_prov_key_data(errmsg)) def package_import_key(self, pkg, askcb=None, fullaskcb=None): # :api """Retrieve a key for a package. If needed, use the given callback to prompt whether the key should be imported. :param pkg: the package object to retrieve the key of :param askcb: Callback function to use to ask permission to import a key. The arguments *askcb* should take are the package object, the userid of the key, and the keyid :param fullaskcb: Callback function to use to ask permission to import a key. This differs from *askcb* in that it gets passed a dictionary so that we can expand the values passed. :raises: :class:`dnf.exceptions.Error` if there are errors retrieving the keys """ self._get_key_for_package(pkg, askcb, fullaskcb) def _run_rpm_check(self): results = [] self._ts.check() for prob in self._ts.problems(): # Newer rpm (4.8.0+) has problem objects, older have just strings. # Should probably move to using the new objects, when we can. For # now just be compatible. results.append(ucd(prob)) return results def urlopen(self, url, repo=None, mode='w+b', **kwargs): # :api """ Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads """ return dnf.util._urlopen(url, self.conf, repo, mode, **kwargs) def _get_installonly_query(self, q=None): if q is None: q = self._sack.query(flags=hawkey.IGNORE_EXCLUDES) installonly = q.filter(provides=self.conf.installonlypkgs) return installonly def _report_icase_hint(self, pkg_spec): subj = dnf.subject.Subject(pkg_spec, ignore_case=True) solution = subj.get_best_solution(self.sack, with_nevra=True, with_provides=False, with_filenames=False) if solution['query'] and solution['nevra'] and solution['nevra'].name and \ pkg_spec != solution['query'][0].name: logger.info(_(" * Maybe you meant: {}").format(solution['query'][0].name)) def _select_remote_pkgs(self, install_pkgs): """ Check checksum of packages from local repositories and returns list packages from remote repositories that will be downloaded. Packages from commandline are skipped. :param install_pkgs: list of packages :return: list of remote pkgs """ def _verification_of_packages(pkg_list, logger_msg): all_packages_verified = True for pkg in pkg_list: pkg_successfully_verified = False try: pkg_successfully_verified = pkg.verifyLocalPkg() except Exception as e: logger.critical(str(e)) if pkg_successfully_verified is not True: logger.critical(logger_msg.format(pkg, pkg.reponame)) all_packages_verified = False return all_packages_verified remote_pkgs = [] local_repository_pkgs = [] for pkg in install_pkgs: if pkg._is_local_pkg(): if pkg.reponame != hawkey.CMDLINE_REPO_NAME: local_repository_pkgs.append(pkg) else: remote_pkgs.append(pkg) msg = _('Package "{}" from local repository "{}" has incorrect checksum') if not _verification_of_packages(local_repository_pkgs, msg): raise dnf.exceptions.Error( _("Some packages from local repository have incorrect checksum")) if self.conf.cacheonly: msg = _('Package "{}" from repository "{}" has incorrect checksum') if not _verification_of_packages(remote_pkgs, msg): raise dnf.exceptions.Error( _('Some packages have invalid cache, but cannot be downloaded due to ' '"--cacheonly" option')) remote_pkgs = [] return remote_pkgs, local_repository_pkgs def _report_already_installed(self, packages): for pkg in packages: _msg_installed(pkg) def _raise_package_not_found_error(self, pkg_spec, forms, reponame): all_query = self.sack.query(flags=hawkey.IGNORE_EXCLUDES) subject = dnf.subject.Subject(pkg_spec) solution = subject.get_best_solution( self.sack, forms=forms, with_src=False, query=all_query) if reponame is not None: solution['query'].filterm(reponame=reponame) if not solution['query']: raise dnf.exceptions.PackageNotFoundError(_('No match for argument'), pkg_spec) else: with_regular_query = self.sack.query(flags=hawkey.IGNORE_REGULAR_EXCLUDES) with_regular_query = solution['query'].intersection(with_regular_query) # Modular filtering is applied on a package set that already has regular excludes # filtered out. So if a package wasn't filtered out by regular excludes, it must have # been filtered out by modularity. if with_regular_query: msg = _('All matches were filtered out by exclude filtering for argument') else: msg = _('All matches were filtered out by modular filtering for argument') raise dnf.exceptions.PackageNotFoundError(msg, pkg_spec) def _raise_package_not_installed_error(self, pkg_spec, forms, reponame): all_query = self.sack.query(flags=hawkey.IGNORE_EXCLUDES).installed() subject = dnf.subject.Subject(pkg_spec) solution = subject.get_best_solution( self.sack, forms=forms, with_src=False, query=all_query) if not solution['query']: raise dnf.exceptions.PackagesNotInstalledError(_('No match for argument'), pkg_spec) if reponame is not None: installed = [pkg for pkg in solution['query'] if self.history.repo(pkg) == reponame] else: installed = solution['query'] if not installed: msg = _('All matches were installed from a different repository for argument') else: msg = _('All matches were filtered out by exclude filtering for argument') raise dnf.exceptions.PackagesNotInstalledError(msg, pkg_spec) def setup_loggers(self): # :api """ Setup DNF file loggers based on given configuration file. The loggers are set the same way as if DNF was run from CLI. """ self._logging._setup_from_dnf_conf(self.conf, file_loggers_only=True) def _skipped_packages(self, report_problems, transaction): """returns set of conflicting packages and set of packages with broken dependency that would be additionally installed when --best and --allowerasing""" if self._goal.actions & (hawkey.INSTALL | hawkey.UPGRADE | hawkey.UPGRADE_ALL): best = True else: best = False ng = deepcopy(self._goal) params = {"allow_uninstall": self._allow_erasing, "force_best": best, "ignore_weak": True} ret = ng.run(**params) if not ret and report_problems: msg = dnf.util._format_resolve_problems(ng.problem_rules()) logger.warning(msg) problem_conflicts = set(ng.problem_conflicts(available=True)) problem_dependency = set(ng.problem_broken_dependency(available=True)) - problem_conflicts def _nevra(item): return hawkey.NEVRA(name=item.name, epoch=item.epoch, version=item.version, release=item.release, arch=item.arch) # Sometimes, pkg is not in transaction item, therefore, comparing by nevra transaction_nevras = [_nevra(tsi) for tsi in transaction] skipped_conflicts = set( [pkg for pkg in problem_conflicts if _nevra(pkg) not in transaction_nevras]) skipped_dependency = set( [pkg for pkg in problem_dependency if _nevra(pkg) not in transaction_nevras]) return skipped_conflicts, skipped_dependency def _msg_installed(pkg): name = ucd(pkg) msg = _('Package %s is already installed.') logger.info(msg, name) PK!dٌ callback.pynu[# callbacks.py # Abstract interfaces to communicate progress on tasks. # # Copyright (C) 2014-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals import dnf.yum.rpmtrans import dnf.transaction PKG_DOWNGRADE = dnf.transaction.PKG_DOWNGRADE # :api PKG_DOWNGRADED = dnf.transaction.PKG_DOWNGRADED # :api PKG_INSTALL = dnf.transaction.PKG_INSTALL # :api PKG_OBSOLETE = dnf.transaction.PKG_OBSOLETE # :api PKG_OBSOLETED = dnf.transaction.PKG_OBSOLETED # :api PKG_REINSTALL = dnf.transaction.PKG_REINSTALL # :api PKG_REINSTALLED = dnf.transaction.PKG_REINSTALLED # :api PKG_REMOVE = dnf.transaction.PKG_ERASE # :api PKG_ERASE = PKG_REMOVE # deprecated, use PKG_REMOVE instead PKG_UPGRADE = dnf.transaction.PKG_UPGRADE # :api PKG_UPGRADED = dnf.transaction.PKG_UPGRADED # :api PKG_CLEANUP = dnf.transaction.PKG_CLEANUP # :api PKG_VERIFY = dnf.transaction.PKG_VERIFY # :api PKG_SCRIPTLET = dnf.transaction.PKG_SCRIPTLET # :api TRANS_PREPARATION = dnf.transaction.TRANS_PREPARATION # :api TRANS_POST = dnf.transaction.TRANS_POST # :api STATUS_OK = None # :api STATUS_FAILED = 1 # :api STATUS_ALREADY_EXISTS = 2 # :api STATUS_MIRROR = 3 # :api STATUS_DRPM = 4 # :api class KeyImport(object): def _confirm(self, id, userid, fingerprint, url, timestamp): """Ask the user if the key should be imported.""" return False class Payload(object): # :api def __init__(self, progress): self.progress = progress def __str__(self): """Nice, human-readable representation. :api""" pass @property def download_size(self): """Total size of the download. :api""" pass class DownloadProgress(object): # :api def end(self, payload, status, msg): """Communicate the information that `payload` has finished downloading. :api, `status` is a constant denoting the type of outcome, `err_msg` is an error message in case the outcome was an error. """ pass def message(self, msg): pass def progress(self, payload, done): """Update the progress display. :api `payload` is the payload this call reports progress for, `done` is how many bytes of this payload are already downloaded. """ pass def start(self, total_files, total_size, total_drpms=0): """Start new progress metering. :api `total_files` the number of files that will be downloaded, `total_size` total size of all files. """ pass class NullDownloadProgress(DownloadProgress): pass class Depsolve(object): def start(self): pass def pkg_added(self, pkg, mode): pass def end(self): pass TransactionProgress = dnf.yum.rpmtrans.TransactionDisplay # :api PK!GJc``comps.pynu[# comps.py # Interface to libcomps. # # Copyright (C) 2013-2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import libdnf.transaction from dnf.exceptions import CompsError from dnf.i18n import _, ucd from functools import reduce import dnf.i18n import dnf.util import fnmatch import gettext import itertools import libcomps import locale import logging import operator import re import sys logger = logging.getLogger("dnf") # :api :binformat CONDITIONAL = libdnf.transaction.CompsPackageType_CONDITIONAL DEFAULT = libdnf.transaction.CompsPackageType_DEFAULT MANDATORY = libdnf.transaction.CompsPackageType_MANDATORY OPTIONAL = libdnf.transaction.CompsPackageType_OPTIONAL ALL_TYPES = CONDITIONAL | DEFAULT | MANDATORY | OPTIONAL def _internal_comps_length(comps): collections = (comps.categories, comps.groups, comps.environments) return reduce(operator.__add__, map(len, collections)) def _first_if_iterable(seq): if seq is None: return None return dnf.util.first(seq) def _by_pattern(pattern, case_sensitive, sqn): """Return items from sqn matching either exactly or glob-wise.""" pattern = dnf.i18n.ucd(pattern) exact = {g for g in sqn if g.name == pattern or g.id == pattern} if exact: return exact if case_sensitive: match = re.compile(fnmatch.translate(pattern)).match else: match = re.compile(fnmatch.translate(pattern), flags=re.I).match ret = set() for g in sqn: if match(g.id): ret.add(g) elif g.name is not None and match(g.name): ret.add(g) elif g.ui_name is not None and match(g.ui_name): ret.add(g) return ret def _fn_display_order(group): return sys.maxsize if group.display_order is None else group.display_order def install_or_skip(install_fnc, grp_or_env_id, types, exclude=None, strict=True, exclude_groups=None): """ Installs a group or an environment identified by grp_or_env_id. This method is preserved for API compatibility. It used to catch an exception thrown when a gorup or env was already installed, which is no longer thrown. `install_fnc` has to be Solver._group_install or Solver._environment_install. """ return install_fnc(grp_or_env_id, types, exclude, strict, exclude_groups) class _Langs(object): """Get all usable abbreviations for the current language.""" def __init__(self): self.last_locale = None self.cache = None @staticmethod def _dotted_locale_str(): lcl = locale.getlocale(locale.LC_MESSAGES) if lcl == (None, None): return 'C' return '.'.join(lcl) def get(self): current_locale = self._dotted_locale_str() if self.last_locale == current_locale: return self.cache self.cache = [] locales = [current_locale] if current_locale != 'C': locales.append('C') for l in locales: for nlang in gettext._expand_lang(l): if nlang not in self.cache: self.cache.append(nlang) self.last_locale = current_locale return self.cache class CompsQuery(object): AVAILABLE = 1 INSTALLED = 2 ENVIRONMENTS = 1 GROUPS = 2 def __init__(self, comps, history, kinds, status): self.comps = comps self.history = history self.kinds = kinds self.status = status def _get_groups(self, available, installed): result = set() if self.status & self.AVAILABLE: result.update({i.id for i in available}) if self.status & self.INSTALLED: for i in installed: group = i.getCompsGroupItem() if not group: continue result.add(group.getGroupId()) return result def _get_envs(self, available, installed): result = set() if self.status & self.AVAILABLE: result.update({i.id for i in available}) if self.status & self.INSTALLED: for i in installed: env = i.getCompsEnvironmentItem() if not env: continue result.add(env.getEnvironmentId()) return result def get(self, *patterns): res = dnf.util.Bunch() res.environments = [] res.groups = [] for pat in patterns: envs = grps = [] if self.kinds & self.ENVIRONMENTS: available = self.comps.environments_by_pattern(pat) installed = self.history.env.search_by_pattern(pat) envs = self._get_envs(available, installed) res.environments.extend(envs) if self.kinds & self.GROUPS: available = self.comps.groups_by_pattern(pat) installed = self.history.group.search_by_pattern(pat) grps = self._get_groups(available, installed) res.groups.extend(grps) if not envs and not grps: if self.status == self.INSTALLED: msg = _("Module or Group '%s' is not installed.") % ucd(pat) elif self.status == self.AVAILABLE: msg = _("Module or Group '%s' is not available.") % ucd(pat) else: msg = _("Module or Group '%s' does not exist.") % ucd(pat) raise CompsError(msg) return res class Forwarder(object): def __init__(self, iobj, langs): self._i = iobj self._langs = langs def __getattr__(self, name): return getattr(self._i, name) def _ui_text(self, default, dct): for l in self._langs.get(): t = dct.get(l) if t is not None: return t return default @property def ui_description(self): return self._ui_text(self.desc, self.desc_by_lang) @property def ui_name(self): return self._ui_text(self.name, self.name_by_lang) class Category(Forwarder): # :api def __init__(self, iobj, langs, group_factory): super(Category, self).__init__(iobj, langs) self._group_factory = group_factory def _build_group(self, grp_id): grp = self._group_factory(grp_id.name) if grp is None: msg = "no group '%s' from category '%s'" raise ValueError(msg % (grp_id.name, self.id)) return grp def groups_iter(self): for grp_id in self.group_ids: yield self._build_group(grp_id) @property def groups(self): return list(self.groups_iter()) class Environment(Forwarder): # :api def __init__(self, iobj, langs, group_factory): super(Environment, self).__init__(iobj, langs) self._group_factory = group_factory def _build_group(self, grp_id): grp = self._group_factory(grp_id.name) if grp is None: msg = "no group '%s' from environment '%s'" raise ValueError(msg % (grp_id.name, self.id)) return grp def _build_groups(self, ids): groups = [] for gi in ids: try: groups.append(self._build_group(gi)) except ValueError as e: logger.error(e) return groups def groups_iter(self): for grp_id in itertools.chain(self.group_ids, self.option_ids): try: yield self._build_group(grp_id) except ValueError as e: logger.error(e) @property def mandatory_groups(self): return self._build_groups(self.group_ids) @property def optional_groups(self): return self._build_groups(self.option_ids) class Group(Forwarder): # :api def __init__(self, iobj, langs, pkg_factory): super(Group, self).__init__(iobj, langs) self._pkg_factory = pkg_factory self.selected = iobj.default def _packages_of_type(self, type_): return [pkg for pkg in self.packages if pkg.type == type_] @property def conditional_packages(self): return self._packages_of_type(libcomps.PACKAGE_TYPE_CONDITIONAL) @property def default_packages(self): return self._packages_of_type(libcomps.PACKAGE_TYPE_DEFAULT) def packages_iter(self): # :api return map(self._pkg_factory, self.packages) @property def mandatory_packages(self): return self._packages_of_type(libcomps.PACKAGE_TYPE_MANDATORY) @property def optional_packages(self): return self._packages_of_type(libcomps.PACKAGE_TYPE_OPTIONAL) @property def visible(self): return self._i.uservisible class Package(Forwarder): """Represents comps package data. :api""" _OPT_MAP = { libcomps.PACKAGE_TYPE_CONDITIONAL : CONDITIONAL, libcomps.PACKAGE_TYPE_DEFAULT : DEFAULT, libcomps.PACKAGE_TYPE_MANDATORY : MANDATORY, libcomps.PACKAGE_TYPE_OPTIONAL : OPTIONAL, } def __init__(self, ipkg): self._i = ipkg @property def name(self): # :api return self._i.name @property def option_type(self): # :api return self._OPT_MAP[self.type] class Comps(object): # :api def __init__(self): self._i = libcomps.Comps() self._langs = _Langs() def __len__(self): return _internal_comps_length(self._i) def _build_category(self, icategory): return Category(icategory, self._langs, self._group_by_id) def _build_environment(self, ienvironment): return Environment(ienvironment, self._langs, self._group_by_id) def _build_group(self, igroup): return Group(igroup, self._langs, self._build_package) def _build_package(self, ipkg): return Package(ipkg) def _add_from_xml_filename(self, fn): comps = libcomps.Comps() try: comps.fromxml_f(fn) except libcomps.ParserError: errors = comps.get_last_errors() raise CompsError(' '.join(errors)) self._i += comps @property def categories(self): # :api return list(self.categories_iter()) def category_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) cats = self.categories_by_pattern(pattern, case_sensitive) return _first_if_iterable(cats) def categories_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) return _by_pattern(pattern, case_sensitive, self.categories) def categories_iter(self): # :api return (self._build_category(c) for c in self._i.categories) @property def environments(self): # :api return sorted(self.environments_iter(), key=_fn_display_order) def _environment_by_id(self, id): assert dnf.util.is_string_type(id) return dnf.util.first(g for g in self.environments_iter() if g.id == id) def environment_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) envs = self.environments_by_pattern(pattern, case_sensitive) return _first_if_iterable(envs) def environments_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) envs = list(self.environments_iter()) found_envs = _by_pattern(pattern, case_sensitive, envs) return sorted(found_envs, key=_fn_display_order) def environments_iter(self): # :api return (self._build_environment(e) for e in self._i.environments) @property def groups(self): # :api return sorted(self.groups_iter(), key=_fn_display_order) def _group_by_id(self, id_): assert dnf.util.is_string_type(id_) return dnf.util.first(g for g in self.groups_iter() if g.id == id_) def group_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) grps = self.groups_by_pattern(pattern, case_sensitive) return _first_if_iterable(grps) def groups_by_pattern(self, pattern, case_sensitive=False): # :api assert dnf.util.is_string_type(pattern) grps = _by_pattern(pattern, case_sensitive, list(self.groups_iter())) return sorted(grps, key=_fn_display_order) def groups_iter(self): # :api return (self._build_group(g) for g in self._i.groups) class CompsTransPkg(object): def __init__(self, pkg_or_name): if dnf.util.is_string_type(pkg_or_name): # from package name self.basearchonly = False self.name = pkg_or_name self.optional = True self.requires = None elif isinstance(pkg_or_name, libdnf.transaction.CompsGroupPackage): # from swdb package # TODO: self.basearchonly = False # self.basearchonly = pkg_or_name.basearchonly self.name = pkg_or_name.getName() self.optional = pkg_or_name.getPackageType() & libcomps.PACKAGE_TYPE_OPTIONAL # TODO: self.requires = None # self.requires = pkg_or_name.requires else: # from comps package self.basearchonly = pkg_or_name.basearchonly self.name = pkg_or_name.name self.optional = pkg_or_name.type & libcomps.PACKAGE_TYPE_OPTIONAL self.requires = pkg_or_name.requires def __eq__(self, other): return (self.name == other.name and self.basearchonly == self.basearchonly and self.optional == self.optional and self.requires == self.requires) def __str__(self): return self.name def __hash__(self): return hash((self.name, self.basearchonly, self.optional, self.requires)) class TransactionBunch(object): def __init__(self): self._install = set() self._install_opt = set() self._remove = set() self._upgrade = set() def __iadd__(self, other): self._install.update(other._install) self._install_opt.update(other._install_opt) self._upgrade.update(other._upgrade) self._remove = (self._remove | other._remove) - \ self._install - self._install_opt - self._upgrade return self def __len__(self): return len(self.install) + len(self.install_opt) + len(self.upgrade) + len(self.remove) @staticmethod def _set_value(param, val): for item in val: if isinstance(item, CompsTransPkg): param.add(item) else: param.add(CompsTransPkg(item)) @property def install(self): """ Packages to be installed with strict=True - transaction will fail if they cannot be installed due to dependency errors etc. """ return self._install @install.setter def install(self, value): self._set_value(self._install, value) @property def install_opt(self): """ Packages to be installed with strict=False - they will be skipped if they cannot be installed """ return self._install_opt @install_opt.setter def install_opt(self, value): self._set_value(self._install_opt, value) @property def remove(self): return self._remove @remove.setter def remove(self, value): self._set_value(self._remove, value) @property def upgrade(self): return self._upgrade @upgrade.setter def upgrade(self, value): self._set_value(self._upgrade, value) class Solver(object): def __init__(self, history, comps, reason_fn): self.history = history self.comps = comps self._reason_fn = reason_fn @staticmethod def _mandatory_group_set(env): return {grp.id for grp in env.mandatory_groups} @staticmethod def _full_package_set(grp): return {pkg.getName() for pkg in grp.mandatory_packages + grp.default_packages + grp.optional_packages + grp.conditional_packages} @staticmethod def _pkgs_of_type(group, pkg_types, exclude=[]): def filter(pkgs): return [pkg for pkg in pkgs if pkg.name not in exclude] pkgs = set() if pkg_types & MANDATORY: pkgs.update(filter(group.mandatory_packages)) if pkg_types & DEFAULT: pkgs.update(filter(group.default_packages)) if pkg_types & OPTIONAL: pkgs.update(filter(group.optional_packages)) if pkg_types & CONDITIONAL: pkgs.update(filter(group.conditional_packages)) return pkgs def _removable_pkg(self, pkg_name): assert dnf.util.is_string_type(pkg_name) return self.history.group.is_removable_pkg(pkg_name) def _removable_grp(self, group_id): assert dnf.util.is_string_type(group_id) return self.history.env.is_removable_group(group_id) def _environment_install(self, env_id, pkg_types, exclude=None, strict=True, exclude_groups=None): assert dnf.util.is_string_type(env_id) comps_env = self.comps._environment_by_id(env_id) if not comps_env: raise CompsError(_("Environment id '%s' does not exist.") % ucd(env_id)) swdb_env = self.history.env.new(env_id, comps_env.name, comps_env.ui_name, pkg_types) self.history.env.install(swdb_env) trans = TransactionBunch() for comps_group in comps_env.mandatory_groups: if exclude_groups and comps_group.id in exclude_groups: continue trans += self._group_install(comps_group.id, pkg_types, exclude, strict) swdb_env.addGroup(comps_group.id, True, MANDATORY) for comps_group in comps_env.optional_groups: if exclude_groups and comps_group.id in exclude_groups: continue swdb_env.addGroup(comps_group.id, False, OPTIONAL) # TODO: if a group is already installed, mark it as installed? return trans def _environment_remove(self, env_id): assert dnf.util.is_string_type(env_id) is True swdb_env = self.history.env.get(env_id) if not swdb_env: raise CompsError(_("Environment id '%s' is not installed.") % env_id) self.history.env.remove(swdb_env) trans = TransactionBunch() group_ids = set([i.getGroupId() for i in swdb_env.getGroups()]) for group_id in group_ids: if not self._removable_grp(group_id): continue trans += self._group_remove(group_id) return trans def _environment_upgrade(self, env_id): assert dnf.util.is_string_type(env_id) comps_env = self.comps._environment_by_id(env_id) swdb_env = self.history.env.get(env_id) if not swdb_env: raise CompsError(_("Environment '%s' is not installed.") % env_id) if not comps_env: raise CompsError(_("Environment '%s' is not available.") % env_id) old_set = set([i.getGroupId() for i in swdb_env.getGroups()]) pkg_types = swdb_env.getPackageTypes() # create a new record for current transaction swdb_env = self.history.env.new(comps_env.id, comps_env.name, comps_env.ui_name, pkg_types) trans = TransactionBunch() for comps_group in comps_env.mandatory_groups: if comps_group.id in old_set: if self.history.group.get(comps_group.id): # upgrade installed group trans += self._group_upgrade(comps_group.id) else: # install new group trans += self._group_install(comps_group.id, pkg_types) swdb_env.addGroup(comps_group.id, True, MANDATORY) for comps_group in comps_env.optional_groups: if comps_group.id in old_set and self.history.group.get(comps_group.id): # upgrade installed group trans += self._group_upgrade(comps_group.id) swdb_env.addGroup(comps_group.id, False, OPTIONAL) # TODO: if a group is already installed, mark it as installed? self.history.env.upgrade(swdb_env) return trans def _group_install(self, group_id, pkg_types, exclude=None, strict=True, exclude_groups=None): assert dnf.util.is_string_type(group_id) comps_group = self.comps._group_by_id(group_id) if not comps_group: raise CompsError(_("Group id '%s' does not exist.") % ucd(group_id)) swdb_group = self.history.group.new(group_id, comps_group.name, comps_group.ui_name, pkg_types) for i in comps_group.packages_iter(): swdb_group.addPackage(i.name, False, Package._OPT_MAP[i.type]) self.history.group.install(swdb_group) trans = TransactionBunch() # TODO: remove exclude if strict: trans.install.update(self._pkgs_of_type(comps_group, pkg_types, exclude=[])) else: trans.install_opt.update(self._pkgs_of_type(comps_group, pkg_types, exclude=[])) return trans def _group_remove(self, group_id): assert dnf.util.is_string_type(group_id) swdb_group = self.history.group.get(group_id) if not swdb_group: raise CompsError(_("Module or Group '%s' is not installed.") % group_id) self.history.group.remove(swdb_group) trans = TransactionBunch() trans.remove = {pkg for pkg in swdb_group.getPackages() if self._removable_pkg(pkg.getName())} return trans def _group_upgrade(self, group_id): assert dnf.util.is_string_type(group_id) comps_group = self.comps._group_by_id(group_id) swdb_group = self.history.group.get(group_id) exclude = [] if not swdb_group: argument = comps_group.ui_name if comps_group else group_id raise CompsError(_("Module or Group '%s' is not installed.") % argument) if not comps_group: raise CompsError(_("Module or Group '%s' is not available.") % group_id) pkg_types = swdb_group.getPackageTypes() old_set = set([i.getName() for i in swdb_group.getPackages()]) new_set = self._pkgs_of_type(comps_group, pkg_types, exclude) # create a new record for current transaction swdb_group = self.history.group.new(group_id, comps_group.name, comps_group.ui_name, pkg_types) for i in comps_group.packages_iter(): swdb_group.addPackage(i.name, False, Package._OPT_MAP[i.type]) self.history.group.upgrade(swdb_group) trans = TransactionBunch() trans.install = {pkg for pkg in new_set if pkg.name not in old_set} trans.remove = {name for name in old_set if name not in [pkg.name for pkg in new_set]} trans.upgrade = {pkg for pkg in new_set if pkg.name in old_set} return trans def _exclude_packages_from_installed_groups(self, base): for group in self.persistor.groups: p_grp = self.persistor.group(group) if p_grp.installed: installed_pkg_names = \ set(p_grp.full_list) - set(p_grp.pkg_exclude) installed_pkgs = base.sack.query().installed().filterm(name=installed_pkg_names) for pkg in installed_pkgs: base._goal.install(pkg) PK!zC@ @ const.pynu[# const.py # dnf constants. # # Copyright (C) 2012-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals import distutils.sysconfig CONF_FILENAME='/etc/dnf/dnf.conf' # :api CONF_AUTOMATIC_FILENAME='/etc/dnf/automatic.conf' DISTROVERPKG=('system-release(releasever)', 'system-release', 'distribution-release(releasever)', 'distribution-release', 'redhat-release', 'suse-release') GROUP_PACKAGE_TYPES = ('mandatory', 'default', 'conditional') # :api INSTALLONLYPKGS=['kernel', 'kernel-PAE', 'installonlypkg(kernel)', 'installonlypkg(kernel-module)', 'installonlypkg(vm)', 'multiversion(kernel)'] LOG='dnf.log' LOG_HAWKEY='hawkey.log' LOG_LIBREPO='dnf.librepo.log' LOG_MARKER='--- logging initialized ---' LOG_RPM='dnf.rpm.log' NAME='DNF' PERSISTDIR='/var/lib/dnf' # :api PID_FILENAME = '/var/run/dnf.pid' RUNDIR='/run' USER_RUNDIR='/run/user' SYSTEM_CACHEDIR='/var/cache/dnf' TMPDIR='/var/tmp/' # CLI verbose values greater or equal to this are considered "verbose": VERBOSE_LEVEL=6 PREFIX=NAME.lower() PROGRAM_NAME=NAME.lower() # Deprecated - no longer used, Argparser prints program name based on sys.argv PLUGINCONFPATH = '/etc/dnf/plugins' # :api PLUGINPATH = '%s/dnf-plugins' % distutils.sysconfig.get_python_lib() VERSION='4.7.0' USER_AGENT = "dnf/%s" % VERSION BUGTRACKER_COMPONENT=NAME.lower() BUGTRACKER='https://bugs.almalinux.org/' PK!%OE crypto.pynu[# crypto.py # Keys and signatures. # # Copyright (C) 2014 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ import contextlib import dnf.pycomp import dnf.util import dnf.yum.misc import io import logging import os import tempfile try: from gpg import Context from gpg import Data except ImportError: import gpgme class Context(object): def __init__(self): self.__dict__["ctx"] = gpgme.Context() def __enter__(self): return self def __exit__(self, type, value, tb): pass @property def armor(self): return self.ctx.armor @armor.setter def armor(self, value): self.ctx.armor = value def op_import(self, key_fo): if isinstance(key_fo, basestring): key_fo = io.BytesIO(key_fo) self.ctx.import_(key_fo) def op_export(self, pattern, mode, keydata): self.ctx.export(pattern, keydata) def __getattr__(self, name): return getattr(self.ctx, name) class Data(object): def __init__(self): self.__dict__["buf"] = io.BytesIO() def __enter__(self): return self def __exit__(self, type, value, tb): pass def read(self): return self.buf.getvalue() def __getattr__(self, name): return getattr(self.buf, name) GPG_HOME_ENV = 'GNUPGHOME' logger = logging.getLogger('dnf') def _extract_signing_subkey(key): return dnf.util.first(subkey for subkey in key.subkeys if subkey.can_sign) def _printable_fingerprint(fpr_hex): segments = (fpr_hex[i:i + 4] for i in range(0, len(fpr_hex), 4)) return " ".join(segments) def import_repo_keys(repo): gpgdir = repo._pubring_dir known_keys = keyids_from_pubring(gpgdir) for keyurl in repo.gpgkey: for keyinfo in retrieve(keyurl, repo): keyid = keyinfo.id_ if keyid in known_keys: logger.debug(_('repo %s: 0x%s already imported'), repo.id, keyid) continue if not repo._key_import._confirm(keyinfo): continue dnf.yum.misc.import_key_to_pubring( keyinfo.raw_key, keyinfo.short_id, gpgdir=gpgdir, make_ro_copy=False) logger.debug(_('repo %s: imported key 0x%s.'), repo.id, keyid) def keyids_from_pubring(gpgdir): if not os.path.exists(gpgdir): return [] with pubring_dir(gpgdir), Context() as ctx: keyids = [] for k in ctx.keylist(): subkey = _extract_signing_subkey(k) if subkey is not None: keyids.append(subkey.keyid) return keyids def log_key_import(keyinfo): msg = (_('Importing GPG key 0x%s:\n' ' Userid : "%s"\n' ' Fingerprint: %s\n' ' From : %s') % (keyinfo.short_id, keyinfo.userid, _printable_fingerprint(keyinfo.fingerprint), keyinfo.url.replace("file://", ""))) logger.critical("%s", msg) def log_dns_key_import(keyinfo, dns_result): log_key_import(keyinfo) if dns_result == dnf.dnssec.Validity.VALID: logger.critical(_('Verified using DNS record with DNSSEC signature.')) else: logger.critical(_('NOT verified using DNS record.')) @contextlib.contextmanager def pubring_dir(pubring_dir): orig = os.environ.get(GPG_HOME_ENV, None) os.environ[GPG_HOME_ENV] = pubring_dir try: yield finally: if orig is None: del os.environ[GPG_HOME_ENV] else: os.environ[GPG_HOME_ENV] = orig def rawkey2infos(key_fo): pb_dir = tempfile.mkdtemp() keyinfos = [] with pubring_dir(pb_dir), Context() as ctx: ctx.op_import(key_fo) for key in ctx.keylist(): subkey = _extract_signing_subkey(key) if subkey is None: continue keyinfos.append(Key(key, subkey)) ctx.armor = True for info in keyinfos: with Data() as sink: ctx.op_export(info.id_, 0, sink) sink.seek(0, os.SEEK_SET) info.raw_key = sink.read() dnf.util.rm_rf(pb_dir) return keyinfos def retrieve(keyurl, repo=None): if keyurl.startswith('http:'): logger.warning(_("retrieving repo key for %s unencrypted from %s"), repo.id, keyurl) with dnf.util._urlopen(keyurl, repo=repo) as handle: keyinfos = rawkey2infos(handle) for keyinfo in keyinfos: keyinfo.url = keyurl return keyinfos class Key(object): def __init__(self, key, subkey): self.id_ = subkey.keyid self.fingerprint = subkey.fpr self.raw_key = None self.timestamp = subkey.timestamp self.url = None self.userid = key.uids[0].uid @property def short_id(self): rj = '0' if dnf.pycomp.PY3 else b'0' return self.id_[-8:].rjust(8, rj) @property def rpm_id(self): return self.short_id.lower() PK!wg;,;, dnssec.pynu[# dnssec.py # DNS extension for automatic GPG key verification # # Copyright (C) 2012-2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from enum import Enum import base64 import hashlib import logging import re from dnf.i18n import _ import dnf.rpm import dnf.exceptions logger = logging.getLogger("dnf") RR_TYPE_OPENPGPKEY = 61 class DnssecError(dnf.exceptions.Error): """ Exception used in the dnssec module """ def __repr__(self): return ""\ .format(self.value if self.value is not None else "Not specified") def email2location(email_address, tag="_openpgpkey"): # type: (str, str) -> str """ Implements RFC 7929, section 3 https://tools.ietf.org/html/rfc7929#section-3 :param email_address: :param tag: :return: """ split = email_address.split("@") if len(split) != 2: msg = "Email address must contain exactly one '@' sign." raise DnssecError(msg) local = split[0] domain = split[1] hash = hashlib.sha256() hash.update(local.encode('utf-8')) digest = base64.b16encode(hash.digest()[0:28])\ .decode("utf-8")\ .lower() return digest + "." + tag + "." + domain class Validity(Enum): """ Output of the verification algorithm. TODO: this type might be simplified in order to less reflect the underlying DNS layer. TODO: more specifically the variants from 3 to 5 should have more understandable names """ VALID = 1 REVOKED = 2 PROVEN_NONEXISTENCE = 3 RESULT_NOT_SECURE = 4 BOGUS_RESULT = 5 ERROR = 9 class NoKey: """ This class represents an absence of a key in the cache. It is an expression of non-existence using the Python's type system. """ pass class KeyInfo: """ Wrapper class for email and associated verification key, where both are represented in form of a string. """ def __init__(self, email=None, key=None): self.email = email self.key = key @staticmethod def from_rpm_key_object(userid, raw_key): # type: (str, bytes) -> KeyInfo """ Since dnf uses different format of the key than the one used in DNS RR, I need to convert the former one into the new one. """ input_email = re.search('<(.*@.*)>', userid) if input_email is None: raise DnssecError email = input_email.group(1) key = raw_key.decode('ascii').split('\n') start = 0 stop = 0 for i in range(0, len(key)): if key[i] == '-----BEGIN PGP PUBLIC KEY BLOCK-----': start = i if key[i] == '-----END PGP PUBLIC KEY BLOCK-----': stop = i cat_key = ''.join(key[start + 2:stop - 1]).encode('ascii') return KeyInfo(email, cat_key) class DNSSECKeyVerification: """ The main class when it comes to verification itself. It wraps Unbound context and a cache with already obtained results. """ # Mapping from email address to b64 encoded public key or NoKey in case of proven nonexistence _cache = {} # type: Dict[str, Union[str, NoKey]] @staticmethod def _cache_hit(key_union, input_key_string): # type: (Union[str, NoKey], str) -> Validity """ Compare the key in case it was found in the cache. """ if key_union == input_key_string: logger.debug("Cache hit, valid key") return Validity.VALID elif key_union is NoKey: logger.debug("Cache hit, proven non-existence") return Validity.PROVEN_NONEXISTENCE else: logger.debug("Key in cache: {}".format(key_union)) logger.debug("Input key : {}".format(input_key_string)) return Validity.REVOKED @staticmethod def _cache_miss(input_key): # type: (KeyInfo) -> Validity """ In case the key was not found in the cache, create an Unbound context and contact the DNS system """ try: import unbound except ImportError as e: msg = _("Configuration option 'gpgkey_dns_verification' requires " "python3-unbound ({})".format(e)) raise dnf.exceptions.Error(msg) ctx = unbound.ub_ctx() if ctx.set_option("verbosity:", "0") != 0: logger.debug("Unbound context: Failed to set verbosity") if ctx.set_option("qname-minimisation:", "yes") != 0: logger.debug("Unbound context: Failed to set qname minimisation") if ctx.resolvconf() != 0: logger.debug("Unbound context: Failed to read resolv.conf") if ctx.add_ta_file("/var/lib/unbound/root.key") != 0: logger.debug("Unbound context: Failed to add trust anchor file") status, result = ctx.resolve(email2location(input_key.email), RR_TYPE_OPENPGPKEY, unbound.RR_CLASS_IN) if status != 0: logger.debug("Communication with DNS servers failed") return Validity.ERROR if result.bogus: logger.debug("DNSSEC signatures are wrong") return Validity.BOGUS_RESULT if not result.secure: logger.debug("Result is not secured with DNSSEC") return Validity.RESULT_NOT_SECURE if result.nxdomain: logger.debug("Non-existence of this record was proven by DNSSEC") return Validity.PROVEN_NONEXISTENCE if not result.havedata: # TODO: This is weird result, but there is no way to perform validation, so just return # an error logger.debug("Unknown error in DNS communication") return Validity.ERROR else: data = result.data.as_raw_data()[0] dns_data_b64 = base64.b64encode(data) if dns_data_b64 == input_key.key: return Validity.VALID else: # In case it is different, print the keys for further examination in debug mode logger.debug("Key from DNS: {}".format(dns_data_b64)) logger.debug("Input key : {}".format(input_key.key)) return Validity.REVOKED @staticmethod def verify(input_key): # type: (KeyInfo) -> Validity """ Public API. Use this method to verify a KeyInfo object. """ logger.debug("Running verification for key with id: {}".format(input_key.email)) key_union = DNSSECKeyVerification._cache.get(input_key.email) if key_union is not None: return DNSSECKeyVerification._cache_hit(key_union, input_key.key) else: result = DNSSECKeyVerification._cache_miss(input_key) if result == Validity.VALID: DNSSECKeyVerification._cache[input_key.email] = input_key.key elif result == Validity.PROVEN_NONEXISTENCE: DNSSECKeyVerification._cache[input_key.email] = NoKey() return result def nice_user_msg(ki, v): # type: (KeyInfo, Validity) -> str """ Inform the user about key validity in a human readable way. """ prefix = _("DNSSEC extension: Key for user ") + ki.email + " " if v == Validity.VALID: return prefix + _("is valid.") else: return prefix + _("has unknown status.") def any_msg(m): # type: (str) -> str """ Label any given message with DNSSEC extension tag """ return _("DNSSEC extension: ") + m class RpmImportedKeys: """ Wrapper around keys, that are imported in the RPM database. The keys are stored in packages with name gpg-pubkey, where the version and release is different for each of them. The key content itself is stored as an ASCII armored string in the package description, so it needs to be parsed before it can be used. """ @staticmethod def _query_db_for_gpg_keys(): # type: () -> List[KeyInfo] # TODO: base.conf.installroot ?? -----------------------\ transaction_set = dnf.rpm.transaction.TransactionWrapper() packages = transaction_set.dbMatch("name", "gpg-pubkey") return_list = [] for pkg in packages: packager = dnf.rpm.getheader(pkg, 'packager') email = re.search('<(.*@.*)>', packager).group(1) description = dnf.rpm.getheader(pkg, 'description') key_lines = description.split('\n')[3:-3] key_str = ''.join(key_lines) return_list += [KeyInfo(email, key_str.encode('ascii'))] return return_list @staticmethod def check_imported_keys_validity(): keys = RpmImportedKeys._query_db_for_gpg_keys() logger.info(any_msg(_("Testing already imported keys for their validity."))) for key in keys: try: result = DNSSECKeyVerification.verify(key) except DnssecError as e: # Errors in this exception should not be fatal, print it and just continue logger.warning("DNSSEC extension error (email={}): {}" .format(key.email, e.value)) continue # TODO: remove revoked keys automatically and possibly ask user to confirm if result == Validity.VALID: logger.debug(any_msg("GPG Key {} is valid".format(key.email))) pass elif result == Validity.PROVEN_NONEXISTENCE: logger.debug(any_msg("GPG Key {} does not support DNS" " verification".format(key.email))) elif result == Validity.BOGUS_RESULT: logger.info(any_msg("GPG Key {} could not be verified, because DNSSEC signatures" " are bogus. Possible causes: wrong configuration of the DNS" " server, MITM attack".format(key.email))) elif result == Validity.REVOKED: logger.info(any_msg("GPG Key {} has been revoked and should" " be removed immediately".format(key.email))) else: logger.debug(any_msg("GPG Key {} could not be tested".format(key.email))) PK!Mvdrpm.pynu[# drpm.py # Delta RPM support # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from binascii import hexlify from dnf.yum.misc import unlink_f from dnf.i18n import _ import dnf.callback import dnf.logging import dnf.repo import hawkey import logging import libdnf.repo import os APPLYDELTA = '/usr/bin/applydeltarpm' logger = logging.getLogger("dnf") class DeltaPayload(dnf.repo.PackagePayload): def __init__(self, delta_info, delta, pkg, progress): super(DeltaPayload, self).__init__(pkg, progress) self.delta_info = delta_info self.delta = delta def __str__(self): return os.path.basename(self.delta.location) def _end_cb(self, cbdata, lr_status, msg): super(DeltaPayload, self)._end_cb(cbdata, lr_status, msg) if lr_status != libdnf.repo.PackageTargetCB.TransferStatus_ERROR: self.delta_info.enqueue(self) def _target_params(self): delta = self.delta ctype, csum = delta.chksum ctype = hawkey.chksum_name(ctype) chksum = hexlify(csum).decode() ctype_code = libdnf.repo.PackageTarget.checksumType(ctype) if ctype_code == libdnf.repo.PackageTarget.ChecksumType_UNKNOWN: logger.warning(_("unsupported checksum type: %s"), ctype) return { 'relative_url' : delta.location, 'checksum_type' : ctype_code, 'checksum' : chksum, 'expectedsize' : delta.downloadsize, 'base_url' : delta.baseurl, } @property def download_size(self): return self.delta.downloadsize @property def _full_size(self): return self.pkg.downloadsize def localPkg(self): location = self.delta.location return os.path.join(self.pkg.repo.pkgdir, os.path.basename(location)) class DeltaInfo(object): def __init__(self, query, progress, deltarpm_percentage=None): '''A delta lookup and rebuild context query -- installed packages to use when looking up deltas progress -- progress obj to display finished delta rebuilds ''' self.deltarpm_installed = False if os.access(APPLYDELTA, os.X_OK): self.deltarpm_installed = True try: self.deltarpm_jobs = os.sysconf('SC_NPROCESSORS_ONLN') except (TypeError, ValueError): self.deltarpm_jobs = 4 if deltarpm_percentage is None: self.deltarpm_percentage = dnf.conf.Conf().deltarpm_percentage else: self.deltarpm_percentage = deltarpm_percentage self.query = query self.progress = progress self.queue = [] self.jobs = {} self.err = {} def delta_factory(self, po, progress): '''Turn a po to Delta RPM po, if possible''' if not self.deltarpm_installed: # deltarpm is not installed return None if not po.repo.deltarpm or not self.deltarpm_percentage: # drpm disabled return None if po._is_local_pkg(): # drpm disabled for local return None if os.path.exists(po.localPkg()): # already there return None best = po._size * self.deltarpm_percentage / 100 best_delta = None for ipo in self.query.filter(name=po.name, arch=po.arch): delta = po.get_delta_from_evr(ipo.evr) if delta and delta.downloadsize < best: best = delta.downloadsize best_delta = delta if best_delta: return DeltaPayload(self, best_delta, po, progress) return None def job_done(self, pid, code): # handle a finished delta rebuild logger.log(dnf.logging.SUBDEBUG, 'drpm: %d: return code: %d, %d', pid, code >> 8, code & 0xff) pload = self.jobs.pop(pid) pkg = pload.pkg if code != 0: unlink_f(pload.pkg.localPkg()) self.err[pkg] = [_('Delta RPM rebuild failed')] elif not pload.pkg.verifyLocalPkg(): self.err[pkg] = [_('Checksum of the delta-rebuilt RPM failed')] else: os.unlink(pload.localPkg()) self.progress.end(pload, dnf.callback.STATUS_DRPM, _('done')) def start_job(self, pload): # spawn a delta rebuild job spawn_args = [APPLYDELTA, APPLYDELTA, '-a', pload.pkg.arch, pload.localPkg(), pload.pkg.localPkg()] pid = os.spawnl(os.P_NOWAIT, *spawn_args) logger.log(dnf.logging.SUBDEBUG, 'drpm: spawned %d: %s', pid, ' '.join(spawn_args[1:])) self.jobs[pid] = pload def enqueue(self, pload): # process finished jobs, start new ones while self.jobs: pid, code = os.waitpid(-1, os.WNOHANG) if not pid: break self.job_done(pid, code) self.queue.append(pload) while len(self.jobs) < self.deltarpm_jobs: self.start_job(self.queue.pop(0)) if not self.queue: break def wait(self): '''Wait until all jobs have finished''' while self.jobs: pid, code = os.wait() self.job_done(pid, code) if self.queue: self.start_job(self.queue.pop(0)) PK!xw exceptions.pynu[# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Copyright 2004 Duke University """ Core DNF Errors. """ from __future__ import unicode_literals from dnf.i18n import ucd, _, P_ import dnf.util import libdnf import warnings class DeprecationWarning(DeprecationWarning): # :api pass class Error(Exception): # :api """Base Error. All other Errors thrown by DNF should inherit from this. :api """ def __init__(self, value=None): super(Error, self).__init__() self.value = None if value is None else ucd(value) def __str__(self): return "{}".format(self.value) def __unicode__(self): return ucd(self.__str__()) class CompsError(Error): # :api pass class ConfigError(Error): def __init__(self, value=None, raw_error=None): super(ConfigError, self).__init__(value) self.raw_error = ucd(raw_error) if raw_error is not None else None class DatabaseError(Error): pass class DepsolveError(Error): # :api pass class DownloadError(Error): # :api def __init__(self, errmap): super(DownloadError, self).__init__() self.errmap = errmap @staticmethod def errmap2str(errmap): errstrings = [] for key in errmap: for error in errmap[key]: msg = '%s: %s' % (key, error) if key else '%s' % error errstrings.append(msg) return '\n'.join(errstrings) def __str__(self): return self.errmap2str(self.errmap) class LockError(Error): pass class MarkingError(Error): # :api def __init__(self, value=None, pkg_spec=None): """Initialize the marking error instance.""" super(MarkingError, self).__init__(value) self.pkg_spec = None if pkg_spec is None else ucd(pkg_spec) def __str__(self): string = super(MarkingError, self).__str__() if self.pkg_spec: string += ': ' + self.pkg_spec return string class MarkingErrors(Error): # :api def __init__(self, no_match_group_specs=(), error_group_specs=(), no_match_pkg_specs=(), error_pkg_specs=(), module_depsolv_errors=()): """Initialize the marking error instance.""" msg = _("Problems in request:") if (no_match_pkg_specs): msg += "\n" + _("missing packages: ") + ", ".join(no_match_pkg_specs) if (error_pkg_specs): msg += "\n" + _("broken packages: ") + ", ".join(error_pkg_specs) if (no_match_group_specs): msg += "\n" + _("missing groups or modules: ") + ", ".join(no_match_group_specs) if (error_group_specs): msg += "\n" + _("broken groups or modules: ") + ", ".join(error_group_specs) if (module_depsolv_errors): msg_mod = dnf.util._format_resolve_problems(module_depsolv_errors[0]) if module_depsolv_errors[1] == \ libdnf.module.ModulePackageContainer.ModuleErrorType_ERROR_IN_DEFAULTS: msg += "\n" + "\n".join([P_('Modular dependency problem with Defaults:', 'Modular dependency problems with Defaults:', len(module_depsolv_errors)), msg_mod]) else: msg += "\n" + "\n".join([P_('Modular dependency problem:', 'Modular dependency problems:', len(module_depsolv_errors)), msg_mod]) super(MarkingErrors, self).__init__(msg) self.no_match_group_specs = no_match_group_specs self.error_group_specs = error_group_specs self.no_match_pkg_specs = no_match_pkg_specs self.error_pkg_specs = error_pkg_specs self.module_depsolv_errors = module_depsolv_errors @property def module_debsolv_errors(self): msg = "Attribute module_debsolv_errors is deprecated. Use module_depsolv_errors " \ "attribute instead." warnings.warn(msg, DeprecationWarning, stacklevel=2) return self.module_depsolv_errors class MetadataError(Error): pass class MiscError(Error): pass class PackagesNotAvailableError(MarkingError): def __init__(self, value=None, pkg_spec=None, packages=None): super(PackagesNotAvailableError, self).__init__(value, pkg_spec) self.packages = packages or [] class PackageNotFoundError(MarkingError): pass class PackagesNotInstalledError(MarkingError): def __init__(self, value=None, pkg_spec=None, packages=None): super(PackagesNotInstalledError, self).__init__(value, pkg_spec) self.packages = packages or [] class ProcessLockError(LockError): def __init__(self, value, pid): super(ProcessLockError, self).__init__(value) self.pid = pid def __reduce__(self): """Pickling support.""" return (ProcessLockError, (self.value, self.pid)) class RepoError(Error): # :api pass class ThreadLockError(LockError): pass class TransactionCheckError(Error): pass PK!olc MMgoal.pynu[# goal.py # Customized hawkey.Goal # # Copyright (C) 2014-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from hawkey import Goal PK!8A~~ history.pynu[# history.py # Interfaces to the history of transactions. # # Copyright (C) 2013-2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # """Interfaces to the history of transactions.""" from __future__ import absolute_import from __future__ import unicode_literals PK!g@!0!0i18n.pynu[# i18n.py # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import unicode_literals from dnf.pycomp import unicode import dnf import locale import os import signal import sys import unicodedata """ Centralize i18n stuff here. Must be unittested. """ class UnicodeStream(object): def __init__(self, stream, encoding): self.stream = stream self.encoding = encoding def write(self, s): if not isinstance(s, str): s = (s.decode(self.encoding, 'replace') if dnf.pycomp.PY3 else s.encode(self.encoding, 'replace')) try: self.stream.write(s) except UnicodeEncodeError: s_bytes = s.encode(self.stream.encoding, 'backslashreplace') if hasattr(self.stream, 'buffer'): self.stream.buffer.write(s_bytes) else: s = s_bytes.decode(self.stream.encoding, 'ignore') self.stream.write(s) def __getattr__(self, name): return getattr(self.stream, name) def _full_ucd_support(encoding): """Return true if encoding can express any Unicode character. Even if an encoding can express all accented letters in the given language, we can't generally settle for it in DNF since sometimes we output special characters like the registered trademark symbol (U+00AE) and surprisingly many national non-unicode encodings, including e.g. ASCII and ISO-8859-2, don't contain it. """ if encoding is None: return False lower = encoding.lower() if lower.startswith('utf-') or lower.startswith('utf_'): return True return False def _guess_encoding(): """ Take the best shot at the current system's string encoding. """ encoding = locale.getpreferredencoding(False) return 'utf-8' if encoding.startswith("ANSI") else encoding def setup_locale(): try: dnf.pycomp.setlocale(locale.LC_ALL, '') except locale.Error: # default to C.UTF-8 or C locale if we got a failure. try: dnf.pycomp.setlocale(locale.LC_ALL, 'C.UTF-8') os.environ['LC_ALL'] = 'C.UTF-8' except locale.Error: dnf.pycomp.setlocale(locale.LC_ALL, 'C') os.environ['LC_ALL'] = 'C' print('Failed to set locale, defaulting to {}'.format(os.environ['LC_ALL']), file=sys.stderr) def setup_stdout(): """ Check that stdout is of suitable encoding and handle the situation if not. Returns True if stdout was of suitable encoding already and no changes were needed. """ stdout = sys.stdout if not stdout.isatty(): signal.signal(signal.SIGPIPE, signal.SIG_DFL) try: encoding = stdout.encoding except AttributeError: encoding = None if not _full_ucd_support(encoding): sys.stdout = UnicodeStream(stdout, _guess_encoding()) return False return True def ucd_input(ucstring): # :api, deprecated in 2.0.0, will be erased when python2 is abandoned """ It uses print instead of passing the prompt to raw_input. raw_input doesn't encode the passed string and the output goes into stderr """ print(ucstring, end='') return dnf.pycomp.raw_input() def ucd(obj): # :api, deprecated in 2.0.0, will be erased when python2 is abandoned """ Like the builtin unicode() but tries to use a reasonable encoding. """ if dnf.pycomp.PY3: if dnf.pycomp.is_py3bytes(obj): return str(obj, _guess_encoding(), errors='ignore') elif isinstance(obj, str): return obj return str(obj) else: if isinstance(obj, dnf.pycomp.unicode): return obj if hasattr(obj, '__unicode__'): # see the doc for the unicode() built-in. The logic here is: if obj # implements __unicode__, let it take a crack at it, but handle the # situation if it fails: try: return dnf.pycomp.unicode(obj) except UnicodeError: pass return dnf.pycomp.unicode(str(obj), _guess_encoding(), errors='ignore') # functions for formatting output according to terminal width, # They should be used instead of build-in functions to count on different # widths of Unicode characters def _exact_width_char(uchar): return 2 if unicodedata.east_asian_width(uchar) in ('W', 'F') else 1 def chop_str(msg, chop=None): """ Return the textual width of a Unicode string, chopping it to a specified value. This is what you want to use instead of %.*s, as it does the "right" thing with regard to different Unicode character width Eg. "%.*s" % (10, msg) <= becomes => "%s" % (chop_str(msg, 10)) """ if chop is None: return exact_width(msg), msg width = 0 chopped_msg = "" for char in msg: char_width = _exact_width_char(char) if width + char_width > chop: break chopped_msg += char width += char_width return width, chopped_msg def exact_width(msg): """ Calculates width of char at terminal screen (Asian char counts for two) """ return sum(_exact_width_char(c) for c in msg) def fill_exact_width(msg, fill, chop=None, left=True, prefix='', suffix=''): """ Expand a msg to a specified "width" or chop to same. Expansion can be left or right. This is what you want to use instead of %*.*s, as it does the "right" thing with regard to different Unicode character width. prefix and suffix should be used for "invisible" bytes, like highlighting. Examples: ``"%-*.*s" % (10, 20, msg)`` becomes ``"%s" % (fill_exact_width(msg, 10, 20))``. ``"%20.10s" % (msg)`` becomes ``"%s" % (fill_exact_width(msg, 20, 10, left=False))``. ``"%s%.10s%s" % (pre, msg, suf)`` becomes ``"%s" % (fill_exact_width(msg, 0, 10, prefix=pre, suffix=suf))``. """ width, msg = chop_str(msg, chop) if width >= fill: if prefix or suffix: msg = ''.join([prefix, msg, suffix]) else: extra = " " * (fill - width) if left: msg = ''.join([prefix, msg, suffix, extra]) else: msg = ''.join([extra, prefix, msg, suffix]) return msg def textwrap_fill(text, width=70, initial_indent='', subsequent_indent=''): """ Works like we want textwrap.wrap() to work, uses Unicode strings and doesn't screw up lists/blocks/etc. """ def _indent_at_beg(line): count = 0 byte = 'X' for byte in line: if byte != ' ': break count += 1 if byte not in ("-", "*", ".", "o", '\xe2'): return count, 0 list_chr = chop_str(line[count:], 1)[1] if list_chr in ("-", "*", ".", "o", "\u2022", "\u2023", "\u2218"): nxt = _indent_at_beg(line[count+len(list_chr):]) nxt = nxt[1] or nxt[0] if nxt: return count, count + 1 + nxt return count, 0 text = text.rstrip('\n') lines = text.replace('\t', ' ' * 8).split('\n') ret = [] indent = initial_indent wrap_last = False csab = 0 cspc_indent = 0 for line in lines: line = line.rstrip(' ') (lsab, lspc_indent) = (csab, cspc_indent) (csab, cspc_indent) = _indent_at_beg(line) force_nl = False # We want to stop wrapping under "certain" conditions: if wrap_last and cspc_indent: # if line starts a list or force_nl = True if wrap_last and csab == len(line): # is empty line force_nl = True # if line doesn't continue a list and is "block indented" if wrap_last and not lspc_indent: if csab >= 4 and csab != lsab: force_nl = True if force_nl: ret.append(indent.rstrip(' ')) indent = subsequent_indent wrap_last = False if csab == len(line): # empty line, remove spaces to make it easier. line = '' if wrap_last: line = line.lstrip(' ') cspc_indent = lspc_indent if exact_width(indent + line) <= width: wrap_last = False ret.append(indent + line) indent = subsequent_indent continue wrap_last = True words = line.split(' ') line = indent spcs = cspc_indent if not spcs and csab >= 4: spcs = csab for word in words: if (width < exact_width(line + word)) and \ (exact_width(line) > exact_width(subsequent_indent)): ret.append(line.rstrip(' ')) line = subsequent_indent + ' ' * spcs line += word line += ' ' indent = line.rstrip(' ') + ' ' if wrap_last: ret.append(indent.rstrip(' ')) return '\n'.join(ret) def select_short_long(width, msg_short, msg_long): """ Automatically selects the short (abbreviated) or long (full) message depending on whether we have enough screen space to display the full message or not. If a caller by mistake passes a long string as msg_short and a short string as a msg_long this function recognizes the mistake and swaps the arguments. This function is especially useful in the i18n context when you cannot predict how long are the translated messages. Limitations: 1. If msg_short is longer than width you will still get an overflow. This function does not abbreviate the string. 2. You are not obliged to provide an actually abbreviated string, it is perfectly correct to pass the same string twice if you don't want any abbreviation. However, if you provide two different strings but having the same width this function is unable to recognize which one is correct and you should assume that it is unpredictable which one is returned. Example: ``select_short_long (10, _("Repo"), _("Repository"))`` will return "Repository" in English but the results in other languages may be different. """ width_short = exact_width(msg_short) width_long = exact_width(msg_long) # If we have two strings of the same width: if width_short == width_long: return msg_long # If the short string is wider than the long string: elif width_short > width_long: return msg_short if width_short <= width else msg_long # The regular case: else: return msg_long if width_long <= width else msg_short def translation(name): # :api, deprecated in 2.0.0, will be erased when python2 is abandoned """ Easy gettext translations setup based on given domain name """ setup_locale() def ucd_wrapper(fnc): return lambda *w: ucd(fnc(*w)) t = dnf.pycomp.gettext.translation(name, fallback=True) return map(ucd_wrapper, dnf.pycomp.gettext_setup(t)) def pgettext(context, message): result = _(context + chr(4) + message) if "\004" in result: return message else: return result # setup translations _, P_ = translation("dnf") C_ = pgettext PK!4) lock.pynu[# lock.py # DNF Locking Subsystem. # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.exceptions import ProcessLockError, ThreadLockError, LockError from dnf.i18n import _ from dnf.yum import misc import dnf.logging import dnf.util import errno import fcntl import hashlib import logging import os import threading import time logger = logging.getLogger("dnf") def _fit_lock_dir(dir_): if not dnf.util.am_i_root(): # for regular users the best we currently do is not to clash with # another DNF process of the same user. Since dir_ is quite definitely # not writable for us, yet significant, use its hash: hexdir = hashlib.sha1(dir_.encode('utf-8')).hexdigest() dir_ = os.path.join(misc.getCacheDir(), 'locks', hexdir) return dir_ def build_download_lock(cachedir, exit_on_lock): return ProcessLock(os.path.join(_fit_lock_dir(cachedir), 'download_lock.pid'), 'cachedir', not exit_on_lock) def build_metadata_lock(cachedir, exit_on_lock): return ProcessLock(os.path.join(_fit_lock_dir(cachedir), 'metadata_lock.pid'), 'metadata', not exit_on_lock) def build_rpmdb_lock(persistdir, exit_on_lock): return ProcessLock(os.path.join(_fit_lock_dir(persistdir), 'rpmdb_lock.pid'), 'RPMDB', not exit_on_lock) def build_log_lock(logdir, exit_on_lock): return ProcessLock(os.path.join(_fit_lock_dir(logdir), 'log_lock.pid'), 'log', not exit_on_lock) class ProcessLock(object): def __init__(self, target, description, blocking=False): self.blocking = blocking self.count = 0 self.description = description self.target = target self.thread_lock = threading.RLock() def _lock_thread(self): if not self.thread_lock.acquire(blocking=False): msg = '%s already locked by a different thread' % self.description raise ThreadLockError(msg) self.count += 1 def _try_lock(self, pid): fd = os.open(self.target, os.O_CREAT | os.O_RDWR, 0o644) try: try: fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except OSError as e: if e.errno == errno.EWOULDBLOCK: return -1 raise old_pid = os.read(fd, 20) if len(old_pid) == 0: # empty file, write our pid os.write(fd, str(pid).encode('utf-8')) return pid try: old_pid = int(old_pid) except ValueError: msg = _('Malformed lock file found: %s.\n' 'Ensure no other dnf/yum process is running and ' 'remove the lock file manually or run ' 'systemd-tmpfiles --remove dnf.conf.') % (self.target) raise LockError(msg) if old_pid == pid: # already locked by this process return pid if not os.access('/proc/%d/stat' % old_pid, os.F_OK): # locked by a dead process, write our pid os.lseek(fd, 0, os.SEEK_SET) os.ftruncate(fd, 0) os.write(fd, str(pid).encode('utf-8')) return pid return old_pid finally: os.close(fd) def _unlock_thread(self): self.count -= 1 self.thread_lock.release() def __enter__(self): dnf.util.ensure_dir(os.path.dirname(self.target)) self._lock_thread() prev_pid = -1 my_pid = os.getpid() pid = self._try_lock(my_pid) while pid != my_pid: if pid != -1: if not self.blocking: self._unlock_thread() msg = '%s already locked by %d' % (self.description, pid) raise ProcessLockError(msg, pid) if prev_pid != pid: msg = _('Waiting for process with pid %d to finish.') % (pid) logger.info(msg) prev_pid = pid time.sleep(1) pid = self._try_lock(my_pid) def __exit__(self, *exc_args): if self.count == 1: os.unlink(self.target) self._unlock_thread() PK!*sr(r( logging.pynu[# logging.py # DNF Logging Subsystem. # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import dnf.exceptions import dnf.const import dnf.lock import dnf.util import libdnf.repo import logging import logging.handlers import os import sys import time import warnings import gzip # :api loggers are: 'dnf', 'dnf.plugin', 'dnf.rpm' SUPERCRITICAL = 100 # do not use this for logging CRITICAL = logging.CRITICAL ERROR = logging.ERROR WARNING = logging.WARNING INFO = logging.INFO DEBUG = logging.DEBUG DDEBUG = 8 # used by anaconda (pyanaconda/payload/dnfpayload.py) SUBDEBUG = 6 TRACE = 4 ALL = 2 def only_once(func): """Method decorator turning the method into noop on second or later calls.""" def noop(*_args, **_kwargs): pass def swan_song(self, *args, **kwargs): func(self, *args, **kwargs) setattr(self, func.__name__, noop) return swan_song class _MaxLevelFilter(object): def __init__(self, max_level): self.max_level = max_level def filter(self, record): if record.levelno >= self.max_level: return 0 return 1 _VERBOSE_VAL_MAPPING = { 0 : SUPERCRITICAL, 1 : logging.INFO, 2 : logging.INFO, # the default 3 : logging.DEBUG, 4 : logging.DEBUG, 5 : logging.DEBUG, 6 : logging.DEBUG, # verbose value 7 : DDEBUG, 8 : SUBDEBUG, 9 : TRACE, 10: ALL, # more verbous librepo and hawkey } def _cfg_verbose_val2level(cfg_errval): assert 0 <= cfg_errval <= 10 return _VERBOSE_VAL_MAPPING.get(cfg_errval, TRACE) # Both the DNF default and the verbose default are WARNING. Note that ERROR has # no specific level. _ERR_VAL_MAPPING = { 0: SUPERCRITICAL, 1: logging.CRITICAL, 2: logging.ERROR } def _cfg_err_val2level(cfg_errval): assert 0 <= cfg_errval <= 10 return _ERR_VAL_MAPPING.get(cfg_errval, logging.WARNING) def compression_namer(name): return name + ".gz" CHUNK_SIZE = 128 * 1024 # 128 KB def compression_rotator(source, dest): with open(source, "rb") as sf: with gzip.open(dest, 'wb') as wf: while True: data = sf.read(CHUNK_SIZE) if not data: break wf.write(data) os.remove(source) class MultiprocessRotatingFileHandler(logging.handlers.RotatingFileHandler): def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False): super(MultiprocessRotatingFileHandler, self).__init__( filename, mode, maxBytes, backupCount, encoding, delay) self.rotate_lock = dnf.lock.build_log_lock("/var/log/", True) def emit(self, record): while True: try: if self.shouldRollover(record): with self.rotate_lock: # Do rollover while preserving the mode of the new log file mode = os.stat(self.baseFilename).st_mode self.doRollover() os.chmod(self.baseFilename, mode) logging.FileHandler.emit(self, record) return except (dnf.exceptions.ProcessLockError, dnf.exceptions.ThreadLockError): time.sleep(0.01) except Exception: self.handleError(record) return def _create_filehandler(logfile, log_size, log_rotate, log_compress): if not os.path.exists(logfile): dnf.util.ensure_dir(os.path.dirname(logfile)) dnf.util.touch(logfile) handler = MultiprocessRotatingFileHandler(logfile, maxBytes=log_size, backupCount=log_rotate) formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s", "%Y-%m-%dT%H:%M:%S%z") formatter.converter = time.localtime handler.setFormatter(formatter) if log_compress: handler.rotator = compression_rotator handler.namer = compression_namer return handler def _paint_mark(logger): logger.log(INFO, dnf.const.LOG_MARKER) class Logging(object): def __init__(self): self.stdout_handler = self.stderr_handler = None logging.addLevelName(DDEBUG, "DDEBUG") logging.addLevelName(SUBDEBUG, "SUBDEBUG") logging.addLevelName(TRACE, "TRACE") logging.addLevelName(ALL, "ALL") logging.captureWarnings(True) logging.raiseExceptions = False @only_once def _presetup(self): logger_dnf = logging.getLogger("dnf") logger_dnf.setLevel(TRACE) # setup stdout stdout = logging.StreamHandler(sys.stdout) stdout.setLevel(INFO) stdout.addFilter(_MaxLevelFilter(logging.WARNING)) logger_dnf.addHandler(stdout) self.stdout_handler = stdout # setup stderr stderr = logging.StreamHandler(sys.stderr) stderr.setLevel(WARNING) logger_dnf.addHandler(stderr) self.stderr_handler = stderr @only_once def _setup_file_loggers(self, logfile_level, logdir, log_size, log_rotate, log_compress): logger_dnf = logging.getLogger("dnf") logger_dnf.setLevel(TRACE) # setup file logger logfile = os.path.join(logdir, dnf.const.LOG) handler = _create_filehandler(logfile, log_size, log_rotate, log_compress) handler.setLevel(logfile_level) logger_dnf.addHandler(handler) # setup Python warnings logger_warnings = logging.getLogger("py.warnings") logger_warnings.addHandler(handler) logger_librepo = logging.getLogger("librepo") logger_librepo.setLevel(TRACE) logfile = os.path.join(logdir, dnf.const.LOG_LIBREPO) handler = _create_filehandler(logfile, log_size, log_rotate, log_compress) logger_librepo.addHandler(handler) libdnf.repo.LibrepoLog.addHandler(logfile, logfile_level <= ALL) # setup RPM callbacks logger logger_rpm = logging.getLogger("dnf.rpm") logger_rpm.propagate = False logger_rpm.setLevel(SUBDEBUG) logfile = os.path.join(logdir, dnf.const.LOG_RPM) handler = _create_filehandler(logfile, log_size, log_rotate, log_compress) logger_rpm.addHandler(handler) @only_once def _setup(self, verbose_level, error_level, logfile_level, logdir, log_size, log_rotate, log_compress): self._presetup() self._setup_file_loggers(logfile_level, logdir, log_size, log_rotate, log_compress) logger_warnings = logging.getLogger("py.warnings") logger_warnings.addHandler(self.stderr_handler) # setup RPM callbacks logger logger_rpm = logging.getLogger("dnf.rpm") logger_rpm.addHandler(self.stdout_handler) logger_rpm.addHandler(self.stderr_handler) logger_dnf = logging.getLogger("dnf") # temporarily turn off stdout/stderr handlers: self.stdout_handler.setLevel(WARNING) self.stderr_handler.setLevel(WARNING) _paint_mark(logger_dnf) _paint_mark(logger_rpm) # bring std handlers to the preferred level self.stdout_handler.setLevel(verbose_level) self.stderr_handler.setLevel(error_level) def _setup_from_dnf_conf(self, conf, file_loggers_only=False): verbose_level_r = _cfg_verbose_val2level(conf.debuglevel) error_level_r = _cfg_err_val2level(conf.errorlevel) logfile_level_r = _cfg_verbose_val2level(conf.logfilelevel) logdir = conf.logdir log_size = conf.log_size log_rotate = conf.log_rotate log_compress = conf.log_compress if file_loggers_only: return self._setup_file_loggers(logfile_level_r, logdir, log_size, log_rotate, log_compress) else: return self._setup( verbose_level_r, error_level_r, logfile_level_r, logdir, log_size, log_rotate, log_compress) class Timer(object): def __init__(self, what): self.what = what self.start = time.time() def __call__(self): diff = time.time() - self.start msg = 'timer: %s: %d ms' % (self.what, diff * 1000) logging.getLogger("dnf").log(DDEBUG, msg) _LIBDNF_TO_DNF_LOGLEVEL_MAPPING = { libdnf.utils.Logger.Level_CRITICAL: CRITICAL, libdnf.utils.Logger.Level_ERROR: ERROR, libdnf.utils.Logger.Level_WARNING: WARNING, libdnf.utils.Logger.Level_NOTICE: INFO, libdnf.utils.Logger.Level_INFO: INFO, libdnf.utils.Logger.Level_DEBUG: DEBUG, libdnf.utils.Logger.Level_TRACE: TRACE } class LibdnfLoggerCB(libdnf.utils.Logger): def __init__(self): super(LibdnfLoggerCB, self).__init__() self._dnf_logger = logging.getLogger("dnf") self._librepo_logger = logging.getLogger("librepo") def write(self, source, *args): """Log message. source -- integer, defines origin (libdnf, librepo, ...) of message, 0 - unknown """ if len(args) == 2: level, message = args elif len(args) == 4: time, pid, level, message = args if source == libdnf.utils.Logger.LOG_SOURCE_LIBREPO: self._librepo_logger.log(_LIBDNF_TO_DNF_LOGLEVEL_MAPPING[level], message) else: self._dnf_logger.log(_LIBDNF_TO_DNF_LOGLEVEL_MAPPING[level], message) libdnfLoggerCB = LibdnfLoggerCB() libdnf.utils.Log.setLogger(libdnfLoggerCB) PK!]'TTmatch_counter.pynu[# match_counter.py # Implements class MatchCounter. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from functools import reduce WEIGHTS = { 'name' : 7, 'summary' : 4, 'description' : 2, 'url' : 1, } def _canonize_string_set(sset, length): """ Ordered sset with empty strings prepended. """ current = len(sset) l = [''] * (length - current) + sorted(sset) return l class MatchCounter(dict): """Map packages to which of their attributes matched in a search against what values. The mapping is: ``package -> [(key, needle), ... ]``. """ @staticmethod def _eval_weights(pkg, matches): # how much is each match worth and return their sum: def weight(match): key = match[0] needle = match[1] haystack = getattr(pkg, key) if key == "name" and haystack == needle: # if package matches exactly by name, increase weight return 2 * WEIGHTS[key] return WEIGHTS[key] return sum(map(weight, matches)) def _key_func(self): """Get the key function used for sorting matches. It is not enough to only look at the matches and order them by the sum of their weighted hits. In case this number is the same we have to ensure that the same matched needles are next to each other in the result. Returned function is: pkg -> (weights_sum, canonized_needles_set, -distance) """ def get_key(pkg): return ( # use negative value to make sure packages with the highest weight come first - self._eval_weights(pkg, self[pkg]), # then order packages alphabetically pkg.name, ) return get_key def _max_needles(self): """Return the max count of needles of all packages.""" if self: return max(len(self.matched_needles(pkg)) for pkg in self) return 0 def add(self, pkg, key, needle): self.setdefault(pkg, []).append((key, needle)) def dump(self): for pkg in self: print('%s\t%s' % (pkg, self[pkg])) def matched_haystacks(self, pkg): return set(getattr(pkg, m[0]) for m in self[pkg]) def matched_keys(self, pkg): # return keys in the same order they appear in the list result = [] for i in self[pkg]: if i[0] in result: continue result.append(i[0]) return result def matched_needles(self, pkg): return set(m[1] for m in self[pkg]) def sorted(self, reverse=False, limit_to=None): keys = limit_to if limit_to else self.keys() return sorted(keys, key=self._key_func()) def total(self): return reduce(lambda total, pkg: total + len(self[pkg]), self, 0) PK!8++ package.pynu[# package.py # Module defining the dnf.Package class. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # """ Contains the dnf.Package class. """ from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ import binascii import dnf.exceptions import dnf.rpm import dnf.yum.misc import hawkey import libdnf.error import libdnf.utils import logging import os import rpm logger = logging.getLogger("dnf") class Package(hawkey.Package): """ Represents a package. #:api """ DEBUGINFO_SUFFIX = "-debuginfo" # :api DEBUGSOURCE_SUFFIX = "-debugsource" # :api def __init__(self, initobject, base): super(Package, self).__init__(initobject) self.base = base self._priv_chksum = None self._repo = None self._priv_size = None @property def _chksum(self): if self._priv_chksum: return self._priv_chksum if self._from_cmdline: chksum_type = dnf.yum.misc.get_default_chksum_type() try: chksum_val = libdnf.utils.checksum_value(chksum_type, self.location) except libdnf.error.Error as e: raise dnf.exceptions.MiscError(str(e)) return (hawkey.chksum_type(chksum_type), binascii.unhexlify(chksum_val)) return super(Package, self).chksum @_chksum.setter def _chksum(self, val): self._priv_chksum = val @property def _from_cmdline(self): return self.reponame == hawkey.CMDLINE_REPO_NAME @property def _from_system(self): return self.reponame == hawkey.SYSTEM_REPO_NAME @property def _from_repo(self): """ For installed packages returns id of repository from which the package was installed prefixed with '@' (if such information is available in the history database). Otherwise returns id of repository the package belongs to (@System for installed packages of unknown origin) """ pkgrepo = None if self._from_system: pkgrepo = self.base.history.repo(self) if pkgrepo: return '@' + pkgrepo return self.reponame @property def from_repo(self): # :api if self._from_system: return self.base.history.repo(self) return "" @property def _header(self): """ Returns the header of a locally present rpm package file. As opposed to self.get_header(), which retrieves the header of an installed package from rpmdb. """ return dnf.rpm._header(self.localPkg()) @property def _size(self): if self._priv_size: return self._priv_size return super(Package, self).size @_size.setter def _size(self, val): self._priv_size = val @property def _pkgid(self): if self.hdr_chksum is None: return None (_, chksum) = self.hdr_chksum return binascii.hexlify(chksum) @property def source_name(self): # :api """ returns name of source package e.g. krb5-libs -> krb5 """ if self.sourcerpm is not None: # trim suffix first srcname = dnf.util.rtrim(self.sourcerpm, ".src.rpm") # sourcerpm should be in form of name-version-release now, so we # will strip the two rightmost parts separated by dash. # Using rtrim with version and release of self is not sufficient # because the package can have different version to the source # package. srcname = srcname.rsplit('-', 2)[0] else: srcname = None return srcname @property def debug_name(self): # :api """ Returns name of the debuginfo package for this package. If this package is a debuginfo package, returns its name. If this package is a debugsource package, returns the debuginfo package for the base package. e.g. kernel-PAE -> kernel-PAE-debuginfo """ if self.name.endswith(self.DEBUGINFO_SUFFIX): return self.name name = self.name if self.name.endswith(self.DEBUGSOURCE_SUFFIX): name = name[:-len(self.DEBUGSOURCE_SUFFIX)] return name + self.DEBUGINFO_SUFFIX @property def debugsource_name(self): # :api """ Returns name of the debugsource package for this package. e.g. krb5-libs -> krb5-debugsource """ # assuming self.source_name is None only for a source package src_name = self.source_name if self.source_name is not None else self.name return src_name + self.DEBUGSOURCE_SUFFIX def get_header(self): """ Returns the rpm header of the package if it is installed. If not installed, returns None. The header is not cached, it is retrieved from rpmdb on every call. In case of a failure (e.g. when the rpmdb changes between loading the data and calling this method), raises an instance of PackageNotFoundError. """ if not self._from_system: return None try: # RPMDBI_PACKAGES stands for the header of the package return next(self.base._ts.dbMatch(rpm.RPMDBI_PACKAGES, self.rpmdbid)) except StopIteration: raise dnf.exceptions.PackageNotFoundError("Package not found when attempting to retrieve header", str(self)) @property def source_debug_name(self): # :api """ returns name of debuginfo package for source package of given package e.g. krb5-libs -> krb5-debuginfo """ # assuming self.source_name is None only for a source package src_name = self.source_name if self.source_name is not None else self.name return src_name + self.DEBUGINFO_SUFFIX @property # yum compatibility attribute def idx(self): """ Always type it to int, rpm bindings expect it like that. """ return int(self.rpmdbid) @property # yum compatibility attribute def repoid(self): return self.reponame @property # yum compatibility attribute def pkgtup(self): return (self.name, self.arch, str(self.e), self.v, self.r) @property # yum compatibility attribute def repo(self): if self._repo: return self._repo return self.base.repos[self.reponame] @repo.setter def repo(self, val): self._repo = val @property def reason(self): if self.repoid != hawkey.SYSTEM_REPO_NAME: return None return self.base.history.rpm.get_reason_name(self) @property # yum compatibility attribute def relativepath(self): return self.location @property # yum compatibility attribute def a(self): return self.arch @property # yum compatibility attribute def e(self): return self.epoch @property # yum compatibility attribute def v(self): return self.version @property # yum compatibility attribute def r(self): return self.release @property # yum compatibility attribute def ui_from_repo(self): return self.reponame # yum compatibility method def evr_eq(self, pkg): return self.evr_cmp(pkg) == 0 # yum compatibility method def evr_gt(self, pkg): return self.evr_cmp(pkg) > 0 # yum compatibility method def evr_lt(self, pkg): return self.evr_cmp(pkg) < 0 # yum compatibility method def getDiscNum(self): return self.medianr # yum compatibility method def localPkg(self): """ Package's location in the filesystem. For packages in remote repo returns where the package will be/has been downloaded. """ if self._from_cmdline: return self.location loc = self.location if self.repo._repo.isLocal() and self.baseurl and self.baseurl.startswith('file://'): return os.path.join(self.get_local_baseurl(), loc.lstrip("/")) if not self._is_local_pkg(): loc = os.path.basename(loc) return os.path.join(self.pkgdir, loc.lstrip("/")) def remote_location(self, schemes=('http', 'ftp', 'file', 'https')): # :api """ The location from where the package can be downloaded from. Returns None for installed and commandline packages. :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: location (string) or None """ if self._from_system or self._from_cmdline: return None return self.repo.remote_location(self.location, schemes) def _is_local_pkg(self): if self._from_system: return True if '://' in self.location and not self.location.startswith('file://'): # the package has a remote URL as its location return False return self._from_cmdline or \ (self.repo._repo.isLocal() and (not self.baseurl or self.baseurl.startswith('file://'))) @property def pkgdir(self): if (self.repo._repo.isLocal() and not self._is_local_pkg()): return self.repo.cache_pkgdir() else: return self.repo.pkgdir # yum compatibility method def returnIdSum(self): """ Return the chksum type and chksum string how the legacy yum expects it. """ if self._chksum is None: return (None, None) (chksum_type, chksum) = self._chksum return (hawkey.chksum_name(chksum_type), binascii.hexlify(chksum).decode()) # yum compatibility method def verifyLocalPkg(self): if self._from_system: raise ValueError("Can not verify an installed package.") if self._from_cmdline: return True # local package always verifies against itself (chksum_type, chksum) = self.returnIdSum() try: return libdnf.utils.checksum_check(chksum_type, self.localPkg(), chksum) except libdnf.error.Error as e: raise dnf.exceptions.MiscError(str(e)) PK!Y'too persistor.pynu[# persistor.py # Persistence data container. # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # The current implementation is storing to files in persistdir. Do not depend on # specific files existing, instead use the persistor API. The underlying # implementation can change, e.g. for one general file with a serialized dict of # data etc. from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import _ import distutils.version import dnf.util import errno import fnmatch import json import logging import os import re logger = logging.getLogger("dnf") class JSONDB(object): def _check_json_db(self, json_path): if not os.path.isfile(json_path): # initialize new db dnf.util.ensure_dir(os.path.dirname(json_path)) self._write_json_db(json_path, []) def _get_json_db(self, json_path, default=[]): with open(json_path, 'r') as f: content = f.read() if content == "": # empty file is invalid json format logger.warning(_("%s is empty file"), json_path) self._write_json_db(json_path, default) else: try: default = json.loads(content) except ValueError as e: logger.warning(e) return default @staticmethod def _write_json_db(json_path, content): with open(json_path, 'w') as f: json.dump(content, f) class RepoPersistor(JSONDB): """Persistent data kept for repositories. Is arch/releasever specific and stores to cachedir. """ def __init__(self, cachedir): self.cachedir = cachedir self.db_path = os.path.join(self.cachedir, "expired_repos.json") self.expired_to_add = set() self.reset_last_makecache = False @property def _last_makecache_path(self): return os.path.join(self.cachedir, "last_makecache") def get_expired_repos(self): try: self._check_json_db(self.db_path) return set(self._get_json_db(self.db_path)) except OSError as e: logger.warning(_("Failed to load expired repos cache: %s"), e) return None def save(self): try: self._check_json_db(self.db_path) self._write_json_db(self.db_path, list(self.expired_to_add)) except OSError as e: logger.warning(_("Failed to store expired repos cache: %s"), e) return False if self.reset_last_makecache: try: dnf.util.touch(self._last_makecache_path) return True except IOError: logger.warning(_("Failed storing last makecache time.")) return False def since_last_makecache(self): try: return int(dnf.util.file_age(self._last_makecache_path)) except OSError: logger.warning(_("Failed determining last makecache time.")) return None class TempfilePersistor(JSONDB): def __init__(self, cachedir): self.db_path = os.path.join(cachedir, "tempfiles.json") self.tempfiles_to_add = set() self._empty = False def get_saved_tempfiles(self): self._check_json_db(self.db_path) return self._get_json_db(self.db_path) def save(self): if not self._empty and not self.tempfiles_to_add: return self._check_json_db(self.db_path) if self._empty: self._write_json_db(self.db_path, []) return if self.tempfiles_to_add: data = set(self._get_json_db(self.db_path)) data.update(self.tempfiles_to_add) self._write_json_db(self.db_path, list(data)) def empty(self): self._empty = True PK!;G@V%V% plugin.pynu[# plugin.py # The interface for building DNF plugins. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import fnmatch import glob import importlib import inspect import logging import operator import os import sys import traceback import libdnf import dnf.logging import dnf.pycomp import dnf.util from dnf.i18n import _ logger = logging.getLogger('dnf') DYNAMIC_PACKAGE = 'dnf.plugin.dynamic' class Plugin(object): """The base class custom plugins must derive from. #:api""" name = '' config_name = None @classmethod def read_config(cls, conf): # :api parser = libdnf.conf.ConfigParser() name = cls.config_name if cls.config_name else cls.name files = ['%s/%s.conf' % (path, name) for path in conf.pluginconfpath] for file in files: if os.path.isfile(file): try: parser.read(file) except Exception as e: raise dnf.exceptions.ConfigError(_("Parsing file failed: %s") % str(e)) return parser def __init__(self, base, cli): # :api self.base = base self.cli = cli def pre_config(self): # :api pass def config(self): # :api pass def resolved(self): # :api pass def sack(self): # :api pass def pre_transaction(self): # :api pass def transaction(self): # :api pass class Plugins(object): def __init__(self): self.plugin_cls = [] self.plugins = [] def __del__(self): self._unload() def _caller(self, method): for plugin in self.plugins: try: getattr(plugin, method)() except dnf.exceptions.Error: raise except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() except_list = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.critical(''.join(except_list)) def _check_enabled(self, conf, enable_plugins): """Checks whether plugins are enabled or disabled in configuration files and removes disabled plugins from list""" for plug_cls in self.plugin_cls[:]: name = plug_cls.name if any(fnmatch.fnmatch(name, pattern) for pattern in enable_plugins): continue parser = plug_cls.read_config(conf) # has it enabled = False? disabled = (parser.has_section('main') and parser.has_option('main', 'enabled') and not parser.getboolean('main', 'enabled')) if disabled: self.plugin_cls.remove(plug_cls) def _load(self, conf, skips, enable_plugins): """Dynamically load relevant plugin modules.""" if DYNAMIC_PACKAGE in sys.modules: raise RuntimeError("load_plugins() called twice") sys.modules[DYNAMIC_PACKAGE] = package = dnf.pycomp.ModuleType(DYNAMIC_PACKAGE) package.__path__ = [] files = _get_plugins_files(conf.pluginpath, skips, enable_plugins) _import_modules(package, files) self.plugin_cls = _plugin_classes()[:] self._check_enabled(conf, enable_plugins) if len(self.plugin_cls) > 0: names = sorted(plugin.name for plugin in self.plugin_cls) logger.debug(_('Loaded plugins: %s'), ', '.join(names)) def _run_pre_config(self): self._caller('pre_config') def _run_config(self): self._caller('config') def _run_init(self, base, cli=None): for p_cls in self.plugin_cls: plugin = p_cls(base, cli) self.plugins.append(plugin) def run_sack(self): self._caller('sack') def run_resolved(self): self._caller('resolved') def run_pre_transaction(self): self._caller('pre_transaction') def run_transaction(self): self._caller('transaction') def _unload(self): if DYNAMIC_PACKAGE in sys.modules: logger.log(dnf.logging.DDEBUG, 'Plugins were unloaded.') del sys.modules[DYNAMIC_PACKAGE] def unload_removed_plugins(self, transaction): """ Unload plugins that were removed in the `transaction`. """ if not transaction.remove_set: return # gather all installed plugins and their files plugins = dict() for plugin in self.plugins: plugins[inspect.getfile(plugin.__class__)] = plugin # gather all removed files that are plugin files plugin_files = set(plugins.keys()) erased_plugin_files = set() for pkg in transaction.remove_set: erased_plugin_files.update(plugin_files.intersection(pkg.files)) if not erased_plugin_files: return # check whether removed plugin file is added at the same time (upgrade of a plugin) for pkg in transaction.install_set: erased_plugin_files.difference_update(pkg.files) # unload plugins that were removed in transaction for plugin_file in erased_plugin_files: self.plugins.remove(plugins[plugin_file]) def _plugin_classes(): return Plugin.__subclasses__() def _import_modules(package, py_files): for fn in py_files: path, module = os.path.split(fn) package.__path__.append(path) (module, ext) = os.path.splitext(module) name = '%s.%s' % (package.__name__, module) try: module = importlib.import_module(name) except Exception as e: logger.error(_('Failed loading plugin "%s": %s'), module, e) logger.log(dnf.logging.SUBDEBUG, '', exc_info=True) def _get_plugins_files(paths, disable_plugins, enable_plugins): plugins = [] disable_plugins = set(disable_plugins) enable_plugins = set(enable_plugins) pattern_enable_found = set() pattern_disable_found = set() for p in paths: for fn in glob.glob('%s/*.py' % p): (plugin_name, dummy) = os.path.splitext(os.path.basename(fn)) matched = True enable_pattern_tested = False for pattern_skip in disable_plugins: if _plugin_name_matches_pattern(plugin_name, pattern_skip): pattern_disable_found.add(pattern_skip) matched = False for pattern_enable in enable_plugins: if _plugin_name_matches_pattern(plugin_name, pattern_enable): matched = True pattern_enable_found.add(pattern_enable) enable_pattern_tested = True if not enable_pattern_tested: for pattern_enable in enable_plugins: if _plugin_name_matches_pattern(plugin_name, pattern_enable): pattern_enable_found.add(pattern_enable) if matched: plugins.append(fn) enable_not_found = enable_plugins.difference(pattern_enable_found) if enable_not_found: logger.warning(_("No matches found for the following enable plugin patterns: {}").format( ", ".join(sorted(enable_not_found)))) disable_not_found = disable_plugins.difference(pattern_disable_found) if disable_not_found: logger.warning(_("No matches found for the following disable plugin patterns: {}").format( ", ".join(sorted(disable_not_found)))) return plugins def _plugin_name_matches_pattern(plugin_name, pattern): """ Checks plugin name matches the pattern. The alternative plugin name using dashes instead of underscores is tried in case of original name is not matched. (see https://bugzilla.redhat.com/show_bug.cgi?id=1980712) """ try_names = set((plugin_name, plugin_name.replace('_', '-'))) return any(fnmatch.fnmatch(name, pattern) for name in try_names) def register_command(command_class): # :api """A class decorator for automatic command registration.""" def __init__(self, base, cli): if cli: cli.register_command(command_class) plugin_class = type(str(command_class.__name__ + 'Plugin'), (dnf.Plugin,), {"__init__": __init__, "name": command_class.aliases[0]}) command_class._plugin = plugin_class return command_class PK!6% pycomp.pynu[# pycomp.py # Python 2 and Python 3 compatibility module # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from gettext import NullTranslations from sys import version_info import base64 import email.mime.text import gettext import itertools import locale import sys import types PY3 = version_info.major >= 3 if PY3: from io import StringIO from configparser import ConfigParser import queue import urllib.parse import shlex # functions renamed in py3 Queue = queue.Queue basestring = unicode = str filterfalse = itertools.filterfalse long = int NullTranslations.ugettext = NullTranslations.gettext NullTranslations.ungettext = NullTranslations.ngettext xrange = range raw_input = input base64_decodebytes = base64.decodebytes urlparse = urllib.parse urllib_quote = urlparse.quote shlex_quote = shlex.quote sys_maxsize = sys.maxsize def gettext_setup(t): _ = t.gettext P_ = t.ngettext return (_, P_) # string helpers def is_py2str_py3bytes(o): return isinstance(o, bytes) def is_py3bytes(o): return isinstance(o, bytes) # functions that don't take unicode arguments in py2 ModuleType = lambda m: types.ModuleType(m) format = locale.format_string def setlocale(category, loc=None): locale.setlocale(category, loc) def write_to_file(f, content): f.write(content) def email_mime(body): return email.mime.text.MIMEText(body) else: # functions renamed in py3 from __builtin__ import unicode, basestring, long, xrange, raw_input from StringIO import StringIO from ConfigParser import ConfigParser import Queue import urllib import urlparse import pipes Queue = Queue.Queue filterfalse = itertools.ifilterfalse base64_decodebytes = base64.decodestring urllib_quote = urllib.quote shlex_quote = pipes.quote sys_maxsize = sys.maxint def gettext_setup(t): _ = t.ugettext P_ = t.ungettext return (_, P_) # string helpers def is_py2str_py3bytes(o): return isinstance(o, str) def is_py3bytes(o): return False # functions that don't take unicode arguments in py2 ModuleType = lambda m: types.ModuleType(m.encode('utf-8')) def format(percent, *args, **kwargs): return locale.format(percent.encode('utf-8'), *args, **kwargs) def setlocale(category, loc=None): locale.setlocale(category, loc.encode('utf-8')) def write_to_file(f, content): f.write(content.encode('utf-8')) def email_mime(body): return email.mime.text.MIMEText(body.encode('utf-8')) PK!"33query.pynu[# query.py # Implements Query. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import hawkey from hawkey import Query from dnf.i18n import ucd from dnf.pycomp import basestring def _by_provides(sack, patterns, ignore_case=False, get_query=False): if isinstance(patterns, basestring): patterns = [patterns] q = sack.query() flags = [] if ignore_case: flags.append(hawkey.ICASE) q.filterm(*flags, provides__glob=patterns) if get_query: return q return q.run() def _per_nevra_dict(pkg_list): return {ucd(pkg):pkg for pkg in pkg_list} PK!!eLQLQrepo.pynu[# repo.py # DNF Repository objects. # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import ucd, _ import dnf.callback import dnf.conf import dnf.conf.substitutions import dnf.const import dnf.crypto import dnf.exceptions import dnf.logging import dnf.pycomp import dnf.util import dnf.yum.misc import libdnf.error import libdnf.repo import functools import hashlib import hawkey import logging import operator import os import re import shutil import string import sys import time import traceback _PACKAGES_RELATIVE_DIR = "packages" _MIRRORLIST_FILENAME = "mirrorlist" # Chars allowed in a repo ID _REPOID_CHARS = string.ascii_letters + string.digits + '-_.:' # Regex pattern that matches a repo cachedir and captures the repo ID _CACHEDIR_RE = r'(?P[%s]+)\-[%s]{16}' % (re.escape(_REPOID_CHARS), string.hexdigits) # Regex patterns matching any filename that is repo-specific cache data of a # particular type. The filename is expected to not contain the base cachedir # path components. CACHE_FILES = { 'metadata': r'^%s\/.*((xml|yaml)(\.gz|\.xz|\.bz2|.zck)?|asc|cachecookie|%s)$' % (_CACHEDIR_RE, _MIRRORLIST_FILENAME), 'packages': r'^%s\/%s\/.+rpm$' % (_CACHEDIR_RE, _PACKAGES_RELATIVE_DIR), 'dbcache': r'^.+(solv|solvx)$', } logger = logging.getLogger("dnf") def repo_id_invalid(repo_id): # :api """Return index of an invalid character in the repo ID (if present).""" first_invalid = libdnf.repo.Repo.verifyId(repo_id) return None if first_invalid < 0 else first_invalid def _pkg2payload(pkg, progress, *factories): for fn in factories: pload = fn(pkg, progress) if pload is not None: return pload raise ValueError(_('no matching payload factory for %s') % pkg) def _download_payloads(payloads, drpm, fail_fast=True): # download packages def _download_sort_key(payload): return not hasattr(payload, 'delta') drpm.err.clear() targets = [pload._librepo_target() for pload in sorted(payloads, key=_download_sort_key)] errs = _DownloadErrors() try: libdnf.repo.PackageTarget.downloadPackages(libdnf.repo.VectorPPackageTarget(targets), fail_fast) except RuntimeError as e: errs._fatal = str(e) drpm.wait() # process downloading errors errs._recoverable = drpm.err.copy() for tgt in targets: err = tgt.getErr() if err is None or err.startswith('Not finished'): continue callbacks = tgt.getCallbacks() payload = callbacks.package_pload pkg = payload.pkg if err == 'Already downloaded': errs._skipped.add(pkg) continue pkg.repo._repo.expire() errs._pkg_irrecoverable[pkg] = [err] return errs def _update_saving(saving, payloads, errs): real, full = saving for pload in payloads: pkg = pload.pkg if pkg in errs: real += pload.download_size continue real += pload.download_size full += pload._full_size return real, full class _DownloadErrors(object): def __init__(self): self._pkg_irrecoverable = {} self._val_recoverable = {} self._fatal = None self._skipped = set() def _irrecoverable(self): if self._pkg_irrecoverable: return self._pkg_irrecoverable if self._fatal: return {'': [self._fatal]} return {} @property def _recoverable(self): return self._val_recoverable @_recoverable.setter def _recoverable(self, new_dct): self._val_recoverable = new_dct def _bandwidth_used(self, pload): if pload.pkg in self._skipped: return 0 return pload.download_size class _DetailedLibrepoError(Exception): def __init__(self, librepo_err, source_url): Exception.__init__(self) self.librepo_code = librepo_err.args[0] self.librepo_msg = librepo_err.args[1] self.source_url = source_url class _NullKeyImport(dnf.callback.KeyImport): def _confirm(self, id, userid, fingerprint, url, timestamp): return True class Metadata(object): def __init__(self, repo): self._repo = repo @property def fresh(self): # :api return self._repo.fresh() class PackageTargetCallbacks(libdnf.repo.PackageTargetCB): def __init__(self, package_pload): super(PackageTargetCallbacks, self).__init__() self.package_pload = package_pload def end(self, status, msg): self.package_pload._end_cb(None, status, msg) return 0 def progress(self, totalToDownload, downloaded): self.package_pload._progress_cb(None, totalToDownload, downloaded) return 0 def mirrorFailure(self, msg, url): self.package_pload._mirrorfail_cb(None, msg, url) return 0 class PackagePayload(dnf.callback.Payload): def __init__(self, pkg, progress): super(PackagePayload, self).__init__(progress) self.callbacks = PackageTargetCallbacks(self) self.pkg = pkg def _end_cb(self, cbdata, lr_status, msg): """End callback to librepo operation.""" status = dnf.callback.STATUS_FAILED if msg is None: status = dnf.callback.STATUS_OK elif msg.startswith('Not finished'): return elif lr_status == libdnf.repo.PackageTargetCB.TransferStatus_ALREADYEXISTS: status = dnf.callback.STATUS_ALREADY_EXISTS self.progress.end(self, status, msg) def _mirrorfail_cb(self, cbdata, err, url): self.progress.end(self, dnf.callback.STATUS_MIRROR, err) def _progress_cb(self, cbdata, total, done): try: self.progress.progress(self, done) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() except_list = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.critical(''.join(except_list)) @property def _full_size(self): return self.download_size def _librepo_target(self): pkg = self.pkg pkgdir = pkg.pkgdir dnf.util.ensure_dir(pkgdir) target_dct = { 'dest': pkgdir, 'resume': True, 'cbdata': self, 'progresscb': self._progress_cb, 'endcb': self._end_cb, 'mirrorfailurecb': self._mirrorfail_cb, } target_dct.update(self._target_params()) return libdnf.repo.PackageTarget( pkg.repo._repo, target_dct['relative_url'], target_dct['dest'], target_dct['checksum_type'], target_dct['checksum'], target_dct['expectedsize'], target_dct['base_url'], target_dct['resume'], 0, 0, self.callbacks) class RPMPayload(PackagePayload): def __str__(self): return os.path.basename(self.pkg.location) def _target_params(self): pkg = self.pkg ctype, csum = pkg.returnIdSum() ctype_code = libdnf.repo.PackageTarget.checksumType(ctype) if ctype_code == libdnf.repo.PackageTarget.ChecksumType_UNKNOWN: logger.warning(_("unsupported checksum type: %s"), ctype) return { 'relative_url': pkg.location, 'checksum_type': ctype_code, 'checksum': csum, 'expectedsize': pkg.downloadsize, 'base_url': pkg.baseurl, } @property def download_size(self): """Total size of the download.""" return self.pkg.downloadsize class RemoteRPMPayload(PackagePayload): def __init__(self, remote_location, conf, progress): super(RemoteRPMPayload, self).__init__("unused_object", progress) self.remote_location = remote_location self.remote_size = 0 self.conf = conf s = (self.conf.releasever or "") + self.conf.substitutions.get('basearch') digest = hashlib.sha256(s.encode('utf8')).hexdigest()[:16] repodir = "commandline-" + digest self.pkgdir = os.path.join(self.conf.cachedir, repodir, "packages") dnf.util.ensure_dir(self.pkgdir) self.local_path = os.path.join(self.pkgdir, self.__str__().lstrip("/")) def __str__(self): return os.path.basename(self.remote_location) def _progress_cb(self, cbdata, total, done): self.remote_size = total try: self.progress.progress(self, done) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() except_list = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.critical(''.join(except_list)) def _librepo_target(self): return libdnf.repo.PackageTarget( self.conf._config, os.path.basename(self.remote_location), self.pkgdir, 0, None, 0, os.path.dirname(self.remote_location), True, 0, 0, self.callbacks) @property def download_size(self): """Total size of the download.""" return self.remote_size class MDPayload(dnf.callback.Payload): def __init__(self, progress): super(MDPayload, self).__init__(progress) self._text = "" self._download_size = 0 self.fastest_mirror_running = False self.mirror_failures = set() def __str__(self): if dnf.pycomp.PY3: return self._text else: return self._text.encode('utf-8') def __unicode__(self): return self._text def _progress_cb(self, cbdata, total, done): self._download_size = total self.progress.progress(self, done) def _fastestmirror_cb(self, cbdata, stage, data): if stage == libdnf.repo.RepoCB.FastestMirrorStage_DETECTION: # pinging mirrors, this might take a while msg = _('determining the fastest mirror (%s hosts).. ') % data self.fastest_mirror_running = True elif stage == libdnf.repo.RepoCB.FastestMirrorStage_STATUS and self.fastest_mirror_running: # done.. report but ignore any errors msg = 'error: %s\n' % data if data else 'done.\n' else: return self.progress.message(msg) def _mirror_failure_cb(self, cbdata, msg, url, metadata): self.mirror_failures.add(msg) msg = 'error: %s (%s).' % (msg, url) logger.debug(msg) @property def download_size(self): return self._download_size @property def progress(self): return self._progress @progress.setter def progress(self, progress): if progress is None: progress = dnf.callback.NullDownloadProgress() self._progress = progress def start(self, text): self._text = text self.progress.start(1, 0) def end(self): self._download_size = 0 self.progress.end(self, None, None) # use the local cache even if it's expired. download if there's no cache. SYNC_LAZY = libdnf.repo.Repo.SyncStrategy_LAZY # use the local cache, even if it's expired, never download. SYNC_ONLY_CACHE = libdnf.repo.Repo.SyncStrategy_ONLY_CACHE # try the cache, if it is expired download new md. SYNC_TRY_CACHE = libdnf.repo.Repo.SyncStrategy_TRY_CACHE class RepoCallbacks(libdnf.repo.RepoCB): def __init__(self, repo): super(RepoCallbacks, self).__init__() self._repo = repo self._md_pload = repo._md_pload def start(self, what): self._md_pload.start(what) def end(self): self._md_pload.end() def progress(self, totalToDownload, downloaded): self._md_pload._progress_cb(None, totalToDownload, downloaded) return 0 def fastestMirror(self, stage, ptr): self._md_pload._fastestmirror_cb(None, stage, ptr) def handleMirrorFailure(self, msg, url, metadata): self._md_pload._mirror_failure_cb(None, msg, url, metadata) return 0 def repokeyImport(self, id, userid, fingerprint, url, timestamp): return self._repo._key_import._confirm(id, userid, fingerprint, url, timestamp) class Repo(dnf.conf.RepoConf): # :api DEFAULT_SYNC = SYNC_TRY_CACHE def __init__(self, name=None, parent_conf=None): # :api super(Repo, self).__init__(section=name, parent=parent_conf) self._config.this.disown() # _repo will be the owner of _config self._repo = libdnf.repo.Repo(name if name else "", self._config) self._md_pload = MDPayload(dnf.callback.NullDownloadProgress()) self._callbacks = RepoCallbacks(self) self._callbacks.this.disown() # _repo will be the owner of callbacks self._repo.setCallbacks(self._callbacks) self._pkgdir = None self._key_import = _NullKeyImport() self.metadata = None # :api self._repo.setSyncStrategy(SYNC_ONLY_CACHE if parent_conf and parent_conf.cacheonly else self.DEFAULT_SYNC) if parent_conf: self._repo.setSubstitutions(parent_conf.substitutions) self._substitutions = dnf.conf.substitutions.Substitutions() self._check_config_file_age = parent_conf.check_config_file_age \ if parent_conf is not None else True @property def id(self): # :api return self._repo.getId() @property def repofile(self): # :api return self._repo.getRepoFilePath() @repofile.setter def repofile(self, value): self._repo.setRepoFilePath(value) @property def pkgdir(self): # :api if self._repo.isLocal(): return self._repo.getLocalBaseurl() return self.cache_pkgdir() def cache_pkgdir(self): if self._pkgdir is not None: return self._pkgdir return os.path.join(self._repo.getCachedir(), _PACKAGES_RELATIVE_DIR) @pkgdir.setter def pkgdir(self, val): # :api self._pkgdir = val @property def _pubring_dir(self): return os.path.join(self._repo.getCachedir(), 'pubring') @property def load_metadata_other(self): return self._repo.getLoadMetadataOther() @load_metadata_other.setter def load_metadata_other(self, val): self._repo.setLoadMetadataOther(val) def __lt__(self, other): return self.id < other.id def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.id) def __setattr__(self, name, value): super(Repo, self).__setattr__(name, value) def disable(self): # :api self._repo.disable() def enable(self): # :api self._repo.enable() def add_metadata_type_to_download(self, metadata_type): # :api """Ask for additional repository metadata type to download. Given metadata_type is appended to the default metadata set when repository is downloaded. Parameters ---------- metadata_type: string Example: add_metadata_type_to_download("productid") """ self._repo.addMetadataTypeToDownload(metadata_type) def remove_metadata_type_from_download(self, metadata_type): # :api """Stop asking for this additional repository metadata type in download. Given metadata_type is no longer downloaded by default when this repository is downloaded. Parameters ---------- metadata_type: string Example: remove_metadata_type_from_download("productid") """ self._repo.removeMetadataTypeFromDownload(metadata_type) def get_metadata_path(self, metadata_type): # :api """Return path to the file with downloaded repository metadata of given type. Parameters ---------- metadata_type: string """ return self._repo.getMetadataPath(metadata_type) def get_metadata_content(self, metadata_type): # :api """Return content of the file with downloaded repository metadata of given type. Content of compressed metadata file is returned uncompressed. Parameters ---------- metadata_type: string """ return self._repo.getMetadataContent(metadata_type) def load(self): # :api """Load the metadata for this repo. Depending on the configuration and the age and consistence of data available on the disk cache, either loads the metadata from the cache or downloads them from the mirror, baseurl or metalink. This method will by default not try to refresh already loaded data if called repeatedly. Returns True if this call to load() caused a fresh metadata download. """ ret = False try: ret = self._repo.load() except (libdnf.error.Error, RuntimeError) as e: if self._md_pload.mirror_failures: msg = "Errors during downloading metadata for repository '%s':" % self.id for failure in self._md_pload.mirror_failures: msg += "\n - %s" % failure logger.warning(msg) raise dnf.exceptions.RepoError(str(e)) finally: self._md_pload.mirror_failures = set() self.metadata = Metadata(self._repo) return ret def _metadata_expire_in(self): """Get the number of seconds after which the cached metadata will expire. Returns a tuple, boolean whether there even is cached metadata and the number of seconds it will expire in. Negative number means the metadata has expired already, None that it never expires. """ if not self.metadata: self._repo.loadCache(False) if self.metadata: if self.metadata_expire == -1: return True, None expiration = self._repo.getExpiresIn() if self._repo.isExpired(): expiration = min(0, expiration) return True, expiration return False, 0 def _set_key_import(self, key_import): self._key_import = key_import def set_progress_bar(self, progress): # :api self._md_pload.progress = progress def get_http_headers(self): # :api """Returns user defined http headers. Returns ------- headers : tuple of strings """ return self._repo.getHttpHeaders() def set_http_headers(self, headers): # :api """Sets http headers. Sets new http headers and rewrites existing ones. Parameters ---------- headers : tuple or list of strings Example: set_http_headers(["User-Agent: Agent007", "MyFieldName: MyFieldValue"]) """ self._repo.setHttpHeaders(headers) def remote_location(self, location, schemes=('http', 'ftp', 'file', 'https')): """ :param location: relative location inside the repo :param schemes: list of allowed protocols. Default is ('http', 'ftp', 'file', 'https') :return: absolute url (string) or None """ def schemes_filter(url_list): for url in url_list: if schemes: s = dnf.pycomp.urlparse.urlparse(url)[0] if s in schemes: return os.path.join(url, location.lstrip('/')) else: return os.path.join(url, location.lstrip('/')) return None if not location: return None mirrors = self._repo.getMirrors() if mirrors: return schemes_filter(mirrors) elif self.baseurl: return schemes_filter(self.baseurl) PK!uTV&& repodict.pynu[# repodict.py # Managing repo configuration in DNF. # # Copyright (C) 2013-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import unicode_literals from dnf.exceptions import ConfigError from dnf.i18n import _ import dnf.util import libdnf.conf import fnmatch import os logger = dnf.util.logger class RepoDict(dict): # :api def add(self, repo): # :api id_ = repo.id if id_ in self: msg = 'Repository %s is listed more than once in the configuration' raise ConfigError(msg % id_) try: repo._repo.verify() except RuntimeError as e: raise ConfigError("{0}".format(e)) self[id_] = repo def all(self): # :api return dnf.util.MultiCallList(self.values()) def _any_enabled(self): return not dnf.util.empty(self.iter_enabled()) def _enable_sub_repos(self, sub_name_fn): for repo in self.iter_enabled(): for found in self.get_matching(sub_name_fn(repo.id)): if not found.enabled: logger.info(_('enabling %s repository'), found.id) found.enable() def add_new_repo(self, repoid, conf, baseurl=(), **kwargs): # :api """ Creates new repo object and add it into RepoDict. Variables in provided values will be automatically substituted using conf.substitutions (like $releasever, ...) @param repoid: Repo ID - string @param conf: dnf Base().conf object @param baseurl: List of strings @param kwargs: keys and values that will be used to setattr on dnf.repo.Repo() object @return: dnf.repo.Repo() object """ def substitute(values): if isinstance(values, str): return libdnf.conf.ConfigParser.substitute(values, conf.substitutions) elif isinstance(values, list) or isinstance(values, tuple): substituted = [] for value in values: if isinstance(value, str): substituted.append( libdnf.conf.ConfigParser.substitute(value, conf.substitutions)) if substituted: return substituted return values repo = dnf.repo.Repo(repoid, conf) for path in baseurl: if '://' not in path: path = 'file://{}'.format(os.path.abspath(path)) repo.baseurl += [substitute(path)] for (key, value) in kwargs.items(): setattr(repo, key, substitute(value)) self.add(repo) logger.info(_("Added %s repo from %s"), repoid, ', '.join(baseurl)) return repo def enable_debug_repos(self): # :api """enable debug repos corresponding to already enabled binary repos""" def debug_name(name): return ("{}-debug-rpms".format(name[:-5]) if name.endswith("-rpms") else "{}-debuginfo".format(name)) self._enable_sub_repos(debug_name) def enable_source_repos(self): # :api """enable source repos corresponding to already enabled binary repos""" def source_name(name): return ("{}-source-rpms".format(name[:-5]) if name.endswith("-rpms") else "{}-source".format(name)) self._enable_sub_repos(source_name) def get_matching(self, key): # :api if dnf.util.is_glob_pattern(key): l = [self[k] for k in self if fnmatch.fnmatch(k, key)] return dnf.util.MultiCallList(l) repo = self.get(key, None) if repo is None: return dnf.util.MultiCallList([]) return dnf.util.MultiCallList([repo]) def iter_enabled(self): # :api return (r for r in self.values() if r.enabled) def items(self): """return repos sorted by priority""" return (item for item in sorted(super(RepoDict, self).items(), key=lambda x: (x[1].priority, x[1].cost))) def __iter__(self): return self.keys() def keys(self): return (k for k, v in self.items()) def values(self): return (v for k, v in self.items()) PK!o4 sack.pynu[# sack.py # The dnf.Sack class, derived from hawkey.Sack # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import dnf.util import dnf.package import dnf.query import logging import hawkey import os from dnf.pycomp import basestring from dnf.i18n import _ logger = logging.getLogger("dnf") class Sack(hawkey.Sack): # :api def __init__(self, *args, **kwargs): super(Sack, self).__init__(*args, **kwargs) def _configure(self, installonly=None, installonly_limit=0, allow_vendor_change=None): if installonly: self.installonly = installonly self.installonly_limit = installonly_limit if allow_vendor_change is not None: self.allow_vendor_change = allow_vendor_change if allow_vendor_change is False: logger.warning(_("allow_vendor_change is disabled. This option is currently not supported for downgrade and distro-sync commands")) def query(self, flags=0): # :api """Factory function returning a DNF Query.""" return dnf.query.Query(self, flags) def _build_sack(base): cachedir = base.conf.cachedir # create the dir ourselves so we have the permissions under control: dnf.util.ensure_dir(cachedir) return Sack(pkgcls=dnf.package.Package, pkginitval=base, arch=base.conf.substitutions["arch"], cachedir=cachedir, rootdir=base.conf.installroot, logfile=os.path.join(base.conf.logdir, dnf.const.LOG_HAWKEY), logdebug=base.conf.logfilelevel > 9) def _rpmdb_sack(base): # used by subscription-manager (src/dnf-plugins/product-id.py) sack = _build_sack(base) try: # It can fail if rpmDB is not present sack.load_system_repo(build_cache=False) except IOError: pass return sack def rpmdb_sack(base): # :api """ Returns a new instance of sack containing only installed packages (@System repo) Useful to get list of the installed RPMs after transaction. """ return _rpmdb_sack(base) PK!eee selector.pynu[# selector.py # DNF specific hawkey.Selector handling. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from hawkey import Selector PK!ū~~ subject.pynu[# subject.py # Implements Subject. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from hawkey import Subject # :api PK!S--transaction.pynu[# -*- coding: utf-8 -*- # transaction.py # Managing the transaction to be passed to RPM. # # Copyright (C) 2013-2018 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals import libdnf.transaction from dnf.i18n import _, C_ # :api - all action constants are considered an API # per-package actions - from libdnf PKG_DOWNGRADE = libdnf.transaction.TransactionItemAction_DOWNGRADE PKG_DOWNGRADED = libdnf.transaction.TransactionItemAction_DOWNGRADED PKG_INSTALL = libdnf.transaction.TransactionItemAction_INSTALL PKG_OBSOLETE = libdnf.transaction.TransactionItemAction_OBSOLETE PKG_OBSOLETED = libdnf.transaction.TransactionItemAction_OBSOLETED PKG_REINSTALL = libdnf.transaction.TransactionItemAction_REINSTALL PKG_REINSTALLED = libdnf.transaction.TransactionItemAction_REINSTALLED PKG_REMOVE = libdnf.transaction.TransactionItemAction_REMOVE PKG_UPGRADE = libdnf.transaction.TransactionItemAction_UPGRADE PKG_UPGRADED = libdnf.transaction.TransactionItemAction_UPGRADED # compatibility PKG_ERASE = PKG_REMOVE # per-package actions - additional PKG_CLEANUP = 101 PKG_VERIFY = 102 PKG_SCRIPTLET = 103 # transaction-wide actions TRANS_PREPARATION = 201 TRANS_POST = 202 # packages that appeared on the system FORWARD_ACTIONS = [ libdnf.transaction.TransactionItemAction_INSTALL, libdnf.transaction.TransactionItemAction_DOWNGRADE, libdnf.transaction.TransactionItemAction_OBSOLETE, libdnf.transaction.TransactionItemAction_UPGRADE, libdnf.transaction.TransactionItemAction_REINSTALL, ] # packages that got removed from the system BACKWARD_ACTIONS = [ libdnf.transaction.TransactionItemAction_DOWNGRADED, libdnf.transaction.TransactionItemAction_OBSOLETED, libdnf.transaction.TransactionItemAction_UPGRADED, libdnf.transaction.TransactionItemAction_REMOVE, # TODO: REINSTALLED may and may not belong here; the same NEVRA is in FORWARD_ACTIONS already # libdnf.transaction.TransactionItemAction_REINSTALLED, ] ACTIONS = { # TRANSLATORS: This is for a single package currently being downgraded. PKG_DOWNGRADE: C_('currently', 'Downgrading'), PKG_DOWNGRADED: _('Cleanup'), # TRANSLATORS: This is for a single package currently being installed. PKG_INSTALL: C_('currently', 'Installing'), PKG_OBSOLETE: _('Obsoleting'), PKG_OBSOLETED: _('Obsoleting'), # TRANSLATORS: This is for a single package currently being reinstalled. PKG_REINSTALL: C_('currently', 'Reinstalling'), PKG_REINSTALLED: _('Cleanup'), # TODO: 'Removing'? PKG_REMOVE: _('Erasing'), # TRANSLATORS: This is for a single package currently being upgraded. PKG_UPGRADE: C_('currently', 'Upgrading'), PKG_UPGRADED: _('Cleanup'), PKG_CLEANUP: _('Cleanup'), PKG_VERIFY: _('Verifying'), PKG_SCRIPTLET: _('Running scriptlet'), TRANS_PREPARATION: _('Preparing'), # TODO: TRANS_POST } # untranslated strings, logging to /var/log/dnf/dnf.rpm.log FILE_ACTIONS = { PKG_DOWNGRADE: 'Downgrade', PKG_DOWNGRADED: 'Downgraded', PKG_INSTALL: 'Installed', PKG_OBSOLETE: 'Obsolete', PKG_OBSOLETED: 'Obsoleted', PKG_REINSTALL: 'Reinstall', PKG_REINSTALLED: 'Reinstalled', # TODO: 'Removed'? PKG_REMOVE: 'Erase', PKG_UPGRADE: 'Upgrade', PKG_UPGRADED: 'Upgraded', PKG_CLEANUP: 'Cleanup', PKG_VERIFY: 'Verified', PKG_SCRIPTLET: 'Running scriptlet', TRANS_PREPARATION: 'Preparing', # TODO: TRANS_POST } PK!mqafaftransaction_sr.pynu[# Copyright (C) 2020 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import libdnf import hawkey from dnf.i18n import _ import dnf.exceptions import json VERSION_MAJOR = 0 VERSION_MINOR = 0 VERSION = "%s.%s" % (VERSION_MAJOR, VERSION_MINOR) """ The version of the stored transaction. MAJOR version denotes backwards incompatible changes (old dnf won't work with new transaction JSON). MINOR version denotes extending the format without breaking backwards compatibility (old dnf can work with new transaction JSON). Forwards compatibility needs to be handled by being able to process the old format as well as the new one. """ class TransactionError(dnf.exceptions.Error): def __init__(self, msg): super(TransactionError, self).__init__(msg) class TransactionReplayError(dnf.exceptions.Error): def __init__(self, filename, errors): """ :param filename: The name of the transaction file being replayed :param errors: a list of error classes or a string with an error description """ # store args in case someone wants to read them from a caught exception self.filename = filename if isinstance(errors, (list, tuple)): self.errors = errors else: self.errors = [errors] if filename: msg = _('The following problems occurred while replaying the transaction from file "{filename}":').format(filename=filename) else: msg = _('The following problems occurred while running a transaction:') for error in self.errors: msg += "\n " + str(error) super(TransactionReplayError, self).__init__(msg) class IncompatibleTransactionVersionError(TransactionReplayError): def __init__(self, filename, msg): super(IncompatibleTransactionVersionError, self).__init__(filename, msg) def _check_version(version, filename): major, minor = version.split('.') try: major = int(major) except ValueError as e: raise TransactionReplayError( filename, _('Invalid major version "{major}", number expected.').format(major=major) ) try: int(minor) # minor is unused, just check it's a number except ValueError as e: raise TransactionReplayError( filename, _('Invalid minor version "{minor}", number expected.').format(minor=minor) ) if major != VERSION_MAJOR: raise IncompatibleTransactionVersionError( filename, _('Incompatible major version "{major}", supported major version is "{major_supp}".') .format(major=major, major_supp=VERSION_MAJOR) ) def serialize_transaction(transaction): """ Serializes a transaction to a data structure that is equivalent to the stored JSON format. :param transaction: the transaction to serialize (an instance of dnf.db.history.TransactionWrapper) """ data = { "version": VERSION, } rpms = [] groups = [] environments = [] if transaction is None: return data for tsi in transaction.packages(): if tsi.is_package(): rpms.append({ "action": tsi.action_name, "nevra": tsi.nevra, "reason": libdnf.transaction.TransactionItemReasonToString(tsi.reason), "repo_id": tsi.from_repo }) elif tsi.is_group(): group = tsi.get_group() group_data = { "action": tsi.action_name, "id": group.getGroupId(), "packages": [], "package_types": libdnf.transaction.compsPackageTypeToString(group.getPackageTypes()) } for pkg in group.getPackages(): group_data["packages"].append({ "name": pkg.getName(), "installed": pkg.getInstalled(), "package_type": libdnf.transaction.compsPackageTypeToString(pkg.getPackageType()) }) groups.append(group_data) elif tsi.is_environment(): env = tsi.get_environment() env_data = { "action": tsi.action_name, "id": env.getEnvironmentId(), "groups": [], "package_types": libdnf.transaction.compsPackageTypeToString(env.getPackageTypes()) } for grp in env.getGroups(): env_data["groups"].append({ "id": grp.getGroupId(), "installed": grp.getInstalled(), "group_type": libdnf.transaction.compsPackageTypeToString(grp.getGroupType()) }) environments.append(env_data) if rpms: data["rpms"] = rpms if groups: data["groups"] = groups if environments: data["environments"] = environments return data class TransactionReplay(object): """ A class that encapsulates replaying a transaction. The transaction data are loaded and stored when the class is initialized. The transaction is run by calling the `run()` method, after the transaction is created (but before it is performed), the `post_transaction()` method needs to be called to verify no extra packages were pulled in and also to fix the reasons. """ def __init__( self, base, filename="", data=None, ignore_extras=False, ignore_installed=False, skip_unavailable=False ): """ :param base: the dnf base :param filename: the filename to load the transaction from (conflicts with the 'data' argument) :param data: the dictionary to load the transaction from (conflicts with the 'filename' argument) :param ignore_extras: whether to ignore extra package pulled into the transaction :param ignore_installed: whether to ignore installed versions of packages :param skip_unavailable: whether to skip transaction packages that aren't available """ self._base = base self._filename = filename self._ignore_installed = ignore_installed self._ignore_extras = ignore_extras self._skip_unavailable = skip_unavailable if not self._base.conf.strict: self._skip_unavailable = True self._nevra_cache = set() self._nevra_reason_cache = {} self._warnings = [] if filename and data: raise ValueError(_("Conflicting TransactionReplay arguments have been specified: filename, data")) elif filename: self._load_from_file(filename) else: self._load_from_data(data) def _load_from_file(self, fn): self._filename = fn with open(fn, "r") as f: try: replay_data = json.load(f) except json.decoder.JSONDecodeError as e: raise TransactionReplayError(fn, str(e) + ".") try: self._load_from_data(replay_data) except TransactionError as e: raise TransactionReplayError(fn, e) def _load_from_data(self, data): self._replay_data = data self._verify_toplevel_json(self._replay_data) self._rpms = self._replay_data.get("rpms", []) self._assert_type(self._rpms, list, "rpms", "array") self._groups = self._replay_data.get("groups", []) self._assert_type(self._groups, list, "groups", "array") self._environments = self._replay_data.get("environments", []) self._assert_type(self._environments, list, "environments", "array") def _raise_or_warn(self, warn_only, msg): if warn_only: self._warnings.append(msg) else: raise TransactionError(msg) def _assert_type(self, value, t, id, expected): if not isinstance(value, t): raise TransactionError(_('Unexpected type of "{id}", {exp} expected.').format(id=id, exp=expected)) def _verify_toplevel_json(self, replay_data): fn = self._filename if "version" not in replay_data: raise TransactionReplayError(fn, _('Missing key "{key}".'.format(key="version"))) self._assert_type(replay_data["version"], str, "version", "string") _check_version(replay_data["version"], fn) def _replay_pkg_action(self, pkg_data): try: action = pkg_data["action"] nevra = pkg_data["nevra"] repo_id = pkg_data["repo_id"] reason = libdnf.transaction.StringToTransactionItemReason(pkg_data["reason"]) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in an rpm.').format(key=e.args[0]) ) except IndexError as e: raise TransactionError( _('Unexpected value of package reason "{reason}" for rpm nevra "{nevra}".') .format(reason=pkg_data["reason"], nevra=nevra) ) subj = hawkey.Subject(nevra) parsed_nevras = subj.get_nevra_possibilities(forms=[hawkey.FORM_NEVRA]) if len(parsed_nevras) != 1: raise TransactionError(_('Cannot parse NEVRA for package "{nevra}".').format(nevra=nevra)) parsed_nevra = parsed_nevras[0] na = "%s.%s" % (parsed_nevra.name, parsed_nevra.arch) query_na = self._base.sack.query().filter(name=parsed_nevra.name, arch=parsed_nevra.arch) epoch = parsed_nevra.epoch if parsed_nevra.epoch is not None else 0 query = query_na.filter(epoch=epoch, version=parsed_nevra.version, release=parsed_nevra.release) # In case the package is found in the same repo as in the original # transaction, limit the query to that plus installed packages. IOW # remove packages with the same NEVRA in case they are found in # multiple repos and the repo the package came from originally is one # of them. # This can e.g. make a difference in the system-upgrade plugin, in case # the same NEVRA is in two repos, this makes sure the same repo is used # for both download and upgrade steps of the plugin. if repo_id: query_repo = query.filter(reponame=repo_id) if query_repo: query = query_repo.union(query.installed()) if not query: self._raise_or_warn(self._skip_unavailable, _('Cannot find rpm nevra "{nevra}".').format(nevra=nevra)) return # a cache to check no extra packages were pulled into the transaction if action != "Reason Change": self._nevra_cache.add(nevra) # store reasons for forward actions and "Removed", the rest of the # actions reasons should stay as they were determined by the transaction if action in ("Install", "Upgrade", "Downgrade", "Reinstall", "Removed"): self._nevra_reason_cache[nevra] = reason if action in ("Install", "Upgrade", "Downgrade"): if action == "Install" and query_na.installed() and not self._base._get_installonly_query(query_na): self._raise_or_warn(self._ignore_installed, _('Package "{na}" is already installed for action "{action}".').format(na=na, action=action)) sltr = dnf.selector.Selector(self._base.sack).set(pkg=query) self._base.goal.install(select=sltr, optional=not self._base.conf.strict) elif action == "Reinstall": query = query.available() if not query: self._raise_or_warn(self._skip_unavailable, _('Package nevra "{nevra}" not available in repositories for action "{action}".') .format(nevra=nevra, action=action)) return sltr = dnf.selector.Selector(self._base.sack).set(pkg=query) self._base.goal.install(select=sltr, optional=not self._base.conf.strict) elif action in ("Upgraded", "Downgraded", "Reinstalled", "Removed", "Obsoleted"): query = query.installed() if not query: self._raise_or_warn(self._ignore_installed, _('Package nevra "{nevra}" not installed for action "{action}".').format(nevra=nevra, action=action)) return # erasing the original version (the reverse part of an action like # e.g. upgrade) is more robust, but we can't do it if # skip_unavailable is True, because if the forward part of the # action is skipped, we would simply remove the package here if not self._skip_unavailable or action == "Removed": for pkg in query: self._base.goal.erase(pkg, clean_deps=False) elif action == "Reason Change": self._base.history.set_reason(query[0], reason) else: raise TransactionError( _('Unexpected value of package action "{action}" for rpm nevra "{nevra}".') .format(action=action, nevra=nevra) ) def _create_swdb_group(self, group_id, pkg_types, pkgs): comps_group = self._base.comps._group_by_id(group_id) if not comps_group: self._raise_or_warn(self._skip_unavailable, _("Group id '%s' is not available.") % group_id) return None swdb_group = self._base.history.group.new(group_id, comps_group.name, comps_group.ui_name, pkg_types) try: for pkg in pkgs: name = pkg["name"] self._assert_type(name, str, "groups.packages.name", "string") installed = pkg["installed"] self._assert_type(installed, bool, "groups.packages.installed", "boolean") package_type = pkg["package_type"] self._assert_type(package_type, str, "groups.packages.package_type", "string") try: swdb_group.addPackage(name, installed, libdnf.transaction.stringToCompsPackageType(package_type)) except libdnf.error.Error as e: raise TransactionError(str(e)) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in groups.packages.').format(key=e.args[0]) ) return swdb_group def _swdb_group_install(self, group_id, pkg_types, pkgs): swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.install(swdb_group) def _swdb_group_upgrade(self, group_id, pkg_types, pkgs): if not self._base.history.group.get(group_id): self._raise_or_warn( self._ignore_installed, _("Group id '%s' is not installed.") % group_id) return swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.upgrade(swdb_group) def _swdb_group_downgrade(self, group_id, pkg_types, pkgs): if not self._base.history.group.get(group_id): self._raise_or_warn(self._ignore_installed, _("Group id '%s' is not installed.") % group_id) return swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.downgrade(swdb_group) def _swdb_group_remove(self, group_id, pkg_types, pkgs): if not self._base.history.group.get(group_id): self._raise_or_warn(self._ignore_installed, _("Group id '%s' is not installed.") % group_id) return swdb_group = self._create_swdb_group(group_id, pkg_types, pkgs) if swdb_group is not None: self._base.history.group.remove(swdb_group) def _create_swdb_environment(self, env_id, pkg_types, groups): comps_env = self._base.comps._environment_by_id(env_id) if not comps_env: self._raise_or_warn(self._skip_unavailable, _("Environment id '%s' is not available.") % env_id) return None swdb_env = self._base.history.env.new(env_id, comps_env.name, comps_env.ui_name, pkg_types) try: for grp in groups: id = grp["id"] self._assert_type(id, str, "environments.groups.id", "string") installed = grp["installed"] self._assert_type(installed, bool, "environments.groups.installed", "boolean") group_type = grp["group_type"] self._assert_type(group_type, str, "environments.groups.group_type", "string") try: group_type = libdnf.transaction.stringToCompsPackageType(group_type) except libdnf.error.Error as e: raise TransactionError(str(e)) if group_type not in ( libdnf.transaction.CompsPackageType_MANDATORY, libdnf.transaction.CompsPackageType_OPTIONAL ): raise TransactionError( _('Invalid value "{group_type}" of environments.groups.group_type, ' 'only "mandatory" or "optional" is supported.' ).format(group_type=grp["group_type"]) ) swdb_env.addGroup(id, installed, group_type) except KeyError as e: raise TransactionError( _('Missing object key "{key}" in environments.groups.').format(key=e.args[0]) ) return swdb_env def _swdb_environment_install(self, env_id, pkg_types, groups): swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.install(swdb_env) def _swdb_environment_upgrade(self, env_id, pkg_types, groups): if not self._base.history.env.get(env_id): self._raise_or_warn(self._ignore_installed,_("Environment id '%s' is not installed.") % env_id) return swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.upgrade(swdb_env) def _swdb_environment_downgrade(self, env_id, pkg_types, groups): if not self._base.history.env.get(env_id): self._raise_or_warn(self._ignore_installed, _("Environment id '%s' is not installed.") % env_id) return swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.downgrade(swdb_env) def _swdb_environment_remove(self, env_id, pkg_types, groups): if not self._base.history.env.get(env_id): self._raise_or_warn(self._ignore_installed, _("Environment id '%s' is not installed.") % env_id) return swdb_env = self._create_swdb_environment(env_id, pkg_types, groups) if swdb_env is not None: self._base.history.env.remove(swdb_env) def get_data(self): """ :returns: the loaded data of the transaction """ return self._replay_data def get_warnings(self): """ :returns: an array of warnings gathered during the transaction replay """ return self._warnings def run(self): """ Replays the transaction. """ fn = self._filename errors = [] for pkg_data in self._rpms: try: self._replay_pkg_action(pkg_data) except TransactionError as e: errors.append(e) for group_data in self._groups: try: action = group_data["action"] group_id = group_data["id"] try: pkg_types = libdnf.transaction.stringToCompsPackageType(group_data["package_types"]) except libdnf.error.Error as e: errors.append(TransactionError(str(e))) continue if action == "Install": self._swdb_group_install(group_id, pkg_types, group_data["packages"]) elif action == "Removed": self._swdb_group_remove(group_id, pkg_types, group_data["packages"]) # Groups are not versioned, but a reverse transaction could be applied, # therefore we treat both actions the same way elif action == "Upgrade" or action == "Upgraded": self._swdb_group_upgrade(group_id, pkg_types, group_data["packages"]) elif action == "Downgrade" or action == "Downgraded": self._swdb_group_downgrade(group_id, pkg_types, group_data["packages"]) else: errors.append(TransactionError( _('Unexpected value of group action "{action}" for group "{group}".') .format(action=action, group=group_id) )) except KeyError as e: errors.append(TransactionError( _('Missing object key "{key}" in a group.').format(key=e.args[0]) )) except TransactionError as e: errors.append(e) for env_data in self._environments: try: action = env_data["action"] env_id = env_data["id"] try: pkg_types = libdnf.transaction.stringToCompsPackageType(env_data["package_types"]) except libdnf.error.Error as e: errors.append(TransactionError(str(e))) continue if action == "Install": self._swdb_environment_install(env_id, pkg_types, env_data["groups"]) elif action == "Removed": self._swdb_environment_remove(env_id, pkg_types, env_data["groups"]) # Environments are not versioned, but a reverse transaction could be applied, # therefore we treat both actions the same way elif action == "Upgrade" or action == "Upgraded": self._swdb_environment_upgrade(env_id, pkg_types, env_data["groups"]) elif action == "Downgrade" or action == "Downgraded": self._swdb_environment_downgrade(env_id, pkg_types, env_data["groups"]) else: errors.append(TransactionError( _('Unexpected value of environment action "{action}" for environment "{env}".') .format(action=action, env=env_id) )) except KeyError as e: errors.append(TransactionError( _('Missing object key "{key}" in an environment.').format(key=e.args[0]) )) except TransactionError as e: errors.append(e) if errors: raise TransactionReplayError(fn, errors) def post_transaction(self): """ Sets reasons in the transaction history to values from the stored transaction. Also serves to check whether additional packages were pulled in by the transaction, which results in an error (unless ignore_extras is True). """ if not self._base.transaction: return errors = [] for tsi in self._base.transaction: try: pkg = tsi.pkg except KeyError as e: # the transaction item has no package, happens for action == "Reason Change" continue nevra = str(pkg) if nevra not in self._nevra_cache: # if ignore_installed is True, we don't want to check for # Upgraded/Downgraded/Reinstalled extras in the transaction, # basically those may be installed and we are ignoring them if not self._ignore_installed or not tsi.action in ( libdnf.transaction.TransactionItemAction_UPGRADED, libdnf.transaction.TransactionItemAction_DOWNGRADED, libdnf.transaction.TransactionItemAction_REINSTALLED ): msg = _('Package nevra "{nevra}", which is not present in the transaction file, was pulled ' 'into the transaction.' ).format(nevra=nevra) if not self._ignore_extras: errors.append(TransactionError(msg)) else: self._warnings.append(msg) try: replay_reason = self._nevra_reason_cache[nevra] if tsi.action in ( libdnf.transaction.TransactionItemAction_INSTALL, libdnf.transaction.TransactionItemAction_REMOVE ) or libdnf.transaction.TransactionItemReasonCompare(replay_reason, tsi.reason) > 0: tsi.reason = replay_reason except KeyError as e: # if the pkg nevra wasn't found, we don't want to change the reason pass if errors: raise TransactionReplayError(self._filename, errors) PK!OOutil.pynu[# util.py # Basic dnf utils. # # Copyright (C) 2012-2016 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals from .pycomp import PY3, basestring from dnf.i18n import _, ucd import argparse import dnf import dnf.callback import dnf.const import dnf.pycomp import errno import functools import hawkey import itertools import locale import logging import os import pwd import shutil import sys import tempfile import time import libdnf.repo import libdnf.transaction logger = logging.getLogger('dnf') MAIN_PROG = argparse.ArgumentParser().prog if argparse.ArgumentParser().prog == "yum" else "dnf" MAIN_PROG_UPPER = MAIN_PROG.upper() """DNF Utilities.""" def _parse_specs(namespace, values): """ Categorize :param values list into packages, groups and filenames :param namespace: argparse.Namespace, where specs will be stored :param values: list of specs, whether packages ('foo') or groups/modules ('@bar') or filenames ('*.rmp', 'http://*', ...) To access packages use: specs.pkg_specs, to access groups use: specs.grp_specs, to access filenames use: specs.filenames """ setattr(namespace, "filenames", []) setattr(namespace, "grp_specs", []) setattr(namespace, "pkg_specs", []) tmp_set = set() for value in values: if value in tmp_set: continue tmp_set.add(value) schemes = dnf.pycomp.urlparse.urlparse(value)[0] if value.endswith('.rpm'): namespace.filenames.append(value) elif schemes and schemes in ('http', 'ftp', 'file', 'https'): namespace.filenames.append(value) elif value.startswith('@'): namespace.grp_specs.append(value[1:]) else: namespace.pkg_specs.append(value) def _urlopen_progress(url, conf, progress=None): if progress is None: progress = dnf.callback.NullDownloadProgress() pload = dnf.repo.RemoteRPMPayload(url, conf, progress) if os.path.exists(pload.local_path): return pload.local_path est_remote_size = sum([pload.download_size]) progress.start(1, est_remote_size) targets = [pload._librepo_target()] try: libdnf.repo.PackageTarget.downloadPackages(libdnf.repo.VectorPPackageTarget(targets), True) except RuntimeError as e: if conf.strict: raise IOError(str(e)) logger.error(str(e)) return pload.local_path def _urlopen(url, conf=None, repo=None, mode='w+b', **kwargs): """ Open the specified absolute url, return a file object which respects proxy setting even for non-repo downloads """ if PY3 and 'b' not in mode: kwargs.setdefault('encoding', 'utf-8') fo = tempfile.NamedTemporaryFile(mode, **kwargs) try: if repo: repo._repo.downloadUrl(url, fo.fileno()) else: libdnf.repo.Downloader.downloadURL(conf._config if conf else None, url, fo.fileno()) except RuntimeError as e: raise IOError(str(e)) fo.seek(0) return fo def rtrim(s, r): if s.endswith(r): s = s[:-len(r)] return s def am_i_root(): # used by ansible (lib/ansible/modules/packaging/os/dnf.py) return os.geteuid() == 0 def clear_dir(path): """Remove all files and dirs under `path` Also see rm_rf() """ for entry in os.listdir(path): contained_path = os.path.join(path, entry) rm_rf(contained_path) def ensure_dir(dname): # used by ansible (lib/ansible/modules/packaging/os/dnf.py) try: os.makedirs(dname, mode=0o755) except OSError as e: if e.errno != errno.EEXIST or not os.path.isdir(dname): raise e def split_path(path): """ Split path by path separators. Use os.path.join() to join the path back to string. """ result = [] head = path while True: head, tail = os.path.split(head) if not tail: if head or not result: # if not result: make sure result is [""] so os.path.join(*result) can be called result.insert(0, head) break result.insert(0, tail) return result def empty(iterable): try: l = len(iterable) except TypeError: l = len(list(iterable)) return l == 0 def first(iterable): """Returns the first item from an iterable or None if it has no elements.""" it = iter(iterable) try: return next(it) except StopIteration: return None def first_not_none(iterable): it = iter(iterable) try: return next(item for item in it if item is not None) except StopIteration: return None def file_age(fn): return time.time() - file_timestamp(fn) def file_timestamp(fn): return os.stat(fn).st_mtime def get_effective_login(): try: return pwd.getpwuid(os.geteuid())[0] except KeyError: return "UID: %s" % os.geteuid() def get_in(dct, keys, not_found): """Like dict.get() for nested dicts.""" for k in keys: dct = dct.get(k) if dct is None: return not_found return dct def group_by_filter(fn, iterable): def splitter(acc, item): acc[not bool(fn(item))].append(item) return acc return functools.reduce(splitter, iterable, ([], [])) def insert_if(item, iterable, condition): """Insert an item into an iterable by a condition.""" for original_item in iterable: if condition(original_item): yield item yield original_item def is_exhausted(iterator): """Test whether an iterator is exhausted.""" try: next(iterator) except StopIteration: return True else: return False def is_glob_pattern(pattern): if is_string_type(pattern): pattern = [pattern] return (isinstance(pattern, list) and any(set(p) & set("*[?") for p in pattern)) def is_string_type(obj): if PY3: return isinstance(obj, str) else: return isinstance(obj, basestring) def lazyattr(attrname): """Decorator to get lazy attribute initialization. Composes with @property. Force reinitialization by deleting the . """ def get_decorated(fn): def cached_getter(obj): try: return getattr(obj, attrname) except AttributeError: val = fn(obj) setattr(obj, attrname, val) return val return cached_getter return get_decorated def mapall(fn, *seq): """Like functools.map(), but return a list instead of an iterator. This means all side effects of fn take place even without iterating the result. """ return list(map(fn, *seq)) def normalize_time(timestamp): """Convert time into locale aware datetime string object.""" t = time.strftime("%c", time.localtime(timestamp)) if not dnf.pycomp.PY3: current_locale_setting = locale.getlocale()[1] if current_locale_setting: t = t.decode(current_locale_setting) return t def on_ac_power(): """Decide whether we are on line power. Returns True if we are on line power, False if not, None if it can not be decided. """ try: ps_folder = "/sys/class/power_supply" ac_nodes = [node for node in os.listdir(ps_folder) if node.startswith("AC")] if len(ac_nodes) > 0: ac_node = ac_nodes[0] with open("{}/{}/online".format(ps_folder, ac_node)) as ac_status: data = ac_status.read() return int(data) == 1 return None except (IOError, ValueError): return None def on_metered_connection(): """Decide whether we are on metered connection. Returns: True: if on metered connection False: if not None: if it can not be decided """ try: import dbus except ImportError: return None try: bus = dbus.SystemBus() proxy = bus.get_object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager") iface = dbus.Interface(proxy, "org.freedesktop.DBus.Properties") metered = iface.Get("org.freedesktop.NetworkManager", "Metered") except dbus.DBusException: return None if metered == 0: # NM_METERED_UNKNOWN return None elif metered in (1, 3): # NM_METERED_YES, NM_METERED_GUESS_YES return True elif metered in (2, 4): # NM_METERED_NO, NM_METERED_GUESS_NO return False else: # Something undocumented (at least at this moment) raise ValueError("Unknown value for metered property: %r", metered) def partition(pred, iterable): """Use a predicate to partition entries into false entries and true entries. Credit: Python library itertools' documentation. """ t1, t2 = itertools.tee(iterable) return dnf.pycomp.filterfalse(pred, t1), filter(pred, t2) def rm_rf(path): try: shutil.rmtree(path) except OSError: pass def split_by(iterable, condition): """Split an iterable into tuples by a condition. Inserts a separator before each item which meets the condition and then cuts the iterable by these separators. """ separator = object() # A unique object. # Create a function returning tuple of objects before the separator. def next_subsequence(it): return tuple(itertools.takewhile(lambda e: e != separator, it)) # Mark each place where the condition is met by the separator. marked = insert_if(separator, iterable, condition) # The 1st subsequence may be empty if the 1st item meets the condition. yield next_subsequence(marked) while True: subsequence = next_subsequence(marked) if not subsequence: break yield subsequence def strip_prefix(s, prefix): if s.startswith(prefix): return s[len(prefix):] return None def touch(path, no_create=False): """Create an empty file if it doesn't exist or bump it's timestamps. If no_create is True only bumps the timestamps. """ if no_create or os.access(path, os.F_OK): return os.utime(path, None) with open(path, 'a'): pass def _terminal_messenger(tp='write', msg="", out=sys.stdout): try: if tp == 'write': out.write(msg) elif tp == 'flush': out.flush() elif tp == 'write_flush': out.write(msg) out.flush() elif tp == 'print': print(msg, file=out) else: raise ValueError('Unsupported type: ' + tp) except IOError as e: logger.critical('{}: {}'.format(type(e).__name__, ucd(e))) pass def _format_resolve_problems(resolve_problems): """ Format string about problems in resolve :param resolve_problems: list with list of strings (output of goal.problem_rules()) :return: string """ msg = "" count_problems = (len(resolve_problems) > 1) for i, rs in enumerate(resolve_problems, start=1): if count_problems: msg += "\n " + _("Problem") + " %d: " % i else: msg += "\n " + _("Problem") + ": " msg += "\n - ".join(rs) return msg def _te_nevra(te): nevra = te.N() + '-' if te.E() is not None and te.E() != '0': nevra += te.E() + ':' return nevra + te.V() + '-' + te.R() + '.' + te.A() def _log_rpm_trans_with_swdb(rpm_transaction, swdb_transaction): logger.debug("Logging transaction elements") for rpm_el in rpm_transaction: tsi = rpm_el.Key() tsi_state = None if tsi is not None: tsi_state = tsi.state msg = "RPM element: '{}', Key(): '{}', Key state: '{}', Failed() '{}': ".format( _te_nevra(rpm_el), tsi, tsi_state, rpm_el.Failed()) logger.debug(msg) for tsi in swdb_transaction: msg = "SWDB element: '{}', State: '{}', Action: '{}', From repo: '{}', Reason: '{}', " \ "Get reason: '{}'".format(str(tsi), tsi.state, tsi.action, tsi.from_repo, tsi.reason, tsi.get_reason()) logger.debug(msg) def _sync_rpm_trans_with_swdb(rpm_transaction, swdb_transaction): revert_actions = {libdnf.transaction.TransactionItemAction_DOWNGRADED, libdnf.transaction.TransactionItemAction_OBSOLETED, libdnf.transaction.TransactionItemAction_REMOVE, libdnf.transaction.TransactionItemAction_UPGRADED, libdnf.transaction.TransactionItemAction_REINSTALLED} cached_tsi = [tsi for tsi in swdb_transaction] el_not_found = False error = False for rpm_el in rpm_transaction: te_nevra = _te_nevra(rpm_el) tsi = rpm_el.Key() if tsi is None or not hasattr(tsi, "pkg"): for tsi_candidate in cached_tsi: if tsi_candidate.state != libdnf.transaction.TransactionItemState_UNKNOWN: continue if tsi_candidate.action not in revert_actions: continue if str(tsi_candidate) == te_nevra: tsi = tsi_candidate break if tsi is None or not hasattr(tsi, "pkg"): logger.critical(_("TransactionItem not found for key: {}").format(te_nevra)) el_not_found = True continue if rpm_el.Failed(): tsi.state = libdnf.transaction.TransactionItemState_ERROR error = True else: tsi.state = libdnf.transaction.TransactionItemState_DONE for tsi in cached_tsi: if tsi.state == libdnf.transaction.TransactionItemState_UNKNOWN: logger.critical(_("TransactionSWDBItem not found for key: {}").format(str(tsi))) el_not_found = True if error: logger.debug(_('Errors occurred during transaction.')) if el_not_found: _log_rpm_trans_with_swdb(rpm_transaction, cached_tsi) class tmpdir(object): # used by subscription-manager (src/dnf-plugins/product-id.py) def __init__(self): prefix = '%s-' % dnf.const.PREFIX self.path = tempfile.mkdtemp(prefix=prefix) def __enter__(self): return self.path def __exit__(self, exc_type, exc_value, traceback): rm_rf(self.path) class Bunch(dict): """Dictionary with attribute accessing syntax. In DNF, prefer using this over dnf.yum.misc.GenericHolder. Credit: Alex Martelli, Doug Hudgeon """ def __init__(self, *args, **kwds): super(Bunch, self).__init__(*args, **kwds) self.__dict__ = self def __hash__(self): return id(self) class MultiCallList(list): def __init__(self, iterable): super(MultiCallList, self).__init__() self.extend(iterable) def __getattr__(self, what): def fn(*args, **kwargs): def call_what(v): method = getattr(v, what) return method(*args, **kwargs) return list(map(call_what, self)) return fn def __setattr__(self, what, val): def setter(item): setattr(item, what, val) return list(map(setter, self)) def _make_lists(transaction): b = Bunch({ 'downgraded': [], 'erased': [], 'erased_clean': [], 'erased_dep': [], 'installed': [], 'installed_group': [], 'installed_dep': [], 'installed_weak': [], 'reinstalled': [], 'upgraded': [], 'failed': [], }) for tsi in transaction: if tsi.state == libdnf.transaction.TransactionItemState_ERROR: b.failed.append(tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_DOWNGRADE: b.downgraded.append(tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_INSTALL: if tsi.reason == libdnf.transaction.TransactionItemReason_GROUP: b.installed_group.append(tsi) elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY: b.installed_dep.append(tsi) elif tsi.reason == libdnf.transaction.TransactionItemReason_WEAK_DEPENDENCY: b.installed_weak.append(tsi) else: # TransactionItemReason_USER b.installed.append(tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_REINSTALL: b.reinstalled.append(tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_REMOVE: if tsi.reason == libdnf.transaction.TransactionItemReason_CLEAN: b.erased_clean.append(tsi) elif tsi.reason == libdnf.transaction.TransactionItemReason_DEPENDENCY: b.erased_dep.append(tsi) else: b.erased.append(tsi) elif tsi.action == libdnf.transaction.TransactionItemAction_UPGRADE: b.upgraded.append(tsi) return b def _post_transaction_output(base, transaction, action_callback): """Returns a human-readable summary of the results of the transaction. :param action_callback: function generating output for specific action. It takes two parameters - action as a string and list of affected packages for this action :return: a list of lines containing a human-readable summary of the results of the transaction """ def _tsi_or_pkg_nevra_cmp(item1, item2): """Compares two transaction items or packages by nevra. Used as a fallback when tsi does not contain package object. """ ret = (item1.name > item2.name) - (item1.name < item2.name) if ret != 0: return ret nevra1 = hawkey.NEVRA(name=item1.name, epoch=item1.epoch, version=item1.version, release=item1.release, arch=item1.arch) nevra2 = hawkey.NEVRA(name=item2.name, epoch=item2.epoch, version=item2.version, release=item2.release, arch=item2.arch) ret = nevra1.evr_cmp(nevra2, base.sack) if ret != 0: return ret return (item1.arch > item2.arch) - (item1.arch < item2.arch) list_bunch = dnf.util._make_lists(transaction) skipped_conflicts, skipped_broken = base._skipped_packages( report_problems=False, transaction=transaction) skipped = skipped_conflicts.union(skipped_broken) out = [] for (action, tsis) in [(_('Upgraded'), list_bunch.upgraded), (_('Downgraded'), list_bunch.downgraded), (_('Installed'), list_bunch.installed + list_bunch.installed_group + list_bunch.installed_weak + list_bunch.installed_dep), (_('Reinstalled'), list_bunch.reinstalled), (_('Skipped'), skipped), (_('Removed'), list_bunch.erased + list_bunch.erased_dep + list_bunch.erased_clean), (_('Failed'), list_bunch.failed)]: out.extend(action_callback( action, sorted(tsis, key=functools.cmp_to_key(_tsi_or_pkg_nevra_cmp)))) return out PK!7!__pycache__/crypto.cpython-36.pycnu[PK!Tv""'__pycache__/dnssec.cpython-36.opt-1.pycnu[PK!Tv""!#=__pycache__/dnssec.cpython-36.pycnu[PK!r惤%#`__pycache__/drpm.cpython-36.opt-1.pycnu[PK!r惤u__pycache__/drpm.cpython-36.pycnu[PK! XxHH+__pycache__/exceptions.cpython-36.opt-1.pycnu[PK! XxHH%__pycache__/exceptions.cpython-36.pycnu[PK!7+%O__pycache__/goal.cpython-36.opt-1.pycnu[PK!7+__pycache__/goal.cpython-36.pycnu[PK!(__pycache__/history.cpython-36.opt-1.pycnu[PK!"!__pycache__/history.cpython-36.pycnu[PK!2C%%%w__pycache__/i18n.cpython-36.opt-1.pycnu[PK!2C%%V__pycache__/i18n.cpython-36.pycnu[PK!ʑ%/__pycache__/lock.cpython-36.opt-1.pycnu[PK!ʑ"__pycache__/lock.cpython-36.pycnu[PK!&6I I (2__pycache__/logging.cpython-36.opt-1.pycnu[PK!g "xS__pycache__/logging.cpython-36.pycnu[PK!U.xt__pycache__/match_counter.cpython-36.opt-1.pycnu[PK!U(τ__pycache__/match_counter.cpython-36.pycnu[PK!f獖*'*'( __pycache__/package.cpython-36.opt-1.pycnu[PK!f獖*'*'"__pycache__/package.cpython-36.pycnu[PK!1G77*__pycache__/persistor.cpython-36.opt-1.pycnu[PK!1G77$__pycache__/persistor.cpython-36.pycnu[PK!N##':__pycache__/plugin.cpython-36.opt-1.pycnu[PK!N##!(__pycache__/plugin.cpython-36.pycnu[PK!j) 'L__pycache__/pycomp.cpython-36.opt-1.pycnu[PK!j) ! X__pycache__/pycomp.cpython-36.pycnu[PK!rӑ&c__pycache__/query.cpython-36.opt-1.pycnu[PK!rӑ g__pycache__/query.cpython-36.pycnu[PK!qսVV%k__pycache__/repo.cpython-36.opt-1.pycnu[PK!qսVV__pycache__/repo.cpython-36.pycnu[PK! )__pycache__/repodict.cpython-36.opt-1.pycnu[PK! #/__pycache__/repodict.cpython-36.pycnu[PK!$%F__pycache__/sack.cpython-36.opt-1.pycnu[PK!$(O__pycache__/sack.cpython-36.pycnu[PK!`x):X__pycache__/selector.cpython-36.opt-1.pycnu[PK!`x#Y__pycache__/selector.cpython-36.pycnu[PK!6!(Z__pycache__/subject.cpython-36.opt-1.pycnu[PK!6!"4\__pycache__/subject.cpython-36.pycnu[PK!'&sgg,]__pycache__/transaction.cpython-36.opt-1.pycnu[PK!'&sgg&]d__pycache__/transaction.cpython-36.pycnu[PK!gL_D_D/k__pycache__/transaction_sr.cpython-36.opt-1.pycnu[PK!gL_D_D)د__pycache__/transaction_sr.cpython-36.pycnu[PK!0,uKuK%__pycache__/util.cpython-36.opt-1.pycnu[PK!0,uKuKZ@__pycache__/util.cpython-36.pycnu[PK!M)__pycache__/__init__.cpython-36.opt-1.pycnu[PK!M#}__pycache__/__init__.cpython-36.pycnu[PK!ؤ 55%֐__pycache__/base.cpython-36.opt-1.pycnu[PK!Ȗ66__pycache__/base.cpython-36.pycnu[PK!<*l77)N__pycache__/callback.cpython-36.opt-1.pycnu[PK!<*l77# __pycache__/callback.cpython-36.pycnu[PK!dd&h__pycache__/comps.cpython-36.opt-1.pycnu[PK!I܊kfkf __pycache__/comps.cpython-36.pycnu[PK! *]yy&__pycache__/const.cpython-36.opt-1.pycnu[PK! *]yy W__pycache__/const.cpython-36.pycnu[PK!7' __pycache__/crypto.cpython-36.opt-1.pycnu[PK!Dr -C cli/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!Dr 'cli/__pycache__/__init__.cpython-36.pycnu[PK!./<<,cli/__pycache__/aliases.cpython-36.opt-1.pycnu[PK!./<<&i&cli/__pycache__/aliases.cpython-36.pycnu[PK!Prixix(;cli/__pycache__/cli.cpython-36.opt-1.pycnu[PK!exx"cli/__pycache__/cli.cpython-36.pycnu[PK!.p!!6- cli/__pycache__/completion_helper.cpython-36.opt-1.pycnu[PK!.p!!0O cli/__pycache__/completion_helper.cpython-36.pycnu[PK!oZv  + r cli/__pycache__/demand.cpython-36.opt-1.pycnu[PK!oZv  %sx cli/__pycache__/demand.cpython-36.pycnu[PK!1ƽ1 1 +~ cli/__pycache__/format.cpython-36.opt-1.pycnu[PK!1ƽ1 1 %_ cli/__pycache__/format.cpython-36.pycnu[PK!U?) cli/__pycache__/main.cpython-36.opt-1.pycnu[PK!U?#ߦ cli/__pycache__/main.cpython-36.pycnu[PK!TeAA2ӻ cli/__pycache__/option_parser.cpython-36.opt-1.pycnu[PK!TeAA, cli/__pycache__/option_parser.cpython-36.pycnu[PK!Fg+? cli/__pycache__/output.cpython-36.opt-1.pycnu[PK!dǦ-((% cli/__pycache__/output.cpython-36.pycnu[PK!i[- cli/__pycache__/progress.cpython-36.opt-1.pycnu[PK!i[' cli/__pycache__/progress.cpython-36.pycnu[PK!FD9+--)H! cli/__pycache__/term.cpython-36.opt-1.pycnu[PK!s..#O cli/__pycache__/term.cpython-36.pycnu[PK!r9a a * ~ cli/__pycache__/utils.cpython-36.opt-1.pycnu[PK!r9a a $Ȋ cli/__pycache__/utils.cpython-36.pycnu[PK!&R  1} cli/commands/__pycache__/downgrade.cpython-36.pycnu[PK! b((3 cli/commands/__pycache__/group.cpython-36.opt-1.pycnu[PK!Jޑ((- cli/commands/__pycache__/group.cpython-36.pycnu[PK!)7,,5$ cli/commands/__pycache__/history.cpython-36.opt-1.pycnu[PK!)7,,/k cli/commands/__pycache__/history.cpython-36.pycnu[PK!5K cli/commands/__pycache__/install.cpython-36.opt-1.pycnu[PK!/c cli/commands/__pycache__/install.cpython-36.pycnu[PK!x7{ cli/commands/__pycache__/makecache.cpython-36.opt-1.pycnu[PK!x1L cli/commands/__pycache__/makecache.cpython-36.pycnu[PK!X:e e 2 cli/commands/__pycache__/mark.cpython-36.opt-1.pycnu[PK!X:e e ,c cli/commands/__pycache__/mark.cpython-36.pycnu[PK!3",t:t:4$ cli/commands/__pycache__/module.cpython-36.opt-1.pycnu[PK!3",t:t:. cli/commands/__pycache__/module.cpython-36.pycnu[PK! D< 7cli/commands/__pycache__/reinstall.cpython-36.opt-1.pycnu[PK!.% 1cli/commands/__pycache__/reinstall.cpython-36.pycnu[PK!ϷAQQ4 ,cli/commands/__pycache__/remove.cpython-36.opt-1.pycnu[PK!ϷAQQ.;cli/commands/__pycache__/remove.cpython-36.pycnu[PK!96oKcli/commands/__pycache__/repolist.cpython-36.opt-1.pycnu[PK!90hcli/commands/__pycache__/repolist.cpython-36.pycnu[PK!N WW79cli/commands/__pycache__/repoquery.cpython-36.opt-1.pycnu[PK!N WW1cli/commands/__pycache__/repoquery.cpython-36.pycnu[PK!^**435cli/commands/__pycache__/search.cpython-36.opt-1.pycnu[PK!^**.Fcli/commands/__pycache__/search.cpython-36.pycnu[PK!mp3IXcli/commands/__pycache__/shell.cpython-36.opt-1.pycnu[PK!mp-kwcli/commands/__pycache__/shell.cpython-36.pycnu[PK!Qx__2cli/commands/__pycache__/swap.cpython-36.opt-1.pycnu[PK!Qx__,Hcli/commands/__pycache__/swap.cpython-36.pycnu[PK!Q6Q68cli/commands/__pycache__/updateinfo.cpython-36.opt-1.pycnu[PK!Q6Q62cli/commands/__pycache__/updateinfo.cpython-36.pycnu[PK!&caa5ocli/commands/__pycache__/upgrade.cpython-36.opt-1.pycnu[PK!&caa/5"cli/commands/__pycache__/upgrade.cpython-36.pycnu[PK!EyOO<0cli/commands/__pycache__/upgrademinimal.cpython-36.opt-1.pycnu[PK!EyOO65cli/commands/__pycache__/upgrademinimal.cpython-36.pycnu[PK!a0gg6e:cli/commands/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!hh0cli/commands/__pycache__/__init__.cpython-36.pycnu[PK!V3T cli/commands/__pycache__/alias.cpython-36.opt-1.pycnu[PK!V-cli/commands/__pycache__/alias.cpython-36.pycnu[PK!͌zz84cli/commands/__pycache__/autoremove.cpython-36.opt-1.pycnu[PK!͌zz2<cli/commands/__pycache__/autoremove.cpython-36.pycnu[PK!jX3vDcli/commands/__pycache__/check.cpython-36.opt-1.pycnu[PK!jX-dScli/commands/__pycache__/check.cpython-36.pycnu[PK! '  3Lbcli/commands/__pycache__/clean.cpython-36.opt-1.pycnu[PK! '  -rcli/commands/__pycache__/clean.cpython-36.pycnu[PK!&yy5cli/commands/__pycache__/deplist.cpython-36.opt-1.pycnu[PK!&yy/cli/commands/__pycache__/deplist.cpython-36.pycnu[PK!(uu8Ԋcli/commands/__pycache__/distrosync.cpython-36.opt-1.pycnu[PK!(uu2ܐcli/commands/__pycache__/distrosync.cpython-36.pycnu[PK!&R  7ޖcli/commands/__pycache__/downgrade.cpython-36.opt-1.pycnu[PK!| {}{}Pcli/commands/__init__.pynu[PK!tDcli/commands/alias.pynu[PK! 8cli/commands/autoremove.pynu[PK!Hh??8Dcli/commands/check.pynu[PK!% tt`cli/commands/clean.pynu[PK!urcli/commands/deplist.pynu[PK!,xcli/commands/distrosync.pynu[PK!(C  cli/commands/downgrade.pynu[PK!#B::cli/commands/group.pynu[PK!Ā%F%Fcli/commands/history.pynu[PK!JlSS7 cli/commands/install.pynu[PK!Nmm)cli/commands/makecache.pynu[PK!!B 1cli/commands/mark.pynu[PK!nAA?cli/commands/module.pynu[PK![fH]]cli/commands/reinstall.pynu[PK!Bp`cli/commands/remove.pynu[PK!3z2z2cli/commands/repolist.pynu[PK!yننRcli/commands/repoquery.pynu[PK!31tfcli/commands/search.pynu[PK!-?kl&l&\cli/commands/shell.pynu[PK!s s  cli/commands/swap.pynu[PK!H2J2Jįcli/commands/updateinfo.pynu[PK!}gO~~@cli/commands/upgrade.pynu[PK!;{ cli/commands/upgrademinimal.pynu[PK!; aZcli/__init__.pynu[PK!6D}^cli/aliases.pynu[PK!4땱 y5cli/cli.pynu[PK!,8M//dcli/completion_helper.pynu[PK! cli/demand.pynu[PK! 3  cli/format.pynu[PK!.) ff Bcli/main.pynu[PK!4B4]4]2cli/option_parser.pynu[PK!NaBZZ [cli/output.pynu[PK!r:cli/progress.pynu[PK!ckf9f9 cli/term.pynu[PK!*,ϰ 6Bcli/utils.pynu[PK!Q?.Tconf/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!Q?(Xconf/__pycache__/__init__.cpython-36.pycnu[PK!':':,]conf/__pycache__/config.cpython-36.opt-1.pycnu[PK!':':&aconf/__pycache__/config.cpython-36.pycnu[PK!\f *conf/__pycache__/read.cpython-36.opt-1.pycnu[PK!\f $Aconf/__pycache__/read.cpython-36.pycnu[PK!*jj3conf/__pycache__/substitutions.cpython-36.opt-1.pycnu[PK!*jj-kconf/__pycache__/substitutions.cpython-36.pycnu[PK!Z2conf/__init__.pynu[PK!EOO+conf/config.pynu[PK!9 HUconf/read.pynu[PK!"Fj j jconf/substitutions.pynu[PK!} qq,tdb/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!} qq&udb/__pycache__/__init__.cpython-36.pycnu[PK!#G77)Rvdb/__pycache__/group.cpython-36.opt-1.pycnu[PK!+-X8X8#db/__pycache__/group.cpython-36.pycnu[PK!n FF+Ldb/__pycache__/history.cpython-36.opt-1.pycnu[PK!9wMFF%O.db/__pycache__/history.cpython-36.pycnu[PK!㦆  judb/__init__.pynu[PK!'<< xdb/group.pynu[PK!CW"%:%: sdb/history.pynu[PK!P<<0module/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!P<<*qmodule/__pycache__/__init__.cpython-36.pycnu[PK!2module/__pycache__/exceptions.cpython-36.opt-1.pycnu[PK!,{module/__pycache__/exceptions.cpython-36.pycnu[PK!" ^^3module/__pycache__/module_base.cpython-36.opt-1.pycnu[PK!7^^-xmodule/__pycache__/module_base.cpython-36.pycnu[PK!T+module/__init__.pynu[PK!E#FF[module/exceptions.pynu[PK!hQmodule/module_base.pynu[PK!edA-rpm/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!edA',rpm/__pycache__/__init__.cpython-36.pycnu[PK!ǣ$/rpm/__pycache__/connection.cpython-36.opt-1.pycnu[PK!ǣ$)rpm/__pycache__/connection.cpython-36.pycnu[PK!j*rpm/__pycache__/error.cpython-36.opt-1.pycnu[PK!j$rpm/__pycache__/error.cpython-36.pycnu[PK!7 .Wrpm/__pycache__/miscutils.cpython-36.opt-1.pycnu[PK!7 (Wrpm/__pycache__/miscutils.cpython-36.pycnu[PK!Jhh0Qrpm/__pycache__/transaction.cpython-36.opt-1.pycnu[PK!Jhh*rpm/__pycache__/transaction.cpython-36.pycnu[PK!BOfrpm/__init__.pynu[PK!5\YYrpm/connection.pynu[PK!# trpm/error.pynu[PK!k?rpm/miscutils.pynu[PK!rpm/transaction.pynu[PK!7#qq->!yum/__pycache__/__init__.cpython-36.opt-1.pycnu[PK!7#qq' "yum/__pycache__/__init__.cpython-36.pycnu[PK!2s&&)"yum/__pycache__/misc.cpython-36.opt-1.pycnu[PK!2s&&#Iyum/__pycache__/misc.cpython-36.pycnu[PK!nZ66-pyum/__pycache__/rpmtrans.cpython-36.opt-1.pycnu[PK!nZ66'yum/__pycache__/rpmtrans.cpython-36.pycnu[PK!yum/__init__.pynu[PK!ͩfW.W. yum/misc.pynu[PK!`Õ>>Iyum/rpmtrans.pynu[PK!4?mm P__init__.pynu[PK!U+Ubase.pynu[PK!dٌ  callback.pynu[PK!GJc``/ comps.pynu[PK!zC@ @ ِ const.pynu[PK!%OE Q crypto.pynu[PK!wg;,;, u dnssec.pynu[PK!Mv drpm.pynu[PK!xw  exceptions.pynu[PK!olc MM!goal.pynu[PK!8A~~ !history.pynu[PK!g@!0!0B!i18n.pynu[PK!4) H!lock.pynu[PK!*sr(r( ]!logging.pynu[PK!]'TTG!match_counter.pynu[PK!8++ ە!package.pynu[PK!Y'too !persistor.pynu[PK!;G@V%V% !plugin.pynu[PK!6% !pycomp.pynu[PK!"33`"query.pynu[PK!!eLQLQ"repo.pynu[PK!uTV&& N`"repodict.pynu[PK!o4 t"sack.pynu[PK!eee ɀ"selector.pynu[PK!ū~~ i"subject.pynu[PK!S--!"transaction.pynu[PK!mqafaf"transaction_sr.pynu[PK!OO.#util.pynu[PK]Q#