__pycache__/__init__.cpython-36.opt-1.pyc000064400000014601147204456360014115 0ustar003 \A@s&ddlmZdgZGdddeZdS))IntEnum HTTPStatusc@seZdZdZdddZdZdZdZdZdZ dZ dZ dZ dZ dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(dZ)dZ*dZ+dZ,dZ-dZ.dZ/dZ0dZ1dZ2dZ3dZ4dZ5dZ6dZ7dZ8dZ9dZ:dZ;dZr?r@)rArBrC)rDrEr:)rFrGr:)rHrIrJ)rKrLrM)rNrOrP)rQrRrS)rTrUrV)rWrXrY)rZr[r\)r]r^r_)r`rarb)rcrdre)rfrgrh)rirjrk)rlrmrn)rorprq)rrrsrt)rurvrw)rxryrz)r{r|r})r~r)rr)rr)rr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rr)rr)rr)rr)rrr)>__name__ __module__ __qualname____doc__rZCONTINUEZSWITCHING_PROTOCOLSZ PROCESSINGrZCREATEDZACCEPTEDZNON_AUTHORITATIVE_INFORMATIONZ NO_CONTENTZ RESET_CONTENTZPARTIAL_CONTENTZ MULTI_STATUSZALREADY_REPORTEDZIM_USEDZMULTIPLE_CHOICESZMOVED_PERMANENTLYZFOUNDZ SEE_OTHERZ NOT_MODIFIEDZ USE_PROXYZTEMPORARY_REDIRECTZPERMANENT_REDIRECTZ BAD_REQUESTZ UNAUTHORIZEDZPAYMENT_REQUIREDZ FORBIDDENZ NOT_FOUNDZMETHOD_NOT_ALLOWEDZNOT_ACCEPTABLEZPROXY_AUTHENTICATION_REQUIREDZREQUEST_TIMEOUTZCONFLICTZGONEZLENGTH_REQUIREDZPRECONDITION_FAILEDZREQUEST_ENTITY_TOO_LARGEZREQUEST_URI_TOO_LONGZUNSUPPORTED_MEDIA_TYPEZREQUESTED_RANGE_NOT_SATISFIABLEZEXPECTATION_FAILEDZUNPROCESSABLE_ENTITYZLOCKEDZFAILED_DEPENDENCYZUPGRADE_REQUIREDZPRECONDITION_REQUIREDZTOO_MANY_REQUESTSZREQUEST_HEADER_FIELDS_TOO_LARGEZINTERNAL_SERVER_ERRORZNOT_IMPLEMENTEDZ BAD_GATEWAYZSERVICE_UNAVAILABLEZGATEWAY_TIMEOUTZHTTP_VERSION_NOT_SUPPORTEDZVARIANT_ALSO_NEGOTIATESZINSUFFICIENT_STORAGEZ LOOP_DETECTEDZ NOT_EXTENDEDZNETWORK_AUTHENTICATION_REQUIREDr r r rrs  N)enumr__all__rr r r rs __pycache__/__init__.cpython-36.opt-2.pyc000064400000013535147204456360014123 0ustar003 \A@s&ddlmZdgZGdddeZdS))IntEnum HTTPStatusc@seZdZdddZdZdZdZdZdZdZ dZ dZ dZ dZ dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(dZ)dZ*dZ+dZ,dZ-dZ.dZ/dZ0dZ1dZ2dZ3dZ4dZ5dZ6dZ7dZ8dZ9dZ:dZ;dZr?r@)rArBrC)rDrEr:)rFrGr:)rHrIrJ)rKrLrM)rNrOrP)rQrRrS)rTrUrV)rWrXrY)rZr[r\)r]r^r_)r`rarb)rcrdre)rfrgrh)rirjrk)rlrmrn)rorprq)rrrsrt)rurvrw)rxryrz)r{r|r})r~r)rr)rr)rr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rr)rr)rr)rr)rrr)=__name__ __module__ __qualname__rZCONTINUEZSWITCHING_PROTOCOLSZ PROCESSINGrZCREATEDZACCEPTEDZNON_AUTHORITATIVE_INFORMATIONZ NO_CONTENTZ RESET_CONTENTZPARTIAL_CONTENTZ MULTI_STATUSZALREADY_REPORTEDZIM_USEDZMULTIPLE_CHOICESZMOVED_PERMANENTLYZFOUNDZ SEE_OTHERZ NOT_MODIFIEDZ USE_PROXYZTEMPORARY_REDIRECTZPERMANENT_REDIRECTZ BAD_REQUESTZ UNAUTHORIZEDZPAYMENT_REQUIREDZ FORBIDDENZ NOT_FOUNDZMETHOD_NOT_ALLOWEDZNOT_ACCEPTABLEZPROXY_AUTHENTICATION_REQUIREDZREQUEST_TIMEOUTZCONFLICTZGONEZLENGTH_REQUIREDZPRECONDITION_FAILEDZREQUEST_ENTITY_TOO_LARGEZREQUEST_URI_TOO_LONGZUNSUPPORTED_MEDIA_TYPEZREQUESTED_RANGE_NOT_SATISFIABLEZEXPECTATION_FAILEDZUNPROCESSABLE_ENTITYZLOCKEDZFAILED_DEPENDENCYZUPGRADE_REQUIREDZPRECONDITION_REQUIREDZTOO_MANY_REQUESTSZREQUEST_HEADER_FIELDS_TOO_LARGEZINTERNAL_SERVER_ERRORZNOT_IMPLEMENTEDZ BAD_GATEWAYZSERVICE_UNAVAILABLEZGATEWAY_TIMEOUTZHTTP_VERSION_NOT_SUPPORTEDZVARIANT_ALSO_NEGOTIATESZINSUFFICIENT_STORAGEZ LOOP_DETECTEDZ NOT_EXTENDEDZNETWORK_AUTHENTICATION_REQUIREDr r r rrs N)enumr__all__rr r r rs __pycache__/__init__.cpython-36.pyc000064400000014601147204456360013156 0ustar003 \A@s&ddlmZdgZGdddeZdS))IntEnum HTTPStatusc@seZdZdZdddZdZdZdZdZdZ dZ dZ dZ dZ dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(dZ)dZ*dZ+dZ,dZ-dZ.dZ/dZ0dZ1dZ2dZ3dZ4dZ5dZ6dZ7dZ8dZ9dZ:dZ;dZr?r@)rArBrC)rDrEr:)rFrGr:)rHrIrJ)rKrLrM)rNrOrP)rQrRrS)rTrUrV)rWrXrY)rZr[r\)r]r^r_)r`rarb)rcrdre)rfrgrh)rirjrk)rlrmrn)rorprq)rrrsrt)rurvrw)rxryrz)r{r|r})r~r)rr)rr)rr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rrr)rr)rr)rr)rr)rrr)>__name__ __module__ __qualname____doc__rZCONTINUEZSWITCHING_PROTOCOLSZ PROCESSINGrZCREATEDZACCEPTEDZNON_AUTHORITATIVE_INFORMATIONZ NO_CONTENTZ RESET_CONTENTZPARTIAL_CONTENTZ MULTI_STATUSZALREADY_REPORTEDZIM_USEDZMULTIPLE_CHOICESZMOVED_PERMANENTLYZFOUNDZ SEE_OTHERZ NOT_MODIFIEDZ USE_PROXYZTEMPORARY_REDIRECTZPERMANENT_REDIRECTZ BAD_REQUESTZ UNAUTHORIZEDZPAYMENT_REQUIREDZ FORBIDDENZ NOT_FOUNDZMETHOD_NOT_ALLOWEDZNOT_ACCEPTABLEZPROXY_AUTHENTICATION_REQUIREDZREQUEST_TIMEOUTZCONFLICTZGONEZLENGTH_REQUIREDZPRECONDITION_FAILEDZREQUEST_ENTITY_TOO_LARGEZREQUEST_URI_TOO_LONGZUNSUPPORTED_MEDIA_TYPEZREQUESTED_RANGE_NOT_SATISFIABLEZEXPECTATION_FAILEDZUNPROCESSABLE_ENTITYZLOCKEDZFAILED_DEPENDENCYZUPGRADE_REQUIREDZPRECONDITION_REQUIREDZTOO_MANY_REQUESTSZREQUEST_HEADER_FIELDS_TOO_LARGEZINTERNAL_SERVER_ERRORZNOT_IMPLEMENTEDZ BAD_GATEWAYZSERVICE_UNAVAILABLEZGATEWAY_TIMEOUTZHTTP_VERSION_NOT_SUPPORTEDZVARIANT_ALSO_NEGOTIATESZINSUFFICIENT_STORAGEZ LOOP_DETECTEDZ NOT_EXTENDEDZNETWORK_AUTHENTICATION_REQUIREDr r r rrs  N)enumr__all__rr r r rs __pycache__/client.cpython-36.opt-1.pyc000064400000104460147204456360013637 0ustar003 f*@srdZddlZddlZddlZddlZddlZddlZddlZddl Z ddl m Z dddddd d d d d ddddddddgZ dZ dZdZdZdZdZejejjddejjjDZdZdZd Zejd!jZejd"jZ ejd#Z!ejd$Z"d%d&d'hZ#dCd)d*Z$Gd+d,d,ej%j&Z'd-d.Z(e'fd/d0Z)Gd1ddej*Z+Gd2ddZ,y ddl-Z-Wne.k rlYnXGd3d4d4e,Z/e j0d4Gd5dde1Z2Gd6dde2Z3Gd7d d e2Z4Gd8dde2Z5Gd9d d e2Z6Gd:d d e2Z7Gd;d d e2Z8Gddde9Z;Gd?dde9ZGdBdde?e=Z@e2ZAdS)Da HTTP/1.1 client library HTTPConnection goes through a number of "states", which define when a client may legally make another request or fetch the response for a particular request. This diagram details these state transitions: (null) | | HTTPConnection() v Idle | | putrequest() v Request-started | | ( putheader() )* endheaders() v Request-sent |\_____________________________ | | getresponse() raises | response = getresponse() | ConnectionError v v Unread-response Idle [Response-headers-read] |\____________________ | | | response.read() | putrequest() v v Idle Req-started-unread-response ______/| / | response.read() | | ( putheader() )* endheaders() v v Request-started Req-sent-unread-response | | response.read() v Request-sent This diagram presents the following rules: -- a second request may not be started until {response-headers-read} -- a response [object] cannot be retrieved until {request-sent} -- there is no differentiation between an unread response body and a partially read response body Note: this enforcement is applied by the HTTPConnection class. The HTTPResponse class does not enforce this state machine, which implies sophisticated clients may accelerate the request/response pipeline. Caution should be taken, though: accelerating the states beyond the above pattern may imply knowledge of the server's connection-close behavior for certain requests. For example, it is impossible to tell whether the server will close the connection UNTIL the response headers have been read; this means that further requests cannot be placed into the pipeline until it is known that the server will NOT be closing the connection. Logical State __state __response ------------- ------- ---------- Idle _CS_IDLE None Request-started _CS_REQ_STARTED None Request-sent _CS_REQ_SENT None Unread-response _CS_IDLE Req-started-unread-response _CS_REQ_STARTED Req-sent-unread-response _CS_REQ_SENT N)urlsplit HTTPResponseHTTPConnection HTTPException NotConnectedUnknownProtocolUnknownTransferEncodingUnimplementedFileModeIncompleteRead InvalidURLImproperConnectionStateCannotSendRequestCannotSendHeaderResponseNotReady BadStatusLine LineTooLongRemoteDisconnectederror responsesPiZUNKNOWNZIdlezRequest-startedz Request-sentcCsi|] }|j|qS)phrase).0vrr#/usr/lib64/python3.6/http/client.py ksriids[^:\s][^:\r\n]*s\n(?![ \t])|\r(?![ \t\n])z[- ]z[-]ZPATCHZPOSTZPUTdatacCsfy |jdStk r`}z:t|j|j|j|jd|j||j|j|fdWYdd}~XnXdS)zZhstringrrr parse_headerss rEcseZdZd@ddZddZddZd d Zd d Zfd dZfddZ ddZ ddZ dAddZ ddZ ddZddZddZdd Zd!d"Zd#d$Zd%d&ZdCd(d)ZdEd*d+ZdGfd,d- Zd.d/Zd0d1Zd2d3ZdHd4d5Zd6d7Zd8d9Zd:d;Zdd?Z Z!S)IrrNcCsR|jd|_||_||_d|_|_t|_t|_t|_ t|_ t|_ t|_ t|_ dS)Nrb)makefiler= debuglevel_methodr>msg_UNKNOWNversionstatusreasonchunked chunk_leftlength will_close)r0sockrHmethodurlrrr__init__s  zHTTPResponse.__init__cCst|jjtdd}t|tkr*td|jdkrBtdt||sNt dy|j dd\}}}WnFt k ry|j dd\}}d}Wnt k rd}YnXYnX|j d s|j t|y$t|}|d ks|d krt|Wnt k rt|YnX|||fS) Nr*z iso-8859-1z status linerzreply:z-Remote end closed connection without responsezHTTP/ri)strr=r:r;r,rrHprintreprrsplit ValueError startswith _close_connrint)r0r2rLrMrNrrr _read_statuss2    zHTTPResponse._read_statusc Cs|jdk rdSx<|j\}}}|tkr(Pt|j}|jdkrFtd|~qW||_|_|j |_ |dkrrd|_ n|j drd|_ nt |t|j|_|_|jdkrx&|jD]}td|d |jj|qW|jjd }|r|jd krd |_d|_nd |_|j|_d|_|jjd}|jjd }|rx|j rxyt||_Wntk rbd|_YnX|jdkr~d|_nd|_|tks|tksd|kodkns|jdkrd|_|j r|j r|jdkrd |_dS)Nrzheaders:HTTP/1.0HTTP/0.9 zHTTP/1. zheader:r)ztransfer-encodingrOTFzcontent-lengthrHEAD)rbrc)r>raZCONTINUEr?r=rHrZcoderMstriprNrLr^rrErJgetr+rOrP _check_closerRrQr`r]Z NO_CONTENTZ NOT_MODIFIEDrI)r0rLrMrNZskipped_headershdrZtr_encrQrrrbegin9s\                   zHTTPResponse.begincCs|jjd}|jdkr:|jjd}|r6d|jkr6dSdS|jjdrJdS|r^d|jkr^dS|jjd}|r~d|jkr~dSdS)NZ connectionrecloseTFz keep-alivezproxy-connection)r>rjrLr+)r0ZconnZpconnrrrrks     zHTTPResponse._check_closecCs|j}d|_|jdS)N)r=rn)r0r=rrrr_szHTTPResponse._close_connc s$ztjWd|jr|jXdS)N)superrnr=r_)r0) __class__rrrnszHTTPResponse.closecstj|jr|jjdS)N)roflushr=)r0)rprrrqs zHTTPResponse.flushcCsdS)zAlways returns TrueTr)r0rrrreadableszHTTPResponse.readablecCs |jdkS)z!True if the connection is closed.N)r=)r0rrrisclosedszHTTPResponse.isclosedc Cs|jdkrdS|jdkr$|jdS|dk rRt|}|j|}t|d|jS|jr`|jS|j dkrv|jj }n6y|j |j }Wnt k r|jYnXd|_ |j|SdS)Nr9rgr) r=rIr_ bytearrayreadinto memoryviewtobytesrO_readall_chunkedrQread _safe_readr )r0amtbr1srrrrys*     zHTTPResponse.readcCs|jdkrdS|jdkr$|jdS|jr4|j|S|jdk r^t||jkr^t|d|j}|jj|}| r~|r~|jn&|jdk r|j|8_|js|j|S)z^Read up to len(b) bytes into bytearray b and return the number of bytes read. Nrrg) r=rIr_rO_readinto_chunkedrQr,rvru)r0r|r1rrrrus$        zHTTPResponse.readintoc Csp|jjtd}t|tkr$td|jd}|dkrB|d|}y t|dStk rj|jYnXdS)Nr*z chunk size;r) r=r:r;r,rfindr`r]r_)r0r2irrr_read_next_chunk_size s    z"HTTPResponse._read_next_chunk_sizecCs>x8|jjtd}t|tkr&td|s,P|dkrPqWdS)Nr*z trailer line r8r9)rr8r9)r=r:r;r,r)r0r2rrr_read_and_discard_trailers z&HTTPResponse._read_and_discard_trailerc Csl|j}|sh|dk r|jdy |j}Wntk rDtdYnX|dkrb|j|jd}||_|S)NrWr9r)rPrzrr]r rr_)r0rPrrr_get_chunk_left(s  zHTTPResponse._get_chunk_leftc Csdg}y8x,|j}|dkrP|j|j|d|_qWdj|Stk r^tdj|YnXdS)Nrr9)rr/rzrPrAr )r0valuerPrrrrx@s  zHTTPResponse._readall_chunkedc Csd}t|}yvxp|j}|dkr$|St||krL|j|}|||_||S|d|}|j|}||d}||7}d|_qWWn(tk rtt|d|YnXdS)Nr)rvrr,_safe_readintorPr bytes)r0r| total_bytesmvbrPr1temp_mvbrrrr~Ns$      zHTTPResponse._readinto_chunkedcCsXg}xH|dkrL|jjt|t}|s4tdj|||j||t|8}qWdj|S)aVRead the number of bytes requested, compensating for partial reads. Normally, we have a blocking socket, but a read() can be interrupted by a signal (resulting in a partial read). Note that we cannot distinguish between EOF and an interrupt when zero bytes have been read. IncompleteRead() will be raised in this situation. This function should be used when bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem. rr9)r=rymin MAXAMOUNTr rAr/r,)r0r{r}chunkrrrrzfs  zHTTPResponse._safe_readcCsd}t|}xt|t|krtt|kr@|dt}|jj|}n |jj|}|sjtt|d|t|||d}||7}qW|S)z2Same as _safe_read, but for reading into a buffer.rN)rvr,rr=rur r)r0r|rrrr1rrrr}s     zHTTPResponse._safe_readintor*c Cs|jdks|jdkrdS|jr(|j|S|jdk rJ|dksD||jkrJ|j}y|jj|}Wn*tk r|dkrt|jjd}YnX| r|r|jn|jdk r|jt|8_|S)zvRead with at most one underlying system call. If at least one byte is buffered, return that instead. Nrgr9rrii@) r=rIrO_read1_chunkedrQread1r]r_r,)r0r1resultrrrrs"    zHTTPResponse.read1cCs4|jdks|jdkrdS|jr(|j|S|jj|S)Nrgr9)r=rIrO _peek_chunkedpeek)r0r1rrrrs  zHTTPResponse.peekcs|jdks|jdkrdS|jr*tj|S|jdk rL|dksF||jkrL|j}|jj|}| rl|rl|jn|jdk r|jt|8_|S)Nrgr9r)r=rIrOror:rQr_r,)r0limitr)rprrr:s     zHTTPResponse.readlinecCsf|j}|dks|dkrdSd|ko.|kns8|}|jj|}|jt|8_|sbtd|S)Nrr9)rr=rrPr,r )r0r1rPryrrrrs zHTTPResponse._read1_chunkedc CsBy |j}Wntk r dSX|dkr.dS|jj|d|S)Nr9)rr r=r)r0r1rPrrrrs zHTTPResponse._peek_chunkedcCs |jjS)N)r=fileno)r0rrrrszHTTPResponse.filenocCsH|jdkrt|jj|p|}t|ts6t|d r:|Sdj|SdS)axReturns the value of the header matching *name*. If there are multiple matching headers, the values are combined into a single string separated by commas and spaces. If no matching header is found, returns *default* or None if the *default* is not specified. If the headers are unknown, raises http.client.ResponseNotReady. N__iter__z, )r>rZget_all isinstancerYhasattrrA)r0r%defaultr>rrr getheaders zHTTPResponse.getheadercCs|jdkrtt|jjS)z&Return list of (header, value) tuples.N)r>rlistitems)r0rrr getheaderss zHTTPResponse.getheaderscCs|S)Nr)r0rrrrszHTTPResponse.__iter__cCs|jS)ajReturns an instance of the class mimetools.Message containing meta-information associated with the URL. When the method is HTTP, these headers are those returned by the server at the head of the retrieved HTML page (including Content-Length and Content-Type). When the method is FTP, a Content-Length header will be present if (as is now usual) the server passed back a file length in response to the FTP retrieval request. A Content-Type header will be present if the MIME type can be guessed. When the method is local-file, returned headers will include a Date representing the file's last-modified time, a Content-Length giving file size, and a Content-Type containing a guess at the file's type. See also the description of the mimetools module. )r>)r0rrrinfoszHTTPResponse.infocCs|jS)aZReturn the real URL of the page. In some cases, the HTTP server redirects a client to another URL. The urlopen() function handles this transparently, but in some cases the caller needs to know which URL the client was redirected to. The geturl() method can be used to get at this redirected URL. )rU)r0rrrgeturls zHTTPResponse.geturlcCs|jS)zuReturn the HTTP status code that was sent with the response, or None if the URL is not an HTTP URL. )rM)r0rrrgetcodeszHTTPResponse.getcode)rNN)N)rr)rr)r)N)"r4r5r6rVrarmrkr_rnrqrrrsryrurrrrxr~rzrrrr:rrrrrrrrr __classcell__rr)rprrs< !K  "     c@seZdZdZdZeZeZdZ dZ e ddZ e ddZ d ejd fd d Zd0d d ZddZddZddZddZddZddZddZddZd1dd Zd2d!d"Zd#d$Zd%d&Zd3dd'd(d)Zd ifdd'd*d+Zd,d-Z d.d/Z!d S)4rrezHTTP/1.1r*rcCs t|tjS)zFTest whether a file-like object is a text or a binary stream. )rio TextIOBase)streamrrr _is_textIO0szHTTPConnection._is_textIOc Csd|dkr|jtkrdSdSt|dr*dSyt|}|jStk rLYnXt|tr`t|SdS)aGet the content-length based on the body. If the body is None, we set Content-Length: 0 for methods that expect a body (RFC 7230, Section 3.3.2). We also set the Content-Length for any method if the body is a str or bytes-like object and not a file. Nrry) upper_METHODS_EXPECTING_BODYrrvnbytes TypeErrorrrYr,)bodyrTZmvrrr_get_content_length6s   z"HTTPConnection._get_content_lengthNcCs\||_||_d|_g|_d|_t|_d|_d|_d|_ i|_ |j ||\|_ |_ tj|_dS)N)timeoutsource_addressrS_buffer_HTTPConnection__response_CS_IDLE_HTTPConnection__staterI _tunnel_host _tunnel_port_tunnel_headers _get_hostporthostportsocketZcreate_connection_create_connection)r0rrrrrrrrVVszHTTPConnection.__init__cCs<|jrtd|j||\|_|_|r.||_n |jjdS)aDSet up host and port for HTTP CONNECT tunnelling. In a connection that uses HTTP CONNECT tunneling, the host passed to the constructor is used as a proxy server that relays all communication to the endpoint passed to `set_tunnel`. This done by sending an HTTP CONNECT request to the proxy server when the connection is established. This method must be called before the HTML connection has been established. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request. z.Can't set up tunnel for established connectionN)rS RuntimeErrorrrrrclear)r0rrr>rrr set_tunnelis zHTTPConnection.set_tunnelc Cs|dkr|jd}|jd}||kryt||dd}WnHtk r||dddkrh|j}ntd||ddYnX|d|}n|j}|r|ddkr|ddkr|dd }||fS) Nr)]r*rXznonnumeric port: '%s'r[rr)rfindr`r] default_portr )r0rrrjrrrrs   zHTTPConnection._get_hostportcCs ||_dS)N)rH)r0levelrrrset_debuglevelszHTTPConnection.set_debuglevelc Csd|j|jf}|jd}|j|x6|jjD](\}}d||f}|jd}|j|q0W|jd|j|j|jd}|j \}} } | t j j kr|j td| | jfxP|jjtd} t| tkrtd | sP| dkrP|jd krtd | jqWdS)NzCONNECT %s:%d HTTP/1.0 asciiz%s: %s zlatin-1 )rTzTunnel connection failed: %d %sr*z header liner8r9rzheader:)rr8r9)rrrsendrrresponse_classrSrIrahttp HTTPStatusZOKrnOSErrorrir=r:r;r,rrHrZrB) r0Z connect_strZ connect_bytesheaderrZ header_strZ header_bytesresponserLrhmessager2rrr_tunnels2         zHTTPConnection._tunnelcCsB|j|j|jf|j|j|_|jjtjtj d|j r>|j dS)z3Connect to the host and port specified in __init__.r*N) rrrrrrSZ setsockoptrZ IPPROTO_TCPZ TCP_NODELAYrr)r0rrrconnects zHTTPConnection.connectc CsBt|_z|j}|r d|_|jWd|j}|rrrrrrszHTTPConnection.requestc Cstdd|D}i}d|kr&d|d<d|kr6d|d<|j||f|d|krd |krd }|j||}|dkr|dk r|jd krtd |d }|jddq|jdt|nd }x |jD]\} } |j| | qWt|trt |d}|j ||ddS)Ncss|]}|jVqdS)N)r+)rkrrr sz/HTTPConnection._send_request..rr*rzaccept-encodingrzcontent-lengthztransfer-encodingFrzUnable to determine size of %rTzTransfer-EncodingrOzContent-Lengthr)r) frozensetrrrHrZrrYrrr'r) r0rTrUrr>rZ header_namesZskipsZcontent_lengthrlrrrrrs0      zHTTPConnection._send_requestcCs|jr|jjrd|_|jtks&|jr0t|j|jdkrR|j|j|j|jd}n|j|j|jd}yLy |j Wnt k r|j YnXt |_|j r|j n||_|S|j YnXdS)a)Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. Nr)rT)rrsrrrrHrrSrIrmConnectionErrorrnrrR)r0rrrr getresponse)s,      zHTTPConnection.getresponse)NN)NF)FF)N)"r4r5r6rrrr HTTP_PORTrrrH staticmethodrrr_GLOBAL_DEFAULT_TIMEOUTrVrrrrrrnrrrrrrrrrrrrrrrr&s<     ' 6   .csFeZdZdZeZdddejdfdddfdd ZfddZ Z S)HTTPSConnectionz(This class allows communication via SSL.N)contextcheck_hostnamec stt|j|||||dk s.|dk s.|dk rDddl} | jdtd||_||_|dkrptj }|j dk rpd|_ |j tj k} |dkr|j }|r| rtd|s|r|j|||j dk rd|_ ||_||_dS)NrzTkey_file, cert_file and check_hostname are deprecated, use a custom context instead.rWTzMcheck_hostname needs a SSL context with either CERT_OPTIONAL or CERT_REQUIRED)rorrVwarningswarnDeprecationWarningkey_file cert_filesslZ_create_default_https_contextZpost_handshake_authZ verify_modeZ CERT_NONErr]Zload_cert_chain_context_check_hostname) r0rrrr rrrrrZ will_verify)rprrrVts0     zHTTPSConnection.__init__c stj|jr|j}n|j}|jj|j|d|_|jj r|jryt j |jj |Wn.t k r|jj tj|jjYnXdS)z(Connect to a host on a given (SSL) port.)server_hostnameN)rorrrr Z wrap_socketrSrr r Zmatch_hostnameZ getpeercert ExceptionZshutdownrZ SHUT_RDWRrn)r0r )rprrrs    zHTTPSConnection.connect) r4r5r6__doc__ HTTPS_PORTrrrrVrrrr)rprrmsrc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@seZdZddZdS)rcCs|f|_||_dS)N)argsrL)r0rLrrrrVszUnknownProtocol.__init__N)r4r5r6rVrrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@s&eZdZdddZddZddZdS) r NcCs|f|_||_||_dS)N)rpartialexpected)r0rrrrrrVszIncompleteRead.__init__cCs2|jdk rd|j}nd}d|jjt|j|fS)Nz, %i more expectedrXz%s(%i bytes read%s))rrpr4r,r)r0errr__repr__s   zIncompleteRead.__repr__cCst|S)N)r[)r0rrr__str__szIncompleteRead.__str__)N)r4r5r6rVrrrrrrr s c@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@seZdZddZdS)rcCs|s t|}|f|_||_dS)N)r[rr2)r0r2rrrrVszBadStatusLine.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcCstj|dt|fdS)Nz&got more than %d bytes when reading %s)rrVr;)r0Z line_typerrrrVszLineTooLong.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcOs"tj|dtj|f||dS)NrX)rrVConnectionResetError)r0poskwrrrrVs zRemoteDisconnected.__init__N)r4r5r6rVrrrrrs)r)BrZ email.parserrCZ email.messagerrosrerrZ urllib.parser__all__rrrKrrrglobalsupdater __members__rrrr;r<compile fullmatchrrrrrrr'rZMessager(r?rEBufferedIOBaserrr  ImportErrorrr/rrrr rrr r r r rrrrrrrrrrrEs        9F = __pycache__/client.cpython-36.opt-2.pyc000064400000062146147204456360013644 0ustar003 f*@snddlZddlZddlZddlZddlZddlZddlZddlZddl m Z ddddddd d d d d dddddddgZ dZ dZ dZdZdZdZejejjddejjjDZdZdZdZejd jZejd!jZejd"Z ejd#Z!d$d%d&hZ"dBd(d)Z#Gd*d+d+ej$j%Z&d,d-Z'e&fd.d/Z(Gd0ddej)Z*Gd1ddZ+y ddl,Z,Wne-k rhYnXGd2d3d3e+Z.e j/d3Gd4dde0Z1Gd5dde1Z2Gd6d d e1Z3Gd7dde1Z4Gd8dde1Z5Gd9d d e1Z6Gd:d d e1Z7Gd;d d e1Z8Gddde8Z;Gd?dde1Ze<Z?e1Z@dS)CN)urlsplit HTTPResponseHTTPConnection HTTPException NotConnectedUnknownProtocolUnknownTransferEncodingUnimplementedFileModeIncompleteRead InvalidURLImproperConnectionStateCannotSendRequestCannotSendHeaderResponseNotReady BadStatusLine LineTooLongRemoteDisconnectederror responsesPiZUNKNOWNZIdlezRequest-startedz Request-sentcCsi|] }|j|qS)phrase).0vrr#/usr/lib64/python3.6/http/client.py ksriids[^:\s][^:\r\n]*s\n(?![ \t])|\r(?![ \t\n])z[- ]z[-]ZPATCHZPOSTZPUTdatacCsfy |jdStk r`}z:t|j|j|j|jd|j||j|j|fdWYdd}~XnXdS)Nzlatin-1z`%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') if you want to send it encoded in UTF-8.)encodeUnicodeEncodeErrorencodingobjectstartendtitle)rnameerrrrr_encodes r'c@seZdZddZdS) HTTPMessagecCsn|jd}t|}g}d}xL|jD]@}|d|j|krDd}n|ddjsXd}|r&|j|q&W|S)N:r)lowerlenkeysisspaceappend)selfr%nZlstZhitlinerrrgetallmatchingheaderss z!HTTPMessage.getallmatchingheadersN)__name__ __module__ __qualname__r3rrrrr(sr(cCs\g}xR|jtd}t|tkr(td|j|t|tkrJtdt|dkrPqW|S)Nr*z header linezgot more than %d headers  )r7r8r9)readline_MAXLINEr,rr/ _MAXHEADERSr)fpheadersr2rrr _read_headerss    r?cCs,t|}dj|jd}tjj|dj|S)Nr9z iso-8859-1)_class)r?joindecodeemailparserZParserZparsestr)r=r@r>Zhstringrrr parse_headerss rEcseZdZd@ddZddZddZd d Zd d Zfd dZfddZ ddZ ddZ dAddZ ddZ ddZddZddZdd Zd!d"Zd#d$Zd%d&ZdCd(d)ZdEd*d+ZdGfd,d- Zd.d/Zd0d1Zd2d3ZdHd4d5Zd6d7Zd8d9Zd:d;Zdd?Z Z!S)IrrNcCsR|jd|_||_||_d|_|_t|_t|_t|_ t|_ t|_ t|_ t|_ dS)Nrb)makefiler= debuglevel_methodr>msg_UNKNOWNversionstatusreasonchunked chunk_leftlength will_close)r0sockrHmethodurlrrr__init__s  zHTTPResponse.__init__cCst|jjtdd}t|tkr*td|jdkrBtdt||sNt dy|j dd\}}}WnFt k ry|j dd\}}d}Wnt k rd}YnXYnX|j d s|j t|y$t|}|d ks|d krt|Wnt k rt|YnX|||fS) Nr*z iso-8859-1z status linerzreply:z-Remote end closed connection without responsezHTTP/ri)strr=r:r;r,rrHprintreprrsplit ValueError startswith _close_connrint)r0r2rLrMrNrrr _read_statuss2    zHTTPResponse._read_statusc Cs|jdk rdSx<|j\}}}|tkr(Pt|j}|jdkrFtd|~qW||_|_|j |_ |dkrrd|_ n|j drd|_ nt |t|j|_|_|jdkrx&|jD]}td|d |jj|qW|jjd }|r|jd krd |_d|_nd |_|j|_d|_|jjd}|jjd }|rx|j rxyt||_Wntk rbd|_YnX|jdkr~d|_nd|_|tks|tksd|kodkns|jdkrd|_|j r|j r|jdkrd |_dS)Nrzheaders:HTTP/1.0HTTP/0.9 zHTTP/1. zheader:r)ztransfer-encodingrOTFzcontent-lengthrHEAD)rbrc)r>raZCONTINUEr?r=rHrZcoderMstriprNrLr^rrErJgetr+rOrP _check_closerRrQr`r]Z NO_CONTENTZ NOT_MODIFIEDrI)r0rLrMrNZskipped_headershdrZtr_encrQrrrbegin9s\                   zHTTPResponse.begincCs|jjd}|jdkr:|jjd}|r6d|jkr6dSdS|jjdrJdS|r^d|jkr^dS|jjd}|r~d|jkr~dSdS)NZ connectionrecloseTFz keep-alivezproxy-connection)r>rjrLr+)r0ZconnZpconnrrrrks     zHTTPResponse._check_closecCs|j}d|_|jdS)N)r=rn)r0r=rrrr_szHTTPResponse._close_connc s$ztjWd|jr|jXdS)N)superrnr=r_)r0) __class__rrrnszHTTPResponse.closecstj|jr|jjdS)N)roflushr=)r0)rprrrqs zHTTPResponse.flushcCsdS)NTr)r0rrrreadableszHTTPResponse.readablecCs |jdkS)N)r=)r0rrrisclosedszHTTPResponse.isclosedc Cs|jdkrdS|jdkr$|jdS|dk rRt|}|j|}t|d|jS|jr`|jS|j dkrv|jj }n6y|j |j }Wnt k r|jYnXd|_ |j|SdS)Nr9rgr) r=rIr_ bytearrayreadinto memoryviewtobytesrO_readall_chunkedrQread _safe_readr )r0amtbr1srrrrys*     zHTTPResponse.readcCs|jdkrdS|jdkr$|jdS|jr4|j|S|jdk r^t||jkr^t|d|j}|jj|}| r~|r~|jn&|jdk r|j|8_|js|j|S)Nrrg) r=rIr_rO_readinto_chunkedrQr,rvru)r0r|r1rrrrus$        zHTTPResponse.readintoc Csp|jjtd}t|tkr$td|jd}|dkrB|d|}y t|dStk rj|jYnXdS)Nr*z chunk size;r) r=r:r;r,rfindr`r]r_)r0r2irrr_read_next_chunk_size s    z"HTTPResponse._read_next_chunk_sizecCs>x8|jjtd}t|tkr&td|s,P|dkrPqWdS)Nr*z trailer line r8r9)rr8r9)r=r:r;r,r)r0r2rrr_read_and_discard_trailers z&HTTPResponse._read_and_discard_trailerc Csl|j}|sh|dk r|jdy |j}Wntk rDtdYnX|dkrb|j|jd}||_|S)NrWr9r)rPrzrr]r rr_)r0rPrrr_get_chunk_left(s  zHTTPResponse._get_chunk_leftc Csdg}y8x,|j}|dkrP|j|j|d|_qWdj|Stk r^tdj|YnXdS)Nrr9)rr/rzrPrAr )r0valuerPrrrrx@s  zHTTPResponse._readall_chunkedc Csd}t|}yvxp|j}|dkr$|St||krL|j|}|||_||S|d|}|j|}||d}||7}d|_qWWn(tk rtt|d|YnXdS)Nr)rvrr,_safe_readintorPr bytes)r0r| total_bytesmvbrPr1temp_mvbrrrr~Ns$      zHTTPResponse._readinto_chunkedcCsXg}xH|dkrL|jjt|t}|s4tdj|||j||t|8}qWdj|S)Nrr9)r=rymin MAXAMOUNTr rAr/r,)r0r{r}chunkrrrrzfs  zHTTPResponse._safe_readcCsd}t|}xt|t|krtt|kr@|dt}|jj|}n |jj|}|sjtt|d|t|||d}||7}qW|S)Nr)rvr,rr=rur r)r0r|rrrr1rrrr}s     zHTTPResponse._safe_readintor*c Cs|jdks|jdkrdS|jr(|j|S|jdk rJ|dksD||jkrJ|j}y|jj|}Wn*tk r|dkrt|jjd}YnX| r|r|jn|jdk r|jt|8_|S)Nrgr9rrii@) r=rIrO_read1_chunkedrQread1r]r_r,)r0r1resultrrrrs"    zHTTPResponse.read1cCs4|jdks|jdkrdS|jr(|j|S|jj|S)Nrgr9)r=rIrO _peek_chunkedpeek)r0r1rrrrs  zHTTPResponse.peekcs|jdks|jdkrdS|jr*tj|S|jdk rL|dksF||jkrL|j}|jj|}| rl|rl|jn|jdk r|jt|8_|S)Nrgr9r)r=rIrOror:rQr_r,)r0limitr)rprrr:s     zHTTPResponse.readlinecCsf|j}|dks|dkrdSd|ko.|kns8|}|jj|}|jt|8_|sbtd|S)Nrr9)rr=rrPr,r )r0r1rPryrrrrs zHTTPResponse._read1_chunkedc CsBy |j}Wntk r dSX|dkr.dS|jj|d|S)Nr9)rr r=r)r0r1rPrrrrs zHTTPResponse._peek_chunkedcCs |jjS)N)r=fileno)r0rrrrszHTTPResponse.filenocCsH|jdkrt|jj|p|}t|ts6t|d r:|Sdj|SdS)N__iter__z, )r>rZget_all isinstancerYhasattrrA)r0r%defaultr>rrr getheaders zHTTPResponse.getheadercCs|jdkrtt|jjS)N)r>rlistitems)r0rrr getheaderss zHTTPResponse.getheaderscCs|S)Nr)r0rrrrszHTTPResponse.__iter__cCs|jS)N)r>)r0rrrinfoszHTTPResponse.infocCs|jS)N)rU)r0rrrgeturls zHTTPResponse.geturlcCs|jS)N)rM)r0rrrgetcodeszHTTPResponse.getcode)rNN)N)rr)rr)r)N)"r4r5r6rVrarmrkr_rnrqrrrsryrurrrrxr~rzrrrr:rrrrrrrrr __classcell__rr)rprrs< !K  "     c@seZdZdZdZeZeZdZ dZ e ddZ e ddZ d ejd fd d Zd0d d ZddZddZddZddZddZddZddZddZd1dd Zd2d!d"Zd#d$Zd%d&Zd3dd'd(d)Zd ifdd'd*d+Zd,d-Z d.d/Z!d S)4rrezHTTP/1.1r*rcCs t|tjS)N)rio TextIOBase)streamrrr _is_textIO0szHTTPConnection._is_textIOc Csd|dkr|jtkrdSdSt|dr*dSyt|}|jStk rLYnXt|tr`t|SdS)Nrry) upper_METHODS_EXPECTING_BODYrrvnbytes TypeErrorrrYr,)bodyrTZmvrrr_get_content_length6s   z"HTTPConnection._get_content_lengthNcCs\||_||_d|_g|_d|_t|_d|_d|_d|_ i|_ |j ||\|_ |_ tj|_dS)N)timeoutsource_addressrS_buffer_HTTPConnection__response_CS_IDLE_HTTPConnection__staterI _tunnel_host _tunnel_port_tunnel_headers _get_hostporthostportsocketZcreate_connection_create_connection)r0rrrrrrrrVVszHTTPConnection.__init__cCs<|jrtd|j||\|_|_|r.||_n |jjdS)Nz.Can't set up tunnel for established connection)rS RuntimeErrorrrrrclear)r0rrr>rrr set_tunnelis zHTTPConnection.set_tunnelc Cs|dkr|jd}|jd}||kryt||dd}WnHtk r||dddkrh|j}ntd||ddYnX|d|}n|j}|r|ddkr|ddkr|dd }||fS) Nr)]r*rXznonnumeric port: '%s'r[rr)rfindr`r] default_portr )r0rrrjrrrrs   zHTTPConnection._get_hostportcCs ||_dS)N)rH)r0levelrrrset_debuglevelszHTTPConnection.set_debuglevelc Csd|j|jf}|jd}|j|x6|jjD](\}}d||f}|jd}|j|q0W|jd|j|j|jd}|j \}} } | t j j kr|j td| | jfxP|jjtd} t| tkrtd | sP| dkrP|jd krtd | jqWdS)NzCONNECT %s:%d HTTP/1.0 asciiz%s: %s zlatin-1 )rTzTunnel connection failed: %d %sr*z header liner8r9rzheader:)rr8r9)rrrsendrrresponse_classrSrIrahttp HTTPStatusZOKrnOSErrorrir=r:r;r,rrHrZrB) r0Z connect_strZ connect_bytesheaderrZ header_strZ header_bytesresponserLrhmessager2rrr_tunnels2         zHTTPConnection._tunnelcCsB|j|j|jf|j|j|_|jjtjtj d|j r>|j dS)Nr*) rrrrrrSZ setsockoptrZ IPPROTO_TCPZ TCP_NODELAYrr)r0rrrconnects zHTTPConnection.connectc CsBt|_z|j}|r d|_|jWd|j}|rrrrrrszHTTPConnection.requestc Cstdd|D}i}d|kr&d|d<d|kr6d|d<|j||f|d|krd |krd }|j||}|dkr|dk r|jd krtd |d }|jddq|jdt|nd }x |jD]\} } |j| | qWt|trt |d}|j ||ddS)Ncss|]}|jVqdS)N)r+)rkrrr sz/HTTPConnection._send_request..rr*rzaccept-encodingrzcontent-lengthztransfer-encodingFrzUnable to determine size of %rTzTransfer-EncodingrOzContent-Lengthr)r) frozensetrrrHrZrrYrrr'r) r0rTrUrr>rZ header_namesZskipsZcontent_lengthrlrrrrrs0      zHTTPConnection._send_requestcCs|jr|jjrd|_|jtks&|jr0t|j|jdkrR|j|j|j|jd}n|j|j|jd}yLy |j Wnt k r|j YnXt |_|j r|j n||_|S|j YnXdS)Nr)rT)rrsrrrrHrrSrIrmConnectionErrorrnrrR)r0rrrr getresponse)s,      zHTTPConnection.getresponse)NN)NF)FF)N)"r4r5r6rrrr HTTP_PORTrrrH staticmethodrrr_GLOBAL_DEFAULT_TIMEOUTrVrrrrrrnrrrrrrrrrrrrrrrr&s<     ' 6   .csBeZdZeZdddejdfdddfdd ZfddZZ S)HTTPSConnectionN)contextcheck_hostnamec stt|j|||||dk s.|dk s.|dk rDddl} | jdtd||_||_|dkrptj }|j dk rpd|_ |j tj k} |dkr|j }|r| rtd|s|r|j|||j dk rd|_ ||_||_dS)NrzTkey_file, cert_file and check_hostname are deprecated, use a custom context instead.rWTzMcheck_hostname needs a SSL context with either CERT_OPTIONAL or CERT_REQUIRED)rorrVwarningswarnDeprecationWarningkey_file cert_filesslZ_create_default_https_contextZpost_handshake_authZ verify_modeZ CERT_NONErr]Zload_cert_chain_context_check_hostname) r0rrrr rrrrrZ will_verify)rprrrVts0     zHTTPSConnection.__init__c stj|jr|j}n|j}|jj|j|d|_|jj r|jryt j |jj |Wn.t k r|jj tj|jjYnXdS)N)server_hostname)rorrrr Z wrap_socketrSrr r Zmatch_hostnameZ getpeercert ExceptionZshutdownrZ SHUT_RDWRrn)r0r )rprrrs    zHTTPSConnection.connect) r4r5r6 HTTPS_PORTrrrrVrrrr)rprrms rc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@seZdZddZdS)rcCs|f|_||_dS)N)argsrL)r0rLrrrrVszUnknownProtocol.__init__N)r4r5r6rVrrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@s&eZdZdddZddZddZdS) r NcCs|f|_||_||_dS)N)rpartialexpected)r0rrrrrrVszIncompleteRead.__init__cCs2|jdk rd|j}nd}d|jjt|j|fS)Nz, %i more expectedrXz%s(%i bytes read%s))rrpr4r,r)r0errr__repr__s   zIncompleteRead.__repr__cCst|S)N)r[)r0rrr__str__szIncompleteRead.__str__)N)r4r5r6rVrrrrrrr s c@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@seZdZddZdS)rcCs|s t|}|f|_||_dS)N)r[rr2)r0r2rrrrVszBadStatusLine.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcCstj|dt|fdS)Nz&got more than %d bytes when reading %s)rrVr;)r0Z line_typerrrrVszLineTooLong.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcOs"tj|dtj|f||dS)NrX)rrVConnectionResetError)r0poskwrrrrVs zRemoteDisconnected.__init__N)r4r5r6rVrrrrrs)r)AZ email.parserrCZ email.messagerrosrerrZ urllib.parser__all__rrrKrrrglobalsupdater __members__rrrr;r<compile fullmatchrrrrrrr'rZMessager(r?rEBufferedIOBaserrr  ImportErrorrr/rrrr rrr r r r rrrrrrrrrrrGs        9F = __pycache__/client.cpython-36.pyc000064400000104623147204456360012701 0ustar003 f*@srdZddlZddlZddlZddlZddlZddlZddlZddl Z ddl m Z dddddd d d d d ddddddddgZ dZ dZdZdZdZdZejejjddejjjDZdZdZd Zejd!jZejd"jZ ejd#Z!ejd$Z"d%d&d'hZ#dCd)d*Z$Gd+d,d,ej%j&Z'd-d.Z(e'fd/d0Z)Gd1ddej*Z+Gd2ddZ,y ddl-Z-Wne.k rlYnXGd3d4d4e,Z/e j0d4Gd5dde1Z2Gd6dde2Z3Gd7d d e2Z4Gd8dde2Z5Gd9d d e2Z6Gd:d d e2Z7Gd;d d e2Z8Gddde9Z;Gd?dde9ZGdBdde?e=Z@e2ZAdS)Da HTTP/1.1 client library HTTPConnection goes through a number of "states", which define when a client may legally make another request or fetch the response for a particular request. This diagram details these state transitions: (null) | | HTTPConnection() v Idle | | putrequest() v Request-started | | ( putheader() )* endheaders() v Request-sent |\_____________________________ | | getresponse() raises | response = getresponse() | ConnectionError v v Unread-response Idle [Response-headers-read] |\____________________ | | | response.read() | putrequest() v v Idle Req-started-unread-response ______/| / | response.read() | | ( putheader() )* endheaders() v v Request-started Req-sent-unread-response | | response.read() v Request-sent This diagram presents the following rules: -- a second request may not be started until {response-headers-read} -- a response [object] cannot be retrieved until {request-sent} -- there is no differentiation between an unread response body and a partially read response body Note: this enforcement is applied by the HTTPConnection class. The HTTPResponse class does not enforce this state machine, which implies sophisticated clients may accelerate the request/response pipeline. Caution should be taken, though: accelerating the states beyond the above pattern may imply knowledge of the server's connection-close behavior for certain requests. For example, it is impossible to tell whether the server will close the connection UNTIL the response headers have been read; this means that further requests cannot be placed into the pipeline until it is known that the server will NOT be closing the connection. Logical State __state __response ------------- ------- ---------- Idle _CS_IDLE None Request-started _CS_REQ_STARTED None Request-sent _CS_REQ_SENT None Unread-response _CS_IDLE Req-started-unread-response _CS_REQ_STARTED Req-sent-unread-response _CS_REQ_SENT N)urlsplit HTTPResponseHTTPConnection HTTPException NotConnectedUnknownProtocolUnknownTransferEncodingUnimplementedFileModeIncompleteRead InvalidURLImproperConnectionStateCannotSendRequestCannotSendHeaderResponseNotReady BadStatusLine LineTooLongRemoteDisconnectederror responsesPiZUNKNOWNZIdlezRequest-startedz Request-sentcCsi|] }|j|qS)phrase).0vrr#/usr/lib64/python3.6/http/client.py ksriids[^:\s][^:\r\n]*s\n(?![ \t])|\r(?![ \t\n])z[- ]z[-]ZPATCHZPOSTZPUTdatacCsfy |jdStk r`}z:t|j|j|j|jd|j||j|j|fdWYdd}~XnXdS)zZhstringrrr parse_headerss rEcseZdZd@ddZddZddZd d Zd d Zfd dZfddZ ddZ ddZ dAddZ ddZ ddZddZddZdd Zd!d"Zd#d$Zd%d&ZdCd(d)ZdEd*d+ZdGfd,d- Zd.d/Zd0d1Zd2d3ZdHd4d5Zd6d7Zd8d9Zd:d;Zdd?Z Z!S)IrrNcCsR|jd|_||_||_d|_|_t|_t|_t|_ t|_ t|_ t|_ t|_ dS)Nrb)makefiler= debuglevel_methodr>msg_UNKNOWNversionstatusreasonchunked chunk_leftlength will_close)r0sockrHmethodurlrrr__init__s  zHTTPResponse.__init__cCst|jjtdd}t|tkr*td|jdkrBtdt||sNt dy|j dd\}}}WnFt k ry|j dd\}}d}Wnt k rd}YnXYnX|j d s|j t|y$t|}|d ks|d krt|Wnt k rt|YnX|||fS) Nr*z iso-8859-1z status linerzreply:z-Remote end closed connection without responsezHTTP/ri)strr=r:r;r,rrHprintreprrsplit ValueError startswith _close_connrint)r0r2rLrMrNrrr _read_statuss2    zHTTPResponse._read_statusc Cs|jdk rdSx<|j\}}}|tkr(Pt|j}|jdkrFtd|~qW||_|_|j |_ |dkrrd|_ n|j drd|_ nt |t|j|_|_|jdkrx&|jD]}td|d |jj|qW|jjd }|r|jd krd |_d|_nd |_|j|_d|_|jjd}|jjd }|rx|j rxyt||_Wntk rbd|_YnX|jdkr~d|_nd|_|tks|tksd|kodkns|jdkrd|_|j r|j r|jdkrd |_dS)Nrzheaders:HTTP/1.0HTTP/0.9 zHTTP/1. zheader:r)ztransfer-encodingrOTFzcontent-lengthrHEAD)rbrc)r>raZCONTINUEr?r=rHrZcoderMstriprNrLr^rrErJgetr+rOrP _check_closerRrQr`r]Z NO_CONTENTZ NOT_MODIFIEDrI)r0rLrMrNZskipped_headershdrZtr_encrQrrrbegin9s\                   zHTTPResponse.begincCs|jjd}|jdkr:|jjd}|r6d|jkr6dSdS|jjdrJdS|r^d|jkr^dS|jjd}|r~d|jkr~dSdS)NZ connectionrecloseTFz keep-alivezproxy-connection)r>rjrLr+)r0ZconnZpconnrrrrks     zHTTPResponse._check_closecCs|j}d|_|jdS)N)r=rn)r0r=rrrr_szHTTPResponse._close_connc s$ztjWd|jr|jXdS)N)superrnr=r_)r0) __class__rrrnszHTTPResponse.closecstj|jr|jjdS)N)roflushr=)r0)rprrrqs zHTTPResponse.flushcCsdS)zAlways returns TrueTr)r0rrrreadableszHTTPResponse.readablecCs |jdkS)z!True if the connection is closed.N)r=)r0rrrisclosedszHTTPResponse.isclosedc Cs|jdkrdS|jdkr$|jdS|dk rRt|}|j|}t|d|jS|jr`|jS|j dkrv|jj }n6y|j |j }Wnt k r|jYnXd|_ |j|SdS)Nr9rgr) r=rIr_ bytearrayreadinto memoryviewtobytesrO_readall_chunkedrQread _safe_readr )r0amtbr1srrrrys*     zHTTPResponse.readcCs|jdkrdS|jdkr$|jdS|jr4|j|S|jdk r^t||jkr^t|d|j}|jj|}| r~|r~|jn&|jdk r|j|8_|js|j|S)z^Read up to len(b) bytes into bytearray b and return the number of bytes read. Nrrg) r=rIr_rO_readinto_chunkedrQr,rvru)r0r|r1rrrrus$        zHTTPResponse.readintoc Csp|jjtd}t|tkr$td|jd}|dkrB|d|}y t|dStk rj|jYnXdS)Nr*z chunk size;r) r=r:r;r,rfindr`r]r_)r0r2irrr_read_next_chunk_size s    z"HTTPResponse._read_next_chunk_sizecCs>x8|jjtd}t|tkr&td|s,P|dkrPqWdS)Nr*z trailer line r8r9)rr8r9)r=r:r;r,r)r0r2rrr_read_and_discard_trailers z&HTTPResponse._read_and_discard_trailerc Csl|j}|sh|dk r|jdy |j}Wntk rDtdYnX|dkrb|j|jd}||_|S)NrWr9r)rPrzrr]r rr_)r0rPrrr_get_chunk_left(s  zHTTPResponse._get_chunk_leftc Csr|jtkstg}y8x,|j}|dkr(P|j|j|d|_qWdj|Stk rltdj|YnXdS)Nrr9) rOrKAssertionErrorrr/rzrPrAr )r0valuerPrrrrx@s  zHTTPResponse._readall_chunkedc Cs|jtkstd}t|}yvxp|j}|dkr2|St||krZ|j|}|||_||S|d|}|j|}||d}||7}d|_qWWn(tk rtt |d|YnXdS)Nr) rOrKrrvrr,_safe_readintorPr bytes)r0r| total_bytesmvbrPr1temp_mvbrrrr~Ns&      zHTTPResponse._readinto_chunkedcCsXg}xH|dkrL|jjt|t}|s4tdj|||j||t|8}qWdj|S)aVRead the number of bytes requested, compensating for partial reads. Normally, we have a blocking socket, but a read() can be interrupted by a signal (resulting in a partial read). Note that we cannot distinguish between EOF and an interrupt when zero bytes have been read. IncompleteRead() will be raised in this situation. This function should be used when bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem. rr9)r=rymin MAXAMOUNTr rAr/r,)r0r{r}chunkrrrrzfs  zHTTPResponse._safe_readcCsd}t|}xt|t|krtt|kr@|dt}|jj|}n |jj|}|sjtt|d|t|||d}||7}qW|S)z2Same as _safe_read, but for reading into a buffer.rN)rvr,rr=rur r)r0r|rrrr1rrrr}s     zHTTPResponse._safe_readintor*c Cs|jdks|jdkrdS|jr(|j|S|jdk rJ|dksD||jkrJ|j}y|jj|}Wn*tk r|dkrt|jjd}YnX| r|r|jn|jdk r|jt|8_|S)zvRead with at most one underlying system call. If at least one byte is buffered, return that instead. Nrgr9rrii@) r=rIrO_read1_chunkedrQread1r]r_r,)r0r1resultrrrrs"    zHTTPResponse.read1cCs4|jdks|jdkrdS|jr(|j|S|jj|S)Nrgr9)r=rIrO _peek_chunkedpeek)r0r1rrrrs  zHTTPResponse.peekcs|jdks|jdkrdS|jr*tj|S|jdk rL|dksF||jkrL|j}|jj|}| rl|rl|jn|jdk r|jt|8_|S)Nrgr9r)r=rIrOror:rQr_r,)r0limitr)rprrr:s     zHTTPResponse.readlinecCsf|j}|dks|dkrdSd|ko.|kns8|}|jj|}|jt|8_|sbtd|S)Nrr9)rr=rrPr,r )r0r1rPryrrrrs zHTTPResponse._read1_chunkedc CsBy |j}Wntk r dSX|dkr.dS|jj|d|S)Nr9)rr r=r)r0r1rPrrrrs zHTTPResponse._peek_chunkedcCs |jjS)N)r=fileno)r0rrrrszHTTPResponse.filenocCsH|jdkrt|jj|p|}t|ts6t|d r:|Sdj|SdS)axReturns the value of the header matching *name*. If there are multiple matching headers, the values are combined into a single string separated by commas and spaces. If no matching header is found, returns *default* or None if the *default* is not specified. If the headers are unknown, raises http.client.ResponseNotReady. N__iter__z, )r>rZget_all isinstancerYhasattrrA)r0r%defaultr>rrr getheaders zHTTPResponse.getheadercCs|jdkrtt|jjS)z&Return list of (header, value) tuples.N)r>rlistitems)r0rrr getheaderss zHTTPResponse.getheaderscCs|S)Nr)r0rrrrszHTTPResponse.__iter__cCs|jS)ajReturns an instance of the class mimetools.Message containing meta-information associated with the URL. When the method is HTTP, these headers are those returned by the server at the head of the retrieved HTML page (including Content-Length and Content-Type). When the method is FTP, a Content-Length header will be present if (as is now usual) the server passed back a file length in response to the FTP retrieval request. A Content-Type header will be present if the MIME type can be guessed. When the method is local-file, returned headers will include a Date representing the file's last-modified time, a Content-Length giving file size, and a Content-Type containing a guess at the file's type. See also the description of the mimetools module. )r>)r0rrrinfoszHTTPResponse.infocCs|jS)aZReturn the real URL of the page. In some cases, the HTTP server redirects a client to another URL. The urlopen() function handles this transparently, but in some cases the caller needs to know which URL the client was redirected to. The geturl() method can be used to get at this redirected URL. )rU)r0rrrgeturls zHTTPResponse.geturlcCs|jS)zuReturn the HTTP status code that was sent with the response, or None if the URL is not an HTTP URL. )rM)r0rrrgetcodeszHTTPResponse.getcode)rNN)N)rr)rr)r)N)"r4r5r6rVrarmrkr_rnrqrrrsryrurrrrxr~rzrrrr:rrrrrrrrr __classcell__rr)rprrs< !K  "     c@seZdZdZdZeZeZdZ dZ e ddZ e ddZ d ejd fd d Zd0d d ZddZddZddZddZddZddZddZddZd1dd Zd2d!d"Zd#d$Zd%d&Zd3dd'd(d)Zd ifdd'd*d+Zd,d-Z d.d/Z!d S)4rrezHTTP/1.1r*rcCs t|tjS)zFTest whether a file-like object is a text or a binary stream. )rio TextIOBase)streamrrr _is_textIO0szHTTPConnection._is_textIOc Csd|dkr|jtkrdSdSt|dr*dSyt|}|jStk rLYnXt|tr`t|SdS)aGet the content-length based on the body. If the body is None, we set Content-Length: 0 for methods that expect a body (RFC 7230, Section 3.3.2). We also set the Content-Length for any method if the body is a str or bytes-like object and not a file. Nrry) upper_METHODS_EXPECTING_BODYrrvnbytes TypeErrorrrYr,)bodyrTZmvrrr_get_content_length6s   z"HTTPConnection._get_content_lengthNcCs\||_||_d|_g|_d|_t|_d|_d|_d|_ i|_ |j ||\|_ |_ tj|_dS)N)timeoutsource_addressrS_buffer_HTTPConnection__response_CS_IDLE_HTTPConnection__staterI _tunnel_host _tunnel_port_tunnel_headers _get_hostporthostportsocketZcreate_connection_create_connection)r0rrrrrrrrVVszHTTPConnection.__init__cCs<|jrtd|j||\|_|_|r.||_n |jjdS)aDSet up host and port for HTTP CONNECT tunnelling. In a connection that uses HTTP CONNECT tunneling, the host passed to the constructor is used as a proxy server that relays all communication to the endpoint passed to `set_tunnel`. This done by sending an HTTP CONNECT request to the proxy server when the connection is established. This method must be called before the HTML connection has been established. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request. z.Can't set up tunnel for established connectionN)rS RuntimeErrorrrrrclear)r0rrr>rrr set_tunnelis zHTTPConnection.set_tunnelc Cs|dkr|jd}|jd}||kryt||dd}WnHtk r||dddkrh|j}ntd||ddYnX|d|}n|j}|r|ddkr|ddkr|dd }||fS) Nr)]r*rXznonnumeric port: '%s'r[rr)rfindr`r] default_portr )r0rrrjrrrrs   zHTTPConnection._get_hostportcCs ||_dS)N)rH)r0levelrrrset_debuglevelszHTTPConnection.set_debuglevelc Csd|j|jf}|jd}|j|x6|jjD](\}}d||f}|jd}|j|q0W|jd|j|j|jd}|j \}} } | t j j kr|j td| | jfxP|jjtd} t| tkrtd | sP| dkrP|jd krtd | jqWdS)NzCONNECT %s:%d HTTP/1.0 asciiz%s: %s zlatin-1 )rTzTunnel connection failed: %d %sr*z header liner8r9rzheader:)rr8r9)rrrsendrrresponse_classrSrIrahttp HTTPStatusZOKrnOSErrorrir=r:r;r,rrHrZrB) r0Z connect_strZ connect_bytesheaderrZ header_strZ header_bytesresponserLrhmessager2rrr_tunnels2         zHTTPConnection._tunnelcCsB|j|j|jf|j|j|_|jjtjtj d|j r>|j dS)z3Connect to the host and port specified in __init__.r*N) rrrrrrSZ setsockoptrZ IPPROTO_TCPZ TCP_NODELAYrr)r0rrrconnects zHTTPConnection.connectc CsBt|_z|j}|r d|_|jWd|j}|rrrrrrszHTTPConnection.requestc Cstdd|D}i}d|kr&d|d<d|kr6d|d<|j||f|d|krd |krd }|j||}|dkr|dk r|jd krtd |d }|jddq|jdt|nd }x |jD]\} } |j| | qWt|trt |d}|j ||ddS)Ncss|]}|jVqdS)N)r+)rkrrr sz/HTTPConnection._send_request..rr*rzaccept-encodingrzcontent-lengthztransfer-encodingFrzUnable to determine size of %rTzTransfer-EncodingrOzContent-Lengthr)r) frozensetrrrHrZrrYrrr'r) r0rTrUrr>rZ header_namesZskipsZcontent_lengthrlrrrrrs0      zHTTPConnection._send_requestcCs|jr|jjrd|_|jtks&|jr0t|j|jdkrR|j|j|j|jd}n|j|j|jd}yZy |j Wnt k r|j YnX|j t kstt|_|j r|j n||_|S|j YnXdS)a)Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. Nr)rT)rrsrrrrHrrSrIrmConnectionErrorrnrRrKrr)r0rrrr getresponse)s.      zHTTPConnection.getresponse)NN)NF)FF)N)"r4r5r6rrrr HTTP_PORTrrrH staticmethodrrr_GLOBAL_DEFAULT_TIMEOUTrVrrrrrrnrrrrrrrrrrrrrrrr&s<     ' 6   .csFeZdZdZeZdddejdfdddfdd ZfddZ Z S)HTTPSConnectionz(This class allows communication via SSL.N)contextcheck_hostnamec stt|j|||||dk s.|dk s.|dk rDddl} | jdtd||_||_|dkrptj }|j dk rpd|_ |j tj k} |dkr|j }|r| rtd|s|r|j|||j dk rd|_ ||_||_dS)NrzTkey_file, cert_file and check_hostname are deprecated, use a custom context instead.rWTzMcheck_hostname needs a SSL context with either CERT_OPTIONAL or CERT_REQUIRED)rorrVwarningswarnDeprecationWarningkey_file cert_filesslZ_create_default_https_contextZpost_handshake_authZ verify_modeZ CERT_NONErr]Zload_cert_chain_context_check_hostname) r0rrr r rrrrrZ will_verify)rprrrVts0     zHTTPSConnection.__init__c stj|jr|j}n|j}|jj|j|d|_|jj r|jryt j |jj |Wn.t k r|jj tj|jjYnXdS)z(Connect to a host on a given (SSL) port.)server_hostnameN)rorrrr Z wrap_socketrSrr r Zmatch_hostnameZ getpeercert ExceptionZshutdownrZ SHUT_RDWRrn)r0r)rprrrs    zHTTPSConnection.connect) r4r5r6__doc__ HTTPS_PORTrrrrVrrrr)rprrmsrc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@seZdZddZdS)rcCs|f|_||_dS)N)argsrL)r0rLrrrrVszUnknownProtocol.__init__N)r4r5r6rVrrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)r N)r4r5r6rrrrr sc@s&eZdZdddZddZddZdS) r NcCs|f|_||_||_dS)N)rpartialexpected)r0rrrrrrVszIncompleteRead.__init__cCs2|jdk rd|j}nd}d|jjt|j|fS)Nz, %i more expectedrXz%s(%i bytes read%s))rrpr4r,r)r0errr__repr__s   zIncompleteRead.__repr__cCst|S)N)r[)r0rrr__str__szIncompleteRead.__str__)N)r4r5r6rVrrrrrrr s c@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)r N)r4r5r6rrrrr sc@s eZdZdS)rN)r4r5r6rrrrrsc@s eZdZdS)rN)r4r5r6rrrrrsc@seZdZddZdS)rcCs|s t|}|f|_||_dS)N)r[rr2)r0r2rrrrVszBadStatusLine.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcCstj|dt|fdS)Nz&got more than %d bytes when reading %s)rrVr;)r0Z line_typerrrrVszLineTooLong.__init__N)r4r5r6rVrrrrrsc@seZdZddZdS)rcOs"tj|dtj|f||dS)NrX)rrVConnectionResetError)r0poskwrrrrVs zRemoteDisconnected.__init__N)r4r5r6rVrrrrrs)r)BrZ email.parserrCZ email.messagerrosrerrZ urllib.parser__all__rrrKrrrglobalsupdater __members__rrrr;r<compile fullmatchrrrrrrr'rZMessager(r?rEBufferedIOBaserrr  ImportErrorrr/rrrr rrr r r r rrrrrrrrrrrEs        9F = __pycache__/cookiejar.cpython-36.opt-1.pyc000064400000151077147204456360014335 0ustar003 fr+@s*dZddddddddgZd d lZd d lZd d lZd d lZd d lZd d lZy d d l Z Wne k rpd d l Z YnXd d l Zd d lmZd Zd ad dZeejjZdZddZdZddZdddddddgZddddd d!d"d#d$d%d&d'g ZgZxeD]Zej ej!qWdud(d)Z"dvd*d+Z#d d d d d,Z$ej%d-ej&Z'd.d/Z(d0d1Z)ej%d2ej&Z*ej%d3ej+ej&BZ,ej%d4ej-ej&BZ.d5d6Z/ej%d7ej-ej&BZ0d8d9Z1d:d;Z2ej%d<Z3ej%d=Z4ej%d>Z5ej%d?Z6d@dAZ7ej%dBZ8dCdDZ9dEdFZ:dGdHZ;ej%dIej&ZdNdOZ?dPdQZ@ej%dRej&ZAdSdTZBdUdVZCdWdXZDdYdZZEd[ZFej%d\ZGd]d^ZHd_d`ZIdadbZJdcddZKGdeddZLGdfddZMGdgddeMZNdhdiZOdjdkZPGdldmdmZQGdnddZRGdoddeSZTGdpddeRZUdqdrZVGdsddeUZWGdtddeUZXd S)waHTTP cookie handling for web clients. This module has (now fairly distant) origins in Gisle Aas' Perl module HTTP::Cookies, from the libwww-perl library. Docstrings, comments and debug strings in this code refer to the attributes of the HTTP cookie system as cookie-attributes, to distinguish them clearly from Python attributes. Class diagram (note that BSDDBCookieJar and the MSIE* classes are not distributed with the Python standard library, but are available from http://wwwsearch.sf.net/): CookieJar____ / \ \ FileCookieJar \ \ / | \ \ \ MozillaCookieJar | LWPCookieJar \ \ | | \ | ---MSIEBase | \ | / | | \ | / MSIEDBCookieJar BSDDBCookieJar |/ MSIECookieJar Cookie CookieJar CookiePolicyDefaultCookiePolicy FileCookieJar LWPCookieJar LoadErrorMozillaCookieJarN)timegmFcGs(tsdStsddl}|jdatj|S)Nr zhttp.cookiejar)debugloggerloggingZ getLogger)argsr r&/usr/lib64/python3.6/http/cookiejar.py_debug.s  rzQa filename was not supplied (nor was the CookieJar instance initialised with one)cCsJddl}ddl}ddl}|j}|jd||j}|jd|dddS)Nr zhttp.cookiejar bug! %s) stacklevel)iowarnings tracebackStringIO print_excgetvaluewarn)rrrfmsgrrr_warn_unhandled_exception<s  ricCs|dd\}}}}}}|tkrd|ko2dknrd|koJdknrd|kobdknrd|kozdknrd|kodknrt|SdSdS) N r ;=) EPOCH_YEARr )ttyearmonthZmdayhourminsecrrr_timegmKs 8Hr,ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs@|dkrtjj}n tjj|}d|j|j|j|j|j|jfS)aHReturn a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", representing Universal Time (UTC, aka GMT). An example of this format is: 1994-11-24 08:49:37Z Nz%04d-%02d-%02d %02d:%02d:%02dZ) datetimeutcnowutcfromtimestampr'r(dayr)minutesecond)tdtrrr time2isozYs   r5cCsR|dkrtjj}n tjj|}dt|j|jt|jd|j|j |j |j fS)zReturn a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT Nz#%s, %02d-%s-%04d %02d:%02d:%02d GMTr) r-r.r/DAYSZweekdayr0MONTHSr(r'r)r1r2)r3r4rrr time2netscapels   r8)ZGMTUTCZUTZz^([-+])?(\d\d?):?(\d\d)?$cCsjd}|tkrd}nTtj|}|rfdt|jd}|jdrR|dt|jd}|jddkrf| }|S)Nr ir<r-) UTC_ZONES TIMEZONE_REsearchintgroup)tzoffsetmrrroffset_from_tz_strings  rFc Cst|}|tjkrdSytj|jd}WnXtk ry t|}Wntk r\dSXd|kopdknr||}ndSYnX|dkrd}|dkrd}|dkrd}t|}t|}t|}t|}|dkr0tjtjd}|d} |} ||| }| | } t | dkr0| dkr(|d}n|d}t |||||||f} | dk r|dkr^d}|j }t |} | dkr|dS| | } | S)Nrr r id2r9) rAr-ZMAXYEAR MONTHS_LOWERindexlower ValueErrortimeZ localtimeabsr,upperrF) r0monyrhrr*r+rCZimonZcur_yrrEZtmpr3rDrrr _str2timesV         rSzV^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) (\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$z+^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*a^ (\d\d?) # day (?:\s+|[-\/]) (\w+) # month (?:\s+|[-\/]) (\d+) # year (?: (?:\s+|:) # separator before clock (\d\d?):(\d\d) # hour:min (?::(\d\d))? # optional seconds )? # optional clock \s* ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone \s* (?:\(\w+\))? # ASCII representation of timezone in parens. \s*$c Cstj|}|rl|j}tj|djd}t|d|t|dt|dt|dt|df}t|S|j }t j d|d}dgd \}}}}}} } t j|}|dk r|j\}}}}}} } ndSt |||||| | S) aReturns time in seconds since epoch of time represented by a string. Return value is an integer. None is returned if the format of str is unrecognized, the time is outside the representable range, or the timezone string is not recognized. If the string contains no timezone, UTC is assumed. The timezone in the string may be numerical (like "-0800" or "+0100") or a string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the timezone strings equivalent to UTC (zero offset) are known to the function. The function loosely parses the following formats: Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) The parser ignores leading and trailing whitespace. The time may be absent. If the year is given with only 2 digits, the function will select the century that makes the year closest to the current date. rrr r;N)STRICT_DATE_REr@groupsrIrJrKrAfloatr,lstrip WEEKDAY_REsubLOOSE_HTTP_DATE_RErS) textrEgrPr&r0rQrRr*r+rCrrr http2times " raa^ (\d{4}) # year [-\/]? (\d\d?) # numerical month [-\/]? (\d\d?) # day (?: (?:\s+|[-:Tt]) # separator before clock (\d\d?):?(\d\d) # hour:min (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) )? # optional clock \s* ([-+]?\d\d?:?(:?\d\d)? |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) \s*$c Csd|j}dgd\}}}}}}}tj|}|dk rL|j\}}}}}}}} ndSt|||||||S)av As for http2time, but parses the ISO 8601 formats: 1994-02-03 14:15:29 -0100 -- ISO 8601 format 1994-02-03 14:15:29 -- zone is optional 1994-02-03 -- only date 1994-02-03T14:15:29 -- Use T as separator 19940203T141529Z -- ISO 8601 compact format 19940203 -- only date NrW)r[ ISO_DATE_REr@rYrS) r_r0rPrQrRr*r+rCrE_rrriso2time's  rdcCs*|jd\}}|jd||j|dS)z)Return unmatched part of re.Match object.r N)spanstring)matchstartendrrr unmatchedHsrjz^\s*([^=\s;,]+)z&^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"z^\s*=\s*([^\s;,]*)z\\(.)c Cs g}x|D]}|}g}x|rtj|}|rt|}|jd}tj|}|rlt|}|jd}tjd|}n.tj|}|rt|}|jd}|j}nd}|j ||fq|j j dr|j dd}|r|j |g}qt j dd|\}} |}qW|r |j |q W|S)amParse header values into a list of lists containing key,value pairs. The function knows how to deal with ",", ";" and "=" as well as quoted values after "=". A list of space separated tokens are parsed as if they were separated by ";". If the header_values passed as argument contains multiple values, then they are treated as if they were a single value separated by comma ",". This means that this function is useful for parsing header fields that follow this syntax (BNF as from the HTTP/1.1 specification, but we relax the requirement for tokens). headers = #header header = (token | parameter) *( [";"] (token | parameter)) token = 1* separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) qdtext = > quoted-pair = "\" CHAR parameter = attribute "=" value attribute = token value = token | quoted-string Each header is represented by a list of key/value pairs. The value for a simple token (not part of a parameter) is None. Syntactically incorrect headers will not necessarily be parsed as you would want. This is easier to describe with some examples: >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] >>> split_header_words(['text/html; charset="iso-8859-1"']) [[('text/html', None), ('charset', 'iso-8859-1')]] >>> split_header_words([r'Basic realm="\"foo\bar\""']) [[('Basic', None), ('realm', '"foobar"')]] rz\1N,z^[=\s;]*rV)HEADER_TOKEN_REr@rjrBHEADER_QUOTED_VALUE_REHEADER_ESCAPE_REr]HEADER_VALUE_RErstripappendr[ startswithresubn) Z header_valuesresultr_Z orig_textpairsrEnamevalueZnon_junkZ nr_junk_charsrrrsplit_header_wordsQs>.         ryz([\"\\])cCsg}xt|D]l}g}xN|D]F\}}|dk rTtjd|sHtjd|}d|}d||f}|j|qW|r |jdj|q Wdj|S)aDo the inverse (almost) of the conversion done by split_header_words. Takes a list of lists of (key, value) pairs and produces a single header value. Attribute values are quoted if needed. >>> join_header_words([[("text/plain", None), ("charset", "iso-8859-1")]]) 'text/plain; charset="iso-8859-1"' >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859-1")]]) 'text/plain, charset="iso-8859-1"' Nz^\w+$z\\\1z"%s"z%s=%sz; z, )rsr@HEADER_JOIN_ESCAPE_REr]rqjoin)Zlistsheadersrvattrkvrrrjoin_header_wordss     rcCs0|jdr|dd}|jdr,|dd}|S)N"r)rrendswith)r_rrr strip_quotess     rc Csd}g}x|D]}g}d}xt|jd D]\}}|j}|jd \}} } |j}|sd|d kr*Pnq*| rp| jnd } |d kr|j} | |kr| }|dkr| d k rt| } d }n|dkr| d k rtt| } |j|| fq*W|r|s|jd|j|qW|S)a5Ad-hoc parser for Netscape protocol cookie-attributes. The old Netscape cookie format for Set-Cookie can for instance contain an unquoted "," in the expires field, so we have to use this ad-hoc parser instead of split_header_words. XXX This may not make the best possible effort to parse all the crap that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient parser is probably better, so could do worse than following that if this ever gives any trouble. Currently, this is also used for parsing RFC 2109 cookies. expiresdomainpathsecureversionportmax-ageF;=r NT0)rrrrrrr)rr) enumeratesplitstrip partitionrKrrarq) Z ns_headersZ known_attrsruZ ns_headerrv version_setZiiZparamkeysepvallcrrrparse_ns_headerss@   rz\.\d+$cCs:tj|rdS|dkrdS|ddks2|ddkr6dSdS)z*Return True if text is a host domain name.FrVr .rTr)IPV4_REr@)r_rrris_HDN s rcCsl|j}|j}||krdSt|s(dS|j|}|dksB|dkrFdS|jdsTdSt|ddshdSdS)aReturn True if domain A domain-matches domain B, according to RFC 2965. A and B may be host domain names or IP addresses. RFC 2965, section 1: Host names can be specified either as an IP address or a HDN string. Sometimes we compare one host name with another. (Such comparisons SHALL be case-insensitive.) Host A's name domain-matches host B's if * their host name strings string-compare equal; or * A is a HDN string and has the form NB, where N is a non-empty name string, B has the form .B', and B' is a HDN string. (So, x.y.com domain-matches .Y.com but not Y.com.) Note that domain-match is not a commutative operation: a.b.c.com domain-matches .c.com, but not the reverse. TFrr rNr)rKrrfindrr)ABirrr domain_matchs  rcCstj|rdSdS)zdReturn True if text is a sort-of-like a host domain name. For accepting/blocking domains. FT)rr@)r_rrrliberal_is_HDNBs rcCsb|j}|j}t|ot|s0||kr,dSdS|jd}|rL|j|rLdS| r^||kr^dSdS)z\For blocking/accepting domains. A and B may be host domain names or IP addresses. TFr)rKrrrr)rr initial_dotrrruser_domain_matchLs rz:\d+$cCsB|j}tjj|d}|dkr,|jdd}tjd|d}|jS)zReturn request-host, as defined by RFC 2965. Variation from RFC: returned value is lowercased, for convenient comparison. rrVZHost) get_full_urlurllibparseZurlparseZ get_header cut_port_rer]rK)requesturlhostrrr request_hostas  rcCs6t|}}|jddkr.tj| r.|d}||fS)zzReturn a tuple (request-host, effective request-host name). As defined by RFC 2965, except both are lowercased. rrz.localr)rfindrr@)rerhnreq_hostrrreff_request_hostqs rcCs4|j}tjj|}t|j}|jds0d|}|S)z6Path component of request-URI, as defined by RFC 2965./)rrrZurlsplit escape_pathrrr)rrpartsrrrr request_path|s    rc Cs^|j}|jd}|dkrV||dd}y t|WqZtk rRtd|dSXnt}|S)N:r rznonnumeric port: '%s')rrrArLrDEFAULT_HTTP_PORT)rrrrrrr request_ports   rz%/;:@&=+$,!~*'()z%([0-9a-fA-F][0-9a-fA-F])cCsd|jdjS)Nz%%%sr)rBrO)rgrrruppercase_escaped_charsrcCstjj|t}tjt|}|S)zEEscape any invalid characters in HTTP URL, and uppercase all escapes.)rrZquoteHTTP_PATH_SAFEESCAPED_CHAR_REr]r)rrrrrs  rcCsP|jd}|dkrL||dd}|jd}t|rL|dksD|dkrLd|S|S)aBReturn reach of host h, as defined by RFC 2965, section 1. The reach R of a host name H is defined as follows: * If - H is the host domain name of a host; and, - H has the form A.B; and - A has no embedded (that is, interior) dots; and - B has at least one embedded dot, or B is the string "local". then the reach of H is .B. * Otherwise, the reach of H is H. >>> reach("www.acme.com") '.acme.com' >>> reach("acme.com") 'acme.com' >>> reach("acme.local") '.local' rr rNZlocal)rr)hrbrrrreachs  rcCs$t|}t|t|jsdSdSdS)z RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction. TFN)rrrZorigin_req_host)rrrrris_third_partys rc@sNeZdZdZdddZddZddd Zd d Zdd d ZddZ ddZ dS)raHTTP Cookie. This class represents both Netscape and RFC 2965 cookies. This is deliberately a very simple class. It just holds attributes. It's possible to construct Cookie instances that don't comply with the cookie standards. CookieJar.make_cookies is the factory function for Cookie objects -- it deals with cookie parsing, supplying defaults, and normalising to the representation used in this class. CookiePolicy is responsible for checking them to see whether they should be accepted from and returned to the server. Note that the port may be present in the headers, but unspecified ("Port" rather than"Port=80", for example); if this is the case, port is None. FcCs|dk rt|}| dk r$tt| } |dkr<|dkr)rrrrxrw)rplimitZ namevaluerrr__str__%s   zCookie.__str__cCspg}x,dD]$}t||}|jd|t|fq W|jdt|j|jdt|jd|jjdj|fS)Nrrwrxrrrrrrrrrrrrz%s=%szrest=%sz rfc2109=%sz%s(%s)z, )rrwrxrrrrrrrrrrrr)getattrrqreprrr __class____name__r{)rrrwr}rrr__repr__/s zCookie.__repr__)F)N)N) r __module__ __qualname____doc__rrrrrrrrrrrrs    c@s0eZdZdZddZddZddZdd Zd S) ra Defines which cookies get accepted from and returned to server. May also modify cookies, though this is probably a bad idea. The subclass DefaultCookiePolicy defines the standard rules for Netscape and RFC 2965 cookies -- override that if you want a customized policy. cCs tdS)zReturn true if (and only if) cookie should be accepted from server. Currently, pre-expired cookies never get this far -- the CookieJar class deletes such cookies itself. N)NotImplementedError)rcookierrrrset_okGszCookiePolicy.set_okcCs tdS)zAReturn true if (and only if) cookie should be returned to server.N)r)rrrrrr return_okPszCookiePolicy.return_okcCsdS)zMReturn false if cookies should not be returned, given cookie domain. Tr)rrrrrrdomain_return_okTszCookiePolicy.domain_return_okcCsdS)zKReturn false if cookies should not be returned, given cookie path. Tr)rrrrrrpath_return_okYszCookiePolicy.path_return_okN)rrrrrrrrrrrrr>s  c @seZdZdZdZdZdZdZeeBZdddddddddeddf d d Z d d Z d dZ ddZ ddZ ddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6ZdS)7rzBImplements the standard rules for accepting and returning cookies.rrrTr NTFc Csp||_||_||_||_||_||_| |_| |_| |_| |_ |dk rPt ||_ nf|_ |dk rft |}||_ dS)zAConstructor arguments should be passed as keyword arguments only.N) netscaperfc2965rfc2109_as_netscape hide_cookie2 strict_domainstrict_rfc2965_unverifiablestrict_ns_unverifiablestrict_ns_domainstrict_ns_set_initial_dollarstrict_ns_set_pathtuple_blocked_domains_allowed_domains) rblocked_domainsallowed_domainsrrrrrrrrrrrrrris  zDefaultCookiePolicy.__init__cCs|jS)z4Return the sequence of blocked domains (as a tuple).)r)rrrrrsz#DefaultCookiePolicy.blocked_domainscCst||_dS)z$Set the sequence of blocked domains.N)rr)rrrrrset_blocked_domainssz'DefaultCookiePolicy.set_blocked_domainscCs"x|jD]}t||rdSqWdS)NTF)rr)rrZblocked_domainrrr is_blockeds  zDefaultCookiePolicy.is_blockedcCs|jS)z=Return None, or the sequence of allowed domains (as a tuple).)r)rrrrrsz#DefaultCookiePolicy.allowed_domainscCs|dk rt|}||_dS)z-Set the sequence of allowed domains, or None.N)rr)rrrrrset_allowed_domainssz'DefaultCookiePolicy.set_allowed_domainscCs0|jdkrdSx|jD]}t||rdSqWdS)NFT)rr)rrZallowed_domainrrris_not_alloweds    z"DefaultCookiePolicy.is_not_allowedcCsBtd|j|jx,d D]$}d|}t||}|||sd SqWd S) z If you override .set_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to accept). z - checking cookie %s=%sr verifiabilityrwrrrZset_ok_FT)rrrwrrr)rrwrxr)rrrnfn_namefnrrrrs   zDefaultCookiePolicy.set_okcCs^|jdkrtd|j|jdS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT) unverifiablerrrrr)rrrrrrset_ok_verifiabilitysz(DefaultCookiePolicy.set_ok_verifiabilitycCs0|jdkr,|jr,|jjdr,td|jdSdS)Nr $z' illegal name (starts with '$'): '%s'FT)rrrwrrr)rrrrrr set_ok_names   zDefaultCookiePolicy.set_ok_namecCsL|jrHt|}|jdks(|jdkrH|jrH|j|j rHtd|j|dSdS)Nr z7 path attribute %s is not a prefix of request path %sFT)rrrrrrrr)rrrreq_pathrrr set_ok_paths  zDefaultCookiePolicy.set_ok_pathc Cs|j|jrtd|jdS|j|jr8td|jdS|jrt|\}}|j}|jr|jddkr|jd}|jdd|}|dkr||dd}||d|} | j d$krt |dkrtd|dS|j dr|dd} n|} | j ddk} | r|dkrtd|dS|j dkrb|j| rb|j d rbd|j| rbtd ||dS|j dks||j|j@rt||std!||dS|j dks|j|j@r|dt | } | j ddkrtj| rtd"| |dSd#S)%Nz" domain %s is in user block-listFz& domain %s is not in user allow-listrrr rcoaccomeduorgnetgovmilrAaerobizcatcoopinfojobsmobimuseumrwprotraveleuz& country-code second level domain %sz.localz/ non-local domain %s contains no embedded dotzO effective request-host %s (even with added initial dot) does not end with %sz5 effective request-host %s does not domain-match %sz. host prefix %s for domain %s contains a dotT)rrrrrrrrrAr r r r r rrrrwrrr)rrrrrrrcountrrKlenrrrrrrDomainRFC2965MatchrDomainStrictNoDotsrr@) rrrrrrrjZtldZsldZundotted_domainZ embedded_dotsZ host_prefixrrr set_ok_domainsf            z!DefaultCookiePolicy.set_ok_domainc Cs|jrt|}|dkrd}nt|}x\|jjdD]:}y t|Wntk r`td|dSX||kr2Pq2Wtd||jdSdS)N80rkz bad port %s (not numeric)Fz$ request port (%s) not found in %sT)rrstrrrrArLr)rrrreq_portrrrr set_ok_port%s"   zDefaultCookiePolicy.set_ok_portcCsBtd|j|jx,d D]$}d|}t||}|||sd SqWd S) z If you override .return_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to return). z - checking cookie %s=%srrrrrrZ return_ok_FT)rrrrrr)rrwrxr)rrrrrrrrrr:s    zDefaultCookiePolicy.return_okcCs@|jdkr|j rtddS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT)rrrrrr)rrrrrrreturn_ok_verifiabilityUsz+DefaultCookiePolicy.return_ok_verifiabilitycCs |jr|jdkrtddSdS)NZhttpsz( secure cookie with non-secure requestFT)rtyper)rrrrrrreturn_ok_secureasz$DefaultCookiePolicy.return_ok_securecCs|j|jrtddSdS)Nz cookie expiredFT)r_nowr)rrrrrrreturn_ok_expiresgs z%DefaultCookiePolicy.return_ok_expirescCsP|jrLt|}|dkrd}x0|jjdD]}||kr(Pq(Wtd||jdSdS)Nrrkz0 request port %s does not match cookie port %sFT)rrrr)rrrrrrrrreturn_ok_portms z"DefaultCookiePolicy.return_ok_portcCst|\}}|j}|r,|jd r,d|}n|}|jdkrb|j|j@rb|j rb||krbtddS|jdkrt|| rtd||dS|jdkrd|j | rtd||dSdS)Nrr zQ cookie with unspecified domain does not string-compare equal to request domainFzQ effective request-host name %s does not domain-match RFC 2965 cookie domain %sz; request-host %s does not match Netscape cookie domain %sT) rrrrrrDomainStrictNonDomainrrrr)rrrrrr dotdomainrrrreturn_ok_domain{s&    z$DefaultCookiePolicy.return_ok_domaincCst|\}}|jdsd|}|jds0d|}|rJ|jd rJd|}n|}|j|p`|j|sfdS|j|r~td|dS|j|rtd|dSdS)NrFz" domain %s is in user block-listz& domain %s is not in user allow-listT)rrrrrrr)rrrrrr&rrrrs"        z$DefaultCookiePolicy.domain_return_okcCs0td|t|}|j|s,td||dSdS)Nz- checking cookie path=%sz %s does not path-match %sFT)rrrr)rrrrrrrrs    z"DefaultCookiePolicy.path_return_ok) rrrrrr%rZ DomainLiberalZ DomainStrictrrrrrrrrrrrrrrrrrr!r#r$r'rrrrrrr_sL    ;  cCst|j}t|j|S)N)sortedkeysmapr)Zadictr)rrrvals_sorted_by_keys r+c csZt|}xL|D]D}d}y |jWntk r4YnXd}t|EdH|s|VqWdS)zBIterates over nested mapping, depth-first, in sorted order by key.FTN)r+itemsAttributeError deepvalues)mappingvaluesobjrrrr.s  r.c@s eZdZdS)AbsentN)rrrrrrrr2sr2c@seZdZdZejdZejdZejdZejdZ ejdZ ejdej Z d3d d Z d d Zd dZddZddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd4d%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Z d1d2Z!dS)5rzCollection of HTTP cookies. You may not need to know about this class: try urllib.request.build_opener(HTTPCookieProcessor).open(url). z\Wz([\"\\])z\.?[^.]*z[^.]*z^\.+z^\#LWP-Cookies-(\d+\.\d+)NcCs(|dkrt}||_tj|_i|_dS)N)r_policy _threadingRLock _cookies_lock_cookies)rpolicyrrrrs  zCookieJar.__init__cCs ||_dS)N)r3)rr8rrr set_policyszCookieJar.set_policycCsg}|jj||sgStd||j|}xd|jD]X}|jj||sHq4||}x:|jD].}|jj||svtdqZtd|j|qZWq4W|S)Nz!Checking %s for cookies to returnz not returning cookiez it's a match) r3rrr7r)rr0rrq)rrrcookiesZcookies_by_pathrZcookies_by_namerrrr_cookies_for_domains   zCookieJar._cookies_for_domaincCs.g}x$|jjD]}|j|j||qW|S)z2Return a list of cookies to be returned to server.)r7r)extendr;)rrr:rrrr_cookies_for_requestszCookieJar._cookies_for_requestc CsF|jddddd}g}x$|D]}|j}|sLd}|dkrL|jd||jdk r~|jj|jr~|dkr~|jjd |j}n|j}|jdkr|j|jn|jd |j|f|dkr"|j r|jd |j |j j d r|j }|j o|j d r|d d}|jd||jdk r"d}|jr4|d|j}|j|q"W|S)zReturn a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request). cSs t|jS)N)rr)arrrsz)CookieJar._cookie_attrs..T)rreverseFr z $Version=%sNz\\\1z%s=%sz $Path="%s"rrz $Domain="%s"z$Portz="%s")sortrrqrx non_word_rer@quote_rer]rwrrrrrrrr) rr:rattrsrrrxrrrrr _cookie_attrss>      zCookieJar._cookie_attrsc Cstd|jjzttj|j_|_|j|}|j|}|r^|j ds^|j ddj ||jj r|jj r|j d rx$|D]}|jdkr|j ddPqWWd|jjX|jdS)zAdd correct Cookie: header to request (urllib.request.Request object). The Cookie2 header is also added unless policy.hide_cookie2 is true. add_cookie_headerrz; ZCookie2rz $Version="1"N)rr6acquirerArMr3r"r=rEZ has_headerZadd_unredirected_headerr{rrrreleaseclear_expired_cookies)rrr:rDrrrrrF?s$          zCookieJar.add_cookie_headercCsg}d}d}x||D]r}|d \}}d }d } i} i} x4|d dD]"\} } | j}||ksh||krl|} | |kr| dkrd} | | krqF| dkr| dkrtdd} P| j} | dkr|rqF| dkrtdqF| dkrd}y t| } Wn$tk rtdd} PYnXd} |j| } | |ks2| |krb| dkrX| dkrXtd| d} P| | | <qF| | | <qFW| rvq|j||| | fqW|S)aReturn list of tuples containing normalised cookie information. attrs_set is the list of lists of key,value pairs extracted from the Set-Cookie or Set-Cookie2 headers. Tuples are name, value, standard, rest, where name and value are the cookie name and value, standard is a dictionary containing the standard cookie-attributes (discard, secure, version, expires or max-age, domain, path and port) and rest is a dictionary containing the rest of the cookie-attributes. rrrrmax-agerrrr commenturlr FrNTz% missing value for domain attributezM missing or invalid value for expires attribute: treating as session cookiez? missing or invalid (non-numeric) value for max-age attributez! missing value for %s attribute)rr)rrrJrrrrrK)rrrK)rKrrArLr"rq)r attrs_set cookie_tuples boolean_attrs value_attrsZ cookie_attrsrwrxZ max_age_setZ bad_cookiestandardrr~rrrrr_normalized_cookie_tuples`sl          z#CookieJar._normalized_cookie_tuplesc!Cs$|\}}}}|jdt}|jdt}|jdt} |jdt} |jdd} | dk rry t| } Wntk rpdSX|jdd} |jdd} |jd d}|jd d}|tk r|d krd }t|}nXd}t|}|jd }|dkr| dkr|d|}n|d|d}t|dkrd }|tk }d}|r8t|j d}|tkrTt |\}}|}n|j dshd|}d}| tk r| dkrt |} nd }t j dd | } nd} | tkrd} d } nH| |jkry|j|||Wntk rYnXtd|||dSt| ||| ||||||| | | |||S)NrrrrrrFrrrKrVTrrr rz\s+z2Expiring cookie, domain='%s', path='%s', name='%s'r)rr2rArLrrrrboolrrrrrsr]r"clearKeyErrorrr)rtuprrwrxrPrrrrrrrrrrrrrrrrrrrr_cookie_from_cookie_tuples                       z#CookieJar._cookie_from_cookie_tuplecCs:|j|}g}x&|D]}|j||}|r|j|qW|S)N)rQrVrq)rrLrrMr:rUrrrr_cookies_from_attrs_sets   z!CookieJar._cookies_from_attrs_setcCsLt|jdd}|dkr |jj }x&|D]}|jdkr&d|_|r&d|_q&WdS)NrrTr )rr3rrr)rr:Z rfc2109_as_nsrrrr_process_rfc2109_cookies&s   z"CookieJar._process_rfc2109_cookiesc Cs6|j}|jdg}|jdg}|jj}|jj}| r<| s`| rH| s`| rT| s`| rd| rdgSy|jt||}Wntk rtg}YnX|o|r2y|jt ||} Wntk rtg} YnX|j | |r"i} x |D]} d| | j | j | j f<qW| fdd} t| | } | r2|j| |S)zAReturn sequence of Cookie objects extracted from response object.z Set-Cookie2z Set-CookieNcSs|j|j|jf}||kS)N)rrrw)Z ns_cookielookuprrrrno_matching_rfc2965^sz3CookieJar.make_cookies..no_matching_rfc2965)r Zget_allr3rrrWry ExceptionrrrXrrrwfilterr<) rresponserr|Z rfc2965_hdrsZns_hdrsrrr:Z ns_cookiesrYrrZrrr make_cookies2sB              zCookieJar.make_cookiesc CsN|jjz2ttj|j_|_|jj||r:|j|Wd|jjXdS)z-Set a cookie if policy says it's OK to do so.N) r6rGrArMr3r"r set_cookierH)rrrrrrset_cookie_if_okhs  zCookieJar.set_cookie_if_okc Csl|j}|jjzJ|j|kr&i||j<||j}|j|krDi||j<||j}|||j<Wd|jjXdS)z?Set a cookie, without checking whether or not it should be set.N)r7r6rGrrrwrH)rrcZc2Zc3rrrr_us     zCookieJar.set_cookiec Cs|td|j|jjzRttj|j_|_x6|j||D]&}|jj ||r>td||j |q>WWd|jj XdS)zAExtract cookies from response, where allowable given the request.zextract_cookies: %sz setting cookie: %sN) rr r6rGrArMr3r"r^rr_rH)rr]rrrrrextract_cookiess  zCookieJar.extract_cookiescCst|dk r2|dks|dkr td|j|||=n>|dk rX|dkrJtd|j||=n|dk rj|j|=ni|_dS)aClear some cookies. Invoking this method without arguments will clear all cookies. If given a single argument, only cookies belonging to that domain will be removed. If given two arguments, cookies belonging to the specified path within that domain are removed. If given three arguments, then the cookie with the specified name, path and domain is removed. Raises KeyError if no matching cookie exists. Nz8domain and path must be given to remove a cookie by namez.domain must be given to remove cookies by path)rLr7)rrrrwrrrrSs  zCookieJar.clearc CsH|jjz,x&|D]}|jr|j|j|j|jqWWd|jjXdS)zDiscard all session cookies. Note that the .save() method won't save session cookies anyway, unless you ask otherwise by passing a true ignore_discard argument. N)r6rGrrSrrrwrH)rrrrrclear_session_cookiess   zCookieJar.clear_session_cookiesc CsT|jjz8tj}x*|D]"}|j|r|j|j|j|jqWWd|jjXdS)aDiscard all expired cookies. You probably don't need to call this method: expired cookies are never sent back to the server (provided you're using DefaultCookiePolicy), this method is called by CookieJar itself every so often, and the .save() method won't save expired cookies anyway (unless you ask otherwise by passing a true ignore_expires argument). N) r6rGrMrrSrrrwrH)rrrrrrrIs   zCookieJar.clear_expired_cookiescCs t|jS)N)r.r7)rrrr__iter__szCookieJar.__iter__cCsd}x|D] }|d}q W|S)z#Return number of contained cookies.r rr)rrrrrr__len__s zCookieJar.__len__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rqrrrr{)rrrrrrrs zCookieJar.__repr__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rqrrrr{)rrfrrrrrs zCookieJar.__str__)N)NNN)"rrrrrscompilerBrCZstrict_domain_reZ domain_reZdots_reASCIImagic_rerr9r;r=rErFrQrVrWrXr^r`r_rbrSrcrIrdrerrrrrrrs8      ;!a\  6   c@s eZdZdS)rN)rrrrrrrrsc@s8eZdZdZd ddZd ddZddd Zdd d ZdS)rz6CookieJar that can be loaded from and saved to a file.NFc CsJtj|||dk r6y |dWntdYnX||_t||_dS)z} Cookies are NOT loaded from the named file until either the .load() or .revert() method is called. NrVzfilename must be string-like)rrrLfilenamerR delayload)rrjrkr8rrrrs  zFileCookieJar.__init__cCs tdS)zSave cookies to a file.N)r)rrjignore_discardignore_expiresrrrsaveszFileCookieJar.savec CsJ|dkr"|jdk r|j}nttt|}|j||||WdQRXdS)zLoad cookies from a file.N)rjrLMISSING_FILENAME_TEXTopen _really_load)rrjrlrmrrrrloads   zFileCookieJar.loadcCs|dkr"|jdk r|j}ntt|jjzFtj|j}i|_y|j|||Wnt k rn||_YnXWd|jj XdS)zClear all cookies and reload cookies from a saved file. Raises LoadError (or OSError) if reversion is not successful; the object's state will not be altered if this happens. N) rjrLror6rGrdeepcopyr7rrOSErrorrH)rrjrlrmZ old_staterrrreverts    zFileCookieJar.revert)NFN)NFF)NFF)NFF)rrrrrrnrrrurrrrrs    cCs$|j|jfd|jfd|jfg}|jdk r8|jd|jf|jrH|jd|jrX|jd|jrh|jd|j rx|jd|j r|jd t t |j f|j r|jd|jr|jd |jf|jr|jd |jft|jj}x$|D]}|j|t|j|fqW|jd t|jft|gS)zReturn string representation of Cookie in the LWP cookie file format. Actually, the format is extended a bit -- see module docstring. rrNr path_spec port_spec domain_dotrrrrrKr)rvN)rwN)rxN)rN)rN)rwrxrrrrqrrrrrr5rZrrrr(rr)rrr)rrr)r~rrrlwp_cookie_strs6         ryc@s,eZdZdZd ddZd ddZd d ZdS) ra[ The LWPCookieJar saves a sequence of "Set-Cookie3" lines. "Set-Cookie3" is the format used by the libwww-perl library, not known to be compatible with any browser, but which is easy to read and doesn't lose information about RFC 2965 cookies. Additional methods as_lwp_str(ignore_discard=True, ignore_expired=True) TcCs\tj}g}x>|D]6}| r$|jr$q| r6|j|r6q|jdt|qWdj|dgS)zReturn cookies as a string of "\n"-separated "Set-Cookie3" headers. ignore_discard and ignore_expires: see docstring for FileCookieJar.save zSet-Cookie3: %s rV)rMrrrqryr{)rrlrmrrfrrrr as_lwp_strGs  zLWPCookieJar.as_lwp_strNFc CsX|dkr"|jdk r|j}nttt|d"}|jd|j|j||WdQRXdS)Nwz#LWP-Cookies-2.0 )rjrLrorpwriter{)rrjrlrmrrrrrnWs   zLWPCookieJar.savecCsL|j}|jj|s$d|}t|tj}d}d} d} yʐx|j} | dkrRP| j|s^q@| t|dj} xt| gD]x} | d\} }i}i}x| D] }d||<qWx| ddD]t\}}|dk r|j }nd}|| ks|| kr|}|| kr|dkr d}|||<q|| kr*|||<q|||<qW|j }|d }|d}|dk r^t |}|dkrld}|d }|jd}t |d| ||d |d|||d|d |d|d|||d |d|}| r|j rq| r|j|rq|j|qWq@WWnBtk rYn,tk rFttd|| fYnXdS)Nz5%r does not look like a Set-Cookie3 (LWP) format filez Set-Cookie3:rwrvrxrrrrrrrrrKrVr FrTrz&invalid Set-Cookie3 format file %r: %r)rwrvrxrr)rrrrrrrK)readlinerir@rrMrrrrryrKrrdrrrr_rtr[r)rrrjrlrmmagicrrheaderrNrOlinedatarwrxrPrr~rrrrrrrrarrrrqcs                   zLWPCookieJar._really_load)TT)NFF)rrrrr{rnrqrrrrr:s   c@s0eZdZdZejdZdZddZd dd Z dS) ra WARNING: you may want to backup your browser's cookies file if you use this class to save cookies. I *think* it works, but there have been bugs in the past! This class differs from CookieJar only in the format it uses to save and load cookies to and from a file. This class uses the Mozilla/Netscape `cookies.txt' format. lynx uses this file format, too. Don't expect cookies saved while the browser is running to be noticed by the browser (in fact, Mozilla on unix will overwrite your saved cookies if you change them on disk while it's running; on Windows, you probably can't save at all while the browser is running). Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to Netscape cookies on saving. In particular, the cookie version and port number information is lost, together with information about whether or not Path, Port and Discard were specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the domain as set in the HTTP header started with a dot (yes, I'm aware some domains in Netscape files start with a dot and some don't -- trust me, you really don't want to know any more about this). Note that though Mozilla and Netscape use the same format, they use slightly different headers. The class saves cookies using the Netscape header by default (Mozilla can cope with that). z#( Netscape)? HTTP Cookie Filezr# Netscape HTTP Cookie File # http://curl.haxx.se/rfc/cookie_spec.html # This is a generated file! Do not edit. cCsntj}|j}|jj|s(td|yx|j}|dkr@P|jdrV|dd}|jjds.|jdkrrq.|jd\}} } } } } }| dk} | dk} | dkr|} d}|jd }d }| dkrd} d }t d | |dd || || d | | |ddi}| r|j rq.| r|j |rq.|j |q.WWnBt k r>Yn,tk rhttd ||fYnXdS)Nz4%r does not look like a Netscape format cookies filerVrzr#r TRUErFTr z+invalid Netscape format cookies file %r: %rr)rr)rMr~rir@rrrrrrrrrr_rtr[r)rrrjrlrmrrrrrrrrrwrxrrrarrrrqs`    zMozillaCookieJar._really_loadNFc Cs|dkr"|jdk r|j}nttt|d}|j|jtj}x|D]}| rZ|jrZqH| rl|j|rlqH|j rxd}nd}|j j drd}nd}|j dk rt |j } nd} |jdkrd} |j} n |j} |j} |jdj|j ||j|| | | gdqHWWdQRXdS)Nr|rZFALSErrVrrz)rjrLrorpr}rrMrrrrrrrrrxrwr{r) rrjrlrmrrrrrrrwrxrrrrns<          zMozillaCookieJar.save)NFF) rrrrrsrgrirrqrnrrrrrs  A)N)N)Yr__all__rr-rsrMZ urllib.parserZurllib.requestZ threadingr4 ImportErrorZdummy_threadingZ http.clientZhttpZcalendarr r r rrZclientZ HTTP_PORTrrorr%r,r6r7rIr(rqrKr5r8r>rgrhr?rFrSrXIr\Xr^rarbrdrjrlrmrornryrzrrrrrrrrrrrrrrrrrrrrrrr+r.r2rrtrrryrrrrrrs        88!    U D'    #b!\:x__pycache__/cookiejar.cpython-36.opt-2.pyc000064400000112411147204456360014323 0ustar003 fr+@s&ddddddddgZdd lZdd lZdd lZdd lZdd lZdd lZy dd lZ Wne k rldd l Z YnXdd l Z dd lmZd Zd ad d Zee jjZdZddZdZddZdddddddgZdddddd d!d"d#d$d%d&g ZgZxeD]Zejej qWdtd'd(Z!dud)d*Z"d d d d d+Z#ej$d,ej%Z&d-d.Z'd/d0Z(ej$d1ej%Z)ej$d2ej*ej%BZ+ej$d3ej,ej%BZ-d4d5Z.ej$d6ej,ej%BZ/d7d8Z0d9d:Z1ej$d;Z2ej$d<Z3ej$d=Z4ej$d>Z5d?d@Z6ej$dAZ7dBdCZ8dDdEZ9dFdGZ:ej$dHej%Z;dIdJZdOdPZ?ej$dQej%Z@dRdSZAdTdUZBdVdWZCdXdYZDdZZEej$d[ZFd\d]ZGd^d_ZHd`daZIdbdcZJGddddZKGdeddZLGdfddeLZMdgdhZNdidjZOGdkdldlZPGdmddZQGdnddeRZSGdoddeQZTdpdqZUGdrddeTZVGdsddeTZWd S)vCookie CookieJar CookiePolicyDefaultCookiePolicy FileCookieJar LWPCookieJar LoadErrorMozillaCookieJarN)timegmFcGs(tsdStsddl}|jdatj|S)Nr zhttp.cookiejar)debugloggerloggingZ getLogger)argsr r&/usr/lib64/python3.6/http/cookiejar.py_debug.s  rzQa filename was not supplied (nor was the CookieJar instance initialised with one)cCsJddl}ddl}ddl}|j}|jd||j}|jd|dddS)Nr zhttp.cookiejar bug! %s) stacklevel)iowarnings tracebackStringIO print_excgetvaluewarn)rrrfmsgrrr_warn_unhandled_exception<s  ricCs|dd\}}}}}}|tkrd|ko2dknrd|koJdknrd|kobdknrd|kozdknrd|kodknrt|SdSdS) N r ;=) EPOCH_YEARr )ttyearmonthZmdayhourminsecrrr_timegmKs 8Hr,ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs@|dkrtjj}n tjj|}d|j|j|j|j|j|jfS)Nz%04d-%02d-%02d %02d:%02d:%02dZ) datetimeutcnowutcfromtimestampr'r(dayr)minutesecond)tdtrrr time2isozYs   r5cCsR|dkrtjj}n tjj|}dt|j|jt|jd|j|j |j |j fS)Nz#%s, %02d-%s-%04d %02d:%02d:%02d GMTr) r-r.r/DAYSZweekdayr0MONTHSr(r'r)r1r2)r3r4rrr time2netscapels   r8)ZGMTUTCZUTZz^([-+])?(\d\d?):?(\d\d)?$cCsjd}|tkrd}nTtj|}|rfdt|jd}|jdrR|dt|jd}|jddkrf| }|S)Nr ir<r-) UTC_ZONES TIMEZONE_REsearchintgroup)tzoffsetmrrroffset_from_tz_strings  rFc Cst|}|tjkrdSytj|jd}WnXtk ry t|}Wntk r\dSXd|kopdknr||}ndSYnX|dkrd}|dkrd}|dkrd}t|}t|}t|}t|}|dkr0tjtjd}|d} |} ||| }| | } t | dkr0| dkr(|d}n|d}t |||||||f} | dk r|dkr^d}|j }t |} | dkr|dS| | } | S)Nrr r id2r9) rAr-ZMAXYEAR MONTHS_LOWERindexlower ValueErrortimeZ localtimeabsr,upperrF) r0monyrhrr*r+rCZimonZcur_yrrEZtmpr3rDrrr _str2timesV         rSzV^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) (\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$z+^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*a^ (\d\d?) # day (?:\s+|[-\/]) (\w+) # month (?:\s+|[-\/]) (\d+) # year (?: (?:\s+|:) # separator before clock (\d\d?):(\d\d) # hour:min (?::(\d\d))? # optional seconds )? # optional clock \s* ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone \s* (?:\(\w+\))? # ASCII representation of timezone in parens. \s*$c Cstj|}|rl|j}tj|djd}t|d|t|dt|dt|dt|df}t|S|j }t j d|d}dgd\}}}}}} } t j|}|dk r|j\}}}}}} } ndSt |||||| | S) Nrrr r;)STRICT_DATE_REr@groupsrIrJrKrAfloatr,lstrip WEEKDAY_REsubLOOSE_HTTP_DATE_RErS) textrEgrPr&r0rQrRr*r+rCrrr http2times " raa^ (\d{4}) # year [-\/]? (\d\d?) # numerical month [-\/]? (\d\d?) # day (?: (?:\s+|[-:Tt]) # separator before clock (\d\d?):?(\d\d) # hour:min (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) )? # optional clock \s* ([-+]?\d\d?:?(:?\d\d)? |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) \s*$c Csd|j}dgd\}}}}}}}tj|}|dk rL|j\}}}}}}}} ndSt|||||||S)NrW)r[ ISO_DATE_REr@rYrS) r_r0rPrQrRr*r+rCrE_rrriso2time's  rdcCs*|jd\}}|jd||j|dS)Nr )spanstring)matchstartendrrr unmatchedHsrjz^\s*([^=\s;,]+)z&^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"z^\s*=\s*([^\s;,]*)z\\(.)c Cs g}x|D]}|}g}x|rtj|}|rt|}|jd}tj|}|rlt|}|jd}tjd|}n.tj|}|rt|}|jd}|j}nd}|j ||fq|j j dr|j dd}|r|j |g}qt j dd|\}} |}qW|r |j |q W|S)Nrz\1,z^[=\s;]*rV)HEADER_TOKEN_REr@rjrBHEADER_QUOTED_VALUE_REHEADER_ESCAPE_REr]HEADER_VALUE_RErstripappendr[ startswithresubn) Z header_valuesresultr_Z orig_textpairsrEnamevalueZnon_junkZ nr_junk_charsrrrsplit_header_wordsQs>.         ryz([\"\\])cCsg}xt|D]l}g}xN|D]F\}}|dk rTtjd|sHtjd|}d|}d||f}|j|qW|r |jdj|q Wdj|S)Nz^\w+$z\\\1z"%s"z%s=%sz; z, )rsr@HEADER_JOIN_ESCAPE_REr]rqjoin)Zlistsheadersrvattrkvrrrjoin_header_wordss     rcCs0|jdr|dd}|jdr,|dd}|S)N"r)rrendswith)r_rrr strip_quotess     rc Csd}g}x|D]}g}d}xt|jd D]\}}|j}|jd \}} } |j}|sd|d kr*Pnq*| rp| jnd} |d kr|j} | |kr| }|dkr| dk rt| } d }n|dkr| dk rtt| } |j|| fq*W|r|s|jd|j|qW|S)Nexpiresdomainpathsecureversionportmax-ageF;=r T0)rrrrrrr)rr) enumeratesplitstrip partitionrKrrarq) Z ns_headersZ known_attrsruZ ns_headerrv version_setZiiZparamkeysepvallcrrrparse_ns_headerss@   rz\.\d+$cCs:tj|rdS|dkrdS|ddks2|ddkr6dSdS)NFrVr .rTr)IPV4_REr@)r_rrris_HDN s rcCsl|j}|j}||krdSt|s(dS|j|}|dksB|dkrFdS|jdsTdSt|ddshdSdS)NTFrr rr)rKrrfindrr)ABirrr domain_matchs  rcCstj|rdSdS)NFT)rr@)r_rrrliberal_is_HDNBs rcCsb|j}|j}t|ot|s0||kr,dSdS|jd}|rL|j|rLdS| r^||kr^dSdS)NTFr)rKrrrr)rr initial_dotrrruser_domain_matchLs rz:\d+$cCsB|j}tjj|d}|dkr,|jdd}tjd|d}|jS)NrrVZHost) get_full_urlurllibparseZurlparseZ get_header cut_port_rer]rK)requesturlhostrrr request_hostas  rcCs6t|}}|jddkr.tj| r.|d}||fS)Nrrz.localr)rfindrr@)rerhnreq_hostrrreff_request_hostqs rcCs4|j}tjj|}t|j}|jds0d|}|S)N/)rrrZurlsplit escape_pathrrr)rrpartsrrrr request_path|s    rc Cs^|j}|jd}|dkrV||dd}y t|WqZtk rRtd|dSXnt}|S)N:r rznonnumeric port: '%s')rrrArLrDEFAULT_HTTP_PORT)rrrrrrr request_ports   rz%/;:@&=+$,!~*'()z%([0-9a-fA-F][0-9a-fA-F])cCsd|jdjS)Nz%%%sr)rBrO)rgrrruppercase_escaped_charsrcCstjj|t}tjt|}|S)N)rrZquoteHTTP_PATH_SAFEESCAPED_CHAR_REr]r)rrrrrs  rcCsP|jd}|dkrL||dd}|jd}t|rL|dksD|dkrLd|S|S)Nrr rZlocal)rr)hrbrrrreachs  rcCs$t|}t|t|jsdSdSdS)NTF)rrrZorigin_req_host)rrrrris_third_partys rc@sJeZdZdddZddZdddZd d Zdd d Zd dZddZ dS)rFcCs|dk rt|}| dk r$tt| } |dkr<|dkr)rrrrxrw)rplimitZ namevaluerrr__str__%s   zCookie.__str__cCspg}x,dD]$}t||}|jd|t|fq W|jdt|j|jdt|jd|jjdj|fS)Nrrwrxrrrrrrrrrrrrz%s=%szrest=%sz rfc2109=%sz%s(%s)z, )rrwrxrrrrrrrrrrrr)getattrrqreprrr __class____name__r{)rrrwr}rrr__repr__/s zCookie.__repr__)F)N)N) r __module__ __qualname__rrrrrrrrrrrrs    c@s,eZdZddZddZddZddZd S) rcCs tdS)N)NotImplementedError)rcookierrrrset_okGszCookiePolicy.set_okcCs tdS)N)r)rrrrrr return_okPszCookiePolicy.return_okcCsdS)NTr)rrrrrrdomain_return_okTszCookiePolicy.domain_return_okcCsdS)NTr)rrrrrrpath_return_okYszCookiePolicy.path_return_okN)rrrrrrrrrrrr>s  c @seZdZdZdZdZdZeeBZdddddddddeddf dd Zd d Z d d Z ddZ ddZ ddZ ddZddZddZddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(d)Zd*d+Zd,d-Zd.d/Zd0d1Zd2d3Zd4d5ZdS)6rrrrTr NTFc Csp||_||_||_||_||_||_| |_| |_| |_| |_ |dk rPt ||_ nf|_ |dk rft |}||_ dS)N) netscaperfc2965rfc2109_as_netscape hide_cookie2 strict_domainstrict_rfc2965_unverifiablestrict_ns_unverifiablestrict_ns_domainstrict_ns_set_initial_dollarstrict_ns_set_pathtuple_blocked_domains_allowed_domains) rblocked_domainsallowed_domainsrrrrrrrrrrrrrris  zDefaultCookiePolicy.__init__cCs|jS)N)r)rrrrrsz#DefaultCookiePolicy.blocked_domainscCst||_dS)N)rr)rrrrrset_blocked_domainssz'DefaultCookiePolicy.set_blocked_domainscCs"x|jD]}t||rdSqWdS)NTF)rr)rrZblocked_domainrrr is_blockeds  zDefaultCookiePolicy.is_blockedcCs|jS)N)r)rrrrrsz#DefaultCookiePolicy.allowed_domainscCs|dk rt|}||_dS)N)rr)rrrrrset_allowed_domainssz'DefaultCookiePolicy.set_allowed_domainscCs0|jdkrdSx|jD]}t||rdSqWdS)NFT)rr)rrZallowed_domainrrris_not_alloweds    z"DefaultCookiePolicy.is_not_allowedcCsBtd|j|jx,d D]$}d|}t||}|||sd SqWd S) Nz - checking cookie %s=%sr verifiabilityrwrrrZset_ok_FT)rrrwrrr)rrwrxr)rrrnfn_namefnrrrrs   zDefaultCookiePolicy.set_okcCs^|jdkrtd|j|jdS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT) unverifiablerrrrr)rrrrrrset_ok_verifiabilitysz(DefaultCookiePolicy.set_ok_verifiabilitycCs0|jdkr,|jr,|jjdr,td|jdSdS)Nr $z' illegal name (starts with '$'): '%s'FT)rrrwrrr)rrrrrr set_ok_names   zDefaultCookiePolicy.set_ok_namecCsL|jrHt|}|jdks(|jdkrH|jrH|j|j rHtd|j|dSdS)Nr z7 path attribute %s is not a prefix of request path %sFT)rrrrrrrr)rrrreq_pathrrr set_ok_paths  zDefaultCookiePolicy.set_ok_pathc Cs|j|jrtd|jdS|j|jr8td|jdS|jrt|\}}|j}|jr|jddkr|jd}|jdd|}|dkr||dd}||d|} | j d$krt |dkrtd|dS|j dr|dd} n|} | j ddk} | r|dkrtd|dS|j dkrb|j| rb|j d rbd|j| rbtd ||dS|j dks||j|j@rt||std!||dS|j dks|j|j@r|dt | } | j ddkrtj| rtd"| |dSd#S)%Nz" domain %s is in user block-listFz& domain %s is not in user allow-listrrr rcoaccomeduorgnetgovmilrAaerobizcatcoopinfojobsmobimuseumrwprotraveleuz& country-code second level domain %sz.localz/ non-local domain %s contains no embedded dotzO effective request-host %s (even with added initial dot) does not end with %sz5 effective request-host %s does not domain-match %sz. host prefix %s for domain %s contains a dotT)rrrrrrrrrArr r r r r rrrwrrr)rrrrrrrcountrrKlenrrrrrrDomainRFC2965MatchrDomainStrictNoDotsrr@) rrrrrrrjZtldZsldZundotted_domainZ embedded_dotsZ host_prefixrrr set_ok_domainsf            z!DefaultCookiePolicy.set_ok_domainc Cs|jrt|}|dkrd}nt|}x\|jjdD]:}y t|Wntk r`td|dSX||kr2Pq2Wtd||jdSdS)N80rkz bad port %s (not numeric)Fz$ request port (%s) not found in %sT)rrstrrrrArLr)rrrreq_portrrrr set_ok_port%s"   zDefaultCookiePolicy.set_ok_portcCsBtd|j|jx,d D]$}d|}t||}|||sd SqWd S) Nz - checking cookie %s=%srrrrrrZ return_ok_FT)rrrrrr)rrwrxr)rrrrrrrrrr:s    zDefaultCookiePolicy.return_okcCs@|jdkr|j rtddS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT)rrrrrr)rrrrrrreturn_ok_verifiabilityUsz+DefaultCookiePolicy.return_ok_verifiabilitycCs |jr|jdkrtddSdS)NZhttpsz( secure cookie with non-secure requestFT)rtyper)rrrrrrreturn_ok_secureasz$DefaultCookiePolicy.return_ok_securecCs|j|jrtddSdS)Nz cookie expiredFT)r_nowr)rrrrrrreturn_ok_expiresgs z%DefaultCookiePolicy.return_ok_expirescCsP|jrLt|}|dkrd}x0|jjdD]}||kr(Pq(Wtd||jdSdS)Nrrkz0 request port %s does not match cookie port %sFT)rrrr)rrrrrrrrreturn_ok_portms z"DefaultCookiePolicy.return_ok_portcCst|\}}|j}|r,|jd r,d|}n|}|jdkrb|j|j@rb|j rb||krbtddS|jdkrt|| rtd||dS|jdkrd|j | rtd||dSdS)Nrr zQ cookie with unspecified domain does not string-compare equal to request domainFzQ effective request-host name %s does not domain-match RFC 2965 cookie domain %sz; request-host %s does not match Netscape cookie domain %sT) rrrrrrDomainStrictNonDomainrrrr)rrrrrr dotdomainrrrreturn_ok_domain{s&    z$DefaultCookiePolicy.return_ok_domaincCst|\}}|jdsd|}|jds0d|}|rJ|jd rJd|}n|}|j|p`|j|sfdS|j|r~td|dS|j|rtd|dSdS)NrFz" domain %s is in user block-listz& domain %s is not in user allow-listT)rrrrrrr)rrrrrr%rrrrs"        z$DefaultCookiePolicy.domain_return_okcCs0td|t|}|j|s,td||dSdS)Nz- checking cookie path=%sz %s does not path-match %sFT)rrrr)rrrrrrrrs    z"DefaultCookiePolicy.path_return_ok)rrrrr$rZ DomainLiberalZ DomainStrictrrrrrrrrrrrrrrrrrr r"r#r&rrrrrrr_sJ    ;  cCst|j}t|j|S)N)sortedkeysmapr)Zadictr(rrrvals_sorted_by_keys r*c csZt|}xL|D]D}d}y |jWntk r4YnXd}t|EdH|s|VqWdS)NFT)r*itemsAttributeError deepvalues)mappingvaluesobjrrrr-s  r-c@s eZdZdS)AbsentN)rrrrrrrr1sr1c@seZdZejdZejdZejdZejdZejdZ ejdej Z d2dd Z d d Z d d ZddZddZddZddZddZddZddZddZddZd d!Zd"d#Zd3d$d%Zd&d'Zd(d)Zd*d+Zd,d-Zd.d/Zd0d1Z dS)4rz\Wz([\"\\])z\.?[^.]*z[^.]*z^\.+z^\#LWP-Cookies-(\d+\.\d+)NcCs(|dkrt}||_tj|_i|_dS)N)r_policy _threadingRLock _cookies_lock_cookies)rpolicyrrrrs  zCookieJar.__init__cCs ||_dS)N)r2)rr7rrr set_policyszCookieJar.set_policycCsg}|jj||sgStd||j|}xd|jD]X}|jj||sHq4||}x:|jD].}|jj||svtdqZtd|j|qZWq4W|S)Nz!Checking %s for cookies to returnz not returning cookiez it's a match) r2rrr6r(rr/rrq)rrrcookiesZcookies_by_pathrZcookies_by_namerrrr_cookies_for_domains   zCookieJar._cookies_for_domaincCs.g}x$|jjD]}|j|j||qW|S)N)r6r(extendr:)rrr9rrrr_cookies_for_requestszCookieJar._cookies_for_requestc CsF|jddddd}g}x$|D]}|j}|sLd}|dkrL|jd||jdk r~|jj|jr~|dkr~|jjd|j}n|j}|jdkr|j|jn|jd |j|f|dkr"|j r|jd |j |j j d r|j }|j o|j d r|d d}|jd ||jdk r"d}|jr4|d|j}|j|q"W|S)NcSs t|jS)N)rr)arrrsz)CookieJar._cookie_attrs..T)rreverseFr z $Version=%sz\\\1z%s=%sz $Path="%s"rrz $Domain="%s"z$Portz="%s")sortrrqrx non_word_rer@quote_rer]rwrrrrrrrr) rr9rattrsrrrxrrrrr _cookie_attrss>      zCookieJar._cookie_attrsc Cstd|jjzttj|j_|_|j|}|j|}|r^|j ds^|j ddj ||jj r|jj r|j d rx$|D]}|jdkr|j ddPqWWd|jjX|jdS)Nadd_cookie_headerrz; ZCookie2rz $Version="1")rr5acquirerArMr2r!r<rDZ has_headerZadd_unredirected_headerr{rrrreleaseclear_expired_cookies)rrr9rCrrrrrE?s$          zCookieJar.add_cookie_headercCsg}d}d}x||D]r}|d \}}d }d } i} i} x4|d dD]"\} } | j}||ksh||krl|} | |kr| dkrd} | | krqF| dkr| dkrtdd} P| j} | dkr|rqF| dkrtdqF| dkrd}y t| } Wn$tk rtdd} PYnXd} |j| } | |ks2| |krb| dkrX| dkrXtd| d} P| | | <qF| | | <qFW| rvq|j||| | fqW|S)Nrrrrmax-agerrrr commenturlr FrTz% missing value for domain attributezM missing or invalid value for expires attribute: treating as session cookiez? missing or invalid (non-numeric) value for max-age attributez! missing value for %s attribute)rr)rrrIrrrrrJ)rrrJ)rKrrArLr!rq)r attrs_set cookie_tuples boolean_attrs value_attrsZ cookie_attrsrwrxZ max_age_setZ bad_cookiestandardrr~rrrrr_normalized_cookie_tuples`sl          z#CookieJar._normalized_cookie_tuplesc!Cs$|\}}}}|jdt}|jdt}|jdt} |jdt} |jdd} | dk rry t| } Wntk rpdSX|jdd} |jdd} |jd d}|jd d}|tk r|d krd }t|}nXd}t|}|jd }|dkr| dkr|d|}n|d|d}t|dkrd }|tk }d}|r8t|j d}|tkrTt |\}}|}n|j dshd|}d}| tk r| dkrt |} nd }t j dd | } nd} | tkrd} d } nH| |jkry|j|||Wntk rYnXtd|||dSt| ||| ||||||| | | |||S)NrrrrrrFrrrJrVTrrr rz\s+z2Expiring cookie, domain='%s', path='%s', name='%s'r)rr1rArLrrrrboolrrrrrsr]r!clearKeyErrorrr)rtuprrwrxrOrrrrrrrrrrrrrrrrrrrr_cookie_from_cookie_tuples                       z#CookieJar._cookie_from_cookie_tuplecCs:|j|}g}x&|D]}|j||}|r|j|qW|S)N)rPrUrq)rrKrrLr9rTrrrr_cookies_from_attrs_sets   z!CookieJar._cookies_from_attrs_setcCsLt|jdd}|dkr |jj }x&|D]}|jdkr&d|_|r&d|_q&WdS)NrrTr )rr2rrr)rr9Z rfc2109_as_nsrrrr_process_rfc2109_cookies&s   z"CookieJar._process_rfc2109_cookiesc Cs6|j}|jdg}|jdg}|jj}|jj}| r<| s`| rH| s`| rT| s`| rd| rdgSy|jt||}Wntk rtg}YnX|o|r2y|jt ||} Wntk rtg} YnX|j | |r"i} x |D]} d| | j | j | j f<qW| fdd} t| | } | r2|j| |S)Nz Set-Cookie2z Set-CookiecSs|j|j|jf}||kS)N)rrrw)Z ns_cookielookuprrrrno_matching_rfc2965^sz3CookieJar.make_cookies..no_matching_rfc2965)r Zget_allr2rrrVry ExceptionrrrWrrrwfilterr;) rresponserr|Z rfc2965_hdrsZns_hdrsrrr9Z ns_cookiesrXrrYrrr make_cookies2sB              zCookieJar.make_cookiesc CsN|jjz2ttj|j_|_|jj||r:|j|Wd|jjXdS)N) r5rFrArMr2r!r set_cookierG)rrrrrrset_cookie_if_okhs  zCookieJar.set_cookie_if_okc Csl|j}|jjzJ|j|kr&i||j<||j}|j|krDi||j<||j}|||j<Wd|jjXdS)N)r6r5rFrrrwrG)rrcZc2Zc3rrrr^us     zCookieJar.set_cookiec Cs|td|j|jjzRttj|j_|_x6|j||D]&}|jj ||r>td||j |q>WWd|jj XdS)Nzextract_cookies: %sz setting cookie: %s) rr r5rFrArMr2r!r]rr^rG)rr\rrrrrextract_cookiess  zCookieJar.extract_cookiescCst|dk r2|dks|dkr td|j|||=n>|dk rX|dkrJtd|j||=n|dk rj|j|=ni|_dS)Nz8domain and path must be given to remove a cookie by namez.domain must be given to remove cookies by path)rLr6)rrrrwrrrrRs  zCookieJar.clearc CsH|jjz,x&|D]}|jr|j|j|j|jqWWd|jjXdS)N)r5rFrrRrrrwrG)rrrrrclear_session_cookiess   zCookieJar.clear_session_cookiesc CsT|jjz8tj}x*|D]"}|j|r|j|j|j|jqWWd|jjXdS)N) r5rFrMrrRrrrwrG)rrrrrrrHs   zCookieJar.clear_expired_cookiescCs t|jS)N)r-r6)rrrr__iter__szCookieJar.__iter__cCsd}x|D] }|d}q W|S)Nr rr)rrrrrr__len__s zCookieJar.__len__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rqrrrr{)rrrrrrrs zCookieJar.__repr__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rqrrrr{)rrerrrrrs zCookieJar.__str__)N)NNN)!rrrrscompilerArBZstrict_domain_reZ domain_reZdots_reASCIImagic_rerr8r:r<rDrErPrUrVrWr]r_r^rarRrbrHrcrdrrrrrrrs6      ;!a\  6   c@s eZdZdS)rN)rrrrrrrrsc@s4eZdZd ddZd ddZd ddZdd d ZdS)rNFc CsJtj|||dk r6y |dWntdYnX||_t||_dS)NrVzfilename must be string-like)rrrLfilenamerQ delayload)rrirjr7rrrrs  zFileCookieJar.__init__cCs tdS)N)r)rriignore_discardignore_expiresrrrsaveszFileCookieJar.savec CsJ|dkr"|jdk r|j}nttt|}|j||||WdQRXdS)N)rirLMISSING_FILENAME_TEXTopen _really_load)rrirkrlrrrrloads   zFileCookieJar.loadcCs|dkr"|jdk r|j}ntt|jjzFtj|j}i|_y|j|||Wnt k rn||_YnXWd|jj XdS)N) rirLrnr5rFrdeepcopyr6rqOSErrorrG)rrirkrlZ old_staterrrreverts    zFileCookieJar.revert)NFN)NFF)NFF)NFF)rrrrrmrqrtrrrrrs    cCs$|j|jfd|jfd|jfg}|jdk r8|jd|jf|jrH|jd |jrX|jd|jrh|jd|j rx|jd|j r|jdt t |j f|j r|jd|jr|jd |jf|jr|jd |jft|jj}x$|D]}|j|t|j|fqW|jd t|jft|gS)Nrrr path_spec port_spec domain_dotrrrrrJr)ruN)rvN)rwN)rN)rN)rwrxrrrrqrrrrrr5rZrrrr'rr(rrr)rrr(r~rrrlwp_cookie_strs6         rxc@s(eZdZd ddZd ddZdd ZdS) rTcCs\tj}g}x>|D]6}| r$|jr$q| r6|j|r6q|jdt|qWdj|dgS)NzSet-Cookie3: %s rV)rMrrrqrxr{)rrkrlrrerrrr as_lwp_strGs  zLWPCookieJar.as_lwp_strNFc CsX|dkr"|jdk r|j}nttt|d"}|jd|j|j||WdQRXdS)Nwz#LWP-Cookies-2.0 )rirLrnrowriterz)rrirkrlrrrrrmWs   zLWPCookieJar.savecCsL|j}|jj|s$d|}t|tj}d}d} d} yʐx|j} | dkrRP| j|s^q@| t|dj} xt| gD]x} | d\} }i}i}x| D] }d||<qWx| ddD]t\}}|dk r|j }nd}|| ks|| kr|}|| kr|dkr d}|||<q|| kr*|||<q|||<qW|j }|d }|d}|dk r^t |}|dkrld}|d }|jd}t |d| ||d |d|||d|d |d|d|||d |d|}| r|j rq| r|j|rq|j|qWq@WWnBtk rYn,tk rFttd|| fYnXdS)Nz5%r does not look like a Set-Cookie3 (LWP) format filez Set-Cookie3:rvrurwrrrrrrrrrJrVr FrTrz&invalid Set-Cookie3 format file %r: %r)rvrurwrr)rrrrrrrJ)readlinerhr@rrMrrrrryrKrrdrrrr^rsrZr)rrrirkrlmagicrrheaderrMrNlinedatarwrxrOrr~rrrrrrrr`rrrrpcs                   zLWPCookieJar._really_load)TT)NFF)rrrrzrmrprrrrr:s  c@s,eZdZejdZdZddZd ddZdS) rz#( Netscape)? HTTP Cookie Filezr# Netscape HTTP Cookie File # http://curl.haxx.se/rfc/cookie_spec.html # This is a generated file! Do not edit. cCsntj}|j}|jj|s(td|yx|j}|dkr@P|jdrV|dd}|jjds.|jdkrrq.|jd\}} } } } } }| dk} | dk} | dkr|} d}|jd }d }| dkrd} d }t d | |dd || || d | | |ddi}| r|j rq.| r|j |rq.|j |q.WWnBt k r>Yn,tk rhttd ||fYnXdS)Nz4%r does not look like a Netscape format cookies filerVryr#r TRUErFTr z+invalid Netscape format cookies file %r: %rr)rr)rMr}rhr@rrrrrrrrrr^rsrZr)rrrirkrlrr~rrrrrrrwrxrrr`rrrrps`    zMozillaCookieJar._really_loadNFc Cs|dkr"|jdk r|j}nttt|d}|j|jtj}x|D]}| rZ|jrZqH| rl|j|rlqH|j rxd}nd}|j j drd}nd}|j dk rt |j } nd} |jdkrd} |j} n |j} |j} |jdj|j ||j|| | | gdqHWWdQRXdS)Nr{rZFALSErrVrry)rirLrnror|rrMrrrrrrrrrxrwr{r) rrirkrlrrrrrrrwrxrrrrms<          zMozillaCookieJar.save)NFF) rrrrsrfrhrrprmrrrrrs A)N)N)X__all__rr-rsrMZ urllib.parserZurllib.requestZ threadingr3 ImportErrorZdummy_threadingZ http.clientZhttpZcalendarr r r rrZclientZ HTTP_PORTrrnrr%r,r6r7rIr(rqrKr5r8r>rfrgr?rFrSrXIr\Xr^rarbrdrjrlrmrornryrzrrrrrrrrrrrrrrrrrrrrrrr*r-r1rrsrrrxrrrrrrs        88!    U D'    #b!\:x__pycache__/cookiejar.cpython-36.pyc000064400000151360147204456360013371 0ustar003 fr+@s*dZddddddddgZd d lZd d lZd d lZd d lZd d lZd d lZy d d l Z Wne k rpd d l Z YnXd d l Zd d lmZd Zd ad dZeejjZdZddZdZddZdddddddgZddddd d!d"d#d$d%d&d'g ZgZxeD]Zej ej!qWdud(d)Z"dvd*d+Z#d d d d d,Z$ej%d-ej&Z'd.d/Z(d0d1Z)ej%d2ej&Z*ej%d3ej+ej&BZ,ej%d4ej-ej&BZ.d5d6Z/ej%d7ej-ej&BZ0d8d9Z1d:d;Z2ej%d<Z3ej%d=Z4ej%d>Z5ej%d?Z6d@dAZ7ej%dBZ8dCdDZ9dEdFZ:dGdHZ;ej%dIej&ZdNdOZ?dPdQZ@ej%dRej&ZAdSdTZBdUdVZCdWdXZDdYdZZEd[ZFej%d\ZGd]d^ZHd_d`ZIdadbZJdcddZKGdeddZLGdfddZMGdgddeMZNdhdiZOdjdkZPGdldmdmZQGdnddZRGdoddeSZTGdpddeRZUdqdrZVGdsddeUZWGdtddeUZXd S)waHTTP cookie handling for web clients. This module has (now fairly distant) origins in Gisle Aas' Perl module HTTP::Cookies, from the libwww-perl library. Docstrings, comments and debug strings in this code refer to the attributes of the HTTP cookie system as cookie-attributes, to distinguish them clearly from Python attributes. Class diagram (note that BSDDBCookieJar and the MSIE* classes are not distributed with the Python standard library, but are available from http://wwwsearch.sf.net/): CookieJar____ / \ \ FileCookieJar \ \ / | \ \ \ MozillaCookieJar | LWPCookieJar \ \ | | \ | ---MSIEBase | \ | / | | \ | / MSIEDBCookieJar BSDDBCookieJar |/ MSIECookieJar Cookie CookieJar CookiePolicyDefaultCookiePolicy FileCookieJar LWPCookieJar LoadErrorMozillaCookieJarN)timegmFcGs(tsdStsddl}|jdatj|S)Nr zhttp.cookiejar)debugloggerloggingZ getLogger)argsr r&/usr/lib64/python3.6/http/cookiejar.py_debug.s  rzQa filename was not supplied (nor was the CookieJar instance initialised with one)cCsJddl}ddl}ddl}|j}|jd||j}|jd|dddS)Nr zhttp.cookiejar bug! %s) stacklevel)iowarnings tracebackStringIO print_excgetvaluewarn)rrrfmsgrrr_warn_unhandled_exception<s  ricCs|dd\}}}}}}|tkrd|ko2dknrd|koJdknrd|kobdknrd|kozdknrd|kodknrt|SdSdS) N r ;=) EPOCH_YEARr )ttyearmonthZmdayhourminsecrrr_timegmKs 8Hr,ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs@|dkrtjj}n tjj|}d|j|j|j|j|j|jfS)aHReturn a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", representing Universal Time (UTC, aka GMT). An example of this format is: 1994-11-24 08:49:37Z Nz%04d-%02d-%02d %02d:%02d:%02dZ) datetimeutcnowutcfromtimestampr'r(dayr)minutesecond)tdtrrr time2isozYs   r5cCsR|dkrtjj}n tjj|}dt|j|jt|jd|j|j |j |j fS)zReturn a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT Nz#%s, %02d-%s-%04d %02d:%02d:%02d GMTr) r-r.r/DAYSZweekdayr0MONTHSr(r'r)r1r2)r3r4rrr time2netscapels   r8)ZGMTUTCZUTZz^([-+])?(\d\d?):?(\d\d)?$cCsjd}|tkrd}nTtj|}|rfdt|jd}|jdrR|dt|jd}|jddkrf| }|S)Nr ir<r-) UTC_ZONES TIMEZONE_REsearchintgroup)tzoffsetmrrroffset_from_tz_strings  rFc Cst|}|tjkrdSytj|jd}WnXtk ry t|}Wntk r\dSXd|kopdknr||}ndSYnX|dkrd}|dkrd}|dkrd}t|}t|}t|}t|}|dkr0tjtjd}|d} |} ||| }| | } t | dkr0| dkr(|d}n|d}t |||||||f} | dk r|dkr^d}|j }t |} | dkr|dS| | } | S)Nrr r id2r9) rAr-ZMAXYEAR MONTHS_LOWERindexlower ValueErrortimeZ localtimeabsr,upperrF) r0monyrhrr*r+rCZimonZcur_yrrEZtmpr3rDrrr _str2timesV         rSzV^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) (\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$z+^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*a^ (\d\d?) # day (?:\s+|[-\/]) (\w+) # month (?:\s+|[-\/]) (\d+) # year (?: (?:\s+|:) # separator before clock (\d\d?):(\d\d) # hour:min (?::(\d\d))? # optional seconds )? # optional clock \s* ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone \s* (?:\(\w+\))? # ASCII representation of timezone in parens. \s*$c Cstj|}|rl|j}tj|djd}t|d|t|dt|dt|dt|df}t|S|j }t j d|d}dgd \}}}}}} } t j|}|dk r|j\}}}}}} } ndSt |||||| | S) aReturns time in seconds since epoch of time represented by a string. Return value is an integer. None is returned if the format of str is unrecognized, the time is outside the representable range, or the timezone string is not recognized. If the string contains no timezone, UTC is assumed. The timezone in the string may be numerical (like "-0800" or "+0100") or a string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the timezone strings equivalent to UTC (zero offset) are known to the function. The function loosely parses the following formats: Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) The parser ignores leading and trailing whitespace. The time may be absent. If the year is given with only 2 digits, the function will select the century that makes the year closest to the current date. rrr r;N)STRICT_DATE_REr@groupsrIrJrKrAfloatr,lstrip WEEKDAY_REsubLOOSE_HTTP_DATE_RErS) textrEgrPr&r0rQrRr*r+rCrrr http2times " raa^ (\d{4}) # year [-\/]? (\d\d?) # numerical month [-\/]? (\d\d?) # day (?: (?:\s+|[-:Tt]) # separator before clock (\d\d?):?(\d\d) # hour:min (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) )? # optional clock \s* ([-+]?\d\d?:?(:?\d\d)? |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) \s*$c Csd|j}dgd\}}}}}}}tj|}|dk rL|j\}}}}}}}} ndSt|||||||S)av As for http2time, but parses the ISO 8601 formats: 1994-02-03 14:15:29 -0100 -- ISO 8601 format 1994-02-03 14:15:29 -- zone is optional 1994-02-03 -- only date 1994-02-03T14:15:29 -- Use T as separator 19940203T141529Z -- ISO 8601 compact format 19940203 -- only date NrW)r[ ISO_DATE_REr@rYrS) r_r0rPrQrRr*r+rCrE_rrriso2time's  rdcCs*|jd\}}|jd||j|dS)z)Return unmatched part of re.Match object.r N)spanstring)matchstartendrrr unmatchedHsrjz^\s*([^=\s;,]+)z&^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"z^\s*=\s*([^\s;,]*)z\\(.)c Cs<t|t stg}x |D]}|}g}x|r$tj|}|rt|}|jd}tj|}|rt|}|jd}tj d|}n.t j|}|rt|}|jd}|j }nd}|j ||fq,|j jdr|j dd}|r|j |g}q,tjdd|\}} | dkstd|||f|}q,W|r|j |qW|S) amParse header values into a list of lists containing key,value pairs. The function knows how to deal with ",", ";" and "=" as well as quoted values after "=". A list of space separated tokens are parsed as if they were separated by ";". If the header_values passed as argument contains multiple values, then they are treated as if they were a single value separated by comma ",". This means that this function is useful for parsing header fields that follow this syntax (BNF as from the HTTP/1.1 specification, but we relax the requirement for tokens). headers = #header header = (token | parameter) *( [";"] (token | parameter)) token = 1* separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) qdtext = > quoted-pair = "\" CHAR parameter = attribute "=" value attribute = token value = token | quoted-string Each header is represented by a list of key/value pairs. The value for a simple token (not part of a parameter) is None. Syntactically incorrect headers will not necessarily be parsed as you would want. This is easier to describe with some examples: >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] >>> split_header_words(['text/html; charset="iso-8859-1"']) [[('text/html', None), ('charset', 'iso-8859-1')]] >>> split_header_words([r'Basic realm="\"foo\bar\""']) [[('Basic', None), ('realm', '"foobar"')]] rz\1N,z^[=\s;]*rVr z&split_header_words bug: '%s', '%s', %s) isinstancestrAssertionErrorHEADER_TOKEN_REr@rjrBHEADER_QUOTED_VALUE_REHEADER_ESCAPE_REr]HEADER_VALUE_RErstripappendr[ startswithresubn) Z header_valuesresultr_Z orig_textpairsrEnamevalueZnon_junkZ nr_junk_charsrrrsplit_header_wordsQsF-         r|z([\"\\])cCsg}xt|D]l}g}xN|D]F\}}|dk rTtjd|sHtjd|}d|}d||f}|j|qW|r |jdj|q Wdj|S)aDo the inverse (almost) of the conversion done by split_header_words. Takes a list of lists of (key, value) pairs and produces a single header value. Attribute values are quoted if needed. >>> join_header_words([[("text/plain", None), ("charset", "iso-8859-1")]]) 'text/plain; charset="iso-8859-1"' >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859-1")]]) 'text/plain, charset="iso-8859-1"' Nz^\w+$z\\\1z"%s"z%s=%sz; z, )rvr@HEADER_JOIN_ESCAPE_REr]rtjoin)Zlistsheadersryattrkvrrrjoin_header_wordss     rcCs0|jdr|dd}|jdr,|dd}|S)N"r)ruendswith)r_rrr strip_quotess     rc Csd}g}x|D]}g}d}xt|jd D]\}}|j}|jd \}} } |j}|sd|d kr*Pnq*| rp| jnd } |d kr|j} | |kr| }|dkr| d k rt| } d }n|dkr| d k rtt| } |j|| fq*W|r|s|jd|j|qW|S)a5Ad-hoc parser for Netscape protocol cookie-attributes. The old Netscape cookie format for Set-Cookie can for instance contain an unquoted "," in the expires field, so we have to use this ad-hoc parser instead of split_header_words. XXX This may not make the best possible effort to parse all the crap that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient parser is probably better, so could do worse than following that if this ever gives any trouble. Currently, this is also used for parsing RFC 2109 cookies. expiresdomainpathsecureversionportmax-ageF;=r NT0)rrrrrrr)rr) enumeratesplitstrip partitionrKrrart) Z ns_headersZ known_attrsrxZ ns_headerry version_setZiiZparamkeysepvallcrrrparse_ns_headerss@   rz\.\d+$cCs:tj|rdS|dkrdS|ddks2|ddkr6dSdS)z*Return True if text is a host domain name.FrVr .rTr)IPV4_REr@)r_rrris_HDN s rcCsl|j}|j}||krdSt|s(dS|j|}|dksB|dkrFdS|jdsTdSt|ddshdSdS)aReturn True if domain A domain-matches domain B, according to RFC 2965. A and B may be host domain names or IP addresses. RFC 2965, section 1: Host names can be specified either as an IP address or a HDN string. Sometimes we compare one host name with another. (Such comparisons SHALL be case-insensitive.) Host A's name domain-matches host B's if * their host name strings string-compare equal; or * A is a HDN string and has the form NB, where N is a non-empty name string, B has the form .B', and B' is a HDN string. (So, x.y.com domain-matches .Y.com but not Y.com.) Note that domain-match is not a commutative operation: a.b.c.com domain-matches .c.com, but not the reverse. TFrr rNr)rKrrfindru)ABirrr domain_matchs  rcCstj|rdSdS)zdReturn True if text is a sort-of-like a host domain name. For accepting/blocking domains. FT)rr@)r_rrrliberal_is_HDNBs rcCsb|j}|j}t|ot|s0||kr,dSdS|jd}|rL|j|rLdS| r^||kr^dSdS)z\For blocking/accepting domains. A and B may be host domain names or IP addresses. TFr)rKrrur)rr initial_dotrrruser_domain_matchLs rz:\d+$cCsB|j}tjj|d}|dkr,|jdd}tjd|d}|jS)zReturn request-host, as defined by RFC 2965. Variation from RFC: returned value is lowercased, for convenient comparison. rrVZHost) get_full_urlurllibparseZurlparseZ get_header cut_port_rer]rK)requesturlhostrrr request_hostas  rcCs6t|}}|jddkr.tj| r.|d}||fS)zzReturn a tuple (request-host, effective request-host name). As defined by RFC 2965, except both are lowercased. rrz.localr)rfindrr@)rerhnreq_hostrrreff_request_hostqs rcCs4|j}tjj|}t|j}|jds0d|}|S)z6Path component of request-URI, as defined by RFC 2965./)rrrZurlsplit escape_pathrru)rrpartsrrrr request_path|s    rc Cs^|j}|jd}|dkrV||dd}y t|WqZtk rRtd|dSXnt}|S)N:r rznonnumeric port: '%s')rrrArLrDEFAULT_HTTP_PORT)rrrrrrr request_ports   rz%/;:@&=+$,!~*'()z%([0-9a-fA-F][0-9a-fA-F])cCsd|jdjS)Nz%%%sr)rBrO)rgrrruppercase_escaped_charsrcCstjj|t}tjt|}|S)zEEscape any invalid characters in HTTP URL, and uppercase all escapes.)rrZquoteHTTP_PATH_SAFEESCAPED_CHAR_REr]r)rrrrrs  rcCsP|jd}|dkrL||dd}|jd}t|rL|dksD|dkrLd|S|S)aBReturn reach of host h, as defined by RFC 2965, section 1. The reach R of a host name H is defined as follows: * If - H is the host domain name of a host; and, - H has the form A.B; and - A has no embedded (that is, interior) dots; and - B has at least one embedded dot, or B is the string "local". then the reach of H is .B. * Otherwise, the reach of H is H. >>> reach("www.acme.com") '.acme.com' >>> reach("acme.com") 'acme.com' >>> reach("acme.local") '.local' rr rNZlocal)rr)hrbrrrreachs  rcCs$t|}t|t|jsdSdSdS)z RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction. TFN)rrrZorigin_req_host)rrrrris_third_partys rc@sNeZdZdZdddZddZddd Zd d Zdd d ZddZ ddZ dS)raHTTP Cookie. This class represents both Netscape and RFC 2965 cookies. This is deliberately a very simple class. It just holds attributes. It's possible to construct Cookie instances that don't comply with the cookie standards. CookieJar.make_cookies is the factory function for Cookie objects -- it deals with cookie parsing, supplying defaults, and normalising to the representation used in this class. CookiePolicy is responsible for checking them to see whether they should be accepted from and returned to the server. Note that the port may be present in the headers, but unspecified ("Port" rather than"Port=80", for example); if this is the case, port is None. FcCs|dk rt|}| dk r$tt| } |dkr<|dkr)rrrr{rz)rplimitZ namevaluerrr__str__%s   zCookie.__str__cCspg}x,dD]$}t||}|jd|t|fq W|jdt|j|jdt|jd|jjdj|fS)Nrrzr{rrrrrrrrrrrrz%s=%szrest=%sz rfc2109=%sz%s(%s)z, )rrzr{rrrrrrrrrrrr)getattrrtreprrr __class____name__r~)rrrzrrrr__repr__/s zCookie.__repr__)F)N)N) r __module__ __qualname____doc__rrrrrrrrrrrrs    c@s0eZdZdZddZddZddZdd Zd S) ra Defines which cookies get accepted from and returned to server. May also modify cookies, though this is probably a bad idea. The subclass DefaultCookiePolicy defines the standard rules for Netscape and RFC 2965 cookies -- override that if you want a customized policy. cCs tdS)zReturn true if (and only if) cookie should be accepted from server. Currently, pre-expired cookies never get this far -- the CookieJar class deletes such cookies itself. N)NotImplementedError)rcookierrrrset_okGszCookiePolicy.set_okcCs tdS)zAReturn true if (and only if) cookie should be returned to server.N)r)rrrrrr return_okPszCookiePolicy.return_okcCsdS)zMReturn false if cookies should not be returned, given cookie domain. Tr)rrrrrrdomain_return_okTszCookiePolicy.domain_return_okcCsdS)zKReturn false if cookies should not be returned, given cookie path. Tr)rrrrrrpath_return_okYszCookiePolicy.path_return_okN)rrrrrrrrrrrrr>s  c @seZdZdZdZdZdZdZeeBZdddddddddeddf d d Z d d Z d dZ ddZ ddZ ddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6ZdS)7rzBImplements the standard rules for accepting and returning cookies.rrrTr NTFc Csp||_||_||_||_||_||_| |_| |_| |_| |_ |dk rPt ||_ nf|_ |dk rft |}||_ dS)zAConstructor arguments should be passed as keyword arguments only.N) netscaperfc2965rfc2109_as_netscape hide_cookie2 strict_domainstrict_rfc2965_unverifiablestrict_ns_unverifiablestrict_ns_domainstrict_ns_set_initial_dollarstrict_ns_set_pathtuple_blocked_domains_allowed_domains) rblocked_domainsallowed_domainsrrrrrrrrrrrrrris  zDefaultCookiePolicy.__init__cCs|jS)z4Return the sequence of blocked domains (as a tuple).)r)rrrrrsz#DefaultCookiePolicy.blocked_domainscCst||_dS)z$Set the sequence of blocked domains.N)rr)rrrrrset_blocked_domainssz'DefaultCookiePolicy.set_blocked_domainscCs"x|jD]}t||rdSqWdS)NTF)rr)rrZblocked_domainrrr is_blockeds  zDefaultCookiePolicy.is_blockedcCs|jS)z=Return None, or the sequence of allowed domains (as a tuple).)r)rrrrrsz#DefaultCookiePolicy.allowed_domainscCs|dk rt|}||_dS)z-Set the sequence of allowed domains, or None.N)rr)rrrrrset_allowed_domainssz'DefaultCookiePolicy.set_allowed_domainscCs0|jdkrdSx|jD]}t||rdSqWdS)NFT)rr)rrZallowed_domainrrris_not_alloweds    z"DefaultCookiePolicy.is_not_allowedcCsPtd|j|j|jdk stx,d D]$}d |}t||}|||s$d Sq$Wd S) z If you override .set_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to accept). z - checking cookie %s=%sNr verifiabilityrzrrrZset_ok_FT)rrrzrrr)rrzr{rnr)rrrnfn_namefnrrrrs   zDefaultCookiePolicy.set_okcCs^|jdkrtd|j|jdS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT) unverifiablerrrrr)rrrrrrset_ok_verifiabilitysz(DefaultCookiePolicy.set_ok_verifiabilitycCs0|jdkr,|jr,|jjdr,td|jdSdS)Nr $z' illegal name (starts with '$'): '%s'FT)rrrzrur)rrrrrr set_ok_names   zDefaultCookiePolicy.set_ok_namecCsL|jrHt|}|jdks(|jdkrH|jrH|j|j rHtd|j|dSdS)Nr z7 path attribute %s is not a prefix of request path %sFT)rrrrrurr)rrrreq_pathrrr set_ok_paths  zDefaultCookiePolicy.set_ok_pathc Cs|j|jrtd|jdS|j|jr8td|jdS|jrt|\}}|j}|jr|jddkr|jd}|jdd|}|dkr||dd}||d|} | j d$krt |dkrtd|dS|j dr|dd} n|} | j ddk} | r|dkrtd|dS|j dkrb|j| rb|j d rbd|j| rbtd ||dS|j dks||j|j@rt||std!||dS|j dks|j|j@r|dt | } | j ddkrtj| rtd"| |dSd#S)%Nz" domain %s is in user block-listFz& domain %s is not in user allow-listrrr rcoaccomeduorgnetgovmilrAaerobizcatcoopinfojobsmobimuseumrzprotraveleuz& country-code second level domain %sz.localz/ non-local domain %s contains no embedded dotzO effective request-host %s (even with added initial dot) does not end with %sz5 effective request-host %s does not domain-match %sz. host prefix %s for domain %s contains a dotT)rrrrrr r r rAr r rrrrrrrzrrr)rrrrrrrcountrrKlenrurrrrDomainRFC2965MatchrDomainStrictNoDotsrr@) rrrrrrrjZtldZsldZundotted_domainZ embedded_dotsZ host_prefixrrr set_ok_domainsf            z!DefaultCookiePolicy.set_ok_domainc Cs|jrt|}|dkrd}nt|}x\|jjdD]:}y t|Wntk r`td|dSX||kr2Pq2Wtd||jdSdS)N80rkz bad port %s (not numeric)Fz$ request port (%s) not found in %sT)rrrmrrrArLr)rrrreq_portrrrr set_ok_port%s"   zDefaultCookiePolicy.set_ok_portcCsBtd|j|jx,d D]$}d|}t||}|||sd SqWd S) z If you override .return_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to return). z - checking cookie %s=%srrrrrrZ return_ok_FT)rrrrrr)rrzr{r)rrrrrrrrrr:s    zDefaultCookiePolicy.return_okcCs@|jdkr|j rtddS|jdkr<|j r third-party RFC 2965 cookie during unverifiable transactionFz> third-party Netscape cookie during unverifiable transactionT)rrrrrr)rrrrrrreturn_ok_verifiabilityUsz+DefaultCookiePolicy.return_ok_verifiabilitycCs |jr|jdkrtddSdS)NZhttpsz( secure cookie with non-secure requestFT)rtyper)rrrrrrreturn_ok_secureasz$DefaultCookiePolicy.return_ok_securecCs|j|jrtddSdS)Nz cookie expiredFT)r_nowr)rrrrrrreturn_ok_expiresgs z%DefaultCookiePolicy.return_ok_expirescCsP|jrLt|}|dkrd}x0|jjdD]}||kr(Pq(Wtd||jdSdS)Nrrkz0 request port %s does not match cookie port %sFT)rrrr)rrrrrrrrreturn_ok_portms z"DefaultCookiePolicy.return_ok_portcCst|\}}|j}|r,|jd r,d|}n|}|jdkrb|j|j@rb|j rb||krbtddS|jdkrt|| rtd||dS|jdkrd|j | rtd||dSdS)Nrr zQ cookie with unspecified domain does not string-compare equal to request domainFzQ effective request-host name %s does not domain-match RFC 2965 cookie domain %sz; request-host %s does not match Netscape cookie domain %sT) rrrurrDomainStrictNonDomainrrrr)rrrrrr dotdomainrrrreturn_ok_domain{s&    z$DefaultCookiePolicy.return_ok_domaincCst|\}}|jdsd|}|jds0d|}|rJ|jd rJd|}n|}|j|p`|j|sfdS|j|r~td|dS|j|rtd|dSdS)NrFz" domain %s is in user block-listz& domain %s is not in user allow-listT)rrurrrr)rrrrrr(rrrrs"        z$DefaultCookiePolicy.domain_return_okcCs0td|t|}|j|s,td||dSdS)Nz- checking cookie path=%sz %s does not path-match %sFT)rrru)rrrrrrrrs    z"DefaultCookiePolicy.path_return_ok) rrrrrr'rZ DomainLiberalZ DomainStrictrrrrrrrrrrrrrrrr r!r#r%r&r)rrrrrrr_sL    ;  cCst|j}t|j|S)N)sortedkeysmapr)Zadictr+rrrvals_sorted_by_keys r-c csZt|}xL|D]D}d}y |jWntk r4YnXd}t|EdH|s|VqWdS)zBIterates over nested mapping, depth-first, in sorted order by key.FTN)r-itemsAttributeError deepvalues)mappingvaluesobjrrrr0s  r0c@s eZdZdS)AbsentN)rrrrrrrr4sr4c@seZdZdZejdZejdZejdZejdZ ejdZ ejdej Z d3d d Z d d Zd dZddZddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd4d%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Z d1d2Z!dS)5rzCollection of HTTP cookies. You may not need to know about this class: try urllib.request.build_opener(HTTPCookieProcessor).open(url). z\Wz([\"\\])z\.?[^.]*z[^.]*z^\.+z^\#LWP-Cookies-(\d+\.\d+)NcCs(|dkrt}||_tj|_i|_dS)N)r_policy _threadingRLock _cookies_lock_cookies)rpolicyrrrrs  zCookieJar.__init__cCs ||_dS)N)r5)rr:rrr set_policyszCookieJar.set_policycCsg}|jj||sgStd||j|}xd|jD]X}|jj||sHq4||}x:|jD].}|jj||svtdqZtd|j|qZWq4W|S)Nz!Checking %s for cookies to returnz not returning cookiez it's a match) r5rrr9r+rr2rrt)rrrcookiesZcookies_by_pathrZcookies_by_namerrrr_cookies_for_domains   zCookieJar._cookies_for_domaincCs.g}x$|jjD]}|j|j||qW|S)z2Return a list of cookies to be returned to server.)r9r+extendr=)rrr<rrrr_cookies_for_requestszCookieJar._cookies_for_requestc CsF|jddddd}g}x$|D]}|j}|sLd}|dkrL|jd||jdk r~|jj|jr~|dkr~|jjd |j}n|j}|jdkr|j|jn|jd |j|f|dkr"|j r|jd |j |j j d r|j }|j o|j d r|d d}|jd||jdk r"d}|jr4|d|j}|j|q"W|S)zReturn a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request). cSs t|jS)N)rr)arrrsz)CookieJar._cookie_attrs..T)rreverseFr z $Version=%sNz\\\1z%s=%sz $Path="%s"rrz $Domain="%s"z$Portz="%s")sortrrtr{ non_word_rer@quote_rer]rzrrrrurrr) rr<rattrsrrr{rrrrr _cookie_attrss>      zCookieJar._cookie_attrsc Cstd|jjzttj|j_|_|j|}|j|}|r^|j ds^|j ddj ||jj r|jj r|j d rx$|D]}|jdkr|j ddPqWWd|jjX|jdS)zAdd correct Cookie: header to request (urllib.request.Request object). The Cookie2 header is also added unless policy.hide_cookie2 is true. add_cookie_headerrz; ZCookie2rz $Version="1"N)rr8acquirerArMr5r$r?rGZ has_headerZadd_unredirected_headerr~rrrreleaseclear_expired_cookies)rrr<rFrrrrrH?s$          zCookieJar.add_cookie_headercCsg}d}d}x||D]r}|d \}}d }d } i} i} x4|d dD]"\} } | j}||ksh||krl|} | |kr| dkrd} | | krqF| dkr| dkrtdd} P| j} | dkr|rqF| dkrtdqF| dkrd}y t| } Wn$tk rtdd} PYnXd} |j| } | |ks2| |krb| dkrX| dkrXtd| d} P| | | <qF| | | <qFW| rvq|j||| | fqW|S)aReturn list of tuples containing normalised cookie information. attrs_set is the list of lists of key,value pairs extracted from the Set-Cookie or Set-Cookie2 headers. Tuples are name, value, standard, rest, where name and value are the cookie name and value, standard is a dictionary containing the standard cookie-attributes (discard, secure, version, expires or max-age, domain, path and port) and rest is a dictionary containing the rest of the cookie-attributes. rrrrmax-agerrrr commenturlr FrNTz% missing value for domain attributezM missing or invalid value for expires attribute: treating as session cookiez? missing or invalid (non-numeric) value for max-age attributez! missing value for %s attribute)rr)rrrLrrrrrM)rrrM)rKrrArLr$rt)r attrs_set cookie_tuples boolean_attrs value_attrsZ cookie_attrsrzr{Z max_age_setZ bad_cookiestandardrrrrrrr_normalized_cookie_tuples`sl          z#CookieJar._normalized_cookie_tuplesc!Cs$|\}}}}|jdt}|jdt}|jdt} |jdt} |jdd} | dk rry t| } Wntk rpdSX|jdd} |jdd} |jd d}|jd d}|tk r|d krd }t|}nXd}t|}|jd }|dkr| dkr|d|}n|d|d}t|dkrd }|tk }d}|r8t|j d}|tkrTt |\}}|}n|j dshd|}d}| tk r| dkrt |} nd }t j dd | } nd} | tkrd} d } nH| |jkry|j|||Wntk rYnXtd|||dSt| ||| ||||||| | | |||S)NrrrrrrFrrrMrVTrrr rz\s+z2Expiring cookie, domain='%s', path='%s', name='%s'r)rr4rArLrrrrboolrurrrvr]r$clearKeyErrorrr)rtuprrzr{rRrrrrrrrrrrrrrrrrrrrr_cookie_from_cookie_tuples                       z#CookieJar._cookie_from_cookie_tuplecCs:|j|}g}x&|D]}|j||}|r|j|qW|S)N)rSrXrt)rrNrrOr<rWrrrr_cookies_from_attrs_sets   z!CookieJar._cookies_from_attrs_setcCsLt|jdd}|dkr |jj }x&|D]}|jdkr&d|_|r&d|_q&WdS)NrrTr )rr5rrr)rr<Z rfc2109_as_nsrrrr_process_rfc2109_cookies&s   z"CookieJar._process_rfc2109_cookiesc Cs6|j}|jdg}|jdg}|jj}|jj}| r<| s`| rH| s`| rT| s`| rd| rdgSy|jt||}Wntk rtg}YnX|o|r2y|jt ||} Wntk rtg} YnX|j | |r"i} x |D]} d| | j | j | j f<qW| fdd} t| | } | r2|j| |S)zAReturn sequence of Cookie objects extracted from response object.z Set-Cookie2z Set-CookieNcSs|j|j|jf}||kS)N)rrrz)Z ns_cookielookuprrrrno_matching_rfc2965^sz3CookieJar.make_cookies..no_matching_rfc2965)rZget_allr5rrrYr| ExceptionrrrZrrrzfilterr>) rresponserrZ rfc2965_hdrsZns_hdrsrrr<Z ns_cookiesr[rr\rrr make_cookies2sB              zCookieJar.make_cookiesc CsN|jjz2ttj|j_|_|jj||r:|j|Wd|jjXdS)z-Set a cookie if policy says it's OK to do so.N) r8rIrArMr5r$r set_cookierJ)rrrrrrset_cookie_if_okhs  zCookieJar.set_cookie_if_okc Csl|j}|jjzJ|j|kr&i||j<||j}|j|krDi||j<||j}|||j<Wd|jjXdS)z?Set a cookie, without checking whether or not it should be set.N)r9r8rIrrrzrJ)rrcZc2Zc3rrrraus     zCookieJar.set_cookiec Cs|td|j|jjzRttj|j_|_x6|j||D]&}|jj ||r>td||j |q>WWd|jj XdS)zAExtract cookies from response, where allowable given the request.zextract_cookies: %sz setting cookie: %sN) rrr8rIrArMr5r$r`rrarJ)rr_rrrrrextract_cookiess  zCookieJar.extract_cookiescCst|dk r2|dks|dkr td|j|||=n>|dk rX|dkrJtd|j||=n|dk rj|j|=ni|_dS)aClear some cookies. Invoking this method without arguments will clear all cookies. If given a single argument, only cookies belonging to that domain will be removed. If given two arguments, cookies belonging to the specified path within that domain are removed. If given three arguments, then the cookie with the specified name, path and domain is removed. Raises KeyError if no matching cookie exists. Nz8domain and path must be given to remove a cookie by namez.domain must be given to remove cookies by path)rLr9)rrrrzrrrrUs  zCookieJar.clearc CsH|jjz,x&|D]}|jr|j|j|j|jqWWd|jjXdS)zDiscard all session cookies. Note that the .save() method won't save session cookies anyway, unless you ask otherwise by passing a true ignore_discard argument. N)r8rIrrUrrrzrJ)rrrrrclear_session_cookiess   zCookieJar.clear_session_cookiesc CsT|jjz8tj}x*|D]"}|j|r|j|j|j|jqWWd|jjXdS)aDiscard all expired cookies. You probably don't need to call this method: expired cookies are never sent back to the server (provided you're using DefaultCookiePolicy), this method is called by CookieJar itself every so often, and the .save() method won't save expired cookies anyway (unless you ask otherwise by passing a true ignore_expires argument). N) r8rIrMrrUrrrzrJ)rrrrrrrKs   zCookieJar.clear_expired_cookiescCs t|jS)N)r0r9)rrrr__iter__szCookieJar.__iter__cCsd}x|D] }|d}q W|S)z#Return number of contained cookies.r rr)rrrrrr__len__s zCookieJar.__len__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rtrrrr~)rrrrrrrs zCookieJar.__repr__cCs6g}x|D]}|jt|q Wd|jjdj|fS)Nz<%s[%s]>z, )rtrmrrr~)rrhrrrrrs zCookieJar.__str__)N)NNN)"rrrrrvcompilerDrEZstrict_domain_reZ domain_reZdots_reASCIImagic_rerr;r=r?rGrHrSrXrYrZr`rbrardrUrerKrfrgrrrrrrrs8      ;!a\  6   c@s eZdZdS)rN)rrrrrrrrsc@s8eZdZdZd ddZd ddZddd Zdd d ZdS)rz6CookieJar that can be loaded from and saved to a file.NFc CsJtj|||dk r6y |dWntdYnX||_t||_dS)z} Cookies are NOT loaded from the named file until either the .load() or .revert() method is called. NrVzfilename must be string-like)rrrLfilenamerT delayload)rrlrmr:rrrrs  zFileCookieJar.__init__cCs tdS)zSave cookies to a file.N)r)rrlignore_discardignore_expiresrrrsaveszFileCookieJar.savec CsJ|dkr"|jdk r|j}nttt|}|j||||WdQRXdS)zLoad cookies from a file.N)rlrLMISSING_FILENAME_TEXTopen _really_load)rrlrnrorrrrloads   zFileCookieJar.loadcCs|dkr"|jdk r|j}ntt|jjzFtj|j}i|_y|j|||Wnt k rn||_YnXWd|jj XdS)zClear all cookies and reload cookies from a saved file. Raises LoadError (or OSError) if reversion is not successful; the object's state will not be altered if this happens. N) rlrLrqr8rIrdeepcopyr9rtOSErrorrJ)rrlrnroZ old_staterrrreverts    zFileCookieJar.revert)NFN)NFF)NFF)NFF)rrrrrrprtrwrrrrrs    cCs$|j|jfd|jfd|jfg}|jdk r8|jd|jf|jrH|jd|jrX|jd|jrh|jd|j rx|jd|j r|jd t t |j f|j r|jd|jr|jd |jf|jr|jd |jft|jj}x$|D]}|j|t|j|fqW|jd t|jft|gS)zReturn string representation of Cookie in the LWP cookie file format. Actually, the format is extended a bit -- see module docstring. rrNr path_spec port_spec domain_dotrrrrrMr)rxN)ryN)rzN)rN)rN)rzr{rrrrtrrrrrr5rZrrrr*rr+rmrr)rrr+rrrrlwp_cookie_strs6         r{c@s,eZdZdZd ddZd ddZd d ZdS) ra[ The LWPCookieJar saves a sequence of "Set-Cookie3" lines. "Set-Cookie3" is the format used by the libwww-perl library, not known to be compatible with any browser, but which is easy to read and doesn't lose information about RFC 2965 cookies. Additional methods as_lwp_str(ignore_discard=True, ignore_expired=True) TcCs\tj}g}x>|D]6}| r$|jr$q| r6|j|r6q|jdt|qWdj|dgS)zReturn cookies as a string of "\n"-separated "Set-Cookie3" headers. ignore_discard and ignore_expires: see docstring for FileCookieJar.save zSet-Cookie3: %s rV)rMrrrtr{r~)rrnrorrhrrrr as_lwp_strGs  zLWPCookieJar.as_lwp_strNFc CsX|dkr"|jdk r|j}nttt|d"}|jd|j|j||WdQRXdS)Nwz#LWP-Cookies-2.0 )rlrLrqrrwriter})rrlrnrorrrrrpWs   zLWPCookieJar.savecCsL|j}|jj|s$d|}t|tj}d}d} d} yʐx|j} | dkrRP| j|s^q@| t|dj} xt| gD]x} | d\} }i}i}x| D] }d||<qWx| ddD]t\}}|dk r|j }nd}|| ks|| kr|}|| kr|dkr d}|||<q|| kr*|||<q|||<qW|j }|d }|d}|dk r^t |}|dkrld}|d }|jd}t |d| ||d |d|||d|d |d|d|||d |d|}| r|j rq| r|j|rq|j|qWq@WWnBtk rYn,tk rFttd|| fYnXdS)Nz5%r does not look like a Set-Cookie3 (LWP) format filez Set-Cookie3:ryrxrzrrrrrrrrrMrVr FrTrz&invalid Set-Cookie3 format file %r: %r)ryrxrzrr)rrrrrrrM)readlinerkr@rrMrurrr|rKrrdrrrrarvr]r)rrrlrnromagicrrheaderrPrQlinedatarzr{rRrrrrrrrrrrcrrrrscs                   zLWPCookieJar._really_load)TT)NFF)rrrrr}rprsrrrrr:s   c@s0eZdZdZejdZdZddZd dd Z dS) ra WARNING: you may want to backup your browser's cookies file if you use this class to save cookies. I *think* it works, but there have been bugs in the past! This class differs from CookieJar only in the format it uses to save and load cookies to and from a file. This class uses the Mozilla/Netscape `cookies.txt' format. lynx uses this file format, too. Don't expect cookies saved while the browser is running to be noticed by the browser (in fact, Mozilla on unix will overwrite your saved cookies if you change them on disk while it's running; on Windows, you probably can't save at all while the browser is running). Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to Netscape cookies on saving. In particular, the cookie version and port number information is lost, together with information about whether or not Path, Port and Discard were specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the domain as set in the HTTP header started with a dot (yes, I'm aware some domains in Netscape files start with a dot and some don't -- trust me, you really don't want to know any more about this). Note that though Mozilla and Netscape use the same format, they use slightly different headers. The class saves cookies using the Netscape header by default (Mozilla can cope with that). z#( Netscape)? HTTP Cookie Filezr# Netscape HTTP Cookie File # http://curl.haxx.se/rfc/cookie_spec.html # This is a generated file! Do not edit. cCs|tj}|j}|jj|s(td|y x|j}|dkrBP|jdrX|dd}|jjds0|jdkrtq0|jd\}} } } } } }| dk} | dk} | dkr|} d}|jd }| |kst d }| dkrd} d }t d | |dd || || d | | |ddi}| r|j rq0| r$|j |r$q0|j |q0WWnBtk rLYn,tk rvttd ||fYnXdS)Nz4%r does not look like a Netscape format cookies filerVr|r#r TRUErFTr z+invalid Netscape format cookies file %r: %rr)rr)rMrrkr@rrrrurrnrrrrarvr]r)rrrlrnrorrrrrrrrrzr{rrrcrrrrssb     zMozillaCookieJar._really_loadNFc Cs|dkr"|jdk r|j}nttt|d}|j|jtj}x|D]}| rZ|jrZqH| rl|j|rlqH|j rxd}nd}|j j drd}nd}|j dk rt |j } nd} |jdkrd} |j} n |j} |j} |jdj|j ||j|| | | gdqHWWdQRXdS)Nr~rZFALSErrVrr|)rlrLrqrrrrrMrrrrrurrmr{rzr~r) rrlrnrorrrrrrrzr{rrrrps<          zMozillaCookieJar.save)NFF) rrrrrvrirkrrsrprrrrrs  A)N)N)Yr__all__rr-rvrMZ urllib.parserZurllib.requestZ threadingr6 ImportErrorZdummy_threadingZ http.clientZhttpZcalendarr r r rrmZclientZ HTTP_PORTrrqrr%r,r6r7rIr(rtrKr5r8r>rirjr?rFrSrXIr\Xr^rarbrdrjrorprrrqr|r}rrrrrrrrrrrrrrrrrrrrrrr-r0r4rrvrrr{rrrrrrs        88!    U D'    #b!\:x__pycache__/cookies.cpython-36.opt-1.pyc000064400000037352147204456360014022 0ustar003 \S @s|dZddlZddlZdddgZdjZdjZdjZd d ZGd dde Z ej ej d Z e d ZddeedeeeeDZejeddeddiejdeje jZddZejdZejdZddZddddd d!d"gZdd#d$d%d&d'd(d)d*d+d,d-d.g Zdeefd/d0ZGd1d2d2e Z!d3Z"e"d4Z#ejd5e"d6e#d7ej$ej%BZ&Gd8dde Z'Gd9dde'Z(dS):a. Here's a sample session to show how to use this module. At the moment, this is the only documentation. The Basics ---------- Importing is easy... >>> from http import cookies Most of the time you start by creating a cookie. >>> C = cookies.SimpleCookie() Once you've created your Cookie, you can add values just as if it were a dictionary. >>> C = cookies.SimpleCookie() >>> C["fig"] = "newton" >>> C["sugar"] = "wafer" >>> C.output() 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' Notice that the printable representation of a Cookie is the appropriate format for a Set-Cookie: header. This is the default behavior. You can change the header and printed attributes by using the .output() function >>> C = cookies.SimpleCookie() >>> C["rocky"] = "road" >>> C["rocky"]["path"] = "/cookie" >>> print(C.output(header="Cookie:")) Cookie: rocky=road; Path=/cookie >>> print(C.output(attrs=[], header="Cookie:")) Cookie: rocky=road The load() method of a Cookie extracts cookies from a string. In a CGI script, you would use this method to extract the cookies from the HTTP_COOKIE environment variable. >>> C = cookies.SimpleCookie() >>> C.load("chips=ahoy; vienna=finger") >>> C.output() 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' The load() method is darn-tootin smart about identifying cookies within a string. Escaped quotation marks, nested semicolons, and other such trickeries do not confuse it. >>> C = cookies.SimpleCookie() >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') >>> print(C) Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" Each element of the Cookie also supports all of the RFC 2109 Cookie attributes. Here's an example which sets the Path attribute. >>> C = cookies.SimpleCookie() >>> C["oreo"] = "doublestuff" >>> C["oreo"]["path"] = "/" >>> print(C) Set-Cookie: oreo=doublestuff; Path=/ Each dictionary element has a 'value' attribute, which gives you back the value associated with the key. >>> C = cookies.SimpleCookie() >>> C["twix"] = "none for you" >>> C["twix"].value 'none for you' The SimpleCookie expects that all values should be standard strings. Just to be sure, SimpleCookie invokes the str() builtin to convert the value to a string, when the values are set dictionary-style. >>> C = cookies.SimpleCookie() >>> C["number"] = 7 >>> C["string"] = "seven" >>> C["number"].value '7' >>> C["string"].value 'seven' >>> C.output() 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' Finis. N CookieError BaseCookie SimpleCookiez;  cCs$ddl}d|}|j|tdddS)NrzvThe .%s setter is deprecated. The attribute will be read-only in future releases. Please use the set() method instead.) stacklevel)warningswarnDeprecationWarning)setterr msgr$/usr/lib64/python3.6/http/cookies.py_warn_deprecated_settersrc@s eZdZdS)rN)__name__ __module__ __qualname__rrrrrsz!#$%&'*+-.^_`|~:z ()/<=>?@[]{}cCsi|]}d||qS)z\%03or).0nrrr sr"z\"\z\\z[%s]+cCs*|dkst|r|Sd|jtdSdS)zQuote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters. Nr) _is_legal_key translate _Translator)strrrr_quotesrz\\[0-3][0-7][0-7]z[\\].cCsT|dkst|dkr|S|ddks0|ddkr4|S|dd}d}t|}g}xd|kod|knrJtj||}tj||}| r| r|j||dPd }}|r|jd}|r|jd}|o| s||kr |j||||j||d|d}qR|j||||jtt||d|dd|d}qRWt|S) Nrrr#r#) len _OctalPattsearch _QuotePattappendstartchrint _nulljoin)rirresZo_matchZq_matchjkrrr_unquotes6       $ r1ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDecc CsRddlm}m}|}|||\ }}}} } } } } }d|| ||||| | | fS)Nr)gmtimetimez#%s, %02d %3s %4d %02d:%02d:%02d GMT)r3r2)ZfutureZ weekdaynameZ monthnamer2r3ZnowZyearZmonthZdayZhhZmmZssZwdyzrrr_getdates r6c @seZdZdZdddddddd d Zd d hZd dZeddZej ddZeddZ e j ddZ eddZ e j ddZ ddZ d4ddZ ddZejZdd Zd!d"Zd#d$Zefd%d&Zd'd(Zd)d*Zd5d,d-ZeZd.d/Zd6d0d1Zd7d2d3ZdS)8MorselaA class to hold ONE (key, value) pair. In a cookie, each such pair may have several attributes, so this class is used to keep the attributes associated with the appropriate key,value pair. This class also includes a coded_value attribute, which is used to hold the network representation of the value. This is most useful when Python objects are pickled for network transit. expiresZPathCommentZDomainzMax-AgeZSecureZHttpOnlyZVersion)r8pathcommentZdomainzmax-agesecurehttponlyversionr<r=cCs4d|_|_|_x|jD]}tj||dqWdS)Nr)_key_value _coded_value _reserveddict __setitem__)selfkeyrrr__init__&s zMorsel.__init__cCs|jS)N)r?)rErrrrF.sz Morsel.keycCstd||_dS)NrF)rr?)rErFrrrrF2scCs|jS)N)r@)rErrrvalue7sz Morsel.valuecCstd||_dS)NrH)rr@)rErHrrrrH;scCs|jS)N)rA)rErrr coded_value@szMorsel.coded_valuecCstd||_dS)NrI)rrA)rErIrrrrIDscCs2|j}||jkr td|ftj|||dS)NzInvalid attribute %r)lowerrBrrCrD)rEKVrrrrDIs zMorsel.__setitem__NcCs.|j}||jkr td|ftj|||S)NzInvalid attribute %r)rJrBrrC setdefault)rErFvalrrrrMOs zMorsel.setdefaultcCs>t|tstStj||o<|j|jko<|j|jko<|j|jkS)N) isinstancer7NotImplementedrC__eq__r@r?rA)rEmorselrrrrQUs     z Morsel.__eq__cCs$t}tj|||jj|j|S)N)r7rCupdate__dict__)rErRrrrcopy_s z Morsel.copycCsVi}x@t|jD]0\}}|j}||jkr:td|f|||<qWtj||dS)NzInvalid attribute %r)rCitemsrJrBrrS)rEvaluesdatarFrNrrrrSes  z Morsel.updatecCs|j|jkS)N)rJrB)rErKrrr isReservedKeynszMorsel.isReservedKeycCsh|tkr ddl}|jdtdd|j|jkr) __class__rr`)rErrr__repr__szMorsel.__repr__cCsd|j|jddS)Nz rz\")r`replace)rErarrr js_outputszMorsel.js_outputcCs(g}|j}|d|j|jf|dkr,|j}t|j}x|D]\}}|dkrPq>||krZq>|dkrt|tr|d|j|t|fq>|dkrt|tr|d|j||fq>|dkrt|t r|d|j|t |fq>||j kr|r|t |j|q>|d|j||fq>Wt |S)Nz%s=%srr8zmax-agez%s=%dr;) r(rFrIrBsortedrVrOr+r6rr_flags_semispacejoin)rEraresultr(rVrFrHrrrr`s,  zMorsel.OutputString)N)Nr_)N)N)rrr__doc__rBrirGpropertyrFr rHrIrDrMrQobject__ne__rUrSrYrZr[r\r^rc__str__rergr`rrrrr7s@        r7z,\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=z\[\]z \s* # Optional whitespace at start of cookie (?P # Start of group 'key' [a ]+? # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign (?P # Start of group 'val' "(?:[^\\"]|\\.)*" # Any doublequoted string | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or [a-]* # Any word or empty string ) # End of group 'val' )? # End of optional value group \s* # Any number of spaces. (\s+|;|$) # Ending either at space, semicolon, or EOS. c@sneZdZdZddZddZdddZd d Zd d ZdddZ e Z ddZ dddZ ddZ efddZdS)rz'A container class for a set of Morsels.cCs||fS)a real_value, coded_value = value_decode(STRING) Called prior to setting a cookie's value from the network representation. The VALUE is the value read from HTTP header. Override this function to modify the behavior of cookies. r)rErNrrr value_decodeszBaseCookie.value_decodecCst|}||fS)zreal_value, coded_value = value_encode(VALUE) Called prior to setting a cookie's value from the dictionary representation. The VALUE is the value being assigned. Override this function to modify the behavior of cookies. )r)rErNstrvalrrr value_encodeszBaseCookie.value_encodeNcCs|r|j|dS)N)load)rEinputrrrrGszBaseCookie.__init__cCs.|j|t}|j|||tj|||dS)z+Private method for setting a cookie's valueN)getr7r[rCrD)rErFZ real_valuerIMrrrZ__setszBaseCookie.__setcCs:t|trtj|||n|j|\}}|j|||dS)zDictionary style assignment.N)rOr7rCrDrs_BaseCookie__set)rErFrHrvalcvalrrrrDs zBaseCookie.__setitem__ Set-Cookie: cCs>g}t|j}x"|D]\}}|j|j||qW|j|S)z"Return a string suitable for HTTP.)rhrVr(rcjoin)rErarbseprkrVrFrHrrrrc s  zBaseCookie.outputcCsNg}t|j}x(|D] \}}|jd|t|jfqWd|jjt|fS)Nz%s=%sz<%s: %s>)rhrVr(reprrHrdr _spacejoin)rElrVrFrHrrrres  zBaseCookie.__repr__cCs:g}t|j}x |D]\}}|j|j|qWt|S)z(Return a string suitable for JavaScript.)rhrVr(rgr,)rErarkrVrFrHrrrrgs  zBaseCookie.js_outputcCs8t|tr|j|nx|jD]\}}|||<q WdS)zLoad cookies from a string (presumably HTTP_COOKIE) or from a dictionary. Loading cookies from a dictionary 'd' is equivalent to calling: map(Cookie.__setitem__, d.keys(), d.values()) N)rOr_BaseCookie__parse_stringrV)rEZrawdatarFrHrrrrt&s    zBaseCookie.loadcCspd}t|}g}d}d}d}xd|ko2|knr|j||} | sLP| jd| jd} } | jd}| ddkr|s~q |j|| dd| fq | jtjkr|sdS| dkr| jtjkr|j|| dfqdSn|j|| t | fq | dk r|j|| |j | fd}q dSq Wd} xF|D]>\} } } | |krH| | | <n| \}}|j | |||| } q*WdS) NrFr rrFrN$T) r$matchgroupendr(rJr7rBrir1rqrx)rErZpattr-rZ parsed_itemsZ morsel_seenZTYPE_ATTRIBUTEZ TYPE_KEYVALUErrFrHrwtpryrzrrrZ__parse_string4sF      zBaseCookie.__parse_string)N)Nr{r|)N)rrrrlrqrsrGrxrDrcrprergrt_CookiePatternrrrrrrs    c@s eZdZdZddZddZdS)rz SimpleCookie supports strings as cookie values. When setting the value using the dictionary assignment notation, SimpleCookie calls the builtin str() to convert the value to a string. Values received from HTTP are kept as strings. cCs t||fS)N)r1)rErNrrrrqxszSimpleCookie.value_decodecCst|}|t|fS)N)rr)rErNrrrrrrs{szSimpleCookie.value_encodeN)rrrrlrqrsrrrrrqs))rlrestring__all__r}r,rjrr ExceptionrZ ascii_lettersZdigitsrZZ_UnescapedCharsr[rangemapordrrScompileescape fullmatchrrr%r'r1Z _weekdaynameZ _monthnamer6rCr7Z_LegalKeyCharsZ_LegalValueCharsASCIIVERBOSErrrrrrrsF    2 J  __pycache__/cookies.cpython-36.opt-2.pyc000064400000026541147204456360014021 0ustar003 \S @sxddlZddlZdddgZdjZdjZdjZdd ZGd ddeZ ej ej d Z e d Z d deedeeee DZejeddeddiejdeje jZddZejdZejdZddZdddddd d!gZdd"d#d$d%d&d'd(d)d*d+d,d-g Zdeefd.d/ZGd0d1d1eZ d2Z!e!d3Z"ejd4e!d5e"d6ej#ej$BZ%Gd7ddeZ&Gd8dde&Z'dS)9N CookieError BaseCookie SimpleCookiez;  cCs$ddl}d|}|j|tdddS)NrzvThe .%s setter is deprecated. The attribute will be read-only in future releases. Please use the set() method instead.) stacklevel)warningswarnDeprecationWarning)setterr msgr$/usr/lib64/python3.6/http/cookies.py_warn_deprecated_settersrc@s eZdZdS)rN)__name__ __module__ __qualname__rrrrrsz!#$%&'*+-.^_`|~:z ()/<=>?@[]{}cCsi|]}d||qS)z\%03or).0nrrr sr"z\"\z\\z[%s]+cCs*|dkst|r|Sd|jtdSdS)Nr) _is_legal_key translate _Translator)strrrr_quotesrz\\[0-3][0-7][0-7]z[\\].cCsT|dkst|dkr|S|ddks0|ddkr4|S|dd}d}t|}g}xd|kod|knrJtj||}tj||}| r| r|j||dPd }}|r|jd}|r|jd}|o| s||kr |j||||j||d|d}qR|j||||jtt||d|dd|d}qRWt|S) Nrrr#r#) len _OctalPattsearch _QuotePattappendstartchrint _nulljoin)rirresZo_matchZq_matchjkrrr_unquotes6       $ r1ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDecc CsRddlm}m}|}|||\ }}}} } } } } }d|| ||||| | | fS)Nr)gmtimetimez#%s, %02d %3s %4d %02d:%02d:%02d GMT)r3r2)ZfutureZ weekdaynameZ monthnamer2r3ZnowZyearZmonthZdayZhhZmmZssZwdyzrrr_getdates r6c @seZdZddddddddd Zd d hZd d ZeddZejddZeddZ e jddZ eddZ e jddZ ddZ d3ddZ ddZ ejZddZd d!Zd"d#Zefd$d%Zd&d'Zd(d)Zd4d+d,ZeZd-d.Zd5d/d0Zd6d1d2ZdS)7MorselexpiresZPathCommentZDomainzMax-AgeZSecureZHttpOnlyZVersion)r8pathcommentZdomainzmax-agesecurehttponlyversionr<r=cCs4d|_|_|_x|jD]}tj||dqWdS)Nr)_key_value _coded_value _reserveddict __setitem__)selfkeyrrr__init__&s zMorsel.__init__cCs|jS)N)r?)rErrrrF.sz Morsel.keycCstd||_dS)NrF)rr?)rErFrrrrF2scCs|jS)N)r@)rErrrvalue7sz Morsel.valuecCstd||_dS)NrH)rr@)rErHrrrrH;scCs|jS)N)rA)rErrr coded_value@szMorsel.coded_valuecCstd||_dS)NrI)rrA)rErIrrrrIDscCs2|j}||jkr td|ftj|||dS)NzInvalid attribute %r)lowerrBrrCrD)rEKVrrrrDIs zMorsel.__setitem__NcCs.|j}||jkr td|ftj|||S)NzInvalid attribute %r)rJrBrrC setdefault)rErFvalrrrrMOs zMorsel.setdefaultcCs>t|tstStj||o<|j|jko<|j|jko<|j|jkS)N) isinstancer7NotImplementedrC__eq__r@r?rA)rEmorselrrrrQUs     z Morsel.__eq__cCs$t}tj|||jj|j|S)N)r7rCupdate__dict__)rErRrrrcopy_s z Morsel.copycCsVi}x@t|jD]0\}}|j}||jkr:td|f|||<qWtj||dS)NzInvalid attribute %r)rCitemsrJrBrrS)rEvaluesdatarFrNrrrrSes  z Morsel.updatecCs|j|jkS)N)rJrB)rErKrrr isReservedKeynszMorsel.isReservedKeycCsh|tkr ddl}|jdtdd|j|jkr) __class__rr`)rErrr__repr__szMorsel.__repr__cCsd|j|jddS)Nz rz\")r`replace)rErarrr js_outputszMorsel.js_outputcCs(g}|j}|d|j|jf|dkr,|j}t|j}x|D]\}}|dkrPq>||krZq>|dkrt|tr|d|j|t|fq>|dkrt|tr|d|j||fq>|dkrt|t r|d|j|t |fq>||j kr|r|t |j|q>|d|j||fq>Wt |S)Nz%s=%srr8zmax-agez%s=%dr;) r(rFrIrBsortedrVrOr+r6rr_flags_semispacejoin)rEraresultr(rVrFrHrrrr`s,  zMorsel.OutputString)N)Nr_)N)N)rrrrBrirGpropertyrFr rHrIrDrMrQobject__ne__rUrSrYrZr[r\r^rc__str__rergr`rrrrr7s>        r7z,\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=z\[\]z \s* # Optional whitespace at start of cookie (?P # Start of group 'key' [a ]+? # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign (?P # Start of group 'val' "(?:[^\\"]|\\.)*" # Any doublequoted string | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or [a-]* # Any word or empty string ) # End of group 'val' )? # End of optional value group \s* # Any number of spaces. (\s+|;|$) # Ending either at space, semicolon, or EOS. c@sjeZdZddZddZdddZdd Zd d ZdddZeZ ddZ dddZ ddZ e fddZdS)rcCs||fS)Nr)rErNrrr value_decodeszBaseCookie.value_decodecCst|}||fS)N)r)rErNstrvalrrr value_encodeszBaseCookie.value_encodeNcCs|r|j|dS)N)load)rEinputrrrrGszBaseCookie.__init__cCs.|j|t}|j|||tj|||dS)N)getr7r[rCrD)rErFZ real_valuerIMrrrZ__setszBaseCookie.__setcCs:t|trtj|||n|j|\}}|j|||dS)N)rOr7rCrDrr_BaseCookie__set)rErFrHrvalcvalrrrrDs zBaseCookie.__setitem__ Set-Cookie: cCs>g}t|j}x"|D]\}}|j|j||qW|j|S)N)rhrVr(rcjoin)rErarbseprkrVrFrHrrrrc s  zBaseCookie.outputcCsNg}t|j}x(|D] \}}|jd|t|jfqWd|jjt|fS)Nz%s=%sz<%s: %s>)rhrVr(reprrHrdr _spacejoin)rElrVrFrHrrrres  zBaseCookie.__repr__cCs:g}t|j}x |D]\}}|j|j|qWt|S)N)rhrVr(rgr,)rErarkrVrFrHrrrrgs  zBaseCookie.js_outputcCs8t|tr|j|nx|jD]\}}|||<q WdS)N)rOr_BaseCookie__parse_stringrV)rEZrawdatarFrHrrrrs&s    zBaseCookie.loadcCspd}t|}g}d}d}d}xd|ko2|knr|j||} | sLP| jd| jd} } | jd}| ddkr|s~q |j|| dd| fq | jtjkr|sdS| dkr| jtjkr|j|| dfqdSn|j|| t | fq | dk r|j|| |j | fd}q dSq Wd} xF|D]>\} } } | |krH| | | <n| \}}|j | |||| } q*WdS) NrFr rrFrN$T) r$matchgroupendr(rJr7rBrir1rprw)rErZpattr-rZ parsed_itemsZ morsel_seenZTYPE_ATTRIBUTEZ TYPE_KEYVALUErrFrHrvtprxryrrrZ__parse_string4sF      zBaseCookie.__parse_string)N)Nrzr{)N)rrrrprrrGrwrDrcrorergrs_CookiePatternrrrrrrs    c@seZdZddZddZdS)rcCs t||fS)N)r1)rErNrrrrpxszSimpleCookie.value_decodecCst|}|t|fS)N)rr)rErNrqrrrrr{szSimpleCookie.value_encodeN)rrrrprrrrrrrqs)(restring__all__r|r,rjrr ExceptionrZ ascii_lettersZdigitsrZZ_UnescapedCharsr[rangemapordrrScompileescape fullmatchrrr%r'r1Z _weekdaynameZ _monthnamer6rCr7Z_LegalKeyCharsZ_LegalValueCharsASCIIVERBOSErrrrrrrsD    2 J  __pycache__/cookies.cpython-36.pyc000064400000037432147204456360013062 0ustar003 \S @s|dZddlZddlZdddgZdjZdjZdjZd d ZGd dde Z ej ej d Z e d ZddeedeeeeDZejeddeddiejdeje jZddZejdZejdZddZddddd d!d"gZdd#d$d%d&d'd(d)d*d+d,d-d.g Zdeefd/d0ZGd1d2d2e Z!d3Z"e"d4Z#ejd5e"d6e#d7ej$ej%BZ&Gd8dde Z'Gd9dde'Z(dS):a. Here's a sample session to show how to use this module. At the moment, this is the only documentation. The Basics ---------- Importing is easy... >>> from http import cookies Most of the time you start by creating a cookie. >>> C = cookies.SimpleCookie() Once you've created your Cookie, you can add values just as if it were a dictionary. >>> C = cookies.SimpleCookie() >>> C["fig"] = "newton" >>> C["sugar"] = "wafer" >>> C.output() 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' Notice that the printable representation of a Cookie is the appropriate format for a Set-Cookie: header. This is the default behavior. You can change the header and printed attributes by using the .output() function >>> C = cookies.SimpleCookie() >>> C["rocky"] = "road" >>> C["rocky"]["path"] = "/cookie" >>> print(C.output(header="Cookie:")) Cookie: rocky=road; Path=/cookie >>> print(C.output(attrs=[], header="Cookie:")) Cookie: rocky=road The load() method of a Cookie extracts cookies from a string. In a CGI script, you would use this method to extract the cookies from the HTTP_COOKIE environment variable. >>> C = cookies.SimpleCookie() >>> C.load("chips=ahoy; vienna=finger") >>> C.output() 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' The load() method is darn-tootin smart about identifying cookies within a string. Escaped quotation marks, nested semicolons, and other such trickeries do not confuse it. >>> C = cookies.SimpleCookie() >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') >>> print(C) Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" Each element of the Cookie also supports all of the RFC 2109 Cookie attributes. Here's an example which sets the Path attribute. >>> C = cookies.SimpleCookie() >>> C["oreo"] = "doublestuff" >>> C["oreo"]["path"] = "/" >>> print(C) Set-Cookie: oreo=doublestuff; Path=/ Each dictionary element has a 'value' attribute, which gives you back the value associated with the key. >>> C = cookies.SimpleCookie() >>> C["twix"] = "none for you" >>> C["twix"].value 'none for you' The SimpleCookie expects that all values should be standard strings. Just to be sure, SimpleCookie invokes the str() builtin to convert the value to a string, when the values are set dictionary-style. >>> C = cookies.SimpleCookie() >>> C["number"] = 7 >>> C["string"] = "seven" >>> C["number"].value '7' >>> C["string"].value 'seven' >>> C.output() 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' Finis. N CookieError BaseCookie SimpleCookiez;  cCs$ddl}d|}|j|tdddS)NrzvThe .%s setter is deprecated. The attribute will be read-only in future releases. Please use the set() method instead.) stacklevel)warningswarnDeprecationWarning)setterr msgr$/usr/lib64/python3.6/http/cookies.py_warn_deprecated_settersrc@s eZdZdS)rN)__name__ __module__ __qualname__rrrrrsz!#$%&'*+-.^_`|~:z ()/<=>?@[]{}cCsi|]}d||qS)z\%03or).0nrrr sr"z\"\z\\z[%s]+cCs*|dkst|r|Sd|jtdSdS)zQuote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters. Nr) _is_legal_key translate _Translator)strrrr_quotesrz\\[0-3][0-7][0-7]z[\\].cCsT|dkst|dkr|S|ddks0|ddkr4|S|dd}d}t|}g}xd|kod|knrJtj||}tj||}| r| r|j||dPd }}|r|jd}|r|jd}|o| s||kr |j||||j||d|d}qR|j||||jtt||d|dd|d}qRWt|S) Nrrr#r#) len _OctalPattsearch _QuotePattappendstartchrint _nulljoin)rirresZo_matchZq_matchjkrrr_unquotes6       $ r1ZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDecc CsRddlm}m}|}|||\ }}}} } } } } }d|| ||||| | | fS)Nr)gmtimetimez#%s, %02d %3s %4d %02d:%02d:%02d GMT)r3r2)ZfutureZ weekdaynameZ monthnamer2r3ZnowZyearZmonthZdayZhhZmmZssZwdyzrrr_getdates r6c @seZdZdZdddddddd d Zd d hZd dZeddZej ddZeddZ e j ddZ eddZ e j ddZ ddZ d4ddZ ddZejZdd Zd!d"Zd#d$Zefd%d&Zd'd(Zd)d*Zd5d,d-ZeZd.d/Zd6d0d1Zd7d2d3ZdS)8MorselaA class to hold ONE (key, value) pair. In a cookie, each such pair may have several attributes, so this class is used to keep the attributes associated with the appropriate key,value pair. This class also includes a coded_value attribute, which is used to hold the network representation of the value. This is most useful when Python objects are pickled for network transit. expiresZPathCommentZDomainzMax-AgeZSecureZHttpOnlyZVersion)r8pathcommentZdomainzmax-agesecurehttponlyversionr<r=cCs4d|_|_|_x|jD]}tj||dqWdS)Nr)_key_value _coded_value _reserveddict __setitem__)selfkeyrrr__init__&s zMorsel.__init__cCs|jS)N)r?)rErrrrF.sz Morsel.keycCstd||_dS)NrF)rr?)rErFrrrrF2scCs|jS)N)r@)rErrrvalue7sz Morsel.valuecCstd||_dS)NrH)rr@)rErHrrrrH;scCs|jS)N)rA)rErrr coded_value@szMorsel.coded_valuecCstd||_dS)NrI)rrA)rErIrrrrIDscCs2|j}||jkr td|ftj|||dS)NzInvalid attribute %r)lowerrBrrCrD)rEKVrrrrDIs zMorsel.__setitem__NcCs.|j}||jkr td|ftj|||S)NzInvalid attribute %r)rJrBrrC setdefault)rErFvalrrrrMOs zMorsel.setdefaultcCs>t|tstStj||o<|j|jko<|j|jko<|j|jkS)N) isinstancer7NotImplementedrC__eq__r@r?rA)rEmorselrrrrQUs     z Morsel.__eq__cCs$t}tj|||jj|j|S)N)r7rCupdate__dict__)rErRrrrcopy_s z Morsel.copycCsVi}x@t|jD]0\}}|j}||jkr:td|f|||<qWtj||dS)NzInvalid attribute %r)rCitemsrJrBrrS)rEvaluesdatarFrNrrrrSes  z Morsel.updatecCs|j|jkS)N)rJrB)rErKrrr isReservedKeynszMorsel.isReservedKeycCsh|tkr ddl}|jdtdd|j|jkr) __class__rr`)rErrr__repr__szMorsel.__repr__cCsd|j|jddS)Nz rz\")r`replace)rErarrr js_outputszMorsel.js_outputcCs(g}|j}|d|j|jf|dkr,|j}t|j}x|D]\}}|dkrPq>||krZq>|dkrt|tr|d|j|t|fq>|dkrt|tr|d|j||fq>|dkrt|t r|d|j|t |fq>||j kr|r|t |j|q>|d|j||fq>Wt |S)Nz%s=%srr8zmax-agez%s=%dr;) r(rFrIrBsortedrVrOr+r6rr_flags_semispacejoin)rEraresultr(rVrFrHrrrr`s,  zMorsel.OutputString)N)Nr_)N)N)rrr__doc__rBrirGpropertyrFr rHrIrDrMrQobject__ne__rUrSrYrZr[r\r^rc__str__rergr`rrrrr7s@        r7z,\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=z\[\]z \s* # Optional whitespace at start of cookie (?P # Start of group 'key' [a ]+? # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign (?P # Start of group 'val' "(?:[^\\"]|\\.)*" # Any doublequoted string | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or [a-]* # Any word or empty string ) # End of group 'val' )? # End of optional value group \s* # Any number of spaces. (\s+|;|$) # Ending either at space, semicolon, or EOS. c@sneZdZdZddZddZdddZd d Zd d ZdddZ e Z ddZ dddZ ddZ efddZdS)rz'A container class for a set of Morsels.cCs||fS)a real_value, coded_value = value_decode(STRING) Called prior to setting a cookie's value from the network representation. The VALUE is the value read from HTTP header. Override this function to modify the behavior of cookies. r)rErNrrr value_decodeszBaseCookie.value_decodecCst|}||fS)zreal_value, coded_value = value_encode(VALUE) Called prior to setting a cookie's value from the dictionary representation. The VALUE is the value being assigned. Override this function to modify the behavior of cookies. )r)rErNstrvalrrr value_encodeszBaseCookie.value_encodeNcCs|r|j|dS)N)load)rEinputrrrrGszBaseCookie.__init__cCs.|j|t}|j|||tj|||dS)z+Private method for setting a cookie's valueN)getr7r[rCrD)rErFZ real_valuerIMrrrZ__setszBaseCookie.__setcCs:t|trtj|||n|j|\}}|j|||dS)zDictionary style assignment.N)rOr7rCrDrs_BaseCookie__set)rErFrHrvalcvalrrrrDs zBaseCookie.__setitem__ Set-Cookie: cCs>g}t|j}x"|D]\}}|j|j||qW|j|S)z"Return a string suitable for HTTP.)rhrVr(rcjoin)rErarbseprkrVrFrHrrrrc s  zBaseCookie.outputcCsNg}t|j}x(|D] \}}|jd|t|jfqWd|jjt|fS)Nz%s=%sz<%s: %s>)rhrVr(reprrHrdr _spacejoin)rElrVrFrHrrrres  zBaseCookie.__repr__cCs:g}t|j}x |D]\}}|j|j|qWt|S)z(Return a string suitable for JavaScript.)rhrVr(rgr,)rErarkrVrFrHrrrrgs  zBaseCookie.js_outputcCs8t|tr|j|nx|jD]\}}|||<q WdS)zLoad cookies from a string (presumably HTTP_COOKIE) or from a dictionary. Loading cookies from a dictionary 'd' is equivalent to calling: map(Cookie.__setitem__, d.keys(), d.values()) N)rOr_BaseCookie__parse_stringrV)rEZrawdatarFrHrrrrt&s    zBaseCookie.loadcCsd}t|}g}d}d}d}xd|ko2|knr|j||} | sLP| jd| jd} } | jd}| ddkr|s~q |j|| dd| fq | jtjkr|sdS| dkr| jtjkr|j|| dfqdSn|j|| t | fq | dk r|j|| |j | fd}q dSq Wd} xb|D]Z\} } } | |krV| dk sLt | | | <n,| |ksdt | \}}|j | |||| } q*WdS) NrFr rrFrN$T) r$matchgroupendr(rJr7rBrir1rqAssertionErrorrx)rErZpattr-rZ parsed_itemsZ morsel_seenZTYPE_ATTRIBUTEZ TYPE_KEYVALUErrFrHrwtpryrzrrrZ__parse_string4sJ      zBaseCookie.__parse_string)N)Nr{r|)N)rrrrlrqrsrGrxrDrcrprergrt_CookiePatternrrrrrrs    c@s eZdZdZddZddZdS)rz SimpleCookie supports strings as cookie values. When setting the value using the dictionary assignment notation, SimpleCookie calls the builtin str() to convert the value to a string. Values received from HTTP are kept as strings. cCs t||fS)N)r1)rErNrrrrqxszSimpleCookie.value_decodecCst|}|t|fS)N)rr)rErNrrrrrrs{szSimpleCookie.value_encodeN)rrrrlrqrsrrrrrqs))rlrestring__all__r}r,rjrr ExceptionrZ ascii_lettersZdigitsrZZ_UnescapedCharsr[rangemapordrrScompileescape fullmatchrrr%r'r1Z _weekdaynameZ _monthnamer6rCr7Z_LegalKeyCharsZ_LegalValueCharsASCIIVERBOSErrrrrrrsF    2 J  __pycache__/server.cpython-36.opt-1.pyc000064400000077030147204456360013671 0ustar003 f@sdZdZddddgZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlmZd Zd ZGd ddejZGd ddejZGd ddeZddZdaddZ ddZ!GdddeZ"eedddfddZ#e$dkrej%Z&e&j'dddde&j'dd dd!d"d#e&j'd$d%de(d&d'd(e&j)Z*e*j+r~e"Z,neZ,e#e,e*j-e*j.d)dS)*a@HTTP server classes. Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, and CGIHTTPRequestHandler for CGI scripts. It does, however, optionally implement HTTP/1.1 persistent connections, as of version 0.3. Notes on CGIHTTPRequestHandler ------------------------------ This class implements GET and POST requests to cgi-bin scripts. If the os.fork() function is not present (e.g. on Windows), subprocess.Popen() is used as a fallback, with slightly altered semantics. In all cases, the implementation is intentionally naive -- all requests are executed synchronously. SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL -- it may execute arbitrary Python code or external programs. Note that status code 200 is sent prior to execution of a CGI script, so scripts cannot send other status codes such as 302 (redirect). XXX To do: - log requests even later (to capture byte count) - log user-agent header and other interesting goodies - send error log to separate file z0.6 HTTPServerBaseHTTPRequestHandlerSimpleHTTPRequestHandlerCGIHTTPRequestHandlerN) HTTPStatusa Error response

Error response

Error code: %(code)d

Message: %(message)s.

Error code explanation: %(code)s - %(explain)s.

ztext/html;charset=utf-8c@seZdZdZddZdS)rcCs4tjj||jdd\}}tj||_||_dS)z.Override server_bind to store the server name.N) socketserver TCPServer server_bindserver_addresssocketZgetfqdn server_name server_port)selfhostportr#/usr/lib64/python3.6/http/server.pyr s  zHTTPServer.server_bindN)__name__ __module__ __qualname__Zallow_reuse_addressr rrrrrsc @seZdZdZdejjdZdeZ e Z e Z dZddZdd Zd d Zd d Zd@ddZdAddZdBddZddZddZddZdCddZddZd d!Zd"d#ZdDd$d%Zd&d'Zd(d)d*d+d,d-d.gZdd/d0d1d2d3d4d5d6d7d8d9d:g Z d;d<Z!d=Z"e#j$j%Z&d>d?e'j(j)DZ*dS)EraHTTP request handler base class. The following explanation of HTTP serves to guide you through the code as well as to expose any misunderstandings I may have about HTTP (so you don't need to read the code to figure out I'm wrong :-). HTTP (HyperText Transfer Protocol) is an extensible protocol on top of a reliable stream transport (e.g. TCP/IP). The protocol recognizes three parts to a request: 1. One line identifying the request type and path 2. An optional set of RFC-822-style headers 3. An optional data part The headers and data are separated by a blank line. The first line of the request has the form where is a (case-sensitive) keyword such as GET or POST, is a string containing path information for the request, and should be the string "HTTP/1.0" or "HTTP/1.1". is encoded using the URL encoding scheme (using %xx to signify the ASCII character with hex code xx). The specification specifies that lines are separated by CRLF but for compatibility with the widest range of clients recommends servers also handle LF. Similarly, whitespace in the request line is treated sensibly (allowing multiple spaces between components and allowing trailing whitespace). Similarly, for output, lines ought to be separated by CRLF pairs but most clients grok LF characters just fine. If the first line of the request has the form (i.e. is left out) then this is assumed to be an HTTP 0.9 request; this form has no optional headers and data part and the reply consists of just the data. The reply form of the HTTP 1.x protocol again has three parts: 1. One line giving the response code 2. An optional set of RFC-822-style headers 3. The data Again, the headers and data are separated by a blank line. The response code line has the form where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), is a 3-digit response code indicating success or failure of the request, and is an optional human-readable string explaining what the response code means. This server parses the request and the headers, and then calls a function specific to the request type (). Specifically, a request SPAM will be handled by a method do_SPAM(). If no such method exists the server sends an error response to the client. If it exists, it is called with no arguments: do_SPAM() Note that the request name is case sensitive (i.e. SPAM and spam are different requests). The various request details are stored in instance variables: - client_address is the client IP address in the form (host, port); - command, path and version are the broken-down request line; - headers is an instance of email.message.Message (or a derived class) containing the header information; - rfile is a file object open for reading positioned at the start of the optional input data part; - wfile is a file object open for writing. IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! The first thing to be written must be the response line. Then follow 0 or more header lines, then a blank line, and then the actual data (if any). The meaning of the header lines depends on the command executed by the server; in most cases, when data is returned, there should be at least one header line of the form Content-type: / where and should be registered MIME types, e.g. "text/html" or "text/plain". zPython/rz BaseHTTP/zHTTP/0.9c Csd|_|j|_}d|_t|jd}|jd}||_|j}t |dkr|\}}}yZ|dddkrjt |jdd d }|jd }t |d krt t |d t |d f}Wn*t t fk r|j tjd |dSX|dkr|jdkrd|_|dkrr|j tjd|dSn^t |d krR|\}}d|_|dkrr|j tjd|dSn |s\dS|j tjd|dS||||_|_|_|jjdrd|jjd|_ytjj|j|jd|_Wnrtjjk r}z|j tjdt|dSd}~Xn:tjjk r4}z|j tjdt|dSd}~XnX|jjdd} | jdkrZd|_n | jdkrz|jdkrzd|_|jjdd} | jdkr|jdkr|jdkr|j sdSdS) a'Parse a request (internal). The request should be stored in self.raw_requestline; the results are in self.command, self.path, self.request_version and self.headers. Return True for success, False for failure; on failure, an error is sent back. NTz iso-8859-1z zHTTP//r.rrzBad request version (%r)FzHTTP/1.1zInvalid HTTP version (%s)ZGETzBad HTTP/0.9 request type (%r)zBad request syntax (%r)z//)Z_classz Line too longzToo many headers Connectionclosez keep-aliveZExpectz 100-continue)rr)rr)!commanddefault_request_versionrequest_versionclose_connectionstrraw_requestlinerstrip requestlinesplitlen ValueErrorint IndexError send_errorrZ BAD_REQUESTprotocol_versionZHTTP_VERSION_NOT_SUPPORTEDpath startswithlstriphttpclientZ parse_headersrfile MessageClassheadersZ LineTooLongZREQUEST_HEADER_FIELDS_TOO_LARGEZ HTTPExceptiongetlowerhandle_expect_100) rversionr&wordsrr.Zbase_version_numberZversion_numbererrZconntypeZexpectrrr parse_requests                    z$BaseHTTPRequestHandler.parse_requestcCs|jtj|jdS)a7Decide what to do with an "Expect: 100-continue" header. If the client is expecting a 100 Continue response, we must respond with either a 100 Continue or a final response before waiting for the request body. The default is to always respond with a 100 Continue. You can behave differently (for example, reject unauthorized requests) by overriding this method. This method should either return True (possibly after sending a 100 Continue response) or send an error response and return False. T)send_response_onlyrZCONTINUE end_headers)rrrrr8ns z(BaseHTTPRequestHandler.handle_expect_100cCsy|jjd|_t|jdkr@d|_d|_d|_|jtj dS|jsPd|_ dS|j s\dSd|j}t ||s|jtj d|jdSt||}||jjWn4tjk r}z|jd|d|_ dSd}~XnXdS) zHandle a single HTTP request. You normally don't need to override this method; see the class __doc__ string for information on how to handle specific HTTP commands such as GET and POST. iirNTZdo_zUnsupported method (%r)zRequest timed out: %r)r3readliner$r(r&r!rr,rZREQUEST_URI_TOO_LONGr"r<hasattrNOT_IMPLEMENTEDgetattrwfileflushr Ztimeout log_error)rZmnamemethoderrrhandle_one_requests4      z)BaseHTTPRequestHandler.handle_one_requestcCs&d|_|jx|js |jqWdS)z&Handle multiple requests if necessary.TN)r"rH)rrrrhandleszBaseHTTPRequestHandler.handleNcCs y|j|\}}Wntk r.d\}}YnX|dkr<|}|dkrH|}|jd|||j|||jddd}|dkr|tjtjtjfkr|j |t j |ddt j |ddd }|j d d }|jd |j |jd tt||j|jdko|r|jj|dS)akSend and log an error reply. Arguments are * code: an HTTP error code 3 digits * message: a simple optional 1 line reason phrase. *( HTAB / SP / VCHAR / %x80-FF ) defaults to short entry matching the response code * explain: a detailed message defaults to the long entry matching the response code. This sends an error response (so it must be called before any output has been generated), logs the error, and finally sends a piece of HTML explaining the error to the user. ???Nzcode %d, message %srrF)quote)codemessageexplainzUTF-8replacez Content-TypezContent-LengthZHEAD)rJrJ) responsesKeyErrorrE send_response send_headerrZ NO_CONTENTZ RESET_CONTENTZ NOT_MODIFIEDerror_message_formathtmlescapeencodeerror_content_typer#r(r>rrCwrite)rrMrNrOZshortmsgZlongmsgZbodyZcontentrrrr,s4     z!BaseHTTPRequestHandler.send_errorcCs:|j||j|||jd|j|jd|jdS)zAdd the response header to the headers buffer and log the response code. Also send two standard headers with the server software version and the current date. ZServerZDateN) log_requestr=rTversion_stringdate_time_string)rrMrNrrrrSs  z$BaseHTTPRequestHandler.send_responsecCsd|jdkr`|dkr0||jkr,|j|d}nd}t|ds@g|_|jjd|j||fjdddS) zSend the response header only.zHTTP/0.9Nrr_headers_bufferz %s %d %s zlatin-1strict)r!rQr@r^appendr-rX)rrMrNrrrr=s   z)BaseHTTPRequestHandler.send_response_onlycCsl|jdkr6t|dsg|_|jjd||fjdd|jdkrh|jdkrVd|_n|jd krhd |_d S) z)Send a MIME header to the headers buffer.zHTTP/0.9r^z%s: %s zlatin-1r_Z connectionrTz keep-aliveFN)r!r@r^r`rXr7r")rkeywordvaluerrrrTs     z"BaseHTTPRequestHandler.send_headercCs"|jdkr|jjd|jdS)z,Send the blank line ending the MIME headers.zHTTP/0.9s N)r!r^r` flush_headers)rrrrr> s  z"BaseHTTPRequestHandler.end_headerscCs(t|dr$|jjdj|jg|_dS)Nr^)r@rCrZjoinr^)rrrrrcs z$BaseHTTPRequestHandler.flush_headers-cCs.t|tr|j}|jd|jt|t|dS)zNLog an accepted request. This is called by send_response(). z "%s" %s %sN) isinstancerrb log_messager&r#)rrMsizerrrr[s z"BaseHTTPRequestHandler.log_requestcGs|j|f|dS)zLog an error. This is called when a request cannot be fulfilled. By default it passes the message on to log_message(). Arguments are the same as for log_message(). XXX This should go to the separate error log. N)rh)rformatargsrrrrE!s z BaseHTTPRequestHandler.log_errorcGs&tjjd|j|j||fdS)aLog an arbitrary message. This is used by all other logging functions. Override it if you have specific logging wishes. The first argument, FORMAT, is a format string for the message to be logged. If the format string contains any % escapes requiring parameters, they should be specified as subsequent arguments (it's just like printf!). The client ip and current date/time are prefixed to every message. z%s - - [%s] %s N)sysstderrrZaddress_stringlog_date_time_string)rrjrkrrrrh/sz"BaseHTTPRequestHandler.log_messagecCs|jd|jS)z*Return the server software version string. )server_version sys_version)rrrrr\Esz%BaseHTTPRequestHandler.version_stringcCs |dkrtj}tjj|ddS)z@Return the current date and time formatted for a message header.NT)Zusegmt)timeemailZutilsZ formatdate)rZ timestamprrrr]Isz'BaseHTTPRequestHandler.date_time_stringc CsBtj}tj|\ }}}}}}}} } d||j|||||f} | S)z.Return the current time formatted for logging.z%02d/%3s/%04d %02d:%02d:%02d)rsZ localtime monthname) rZnowZyearZmonthZdayZhhZmmZssxyzsrrrroOs z+BaseHTTPRequestHandler.log_date_time_stringZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs |jdS)zReturn the client address.r)client_address)rrrrrn]sz%BaseHTTPRequestHandler.address_stringzHTTP/1.0cCsi|]}|j|jf|qSr)phraseZ description).0vrrr lsz!BaseHTTPRequestHandler.)NN)N)N)rfrf)N)+rrr__doc__rlr9r'rr __version__rqDEFAULT_ERROR_MESSAGErUDEFAULT_ERROR_CONTENT_TYPErYr r<r8rHrIr,rSr=rTr>rcr[rErhr\r]roZ weekdaynamerurnr-r1r2Z HTTPMessager4r __members__valuesrQrrrrrs>fg% 5     c@s|eZdZdZdeZddZddZddZd d Z d d Z d dZ ddZ e jsZe je jjZejddddddS)raWSimple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. z SimpleHTTP/c Cs.|j}|r*z|j||jWd|jXdS)zServe a GET request.N) send_headcopyfilerCr)rfrrrdo_GETs zSimpleHTTPRequestHandler.do_GETcCs|j}|r|jdS)zServe a HEAD request.N)rr)rrrrrdo_HEADsz SimpleHTTPRequestHandler.do_HEADc Csx|j|j}d}tjj|rtjj|j}|jjds|jt j |d|d|dd|d|df}tjj |}|j d||j dSx6dD]$}tjj||}tjj|r|}PqW|j|S|j|}yt|d }Wn$tk r|jt jd dSXyZ|jt j|j d |tj|j}|j dt|d|j d|j|j|j |S|jYnXdS)a{Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. NrrrrrZLocation index.html index.htmrbzFile not foundz Content-typezContent-Lengthz Last-Modified)rr)translate_pathr.osisdirurllibparseZurlsplitendswithrSrZMOVED_PERMANENTLYZ urlunsplitrTr>reexistslist_directory guess_typeopenOSErrorr, NOT_FOUNDOKfstatfilenor#r]st_mtimer) rr.rpartsZ new_partsZnew_urlindexZctypeZfsrrrrsF            z"SimpleHTTPRequestHandler.send_headc Csytj|}Wn"tk r0|jtjddSX|jdddg}ytjj |j dd}Wn t k r|tjj |}YnXt j |dd }tj}d |}|jd |jd |jd ||jd||jd||jdx~|D]v}tj j||}|} } tj j|r"|d} |d} tj j|r8|d} |jdtjj| ddt j | dd fqW|jddj|j|d} tj} | j| | jd|jtj|jdd||jdtt| |j| S)zHelper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). zNo permission to list directoryNcSs|jS)N)r7)arrrsz9SimpleHTTPRequestHandler.list_directory..)key surrogatepass)errorsF)rLzDirectory listing for %szZz z@z%s z

%s

z
    r@z
  • %s
  • z

 surrogateescaperz Content-typeztext/html; charset=%szContent-Length) rlistdirrr,rrsortrrunquoter.UnicodeDecodeErrorrVrWrlgetfilesystemencodingr`rerislinkrLrXioBytesIOrZseekrSrrTr#r(r>) rr.listrZ displaypathenctitlenamefullnameZ displaynameZlinknameZencodedrrrrrs\          z'SimpleHTTPRequestHandler.list_directoryc Cs|jddd}|jddd}|jjd}ytjj|dd}Wn tk rbtjj|}YnXtj|}|jd}t d|}t j }x8|D]0}t j j |s|t jt jfkrqt j j||}qW|r|d7}|S) zTranslate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) ?rr#rr)rN)r'r%rrrrr posixpathnormpathfilterrgetcwdr.dirnamecurdirpardirre)rr.Ztrailing_slashr:Zwordrrrrs$     z'SimpleHTTPRequestHandler.translate_pathcCstj||dS)aCopy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. N)shutilZ copyfileobj)rsourceZ outputfilerrrrsz!SimpleHTTPRequestHandler.copyfilecCsLtj|\}}||jkr"|j|S|j}||jkr>|j|S|jdSdS)aGuess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. rN)rsplitextextensions_mapr7)rr.baseZextrrrr)s    z#SimpleHTTPRequestHandler.guess_typezapplication/octet-streamz text/plain)rz.pyz.cz.hN)rrrrrrqrrrrrrr mimetypesZinitedZinitZ types_mapcopyrupdaterrrrrrs"  1: c Cs|jd\}}}tjj|}|jd}g}x<|ddD],}|dkrN|jq8|r8|dkr8|j|q8W|r|j}|r|dkr|jd}q|dkrd}nd}|rdj||f}ddj||f}dj|}|S) a Given a URL path, remove extra '/'s and '.' path elements and collapse any '..' references and returns a collapsed path. Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. The utility of this function is limited to is_cgi method and helps preventing some security attacks. Returns: The reconstituted URL, which will always start with a '/'. Raises: IndexError if too many '..' occur within the path. rrNrz..rr) partitionrrrr'popr`re) r._query path_partsZ head_partspartZ tail_partZ splitpathcollapsed_pathrrr_url_collapse_pathNs.     rcCsptrtSy ddl}Wntk r(dSXy|jddaWn.tk rjdtdd|jDaYnXtS) z$Internal routine to get nobody's uidrNrnobodyrcss|]}|dVqdS)rNr)r|rvrrr sznobody_uid..r)rpwd ImportErrorgetpwnamrRmaxZgetpwall)rrrr nobody_uids  rcCstj|tjS)zTest for executable file.)raccessX_OK)r.rrr executablesrc@sVeZdZdZeedZdZddZddZ dd Z d d gZ d d Z ddZ ddZdS)rzComplete HTTP server with GET, HEAD and POST commands. GET and HEAD also support running CGI scripts. The POST command is *only* implemented for CGI scripts. forkrcCs$|jr|jn|jtjddS)zRServe a POST request. This is only implemented for CGI scripts. zCan only POST to CGI scriptsN)is_cgirun_cgir,rrA)rrrrdo_POSTs  zCGIHTTPRequestHandler.do_POSTcCs|jr|jStj|SdS)z-Version of send_head that support CGI scriptsN)rrrr)rrrrrszCGIHTTPRequestHandler.send_headcCsPt|j}|jdd}|d|||dd}}||jkrL||f|_dSdS)a3Test whether self.path corresponds to a CGI script. Returns True and updates the cgi_info attribute to the tuple (dir, rest) if self.path requires running a CGI script. Returns False otherwise. If any exception is raised, the caller should assume that self.path was rejected as invalid and act accordingly. The default implementation tests whether the normalized url path begins with one of the strings in self.cgi_directories (and the next character is a '/' or the end of the string). rrNTF)rr.findcgi_directoriescgi_info)rrZdir_sepheadtailrrrrs    zCGIHTTPRequestHandler.is_cgiz/cgi-binz/htbincCst|S)z1Test whether argument path is an executable file.)r)rr.rrr is_executablesz#CGIHTTPRequestHandler.is_executablecCstjj|\}}|jdkS)z.Test whether argument path is a Python script..py.pyw)rr)rr.rr7)rr.rrrrr is_pythonszCGIHTTPRequestHandler.is_pythonc)Cs|j\}}|d|}|jdt|d}x`|dkr|d|}||dd}|j|}tjj|r||}}|jdt|d}q,Pq,W|jd\}}} |jd}|dkr|d|||d} }n |d} }|d| } |j| } tjj| s|j t j d| dStjj | s2|j t j d| dS|j| } |jsL| rn|j| sn|j t j d | dStjtj}|j|d <|jj|d <d |d <|j|d<t|jj|d<|j|d<tjj|}||d<|j||d<| |d<| r| |d<|jd|d<|jj d}|r|j!}t|dkrddl"}ddl#}|d|d<|dj$dkry"|dj%d}|j&|j'd}Wn|j(t)fk rYn&X|j!d}t|dkr|d|d<|jj ddkr|jj*|d<n|jd|d<|jj d}|r||d <|jj d!}|r"||d"<g}xN|jj+d#D]>}|ddd$krZ|j,|j-n||d%dj!d&}q4Wd&j.||d'<|jj d(}|r||d)<t/d|jj0d*g}d+j.|}|r||d,<xd=D]}|j1|dqW|j2t j3d.|j4| j5d/d0}|jr.| g}d1|kr*|j,|t6}|j7j8tj9}|dkrtj:|d\}}x0t;j;|jd2|dSy\ytj?|Wnt@k rYnXtjA|j|"d?d}"|"d7g|!}!d1| kr|!j,| |jKd8| jL|!y tM|}#WntNtOfk rd}#YnX| jP|!| jQ| jQ| jQ|d9}$|jj$d:kr|#dkr|jd;|'|$jVjW|$jXjW|$jY}(|(r|j>d2|(n |jKd<dS)@zExecute a CGI script.rrrNrrzNo such CGI script (%r)z#CGI script is not a plain file (%r)z!CGI script is not executable (%r)ZSERVER_SOFTWAREZ SERVER_NAMEzCGI/1.1ZGATEWAY_INTERFACEZSERVER_PROTOCOLZ SERVER_PORTZREQUEST_METHODZ PATH_INFOZPATH_TRANSLATEDZ SCRIPT_NAME QUERY_STRINGZ REMOTE_ADDR authorizationrZ AUTH_TYPEZbasicascii:Z REMOTE_USERz content-typeZ CONTENT_TYPEzcontent-lengthCONTENT_LENGTHreferer HTTP_REFERERacceptz ,Z HTTP_ACCEPTz user-agentHTTP_USER_AGENTZcookiez, HTTP_COOKIE REMOTE_HOSTzScript output follows+rp=zCGI script exit status %#xzw.exerrz-uz command: %s)stdinstdoutrmenvZpostz%szCGI script exited OK)rrrrrr)Zrrr(rrr.rrrr,rrisfileZ FORBIDDENr have_forkrrdeepcopyenvironr\Zserverrr-r#rrrrrrzr5r6r'base64binasciir7rXZ decodebytesdecodeError UnicodeErrorZget_content_typeZgetallmatchingheadersr`striprerZget_all setdefaultrSrrcrPrrCrDrwaitpidselectr3readrEsetuidrdup2rexecveZ handle_errorZrequest_exit subprocessrlrrrhZ list2cmdliner* TypeErrorr)PopenPIPEZ_sockZrecvZ communicaterZrmrr returncode))rdirrestr.iZnextdirZnextrestZ scriptdirrrZscriptZ scriptnameZ scriptfileZispyrZuqrestrrrZlengthrrlineZuacoZ cookie_strkZ decoded_queryrkrpidstsrZcmdlineZinterpnbytespdatarrmZstatusrrrrs4                                                zCGIHTTPRequestHandler.run_cgiN)rrrrr@rrZrbufsizerrrrrrrrrrrrs zHTTP/1.0i@rc Cs||f}||_|||b}|jj}d}t|j|d|ddy |jWn&tk rttdtjdYnXWdQRXdS)zmTest the HTTP request handler class. This runs an HTTP server on port 8000 (or the port argument). z>Serving HTTP on {host} port {port} (http://{host}:{port}/) ...rr)rrz& Keyboard interrupt received, exiting.N) r-r Z getsocknameprintrjZ serve_foreverKeyboardInterruptrlexit) HandlerClassZ ServerClassZprotocolrbindr ZhttpdZsaZ serve_messagerrrtests   r%__main__z--cgi store_truezRun as CGI Server)actionhelpz--bindz-bZADDRESSz8Specify alternate bind address [default: all interfaces])defaultmetavarr)rZstorerz&Specify alternate port [default: 8000])r(r*typenargsr))r#rr$)/rr__all__Z email.utilsrtrVZ http.clientr1rrrrr rr r rlrsZ urllib.parserrargparserrrr rZStreamRequestHandlerrrrrrrrr%rArgumentParserparser add_argumentr* parse_argsrkZcgiZ handler_classrr$rrrr sj3  g]0   __pycache__/server.cpython-36.opt-2.pyc000064400000047615147204456360013700 0ustar003 f@sdZddddgZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlmZdZd ZGd ddejZGd ddejZGd ddeZd dZdaddZddZ GdddeZ!eedddfddZ"e#dkrej$Z%e%j&dddde%j&dddd d!d"e%j&d#d$de'd%d&d'e%j(Z)e)j*rze!Z+neZ+e"e+e)j,e)j-d(dS))z0.6 HTTPServerBaseHTTPRequestHandlerSimpleHTTPRequestHandlerCGIHTTPRequestHandlerN) HTTPStatusa Error response

Error response

Error code: %(code)d

Message: %(message)s.

Error code explanation: %(code)s - %(explain)s.

ztext/html;charset=utf-8c@seZdZdZddZdS)rcCs4tjj||jdd\}}tj||_||_dS)N) socketserver TCPServer server_bindserver_addresssocketZgetfqdn server_name server_port)selfhostportr#/usr/lib64/python3.6/http/server.pyr s  zHTTPServer.server_bindN)__name__ __module__ __qualname__Zallow_reuse_addressr rrrrrsc @seZdZdejjdZdeZe Z e Z dZ ddZddZd d Zd d Zd?ddZd@ddZdAddZddZddZddZdBddZddZdd Zd!d"ZdCd#d$Zd%d&Zd'd(d)d*d+d,d-gZd d.d/d0d1d2d3d4d5d6d7d8d9g Zd:d;Z de&j'j(DZ)d S)DrzPython/rz BaseHTTP/zHTTP/0.9c Csd|_|j|_}d|_t|jd}|jd}||_|j}t |dkr|\}}}yZ|dddkrjt |jddd}|jd }t |d krt t |d t |df}Wn*t t fk r|j tjd |d SX|dkr|jdkrd |_|dkrr|j tjd|d Sn^t |d krR|\}}d|_|dkrr|j tjd|d Sn |s\d S|j tjd|d S||||_|_|_|jjdrd|jjd|_ytjj|j|jd|_Wnrtjjk r}z|j tjdt|d Sd}~Xn:tjjk r4}z|j tjdt|d Sd}~XnX|jjdd} | jdkrZd|_n | jdkrz|jdkrzd |_|jjdd} | jdkr|jdkr|jdkr|j sd SdS)NTz iso-8859-1z zHTTP//r.rrzBad request version (%r)FzHTTP/1.1zInvalid HTTP version (%s)ZGETzBad HTTP/0.9 request type (%r)zBad request syntax (%r)z//)Z_classz Line too longzToo many headers Connectionclosez keep-aliveZExpectz 100-continue)rr)rr)!commanddefault_request_versionrequest_versionclose_connectionstrraw_requestlinerstrip requestlinesplitlen ValueErrorint IndexError send_errorrZ BAD_REQUESTprotocol_versionZHTTP_VERSION_NOT_SUPPORTEDpath startswithlstriphttpclientZ parse_headersrfile MessageClassheadersZ LineTooLongZREQUEST_HEADER_FIELDS_TOO_LARGEZ HTTPExceptiongetlowerhandle_expect_100) rversionr&wordsrr.Zbase_version_numberZversion_numbererrZconntypeZexpectrrr parse_requests                    z$BaseHTTPRequestHandler.parse_requestcCs|jtj|jdS)NT)send_response_onlyrZCONTINUE end_headers)rrrrr8ns z(BaseHTTPRequestHandler.handle_expect_100cCsy|jjd|_t|jdkr@d|_d|_d|_|jtj dS|jsPd|_ dS|j s\dSd|j}t ||s|jtj d|jdSt||}||jjWn4tjk r}z|jd|d|_ dSd}~XnXdS)NiirTZdo_zUnsupported method (%r)zRequest timed out: %r)r3readliner$r(r&r!rr,rZREQUEST_URI_TOO_LONGr"r<hasattrNOT_IMPLEMENTEDgetattrwfileflushr Ztimeout log_error)rZmnamemethoderrrhandle_one_requests4      z)BaseHTTPRequestHandler.handle_one_requestcCs&d|_|jx|js |jqWdS)NT)r"rH)rrrrhandleszBaseHTTPRequestHandler.handleNcCs y|j|\}}Wntk r.d\}}YnX|dkr<|}|dkrH|}|jd|||j|||jddd}|dkr|tjtjtjfkr|j |t j |ddt j |ddd}|j d d }|jd |j |jd tt||j|jd ko|r|jj|dS)N???zcode %d, message %srrF)quote)codemessageexplainzUTF-8replacez Content-TypezContent-LengthZHEAD)rJrJ) responsesKeyErrorrE send_response send_headerrZ NO_CONTENTZ RESET_CONTENTZ NOT_MODIFIEDerror_message_formathtmlescapeencodeerror_content_typer#r(r>rrCwrite)rrMrNrOZshortmsgZlongmsgZbodyZcontentrrrr,s4     z!BaseHTTPRequestHandler.send_errorcCs:|j||j|||jd|j|jd|jdS)NZServerZDate) log_requestr=rTversion_stringdate_time_string)rrMrNrrrrSs  z$BaseHTTPRequestHandler.send_responsecCsd|jdkr`|dkr0||jkr,|j|d}nd}t|ds@g|_|jjd|j||fjdddS)NzHTTP/0.9rr_headers_bufferz %s %d %s zlatin-1strict)r!rQr@r^appendr-rX)rrMrNrrrr=s   z)BaseHTTPRequestHandler.send_response_onlycCsl|jdkr6t|dsg|_|jjd||fjdd|jdkrh|jdkrVd|_n|jd krhd |_dS) NzHTTP/0.9r^z%s: %s zlatin-1r_Z connectionrTz keep-aliveF)r!r@r^r`rXr7r")rkeywordvaluerrrrTs     z"BaseHTTPRequestHandler.send_headercCs"|jdkr|jjd|jdS)NzHTTP/0.9s )r!r^r` flush_headers)rrrrr> s  z"BaseHTTPRequestHandler.end_headerscCs(t|dr$|jjdj|jg|_dS)Nr^)r@rCrZjoinr^)rrrrrcs z$BaseHTTPRequestHandler.flush_headers-cCs.t|tr|j}|jd|jt|t|dS)Nz "%s" %s %s) isinstancerrb log_messager&r#)rrMsizerrrr[s z"BaseHTTPRequestHandler.log_requestcGs|j|f|dS)N)rh)rformatargsrrrrE!s z BaseHTTPRequestHandler.log_errorcGs&tjjd|j|j||fdS)Nz%s - - [%s] %s )sysstderrrZaddress_stringlog_date_time_string)rrjrkrrrrh/sz"BaseHTTPRequestHandler.log_messagecCs|jd|jS)N )server_version sys_version)rrrrr\Esz%BaseHTTPRequestHandler.version_stringcCs |dkrtj}tjj|ddS)NT)Zusegmt)timeemailZutilsZ formatdate)rZ timestamprrrr]Isz'BaseHTTPRequestHandler.date_time_stringc CsBtj}tj|\ }}}}}}}} } d||j|||||f} | S)Nz%02d/%3s/%04d %02d:%02d:%02d)rsZ localtime monthname) rZnowZyearZmonthZdayZhhZmmZssxyzsrrrroOs z+BaseHTTPRequestHandler.log_date_time_stringZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs |jdS)Nr)client_address)rrrrrn]sz%BaseHTTPRequestHandler.address_stringzHTTP/1.0cCsi|]}|j|jf|qSr)phraseZ description).0vrrr lsz!BaseHTTPRequestHandler.)NN)N)N)rfrf)N)*rrrrlr9r'rr __version__rqDEFAULT_ERROR_MESSAGErUDEFAULT_ERROR_CONTENT_TYPErYr r<r8rHrIr,rSr=rTr>rcr[rErhr\r]roZ weekdaynamerurnr-r1r2Z HTTPMessager4r __members__valuesrQrrrrrs<ig% 5     c@sxeZdZdeZddZddZddZdd Zd d Z d d Z ddZ e j sVe je jjZejddddddS)rz SimpleHTTP/c Cs.|j}|r*z|j||jWd|jXdS)N) send_headcopyfilerCr)rfrrrdo_GETs zSimpleHTTPRequestHandler.do_GETcCs|j}|r|jdS)N)rr)rrrrrdo_HEADsz SimpleHTTPRequestHandler.do_HEADc Csx|j|j}d}tjj|rtjj|j}|jjds|jt j |d|d|dd|d|df}tjj |}|j d||j dSx6dD]$}tjj||}tjj|r|}PqW|j|S|j|}yt|d }Wn$tk r|jt jd dSXyZ|jt j|j d |tj|j}|j d t|d|j d|j|j|j |S|jYnXdS)NrrrrrZLocation index.html index.htmrbzFile not foundz Content-typezContent-Lengthz Last-Modified)rr)translate_pathr.osisdirurllibparseZurlsplitendswithrSrZMOVED_PERMANENTLYZ urlunsplitrTr>reexistslist_directory guess_typeopenOSErrorr, NOT_FOUNDOKfstatfilenor#r]st_mtimer) rr.rpartsZ new_partsZnew_urlindexZctypeZfsrrrrsF            z"SimpleHTTPRequestHandler.send_headc Csytj|}Wn"tk r0|jtjddSX|jdddg}ytjj |j dd}Wn t k r|tjj |}YnXt j |dd}tj}d |}|jd |jd |jd ||jd ||jd||jdx~|D]v}tj j||}|} } tj j|r"|d} |d} tj j|r8|d} |jdtjj| ddt j | ddfqW|jddj|j|d} tj} | j| | jd|jtj|jdd||jdtt| |j| S)NzNo permission to list directorycSs|jS)N)r7)arrrsz9SimpleHTTPRequestHandler.list_directory..)key surrogatepass)errorsF)rLzDirectory listing for %szZz z@z%s z

%s

z
    r@z
  • %s
  • z

 surrogateescaperz Content-typeztext/html; charset=%szContent-Length) rlistdirrr,rrsortrrunquoter.UnicodeDecodeErrorrVrWrlgetfilesystemencodingr`rerislinkrLrXioBytesIOrZseekrSrrTr#r(r>) rr.listrZ displaypathenctitlenamefullnameZ displaynameZlinknameZencodedrrrrrs\          z'SimpleHTTPRequestHandler.list_directoryc Cs|jddd}|jddd}|jjd}ytjj|dd}Wn tk rbtjj|}YnXtj|}|jd}t d|}t j }x8|D]0}t j j |s|t jt jfkrqt j j||}qW|r|d7}|S)N?rr#rr)r)r'r%rrrrr posixpathnormpathfilterrgetcwdr.dirnamecurdirpardirre)rr.Ztrailing_slashr:Zwordrrrrs$     z'SimpleHTTPRequestHandler.translate_pathcCstj||dS)N)shutilZ copyfileobj)rsourceZ outputfilerrrrsz!SimpleHTTPRequestHandler.copyfilecCsLtj|\}}||jkr"|j|S|j}||jkr>|j|S|jdSdS)Nr)rsplitextextensions_mapr7)rr.baseZextrrrr)s    z#SimpleHTTPRequestHandler.guess_typezapplication/octet-streamz text/plain)rz.pyz.cz.hN)rrrrrqrrrrrrr mimetypesZinitedZinitZ types_mapcopyrupdaterrrrrrs   1: c Cs|jd\}}}tjj|}|jd}g}x<|ddD],}|dkrN|jq8|r8|dkr8|j|q8W|r|j}|r|dkr|jd}q|dkrd}nd}|rdj||f}ddj||f}dj|}|S)Nrrrz..rr) partitionrrrr'popr`re) r._query path_partsZ head_partspartZ tail_partZ splitpathcollapsed_pathrrr_url_collapse_pathNs.     rcCsptrtSy ddl}Wntk r(dSXy|jddaWn.tk rjdtdd|jDaYnXtS)Nrrnobodyrcss|]}|dVqdS)rNr)r|rvrrr sznobody_uid..r)rpwd ImportErrorgetpwnamrRmaxZgetpwall)rrrr nobody_uids  rcCstj|tjS)N)raccessX_OK)r.rrr executablesrc@sReZdZeedZdZddZddZddZ d d gZ d d Z d dZ ddZ dS)rforkrcCs$|jr|jn|jtjddS)NzCan only POST to CGI scripts)is_cgirun_cgir,rrA)rrrrdo_POSTs  zCGIHTTPRequestHandler.do_POSTcCs|jr|jStj|SdS)N)rrrr)rrrrrszCGIHTTPRequestHandler.send_headcCsPt|j}|jdd}|d|||dd}}||jkrL||f|_dSdS)NrrTF)rr.findcgi_directoriescgi_info)rrZdir_sepheadtailrrrrs    zCGIHTTPRequestHandler.is_cgiz/cgi-binz/htbincCst|S)N)r)rr.rrr is_executablesz#CGIHTTPRequestHandler.is_executablecCstjj|\}}|jdkS)N.py.pyw)rr)rr.rr7)rr.rrrrr is_pythonszCGIHTTPRequestHandler.is_pythonc)Cs|j\}}|d|}|jdt|d}x`|dkr|d|}||dd}|j|}tjj|r||}}|jdt|d}q,Pq,W|jd\}}} |jd}|dkr|d|||d} }n |d} }|d| } |j| } tjj| s|j t j d| dStjj | s2|j t j d| dS|j| } |jsL| rn|j| sn|j t j d| dStjtj}|j|d <|jj|d <d |d <|j|d <t|jj|d<|j|d<tjj|}||d<|j||d<| |d<| r| |d<|jd|d<|jj d}|r|j!}t|dkrddl"}ddl#}|d|d<|dj$dkry"|dj%d}|j&|j'd}Wn|j(t)fk rYn&X|j!d}t|dkr|d|d<|jj ddkr|jj*|d<n|jd|d<|jj d}|r||d<|jj d }|r"||d!<g}xN|jj+d"D]>}|ddd#krZ|j,|j-n||d$dj!d%}q4Wd%j.||d&<|jj d'}|r||d(<t/d|jj0d)g}d*j.|}|r||d+<xdd1|dSy\ytj?|Wnt@k rYnXtjA|jd}"|"d6g|!}!d0| kr|!j,| |jKd7| jL|!y tM|}#WntNtOfk rd}#YnX| jP|!| jQ| jQ| jQ|d8}$|jj$d9kr|#dkr|jd:|'|$jVjW|$jXjW|$jY}(|(r|j>d1|(n |jKd;dS)?NrrrrrzNo such CGI script (%r)z#CGI script is not a plain file (%r)z!CGI script is not executable (%r)ZSERVER_SOFTWAREZ SERVER_NAMEzCGI/1.1ZGATEWAY_INTERFACEZSERVER_PROTOCOLZ SERVER_PORTZREQUEST_METHODZ PATH_INFOZPATH_TRANSLATEDZ SCRIPT_NAME QUERY_STRINGZ REMOTE_ADDR authorizationrZ AUTH_TYPEZbasicascii:Z REMOTE_USERz content-typeZ CONTENT_TYPEzcontent-lengthCONTENT_LENGTHreferer HTTP_REFERERacceptz ,Z HTTP_ACCEPTz user-agentHTTP_USER_AGENTZcookiez, HTTP_COOKIE REMOTE_HOSTzScript output follows+rp=zCGI script exit status %#xzw.exerrz-uz command: %s)stdinstdoutrmenvZpostz%szCGI script exited OK)rrrrrr)Zrrr(rrr.rrrr,rrisfileZ FORBIDDENr have_forkrrdeepcopyenvironr\Zserverrr-r#rrrrrrzr5r6r'base64binasciir7rXZ decodebytesdecodeError UnicodeErrorZget_content_typeZgetallmatchingheadersr`striprerZget_all setdefaultrSrrcrPrrCrDrwaitpidselectr3readrEsetuidrdup2rexecveZ handle_errorZrequest_exit subprocessrlrrrhZ list2cmdliner* TypeErrorr)PopenPIPEZ_sockZrecvZ communicaterZrmrr returncode))rdirrestr.iZnextdirZnextrestZ scriptdirrrZscriptZ scriptnameZ scriptfileZispyrZuqrestrrrZlengthrrlineZuacoZ cookie_strkZ decoded_queryrkrpidstsrZcmdlineZinterpnbytespdatarrmZstatusrrrrs4                                                zCGIHTTPRequestHandler.run_cgiN)rrrr@rrZrbufsizerrrrrrrrrrrrs zHTTP/1.0i@rc Cs||f}||_|||b}|jj}d}t|j|d|ddy |jWn&tk rttdtjdYnXWdQRXdS)Nz>Serving HTTP on {host} port {port} (http://{host}:{port}/) ...rr)rrz& Keyboard interrupt received, exiting.) r-r Z getsocknameprintrjZ serve_foreverKeyboardInterruptrlexit) HandlerClassZ ServerClassZprotocolrbindr ZhttpdZsaZ serve_messagerrrtests   r$__main__z--cgi store_truezRun as CGI Server)actionhelpz--bindz-bZADDRESSz8Specify alternate bind address [default: all interfaces])defaultmetavarr(rZstorerz&Specify alternate port [default: 8000])r'r)typenargsr()r"rr#).r__all__Z email.utilsrtrVZ http.clientr1rrrrr rr r rlrsZ urllib.parserrargparserrrr rZStreamRequestHandlerrrrrrrrr$rArgumentParserparser add_argumentr* parse_argsrkZcgiZ handler_classrr#rrrrSsh  g]0   __pycache__/server.cpython-36.pyc000064400000077030147204456360012732 0ustar003 f@sdZdZddddgZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlmZd Zd ZGd ddejZGd ddejZGd ddeZddZdaddZ ddZ!GdddeZ"eedddfddZ#e$dkrej%Z&e&j'dddde&j'dd dd!d"d#e&j'd$d%de(d&d'd(e&j)Z*e*j+r~e"Z,neZ,e#e,e*j-e*j.d)dS)*a@HTTP server classes. Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, and CGIHTTPRequestHandler for CGI scripts. It does, however, optionally implement HTTP/1.1 persistent connections, as of version 0.3. Notes on CGIHTTPRequestHandler ------------------------------ This class implements GET and POST requests to cgi-bin scripts. If the os.fork() function is not present (e.g. on Windows), subprocess.Popen() is used as a fallback, with slightly altered semantics. In all cases, the implementation is intentionally naive -- all requests are executed synchronously. SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL -- it may execute arbitrary Python code or external programs. Note that status code 200 is sent prior to execution of a CGI script, so scripts cannot send other status codes such as 302 (redirect). XXX To do: - log requests even later (to capture byte count) - log user-agent header and other interesting goodies - send error log to separate file z0.6 HTTPServerBaseHTTPRequestHandlerSimpleHTTPRequestHandlerCGIHTTPRequestHandlerN) HTTPStatusa Error response

Error response

Error code: %(code)d

Message: %(message)s.

Error code explanation: %(code)s - %(explain)s.

ztext/html;charset=utf-8c@seZdZdZddZdS)rcCs4tjj||jdd\}}tj||_||_dS)z.Override server_bind to store the server name.N) socketserver TCPServer server_bindserver_addresssocketZgetfqdn server_name server_port)selfhostportr#/usr/lib64/python3.6/http/server.pyr s  zHTTPServer.server_bindN)__name__ __module__ __qualname__Zallow_reuse_addressr rrrrrsc @seZdZdZdejjdZdeZ e Z e Z dZddZdd Zd d Zd d Zd@ddZdAddZdBddZddZddZddZdCddZddZd d!Zd"d#ZdDd$d%Zd&d'Zd(d)d*d+d,d-d.gZdd/d0d1d2d3d4d5d6d7d8d9d:g Z d;d<Z!d=Z"e#j$j%Z&d>d?e'j(j)DZ*dS)EraHTTP request handler base class. The following explanation of HTTP serves to guide you through the code as well as to expose any misunderstandings I may have about HTTP (so you don't need to read the code to figure out I'm wrong :-). HTTP (HyperText Transfer Protocol) is an extensible protocol on top of a reliable stream transport (e.g. TCP/IP). The protocol recognizes three parts to a request: 1. One line identifying the request type and path 2. An optional set of RFC-822-style headers 3. An optional data part The headers and data are separated by a blank line. The first line of the request has the form where is a (case-sensitive) keyword such as GET or POST, is a string containing path information for the request, and should be the string "HTTP/1.0" or "HTTP/1.1". is encoded using the URL encoding scheme (using %xx to signify the ASCII character with hex code xx). The specification specifies that lines are separated by CRLF but for compatibility with the widest range of clients recommends servers also handle LF. Similarly, whitespace in the request line is treated sensibly (allowing multiple spaces between components and allowing trailing whitespace). Similarly, for output, lines ought to be separated by CRLF pairs but most clients grok LF characters just fine. If the first line of the request has the form (i.e. is left out) then this is assumed to be an HTTP 0.9 request; this form has no optional headers and data part and the reply consists of just the data. The reply form of the HTTP 1.x protocol again has three parts: 1. One line giving the response code 2. An optional set of RFC-822-style headers 3. The data Again, the headers and data are separated by a blank line. The response code line has the form where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), is a 3-digit response code indicating success or failure of the request, and is an optional human-readable string explaining what the response code means. This server parses the request and the headers, and then calls a function specific to the request type (). Specifically, a request SPAM will be handled by a method do_SPAM(). If no such method exists the server sends an error response to the client. If it exists, it is called with no arguments: do_SPAM() Note that the request name is case sensitive (i.e. SPAM and spam are different requests). The various request details are stored in instance variables: - client_address is the client IP address in the form (host, port); - command, path and version are the broken-down request line; - headers is an instance of email.message.Message (or a derived class) containing the header information; - rfile is a file object open for reading positioned at the start of the optional input data part; - wfile is a file object open for writing. IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! The first thing to be written must be the response line. Then follow 0 or more header lines, then a blank line, and then the actual data (if any). The meaning of the header lines depends on the command executed by the server; in most cases, when data is returned, there should be at least one header line of the form Content-type: / where and should be registered MIME types, e.g. "text/html" or "text/plain". zPython/rz BaseHTTP/zHTTP/0.9c Csd|_|j|_}d|_t|jd}|jd}||_|j}t |dkr|\}}}yZ|dddkrjt |jdd d }|jd }t |d krt t |d t |d f}Wn*t t fk r|j tjd |dSX|dkr|jdkrd|_|dkrr|j tjd|dSn^t |d krR|\}}d|_|dkrr|j tjd|dSn |s\dS|j tjd|dS||||_|_|_|jjdrd|jjd|_ytjj|j|jd|_Wnrtjjk r}z|j tjdt|dSd}~Xn:tjjk r4}z|j tjdt|dSd}~XnX|jjdd} | jdkrZd|_n | jdkrz|jdkrzd|_|jjdd} | jdkr|jdkr|jdkr|j sdSdS) a'Parse a request (internal). The request should be stored in self.raw_requestline; the results are in self.command, self.path, self.request_version and self.headers. Return True for success, False for failure; on failure, an error is sent back. NTz iso-8859-1z zHTTP//r.rrzBad request version (%r)FzHTTP/1.1zInvalid HTTP version (%s)ZGETzBad HTTP/0.9 request type (%r)zBad request syntax (%r)z//)Z_classz Line too longzToo many headers Connectionclosez keep-aliveZExpectz 100-continue)rr)rr)!commanddefault_request_versionrequest_versionclose_connectionstrraw_requestlinerstrip requestlinesplitlen ValueErrorint IndexError send_errorrZ BAD_REQUESTprotocol_versionZHTTP_VERSION_NOT_SUPPORTEDpath startswithlstriphttpclientZ parse_headersrfile MessageClassheadersZ LineTooLongZREQUEST_HEADER_FIELDS_TOO_LARGEZ HTTPExceptiongetlowerhandle_expect_100) rversionr&wordsrr.Zbase_version_numberZversion_numbererrZconntypeZexpectrrr parse_requests                    z$BaseHTTPRequestHandler.parse_requestcCs|jtj|jdS)a7Decide what to do with an "Expect: 100-continue" header. If the client is expecting a 100 Continue response, we must respond with either a 100 Continue or a final response before waiting for the request body. The default is to always respond with a 100 Continue. You can behave differently (for example, reject unauthorized requests) by overriding this method. This method should either return True (possibly after sending a 100 Continue response) or send an error response and return False. T)send_response_onlyrZCONTINUE end_headers)rrrrr8ns z(BaseHTTPRequestHandler.handle_expect_100cCsy|jjd|_t|jdkr@d|_d|_d|_|jtj dS|jsPd|_ dS|j s\dSd|j}t ||s|jtj d|jdSt||}||jjWn4tjk r}z|jd|d|_ dSd}~XnXdS) zHandle a single HTTP request. You normally don't need to override this method; see the class __doc__ string for information on how to handle specific HTTP commands such as GET and POST. iirNTZdo_zUnsupported method (%r)zRequest timed out: %r)r3readliner$r(r&r!rr,rZREQUEST_URI_TOO_LONGr"r<hasattrNOT_IMPLEMENTEDgetattrwfileflushr Ztimeout log_error)rZmnamemethoderrrhandle_one_requests4      z)BaseHTTPRequestHandler.handle_one_requestcCs&d|_|jx|js |jqWdS)z&Handle multiple requests if necessary.TN)r"rH)rrrrhandleszBaseHTTPRequestHandler.handleNcCs y|j|\}}Wntk r.d\}}YnX|dkr<|}|dkrH|}|jd|||j|||jddd}|dkr|tjtjtjfkr|j |t j |ddt j |ddd }|j d d }|jd |j |jd tt||j|jdko|r|jj|dS)akSend and log an error reply. Arguments are * code: an HTTP error code 3 digits * message: a simple optional 1 line reason phrase. *( HTAB / SP / VCHAR / %x80-FF ) defaults to short entry matching the response code * explain: a detailed message defaults to the long entry matching the response code. This sends an error response (so it must be called before any output has been generated), logs the error, and finally sends a piece of HTML explaining the error to the user. ???Nzcode %d, message %srrF)quote)codemessageexplainzUTF-8replacez Content-TypezContent-LengthZHEAD)rJrJ) responsesKeyErrorrE send_response send_headerrZ NO_CONTENTZ RESET_CONTENTZ NOT_MODIFIEDerror_message_formathtmlescapeencodeerror_content_typer#r(r>rrCwrite)rrMrNrOZshortmsgZlongmsgZbodyZcontentrrrr,s4     z!BaseHTTPRequestHandler.send_errorcCs:|j||j|||jd|j|jd|jdS)zAdd the response header to the headers buffer and log the response code. Also send two standard headers with the server software version and the current date. ZServerZDateN) log_requestr=rTversion_stringdate_time_string)rrMrNrrrrSs  z$BaseHTTPRequestHandler.send_responsecCsd|jdkr`|dkr0||jkr,|j|d}nd}t|ds@g|_|jjd|j||fjdddS) zSend the response header only.zHTTP/0.9Nrr_headers_bufferz %s %d %s zlatin-1strict)r!rQr@r^appendr-rX)rrMrNrrrr=s   z)BaseHTTPRequestHandler.send_response_onlycCsl|jdkr6t|dsg|_|jjd||fjdd|jdkrh|jdkrVd|_n|jd krhd |_d S) z)Send a MIME header to the headers buffer.zHTTP/0.9r^z%s: %s zlatin-1r_Z connectionrTz keep-aliveFN)r!r@r^r`rXr7r")rkeywordvaluerrrrTs     z"BaseHTTPRequestHandler.send_headercCs"|jdkr|jjd|jdS)z,Send the blank line ending the MIME headers.zHTTP/0.9s N)r!r^r` flush_headers)rrrrr> s  z"BaseHTTPRequestHandler.end_headerscCs(t|dr$|jjdj|jg|_dS)Nr^)r@rCrZjoinr^)rrrrrcs z$BaseHTTPRequestHandler.flush_headers-cCs.t|tr|j}|jd|jt|t|dS)zNLog an accepted request. This is called by send_response(). z "%s" %s %sN) isinstancerrb log_messager&r#)rrMsizerrrr[s z"BaseHTTPRequestHandler.log_requestcGs|j|f|dS)zLog an error. This is called when a request cannot be fulfilled. By default it passes the message on to log_message(). Arguments are the same as for log_message(). XXX This should go to the separate error log. N)rh)rformatargsrrrrE!s z BaseHTTPRequestHandler.log_errorcGs&tjjd|j|j||fdS)aLog an arbitrary message. This is used by all other logging functions. Override it if you have specific logging wishes. The first argument, FORMAT, is a format string for the message to be logged. If the format string contains any % escapes requiring parameters, they should be specified as subsequent arguments (it's just like printf!). The client ip and current date/time are prefixed to every message. z%s - - [%s] %s N)sysstderrrZaddress_stringlog_date_time_string)rrjrkrrrrh/sz"BaseHTTPRequestHandler.log_messagecCs|jd|jS)z*Return the server software version string. )server_version sys_version)rrrrr\Esz%BaseHTTPRequestHandler.version_stringcCs |dkrtj}tjj|ddS)z@Return the current date and time formatted for a message header.NT)Zusegmt)timeemailZutilsZ formatdate)rZ timestamprrrr]Isz'BaseHTTPRequestHandler.date_time_stringc CsBtj}tj|\ }}}}}}}} } d||j|||||f} | S)z.Return the current time formatted for logging.z%02d/%3s/%04d %02d:%02d:%02d)rsZ localtime monthname) rZnowZyearZmonthZdayZhhZmmZssxyzsrrrroOs z+BaseHTTPRequestHandler.log_date_time_stringZMonZTueZWedZThuZFriZSatZSunZJanZFebZMarZAprZMayZJunZJulZAugZSepZOctZNovZDeccCs |jdS)zReturn the client address.r)client_address)rrrrrn]sz%BaseHTTPRequestHandler.address_stringzHTTP/1.0cCsi|]}|j|jf|qSr)phraseZ description).0vrrr lsz!BaseHTTPRequestHandler.)NN)N)N)rfrf)N)+rrr__doc__rlr9r'rr __version__rqDEFAULT_ERROR_MESSAGErUDEFAULT_ERROR_CONTENT_TYPErYr r<r8rHrIr,rSr=rTr>rcr[rErhr\r]roZ weekdaynamerurnr-r1r2Z HTTPMessager4r __members__valuesrQrrrrrs>fg% 5     c@s|eZdZdZdeZddZddZddZd d Z d d Z d dZ ddZ e jsZe je jjZejddddddS)raWSimple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. z SimpleHTTP/c Cs.|j}|r*z|j||jWd|jXdS)zServe a GET request.N) send_headcopyfilerCr)rfrrrdo_GETs zSimpleHTTPRequestHandler.do_GETcCs|j}|r|jdS)zServe a HEAD request.N)rr)rrrrrdo_HEADsz SimpleHTTPRequestHandler.do_HEADc Csx|j|j}d}tjj|rtjj|j}|jjds|jt j |d|d|dd|d|df}tjj |}|j d||j dSx6dD]$}tjj||}tjj|r|}PqW|j|S|j|}yt|d }Wn$tk r|jt jd dSXyZ|jt j|j d |tj|j}|j dt|d|j d|j|j|j |S|jYnXdS)a{Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. NrrrrrZLocation index.html index.htmrbzFile not foundz Content-typezContent-Lengthz Last-Modified)rr)translate_pathr.osisdirurllibparseZurlsplitendswithrSrZMOVED_PERMANENTLYZ urlunsplitrTr>reexistslist_directory guess_typeopenOSErrorr, NOT_FOUNDOKfstatfilenor#r]st_mtimer) rr.rpartsZ new_partsZnew_urlindexZctypeZfsrrrrsF            z"SimpleHTTPRequestHandler.send_headc Csytj|}Wn"tk r0|jtjddSX|jdddg}ytjj |j dd}Wn t k r|tjj |}YnXt j |dd }tj}d |}|jd |jd |jd ||jd||jd||jdx~|D]v}tj j||}|} } tj j|r"|d} |d} tj j|r8|d} |jdtjj| ddt j | dd fqW|jddj|j|d} tj} | j| | jd|jtj|jdd||jdtt| |j| S)zHelper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). zNo permission to list directoryNcSs|jS)N)r7)arrrsz9SimpleHTTPRequestHandler.list_directory..)key surrogatepass)errorsF)rLzDirectory listing for %szZz z@z%s z

%s

z
    r@z
  • %s
  • z

 surrogateescaperz Content-typeztext/html; charset=%szContent-Length) rlistdirrr,rrsortrrunquoter.UnicodeDecodeErrorrVrWrlgetfilesystemencodingr`rerislinkrLrXioBytesIOrZseekrSrrTr#r(r>) rr.listrZ displaypathenctitlenamefullnameZ displaynameZlinknameZencodedrrrrrs\          z'SimpleHTTPRequestHandler.list_directoryc Cs|jddd}|jddd}|jjd}ytjj|dd}Wn tk rbtjj|}YnXtj|}|jd}t d|}t j }x8|D]0}t j j |s|t jt jfkrqt j j||}qW|r|d7}|S) zTranslate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) ?rr#rr)rN)r'r%rrrrr posixpathnormpathfilterrgetcwdr.dirnamecurdirpardirre)rr.Ztrailing_slashr:Zwordrrrrs$     z'SimpleHTTPRequestHandler.translate_pathcCstj||dS)aCopy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. N)shutilZ copyfileobj)rsourceZ outputfilerrrrsz!SimpleHTTPRequestHandler.copyfilecCsLtj|\}}||jkr"|j|S|j}||jkr>|j|S|jdSdS)aGuess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. rN)rsplitextextensions_mapr7)rr.baseZextrrrr)s    z#SimpleHTTPRequestHandler.guess_typezapplication/octet-streamz text/plain)rz.pyz.cz.hN)rrrrrrqrrrrrrr mimetypesZinitedZinitZ types_mapcopyrupdaterrrrrrs"  1: c Cs|jd\}}}tjj|}|jd}g}x<|ddD],}|dkrN|jq8|r8|dkr8|j|q8W|r|j}|r|dkr|jd}q|dkrd}nd}|rdj||f}ddj||f}dj|}|S) a Given a URL path, remove extra '/'s and '.' path elements and collapse any '..' references and returns a collapsed path. Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. The utility of this function is limited to is_cgi method and helps preventing some security attacks. Returns: The reconstituted URL, which will always start with a '/'. Raises: IndexError if too many '..' occur within the path. rrNrz..rr) partitionrrrr'popr`re) r._query path_partsZ head_partspartZ tail_partZ splitpathcollapsed_pathrrr_url_collapse_pathNs.     rcCsptrtSy ddl}Wntk r(dSXy|jddaWn.tk rjdtdd|jDaYnXtS) z$Internal routine to get nobody's uidrNrnobodyrcss|]}|dVqdS)rNr)r|rvrrr sznobody_uid..r)rpwd ImportErrorgetpwnamrRmaxZgetpwall)rrrr nobody_uids  rcCstj|tjS)zTest for executable file.)raccessX_OK)r.rrr executablesrc@sVeZdZdZeedZdZddZddZ dd Z d d gZ d d Z ddZ ddZdS)rzComplete HTTP server with GET, HEAD and POST commands. GET and HEAD also support running CGI scripts. The POST command is *only* implemented for CGI scripts. forkrcCs$|jr|jn|jtjddS)zRServe a POST request. This is only implemented for CGI scripts. zCan only POST to CGI scriptsN)is_cgirun_cgir,rrA)rrrrdo_POSTs  zCGIHTTPRequestHandler.do_POSTcCs|jr|jStj|SdS)z-Version of send_head that support CGI scriptsN)rrrr)rrrrrszCGIHTTPRequestHandler.send_headcCsPt|j}|jdd}|d|||dd}}||jkrL||f|_dSdS)a3Test whether self.path corresponds to a CGI script. Returns True and updates the cgi_info attribute to the tuple (dir, rest) if self.path requires running a CGI script. Returns False otherwise. If any exception is raised, the caller should assume that self.path was rejected as invalid and act accordingly. The default implementation tests whether the normalized url path begins with one of the strings in self.cgi_directories (and the next character is a '/' or the end of the string). rrNTF)rr.findcgi_directoriescgi_info)rrZdir_sepheadtailrrrrs    zCGIHTTPRequestHandler.is_cgiz/cgi-binz/htbincCst|S)z1Test whether argument path is an executable file.)r)rr.rrr is_executablesz#CGIHTTPRequestHandler.is_executablecCstjj|\}}|jdkS)z.Test whether argument path is a Python script..py.pyw)rr)rr.rr7)rr.rrrrr is_pythonszCGIHTTPRequestHandler.is_pythonc)Cs|j\}}|d|}|jdt|d}x`|dkr|d|}||dd}|j|}tjj|r||}}|jdt|d}q,Pq,W|jd\}}} |jd}|dkr|d|||d} }n |d} }|d| } |j| } tjj| s|j t j d| dStjj | s2|j t j d| dS|j| } |jsL| rn|j| sn|j t j d | dStjtj}|j|d <|jj|d <d |d <|j|d<t|jj|d<|j|d<tjj|}||d<|j||d<| |d<| r| |d<|jd|d<|jj d}|r|j!}t|dkrddl"}ddl#}|d|d<|dj$dkry"|dj%d}|j&|j'd}Wn|j(t)fk rYn&X|j!d}t|dkr|d|d<|jj ddkr|jj*|d<n|jd|d<|jj d}|r||d <|jj d!}|r"||d"<g}xN|jj+d#D]>}|ddd$krZ|j,|j-n||d%dj!d&}q4Wd&j.||d'<|jj d(}|r||d)<t/d|jj0d*g}d+j.|}|r||d,<xd=D]}|j1|dqW|j2t j3d.|j4| j5d/d0}|jr.| g}d1|kr*|j,|t6}|j7j8tj9}|dkrtj:|d\}}x0t;j;|jd2|dSy\ytj?|Wnt@k rYnXtjA|j|"d?d}"|"d7g|!}!d1| kr|!j,| |jKd8| jL|!y tM|}#WntNtOfk rd}#YnX| jP|!| jQ| jQ| jQ|d9}$|jj$d:kr|#dkr|jd;|'|$jVjW|$jXjW|$jY}(|(r|j>d2|(n |jKd<dS)@zExecute a CGI script.rrrNrrzNo such CGI script (%r)z#CGI script is not a plain file (%r)z!CGI script is not executable (%r)ZSERVER_SOFTWAREZ SERVER_NAMEzCGI/1.1ZGATEWAY_INTERFACEZSERVER_PROTOCOLZ SERVER_PORTZREQUEST_METHODZ PATH_INFOZPATH_TRANSLATEDZ SCRIPT_NAME QUERY_STRINGZ REMOTE_ADDR authorizationrZ AUTH_TYPEZbasicascii:Z REMOTE_USERz content-typeZ CONTENT_TYPEzcontent-lengthCONTENT_LENGTHreferer HTTP_REFERERacceptz ,Z HTTP_ACCEPTz user-agentHTTP_USER_AGENTZcookiez, HTTP_COOKIE REMOTE_HOSTzScript output follows+rp=zCGI script exit status %#xzw.exerrz-uz command: %s)stdinstdoutrmenvZpostz%szCGI script exited OK)rrrrrr)Zrrr(rrr.rrrr,rrisfileZ FORBIDDENr have_forkrrdeepcopyenvironr\Zserverrr-r#rrrrrrzr5r6r'base64binasciir7rXZ decodebytesdecodeError UnicodeErrorZget_content_typeZgetallmatchingheadersr`striprerZget_all setdefaultrSrrcrPrrCrDrwaitpidselectr3readrEsetuidrdup2rexecveZ handle_errorZrequest_exit subprocessrlrrrhZ list2cmdliner* TypeErrorr)PopenPIPEZ_sockZrecvZ communicaterZrmrr returncode))rdirrestr.iZnextdirZnextrestZ scriptdirrrZscriptZ scriptnameZ scriptfileZispyrZuqrestrrrZlengthrrlineZuacoZ cookie_strkZ decoded_queryrkrpidstsrZcmdlineZinterpnbytespdatarrmZstatusrrrrs4                                                zCGIHTTPRequestHandler.run_cgiN)rrrrr@rrZrbufsizerrrrrrrrrrrrs zHTTP/1.0i@rc Cs||f}||_|||b}|jj}d}t|j|d|ddy |jWn&tk rttdtjdYnXWdQRXdS)zmTest the HTTP request handler class. This runs an HTTP server on port 8000 (or the port argument). z>Serving HTTP on {host} port {port} (http://{host}:{port}/) ...rr)rrz& Keyboard interrupt received, exiting.N) r-r Z getsocknameprintrjZ serve_foreverKeyboardInterruptrlexit) HandlerClassZ ServerClassZprotocolrbindr ZhttpdZsaZ serve_messagerrrtests   r%__main__z--cgi store_truezRun as CGI Server)actionhelpz--bindz-bZADDRESSz8Specify alternate bind address [default: all interfaces])defaultmetavarr)rZstorerz&Specify alternate port [default: 8000])r(r*typenargsr))r#rr$)/rr__all__Z email.utilsrtrVZ http.clientr1rrrrr rr r rlrsZ urllib.parserrargparserrrr rZStreamRequestHandlerrrrrrrrr%rArgumentParserparser add_argumentr* parse_argsrkZcgiZ handler_classrr$rrrr sj3  g]0   __init__.py000064400000013501147204456360006670 0ustar00from enum import IntEnum __all__ = ['HTTPStatus'] class HTTPStatus(IntEnum): """HTTP status codes and reason phrases Status codes from the following RFCs are all observed: * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 * RFC 6585: Additional HTTP Status Codes * RFC 3229: Delta encoding in HTTP * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 * RFC 5842: Binding Extensions to WebDAV * RFC 7238: Permanent Redirect * RFC 2295: Transparent Content Negotiation in HTTP * RFC 2774: An HTTP Extension Framework """ def __new__(cls, value, phrase, description=''): obj = int.__new__(cls, value) obj._value_ = value obj.phrase = phrase obj.description = description return obj # informational CONTINUE = 100, 'Continue', 'Request received, please continue' SWITCHING_PROTOCOLS = (101, 'Switching Protocols', 'Switching to new protocol; obey Upgrade header') PROCESSING = 102, 'Processing' # success OK = 200, 'OK', 'Request fulfilled, document follows' CREATED = 201, 'Created', 'Document created, URL follows' ACCEPTED = (202, 'Accepted', 'Request accepted, processing continues off-line') NON_AUTHORITATIVE_INFORMATION = (203, 'Non-Authoritative Information', 'Request fulfilled from cache') NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows' RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input' PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows' MULTI_STATUS = 207, 'Multi-Status' ALREADY_REPORTED = 208, 'Already Reported' IM_USED = 226, 'IM Used' # redirection MULTIPLE_CHOICES = (300, 'Multiple Choices', 'Object has several resources -- see URI list') MOVED_PERMANENTLY = (301, 'Moved Permanently', 'Object moved permanently -- see URI list') FOUND = 302, 'Found', 'Object moved temporarily -- see URI list' SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list' NOT_MODIFIED = (304, 'Not Modified', 'Document has not changed since given time') USE_PROXY = (305, 'Use Proxy', 'You must use proxy specified in Location to access this resource') TEMPORARY_REDIRECT = (307, 'Temporary Redirect', 'Object moved temporarily -- see URI list') PERMANENT_REDIRECT = (308, 'Permanent Redirect', 'Object moved temporarily -- see URI list') # client error BAD_REQUEST = (400, 'Bad Request', 'Bad request syntax or unsupported method') UNAUTHORIZED = (401, 'Unauthorized', 'No permission -- see authorization schemes') PAYMENT_REQUIRED = (402, 'Payment Required', 'No payment -- see charging schemes') FORBIDDEN = (403, 'Forbidden', 'Request forbidden -- authorization will not help') NOT_FOUND = (404, 'Not Found', 'Nothing matches the given URI') METHOD_NOT_ALLOWED = (405, 'Method Not Allowed', 'Specified method is invalid for this resource') NOT_ACCEPTABLE = (406, 'Not Acceptable', 'URI not available in preferred format') PROXY_AUTHENTICATION_REQUIRED = (407, 'Proxy Authentication Required', 'You must authenticate with this proxy before proceeding') REQUEST_TIMEOUT = (408, 'Request Timeout', 'Request timed out; try again later') CONFLICT = 409, 'Conflict', 'Request conflict' GONE = (410, 'Gone', 'URI no longer exists and has been permanently removed') LENGTH_REQUIRED = (411, 'Length Required', 'Client must specify Content-Length') PRECONDITION_FAILED = (412, 'Precondition Failed', 'Precondition in headers is false') REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large', 'Entity is too large') REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long', 'URI is too long') UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type', 'Entity body in unsupported format') REQUESTED_RANGE_NOT_SATISFIABLE = (416, 'Requested Range Not Satisfiable', 'Cannot satisfy request range') EXPECTATION_FAILED = (417, 'Expectation Failed', 'Expect condition could not be satisfied') UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity' LOCKED = 423, 'Locked' FAILED_DEPENDENCY = 424, 'Failed Dependency' UPGRADE_REQUIRED = 426, 'Upgrade Required' PRECONDITION_REQUIRED = (428, 'Precondition Required', 'The origin server requires the request to be conditional') TOO_MANY_REQUESTS = (429, 'Too Many Requests', 'The user has sent too many requests in ' 'a given amount of time ("rate limiting")') REQUEST_HEADER_FIELDS_TOO_LARGE = (431, 'Request Header Fields Too Large', 'The server is unwilling to process the request because its header ' 'fields are too large') # server errors INTERNAL_SERVER_ERROR = (500, 'Internal Server Error', 'Server got itself in trouble') NOT_IMPLEMENTED = (501, 'Not Implemented', 'Server does not support this operation') BAD_GATEWAY = (502, 'Bad Gateway', 'Invalid responses from another server/proxy') SERVICE_UNAVAILABLE = (503, 'Service Unavailable', 'The server cannot process the request due to a high load') GATEWAY_TIMEOUT = (504, 'Gateway Timeout', 'The gateway server did not receive a timely response') HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported', 'Cannot fulfill request') VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates' INSUFFICIENT_STORAGE = 507, 'Insufficient Storage' LOOP_DETECTED = 508, 'Loop Detected' NOT_EXTENDED = 510, 'Not Extended' NETWORK_AUTHENTICATION_REQUIRED = (511, 'Network Authentication Required', 'The client needs to authenticate to gain network access') client.py000064400000156052147204456360006420 0ustar00r"""HTTP/1.1 client library HTTPConnection goes through a number of "states", which define when a client may legally make another request or fetch the response for a particular request. This diagram details these state transitions: (null) | | HTTPConnection() v Idle | | putrequest() v Request-started | | ( putheader() )* endheaders() v Request-sent |\_____________________________ | | getresponse() raises | response = getresponse() | ConnectionError v v Unread-response Idle [Response-headers-read] |\____________________ | | | response.read() | putrequest() v v Idle Req-started-unread-response ______/| / | response.read() | | ( putheader() )* endheaders() v v Request-started Req-sent-unread-response | | response.read() v Request-sent This diagram presents the following rules: -- a second request may not be started until {response-headers-read} -- a response [object] cannot be retrieved until {request-sent} -- there is no differentiation between an unread response body and a partially read response body Note: this enforcement is applied by the HTTPConnection class. The HTTPResponse class does not enforce this state machine, which implies sophisticated clients may accelerate the request/response pipeline. Caution should be taken, though: accelerating the states beyond the above pattern may imply knowledge of the server's connection-close behavior for certain requests. For example, it is impossible to tell whether the server will close the connection UNTIL the response headers have been read; this means that further requests cannot be placed into the pipeline until it is known that the server will NOT be closing the connection. Logical State __state __response ------------- ------- ---------- Idle _CS_IDLE None Request-started _CS_REQ_STARTED None Request-sent _CS_REQ_SENT None Unread-response _CS_IDLE Req-started-unread-response _CS_REQ_STARTED Req-sent-unread-response _CS_REQ_SENT """ import email.parser import email.message import http import io import os import re import socket import collections from urllib.parse import urlsplit # HTTPMessage, parse_headers(), and the HTTP status code constants are # intentionally omitted for simplicity __all__ = ["HTTPResponse", "HTTPConnection", "HTTPException", "NotConnected", "UnknownProtocol", "UnknownTransferEncoding", "UnimplementedFileMode", "IncompleteRead", "InvalidURL", "ImproperConnectionState", "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error", "responses"] HTTP_PORT = 80 HTTPS_PORT = 443 _UNKNOWN = 'UNKNOWN' # connection states _CS_IDLE = 'Idle' _CS_REQ_STARTED = 'Request-started' _CS_REQ_SENT = 'Request-sent' # hack to maintain backwards compatibility globals().update(http.HTTPStatus.__members__) # another hack to maintain backwards compatibility # Mapping status codes to official W3C names responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()} # maximal amount of data to read at one time in _safe_read MAXAMOUNT = 1048576 # maximal line length when calling readline(). _MAXLINE = 65536 _MAXHEADERS = 100 # Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) # # VCHAR = %x21-7E # obs-text = %x80-FF # header-field = field-name ":" OWS field-value OWS # field-name = token # field-value = *( field-content / obs-fold ) # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] # field-vchar = VCHAR / obs-text # # obs-fold = CRLF 1*( SP / HTAB ) # ; obsolete line folding # ; see Section 3.2.4 # token = 1*tchar # # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # # VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1 # the patterns for both name and value are more lenient than RFC # definitions to allow for backwards compatibility _is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch _is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search # These characters are not allowed within HTTP URL paths. # See https://tools.ietf.org/html/rfc3986#section-3.3 and the # https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. # Prevents CVE-2019-9740. Includes control characters such as \r\n. # We don't restrict chars above \x7f as putrequest() limits us to ASCII. _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]') # Arguably only these _should_ allowed: # _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") # We are more lenient for assumed real world compatibility purposes. # These characters are not allowed within HTTP method names # to prevent http header injection. _contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]') # We always set the Content-Length header for these methods because some # servers will otherwise respond with a 411 _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} def _encode(data, name='data'): """Call data.encode("latin-1") but show a better error message.""" try: return data.encode("latin-1") except UnicodeEncodeError as err: raise UnicodeEncodeError( err.encoding, err.object, err.start, err.end, "%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') " "if you want to send it encoded in UTF-8." % (name.title(), data[err.start:err.end], name)) from None class HTTPMessage(email.message.Message): # XXX The only usage of this method is in # http.server.CGIHTTPRequestHandler. Maybe move the code there so # that it doesn't need to be part of the public API. The API has # never been defined so this could cause backwards compatibility # issues. def getallmatchingheaders(self, name): """Find all header lines matching a given header name. Look through the list of headers and find all lines matching a given header name (and their continuation lines). A list of the lines is returned, without interpretation. If the header does not occur, an empty list is returned. If the header occurs multiple times, all occurrences are returned. Case is not important in the header name. """ name = name.lower() + ':' n = len(name) lst = [] hit = 0 for line in self.keys(): if line[:n].lower() == name: hit = 1 elif not line[:1].isspace(): hit = 0 if hit: lst.append(line) return lst def _read_headers(fp): """Reads potential header lines into a list from a file pointer. Length of line is limited by _MAXLINE, and number of headers is limited by _MAXHEADERS. """ headers = [] while True: line = fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("header line") headers.append(line) if len(headers) > _MAXHEADERS: raise HTTPException("got more than %d headers" % _MAXHEADERS) if line in (b'\r\n', b'\n', b''): break return headers def parse_headers(fp, _class=HTTPMessage): """Parses only RFC2822 headers from a file pointer. email Parser wants to see strings rather than bytes. But a TextIOWrapper around self.rfile would buffer too many bytes from the stream, bytes which we later need to read as bytes. So we read the correct bytes here, as bytes, for email Parser to parse. """ headers = _read_headers(fp) hstring = b''.join(headers).decode('iso-8859-1') return email.parser.Parser(_class=_class).parsestr(hstring) class HTTPResponse(io.BufferedIOBase): # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. # The bytes from the socket object are iso-8859-1 strings. # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded # text following RFC 2047. The basic status line parsing only # accepts iso-8859-1. def __init__(self, sock, debuglevel=0, method=None, url=None): # If the response includes a content-length header, we need to # make sure that the client doesn't read more than the # specified number of bytes. If it does, it will block until # the server times out and closes the connection. This will # happen if a self.fp.read() is done (without a size) whether # self.fp is buffered or not. So, no self.fp.read() by # clients unless they know what they are doing. self.fp = sock.makefile("rb") self.debuglevel = debuglevel self._method = method # The HTTPResponse object is returned via urllib. The clients # of http and urllib expect different attributes for the # headers. headers is used here and supports urllib. msg is # provided as a backwards compatibility layer for http # clients. self.headers = self.msg = None # from the Status-Line of the response self.version = _UNKNOWN # HTTP-Version self.status = _UNKNOWN # Status-Code self.reason = _UNKNOWN # Reason-Phrase self.chunked = _UNKNOWN # is "chunked" being used? self.chunk_left = _UNKNOWN # bytes left to read in current chunk self.length = _UNKNOWN # number of bytes left in response self.will_close = _UNKNOWN # conn will close at end of response def _read_status(self): line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") if len(line) > _MAXLINE: raise LineTooLong("status line") if self.debuglevel > 0: print("reply:", repr(line)) if not line: # Presumably, the server closed the connection before # sending a valid response. raise RemoteDisconnected("Remote end closed connection without" " response") try: version, status, reason = line.split(None, 2) except ValueError: try: version, status = line.split(None, 1) reason = "" except ValueError: # empty version will cause next test to fail. version = "" if not version.startswith("HTTP/"): self._close_conn() raise BadStatusLine(line) # The status code is a three-digit number try: status = int(status) if status < 100 or status > 999: raise BadStatusLine(line) except ValueError: raise BadStatusLine(line) return version, status, reason def begin(self): if self.headers is not None: # we've already started reading the response return # read until we get a non-100 response while True: version, status, reason = self._read_status() if status != CONTINUE: break # skip the header from the 100 response skipped_headers = _read_headers(self.fp) if self.debuglevel > 0: print("headers:", skipped_headers) del skipped_headers self.code = self.status = status self.reason = reason.strip() if version in ("HTTP/1.0", "HTTP/0.9"): # Some servers might still return "0.9", treat it as 1.0 anyway self.version = 10 elif version.startswith("HTTP/1."): self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 else: raise UnknownProtocol(version) self.headers = self.msg = parse_headers(self.fp) if self.debuglevel > 0: for hdr in self.headers: print("header:", hdr + ":", self.headers.get(hdr)) # are we using the chunked-style of transfer encoding? tr_enc = self.headers.get("transfer-encoding") if tr_enc and tr_enc.lower() == "chunked": self.chunked = True self.chunk_left = None else: self.chunked = False # will the connection close at the end of the response? self.will_close = self._check_close() # do we have a Content-Length? # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" self.length = None length = self.headers.get("content-length") # are we using the chunked-style of transfer encoding? tr_enc = self.headers.get("transfer-encoding") if length and not self.chunked: try: self.length = int(length) except ValueError: self.length = None else: if self.length < 0: # ignore nonsensical negative lengths self.length = None else: self.length = None # does the body have a fixed length? (of zero) if (status == NO_CONTENT or status == NOT_MODIFIED or 100 <= status < 200 or # 1xx codes self._method == "HEAD"): self.length = 0 # if the connection remains open, and we aren't using chunked, and # a content-length was not provided, then assume that the connection # WILL close. if (not self.will_close and not self.chunked and self.length is None): self.will_close = True def _check_close(self): conn = self.headers.get("connection") if self.version == 11: # An HTTP/1.1 proxy is assumed to stay open unless # explicitly closed. conn = self.headers.get("connection") if conn and "close" in conn.lower(): return True return False # Some HTTP/1.0 implementations have support for persistent # connections, using rules different than HTTP/1.1. # For older HTTP, Keep-Alive indicates persistent connection. if self.headers.get("keep-alive"): return False # At least Akamai returns a "Connection: Keep-Alive" header, # which was supposed to be sent by the client. if conn and "keep-alive" in conn.lower(): return False # Proxy-Connection is a netscape hack. pconn = self.headers.get("proxy-connection") if pconn and "keep-alive" in pconn.lower(): return False # otherwise, assume it will close return True def _close_conn(self): fp = self.fp self.fp = None fp.close() def close(self): try: super().close() # set "closed" flag finally: if self.fp: self._close_conn() # These implementations are for the benefit of io.BufferedReader. # XXX This class should probably be revised to act more like # the "raw stream" that BufferedReader expects. def flush(self): super().flush() if self.fp: self.fp.flush() def readable(self): """Always returns True""" return True # End of "raw stream" methods def isclosed(self): """True if the connection is closed.""" # NOTE: it is possible that we will not ever call self.close(). This # case occurs when will_close is TRUE, length is None, and we # read up to the last byte, but NOT past it. # # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be # called, meaning self.isclosed() is meaningful. return self.fp is None def read(self, amt=None): if self.fp is None: return b"" if self._method == "HEAD": self._close_conn() return b"" if amt is not None: # Amount is given, implement using readinto b = bytearray(amt) n = self.readinto(b) return memoryview(b)[:n].tobytes() else: # Amount is not given (unbounded read) so we must check self.length # and self.chunked if self.chunked: return self._readall_chunked() if self.length is None: s = self.fp.read() else: try: s = self._safe_read(self.length) except IncompleteRead: self._close_conn() raise self.length = 0 self._close_conn() # we read everything return s def readinto(self, b): """Read up to len(b) bytes into bytearray b and return the number of bytes read. """ if self.fp is None: return 0 if self._method == "HEAD": self._close_conn() return 0 if self.chunked: return self._readinto_chunked(b) if self.length is not None: if len(b) > self.length: # clip the read to the "end of response" b = memoryview(b)[0:self.length] # we do not use _safe_read() here because this may be a .will_close # connection, and the user is reading more bytes than will be provided # (for example, reading in 1k chunks) n = self.fp.readinto(b) if not n and b: # Ideally, we would raise IncompleteRead if the content-length # wasn't satisfied, but it might break compatibility. self._close_conn() elif self.length is not None: self.length -= n if not self.length: self._close_conn() return n def _read_next_chunk_size(self): # Read the next chunk size from the file line = self.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("chunk size") i = line.find(b";") if i >= 0: line = line[:i] # strip chunk-extensions try: return int(line, 16) except ValueError: # close the connection as protocol synchronisation is # probably lost self._close_conn() raise def _read_and_discard_trailer(self): # read and discard trailer up to the CRLF terminator ### note: we shouldn't have any trailers! while True: line = self.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("trailer line") if not line: # a vanishingly small number of sites EOF without # sending the trailer break if line in (b'\r\n', b'\n', b''): break def _get_chunk_left(self): # return self.chunk_left, reading a new chunk if necessary. # chunk_left == 0: at the end of the current chunk, need to close it # chunk_left == None: No current chunk, should read next. # This function returns non-zero or None if the last chunk has # been read. chunk_left = self.chunk_left if not chunk_left: # Can be 0 or None if chunk_left is not None: # We are at the end of chunk, discard chunk end self._safe_read(2) # toss the CRLF at the end of the chunk try: chunk_left = self._read_next_chunk_size() except ValueError: raise IncompleteRead(b'') if chunk_left == 0: # last chunk: 1*("0") [ chunk-extension ] CRLF self._read_and_discard_trailer() # we read everything; close the "file" self._close_conn() chunk_left = None self.chunk_left = chunk_left return chunk_left def _readall_chunked(self): assert self.chunked != _UNKNOWN value = [] try: while True: chunk_left = self._get_chunk_left() if chunk_left is None: break value.append(self._safe_read(chunk_left)) self.chunk_left = 0 return b''.join(value) except IncompleteRead: raise IncompleteRead(b''.join(value)) def _readinto_chunked(self, b): assert self.chunked != _UNKNOWN total_bytes = 0 mvb = memoryview(b) try: while True: chunk_left = self._get_chunk_left() if chunk_left is None: return total_bytes if len(mvb) <= chunk_left: n = self._safe_readinto(mvb) self.chunk_left = chunk_left - n return total_bytes + n temp_mvb = mvb[:chunk_left] n = self._safe_readinto(temp_mvb) mvb = mvb[n:] total_bytes += n self.chunk_left = 0 except IncompleteRead: raise IncompleteRead(bytes(b[0:total_bytes])) def _safe_read(self, amt): """Read the number of bytes requested, compensating for partial reads. Normally, we have a blocking socket, but a read() can be interrupted by a signal (resulting in a partial read). Note that we cannot distinguish between EOF and an interrupt when zero bytes have been read. IncompleteRead() will be raised in this situation. This function should be used when bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem. """ s = [] while amt > 0: chunk = self.fp.read(min(amt, MAXAMOUNT)) if not chunk: raise IncompleteRead(b''.join(s), amt) s.append(chunk) amt -= len(chunk) return b"".join(s) def _safe_readinto(self, b): """Same as _safe_read, but for reading into a buffer.""" total_bytes = 0 mvb = memoryview(b) while total_bytes < len(b): if MAXAMOUNT < len(mvb): temp_mvb = mvb[0:MAXAMOUNT] n = self.fp.readinto(temp_mvb) else: n = self.fp.readinto(mvb) if not n: raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) mvb = mvb[n:] total_bytes += n return total_bytes def read1(self, n=-1): """Read with at most one underlying system call. If at least one byte is buffered, return that instead. """ if self.fp is None or self._method == "HEAD": return b"" if self.chunked: return self._read1_chunked(n) if self.length is not None and (n < 0 or n > self.length): n = self.length try: result = self.fp.read1(n) except ValueError: if n >= 0: raise # some implementations, like BufferedReader, don't support -1 # Read an arbitrarily selected largeish chunk. result = self.fp.read1(16*1024) if not result and n: self._close_conn() elif self.length is not None: self.length -= len(result) return result def peek(self, n=-1): # Having this enables IOBase.readline() to read more than one # byte at a time if self.fp is None or self._method == "HEAD": return b"" if self.chunked: return self._peek_chunked(n) return self.fp.peek(n) def readline(self, limit=-1): if self.fp is None or self._method == "HEAD": return b"" if self.chunked: # Fallback to IOBase readline which uses peek() and read() return super().readline(limit) if self.length is not None and (limit < 0 or limit > self.length): limit = self.length result = self.fp.readline(limit) if not result and limit: self._close_conn() elif self.length is not None: self.length -= len(result) return result def _read1_chunked(self, n): # Strictly speaking, _get_chunk_left() may cause more than one read, # but that is ok, since that is to satisfy the chunked protocol. chunk_left = self._get_chunk_left() if chunk_left is None or n == 0: return b'' if not (0 <= n <= chunk_left): n = chunk_left # if n is negative or larger than chunk_left read = self.fp.read1(n) self.chunk_left -= len(read) if not read: raise IncompleteRead(b"") return read def _peek_chunked(self, n): # Strictly speaking, _get_chunk_left() may cause more than one read, # but that is ok, since that is to satisfy the chunked protocol. try: chunk_left = self._get_chunk_left() except IncompleteRead: return b'' # peek doesn't worry about protocol if chunk_left is None: return b'' # eof # peek is allowed to return more than requested. Just request the # entire chunk, and truncate what we get. return self.fp.peek(chunk_left)[:chunk_left] def fileno(self): return self.fp.fileno() def getheader(self, name, default=None): '''Returns the value of the header matching *name*. If there are multiple matching headers, the values are combined into a single string separated by commas and spaces. If no matching header is found, returns *default* or None if the *default* is not specified. If the headers are unknown, raises http.client.ResponseNotReady. ''' if self.headers is None: raise ResponseNotReady() headers = self.headers.get_all(name) or default if isinstance(headers, str) or not hasattr(headers, '__iter__'): return headers else: return ', '.join(headers) def getheaders(self): """Return list of (header, value) tuples.""" if self.headers is None: raise ResponseNotReady() return list(self.headers.items()) # We override IOBase.__iter__ so that it doesn't check for closed-ness def __iter__(self): return self # For compatibility with old-style urllib responses. def info(self): '''Returns an instance of the class mimetools.Message containing meta-information associated with the URL. When the method is HTTP, these headers are those returned by the server at the head of the retrieved HTML page (including Content-Length and Content-Type). When the method is FTP, a Content-Length header will be present if (as is now usual) the server passed back a file length in response to the FTP retrieval request. A Content-Type header will be present if the MIME type can be guessed. When the method is local-file, returned headers will include a Date representing the file's last-modified time, a Content-Length giving file size, and a Content-Type containing a guess at the file's type. See also the description of the mimetools module. ''' return self.headers def geturl(self): '''Return the real URL of the page. In some cases, the HTTP server redirects a client to another URL. The urlopen() function handles this transparently, but in some cases the caller needs to know which URL the client was redirected to. The geturl() method can be used to get at this redirected URL. ''' return self.url def getcode(self): '''Return the HTTP status code that was sent with the response, or None if the URL is not an HTTP URL. ''' return self.status class HTTPConnection: _http_vsn = 11 _http_vsn_str = 'HTTP/1.1' response_class = HTTPResponse default_port = HTTP_PORT auto_open = 1 debuglevel = 0 @staticmethod def _is_textIO(stream): """Test whether a file-like object is a text or a binary stream. """ return isinstance(stream, io.TextIOBase) @staticmethod def _get_content_length(body, method): """Get the content-length based on the body. If the body is None, we set Content-Length: 0 for methods that expect a body (RFC 7230, Section 3.3.2). We also set the Content-Length for any method if the body is a str or bytes-like object and not a file. """ if body is None: # do an explicit check for not None here to distinguish # between unset and set but empty if method.upper() in _METHODS_EXPECTING_BODY: return 0 else: return None if hasattr(body, 'read'): # file-like object. return None try: # does it implement the buffer protocol (bytes, bytearray, array)? mv = memoryview(body) return mv.nbytes except TypeError: pass if isinstance(body, str): return len(body) return None def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): self.timeout = timeout self.source_address = source_address self.sock = None self._buffer = [] self.__response = None self.__state = _CS_IDLE self._method = None self._tunnel_host = None self._tunnel_port = None self._tunnel_headers = {} (self.host, self.port) = self._get_hostport(host, port) # This is stored as an instance variable to allow unit # tests to replace it with a suitable mockup self._create_connection = socket.create_connection def set_tunnel(self, host, port=None, headers=None): """Set up host and port for HTTP CONNECT tunnelling. In a connection that uses HTTP CONNECT tunneling, the host passed to the constructor is used as a proxy server that relays all communication to the endpoint passed to `set_tunnel`. This done by sending an HTTP CONNECT request to the proxy server when the connection is established. This method must be called before the HTML connection has been established. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request. """ if self.sock: raise RuntimeError("Can't set up tunnel for established connection") self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) if headers: self._tunnel_headers = headers else: self._tunnel_headers.clear() def _get_hostport(self, host, port): if port is None: i = host.rfind(':') j = host.rfind(']') # ipv6 addresses have [...] if i > j: try: port = int(host[i+1:]) except ValueError: if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ port = self.default_port else: raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) host = host[:i] else: port = self.default_port if host and host[0] == '[' and host[-1] == ']': host = host[1:-1] return (host, port) def set_debuglevel(self, level): self.debuglevel = level def _tunnel(self): connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host, self._tunnel_port) connect_bytes = connect_str.encode("ascii") self.send(connect_bytes) for header, value in self._tunnel_headers.items(): header_str = "%s: %s\r\n" % (header, value) header_bytes = header_str.encode("latin-1") self.send(header_bytes) self.send(b'\r\n') response = self.response_class(self.sock, method=self._method) (version, code, message) = response._read_status() if code != http.HTTPStatus.OK: self.close() raise OSError("Tunnel connection failed: %d %s" % (code, message.strip())) while True: line = response.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("header line") if not line: # for sites which EOF without sending a trailer break if line in (b'\r\n', b'\n', b''): break if self.debuglevel > 0: print('header:', line.decode()) def connect(self): """Connect to the host and port specified in __init__.""" self.sock = self._create_connection( (self.host,self.port), self.timeout, self.source_address) self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if self._tunnel_host: self._tunnel() def close(self): """Close the connection to the HTTP server.""" self.__state = _CS_IDLE try: sock = self.sock if sock: self.sock = None sock.close() # close it manually... there may be other refs finally: response = self.__response if response: self.__response = None response.close() def send(self, data): """Send `data' to the server. ``data`` can be a string object, a bytes object, an array object, a file-like object that supports a .read() method, or an iterable object. """ if self.sock is None: if self.auto_open: self.connect() else: raise NotConnected() if self.debuglevel > 0: print("send:", repr(data)) blocksize = 8192 if hasattr(data, "read") : if self.debuglevel > 0: print("sendIng a read()able") encode = self._is_textIO(data) if encode and self.debuglevel > 0: print("encoding file using iso-8859-1") while 1: datablock = data.read(blocksize) if not datablock: break if encode: datablock = datablock.encode("iso-8859-1") self.sock.sendall(datablock) return try: self.sock.sendall(data) except TypeError: if isinstance(data, collections.Iterable): for d in data: self.sock.sendall(d) else: raise TypeError("data should be a bytes-like object " "or an iterable, got %r" % type(data)) def _output(self, s): """Add a line of output to the current request buffer. Assumes that the line does *not* end with \\r\\n. """ self._buffer.append(s) def _read_readable(self, readable): blocksize = 8192 if self.debuglevel > 0: print("sendIng a read()able") encode = self._is_textIO(readable) if encode and self.debuglevel > 0: print("encoding file using iso-8859-1") while True: datablock = readable.read(blocksize) if not datablock: break if encode: datablock = datablock.encode("iso-8859-1") yield datablock def _send_output(self, message_body=None, encode_chunked=False): """Send the currently buffered request and clear the buffer. Appends an extra \\r\\n to the buffer. A message_body may be specified, to be appended to the request. """ self._buffer.extend((b"", b"")) msg = b"\r\n".join(self._buffer) del self._buffer[:] self.send(msg) if message_body is not None: # create a consistent interface to message_body if hasattr(message_body, 'read'): # Let file-like take precedence over byte-like. This # is needed to allow the current position of mmap'ed # files to be taken into account. chunks = self._read_readable(message_body) else: try: # this is solely to check to see if message_body # implements the buffer API. it /would/ be easier # to capture if PyObject_CheckBuffer was exposed # to Python. memoryview(message_body) except TypeError: try: chunks = iter(message_body) except TypeError: raise TypeError("message_body should be a bytes-like " "object or an iterable, got %r" % type(message_body)) else: # the object implements the buffer interface and # can be passed directly into socket methods chunks = (message_body,) for chunk in chunks: if not chunk: if self.debuglevel > 0: print('Zero length chunk ignored') continue if encode_chunked and self._http_vsn == 11: # chunked encoding chunk = f'{len(chunk):X}\r\n'.encode('ascii') + chunk \ + b'\r\n' self.send(chunk) if encode_chunked and self._http_vsn == 11: # end chunked transfer self.send(b'0\r\n\r\n') def putrequest(self, method, url, skip_host=False, skip_accept_encoding=False): """Send a request to the server. `method' specifies an HTTP request method, e.g. 'GET'. `url' specifies the object being requested, e.g. '/index.html'. `skip_host' if True does not add automatically a 'Host:' header `skip_accept_encoding' if True does not add automatically an 'Accept-Encoding:' header """ # if a prior response has been completed, then forget about it. if self.__response and self.__response.isclosed(): self.__response = None # in certain cases, we cannot issue another request on this connection. # this occurs when: # 1) we are in the process of sending a request. (_CS_REQ_STARTED) # 2) a response to a previous request has signalled that it is going # to close the connection upon completion. # 3) the headers for the previous response have not been read, thus # we cannot determine whether point (2) is true. (_CS_REQ_SENT) # # if there is no prior response, then we can request at will. # # if point (2) is true, then we will have passed the socket to the # response (effectively meaning, "there is no prior response"), and # will open a new one when a new request is made. # # Note: if a prior response exists, then we *can* start a new request. # We are not allowed to begin fetching the response to this new # request, however, until that prior response is complete. # if self.__state == _CS_IDLE: self.__state = _CS_REQ_STARTED else: raise CannotSendRequest(self.__state) self._validate_method(method) # Save the method we use, we need it later in the response phase self._method = method if not url: url = '/' # Prevent CVE-2019-9740. match = _contains_disallowed_url_pchar_re.search(url) if match: raise InvalidURL(f"URL can't contain control characters. {url!r} " f"(found at least {match.group()!r})") request = '%s %s %s' % (method, url, self._http_vsn_str) # Non-ASCII characters should have been eliminated earlier self._output(request.encode('ascii')) if self._http_vsn == 11: # Issue some standard headers for better HTTP/1.1 compliance if not skip_host: # this header is issued *only* for HTTP/1.1 # connections. more specifically, this means it is # only issued when the client uses the new # HTTPConnection() class. backwards-compat clients # will be using HTTP/1.0 and those clients may be # issuing this header themselves. we should NOT issue # it twice; some web servers (such as Apache) barf # when they see two Host: headers # If we need a non-standard port,include it in the # header. If the request is going through a proxy, # but the host of the actual URL, not the host of the # proxy. netloc = '' if url.startswith('http'): nil, netloc, nil, nil, nil = urlsplit(url) if netloc: try: netloc_enc = netloc.encode("ascii") except UnicodeEncodeError: netloc_enc = netloc.encode("idna") self.putheader('Host', netloc_enc) else: if self._tunnel_host: host = self._tunnel_host port = self._tunnel_port else: host = self.host port = self.port try: host_enc = host.encode("ascii") except UnicodeEncodeError: host_enc = host.encode("idna") # As per RFC 273, IPv6 address should be wrapped with [] # when used as Host header if host.find(':') >= 0: host_enc = b'[' + host_enc + b']' if port == self.default_port: self.putheader('Host', host_enc) else: host_enc = host_enc.decode("ascii") self.putheader('Host', "%s:%s" % (host_enc, port)) # note: we are assuming that clients will not attempt to set these # headers since *this* library must deal with the # consequences. this also means that when the supporting # libraries are updated to recognize other forms, then this # code should be changed (removed or updated). # we only want a Content-Encoding of "identity" since we don't # support encodings such as x-gzip or x-deflate. if not skip_accept_encoding: self.putheader('Accept-Encoding', 'identity') # we can accept "chunked" Transfer-Encodings, but no others # NOTE: no TE header implies *only* "chunked" #self.putheader('TE', 'chunked') # if TE is supplied in the header, then it must appear in a # Connection header. #self.putheader('Connection', 'TE') else: # For HTTP/1.0, the server will assume "not chunked" pass def _validate_method(self, method): """Validate a method name for putrequest.""" # prevent http header injection match = _contains_disallowed_method_pchar_re.search(method) if match: raise ValueError( f"method can't contain control characters. {method!r} " f"(found at least {match.group()!r})") def putheader(self, header, *values): """Send a request header line to the server. For example: h.putheader('Accept', 'text/html') """ if self.__state != _CS_REQ_STARTED: raise CannotSendHeader() if hasattr(header, 'encode'): header = header.encode('ascii') if not _is_legal_header_name(header): raise ValueError('Invalid header name %r' % (header,)) values = list(values) for i, one_value in enumerate(values): if hasattr(one_value, 'encode'): values[i] = one_value.encode('latin-1') elif isinstance(one_value, int): values[i] = str(one_value).encode('ascii') if _is_illegal_header_value(values[i]): raise ValueError('Invalid header value %r' % (values[i],)) value = b'\r\n\t'.join(values) header = header + b': ' + value self._output(header) def endheaders(self, message_body=None, *, encode_chunked=False): """Indicate that the last header line has been sent to the server. This method sends the request to the server. The optional message_body argument can be used to pass a message body associated with the request. """ if self.__state == _CS_REQ_STARTED: self.__state = _CS_REQ_SENT else: raise CannotSendHeader() self._send_output(message_body, encode_chunked=encode_chunked) def request(self, method, url, body=None, headers={}, *, encode_chunked=False): """Send a complete request to the server.""" self._send_request(method, url, body, headers, encode_chunked) def _send_request(self, method, url, body, headers, encode_chunked): # Honor explicitly requested Host: and Accept-Encoding: headers. header_names = frozenset(k.lower() for k in headers) skips = {} if 'host' in header_names: skips['skip_host'] = 1 if 'accept-encoding' in header_names: skips['skip_accept_encoding'] = 1 self.putrequest(method, url, **skips) # chunked encoding will happen if HTTP/1.1 is used and either # the caller passes encode_chunked=True or the following # conditions hold: # 1. content-length has not been explicitly set # 2. the body is a file or iterable, but not a str or bytes-like # 3. Transfer-Encoding has NOT been explicitly set by the caller if 'content-length' not in header_names: # only chunk body if not explicitly set for backwards # compatibility, assuming the client code is already handling the # chunking if 'transfer-encoding' not in header_names: # if content-length cannot be automatically determined, fall # back to chunked encoding encode_chunked = False content_length = self._get_content_length(body, method) if content_length is None: if body is not None: if self.debuglevel > 0: print('Unable to determine size of %r' % body) encode_chunked = True self.putheader('Transfer-Encoding', 'chunked') else: self.putheader('Content-Length', str(content_length)) else: encode_chunked = False for hdr, value in headers.items(): self.putheader(hdr, value) if isinstance(body, str): # RFC 2616 Section 3.7.1 says that text default has a # default charset of iso-8859-1. body = _encode(body, 'body') self.endheaders(body, encode_chunked=encode_chunked) def getresponse(self): """Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. """ # if a prior response has been completed, then forget about it. if self.__response and self.__response.isclosed(): self.__response = None # if a prior response exists, then it must be completed (otherwise, we # cannot read this response's header to determine the connection-close # behavior) # # note: if a prior response existed, but was connection-close, then the # socket and response were made independent of this HTTPConnection # object since a new request requires that we open a whole new # connection # # this means the prior response had one of two states: # 1) will_close: this connection was reset and the prior socket and # response operate independently # 2) persistent: the response was retained and we await its # isclosed() status to become true. # if self.__state != _CS_REQ_SENT or self.__response: raise ResponseNotReady(self.__state) if self.debuglevel > 0: response = self.response_class(self.sock, self.debuglevel, method=self._method) else: response = self.response_class(self.sock, method=self._method) try: try: response.begin() except ConnectionError: self.close() raise assert response.will_close != _UNKNOWN self.__state = _CS_IDLE if response.will_close: # this effectively passes the connection to the response self.close() else: # remember this, so we can tell when it is complete self.__response = response return response except: response.close() raise try: import ssl except ImportError: pass else: class HTTPSConnection(HTTPConnection): "This class allows communication via SSL." default_port = HTTPS_PORT # XXX Should key_file and cert_file be deprecated in favour of context? def __init__(self, host, port=None, key_file=None, cert_file=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, *, context=None, check_hostname=None): super(HTTPSConnection, self).__init__(host, port, timeout, source_address) if (key_file is not None or cert_file is not None or check_hostname is not None): import warnings warnings.warn("key_file, cert_file and check_hostname are " "deprecated, use a custom context instead.", DeprecationWarning, 2) self.key_file = key_file self.cert_file = cert_file if context is None: context = ssl._create_default_https_context() # enable PHA for TLS 1.3 connections if available if context.post_handshake_auth is not None: context.post_handshake_auth = True will_verify = context.verify_mode != ssl.CERT_NONE if check_hostname is None: check_hostname = context.check_hostname if check_hostname and not will_verify: raise ValueError("check_hostname needs a SSL context with " "either CERT_OPTIONAL or CERT_REQUIRED") if key_file or cert_file: context.load_cert_chain(cert_file, key_file) # cert and key file means the user wants to authenticate. # enable TLS 1.3 PHA implicitly even for custom contexts. if context.post_handshake_auth is not None: context.post_handshake_auth = True self._context = context self._check_hostname = check_hostname def connect(self): "Connect to a host on a given (SSL) port." super().connect() if self._tunnel_host: server_hostname = self._tunnel_host else: server_hostname = self.host self.sock = self._context.wrap_socket(self.sock, server_hostname=server_hostname) if not self._context.check_hostname and self._check_hostname: try: ssl.match_hostname(self.sock.getpeercert(), server_hostname) except Exception: self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() raise __all__.append("HTTPSConnection") class HTTPException(Exception): # Subclasses that define an __init__ must call Exception.__init__ # or define self.args. Otherwise, str() will fail. pass class NotConnected(HTTPException): pass class InvalidURL(HTTPException): pass class UnknownProtocol(HTTPException): def __init__(self, version): self.args = version, self.version = version class UnknownTransferEncoding(HTTPException): pass class UnimplementedFileMode(HTTPException): pass class IncompleteRead(HTTPException): def __init__(self, partial, expected=None): self.args = partial, self.partial = partial self.expected = expected def __repr__(self): if self.expected is not None: e = ', %i more expected' % self.expected else: e = '' return '%s(%i bytes read%s)' % (self.__class__.__name__, len(self.partial), e) def __str__(self): return repr(self) class ImproperConnectionState(HTTPException): pass class CannotSendRequest(ImproperConnectionState): pass class CannotSendHeader(ImproperConnectionState): pass class ResponseNotReady(ImproperConnectionState): pass class BadStatusLine(HTTPException): def __init__(self, line): if not line: line = repr(line) self.args = line, self.line = line class LineTooLong(HTTPException): def __init__(self, line_type): HTTPException.__init__(self, "got more than %d bytes when reading %s" % (_MAXLINE, line_type)) class RemoteDisconnected(ConnectionResetError, BadStatusLine): def __init__(self, *pos, **kw): BadStatusLine.__init__(self, "") ConnectionResetError.__init__(self, *pos, **kw) # for backwards compatibility error = HTTPException cookiejar.py000064400000225562147204456360007113 0ustar00r"""HTTP cookie handling for web clients. This module has (now fairly distant) origins in Gisle Aas' Perl module HTTP::Cookies, from the libwww-perl library. Docstrings, comments and debug strings in this code refer to the attributes of the HTTP cookie system as cookie-attributes, to distinguish them clearly from Python attributes. Class diagram (note that BSDDBCookieJar and the MSIE* classes are not distributed with the Python standard library, but are available from http://wwwsearch.sf.net/): CookieJar____ / \ \ FileCookieJar \ \ / | \ \ \ MozillaCookieJar | LWPCookieJar \ \ | | \ | ---MSIEBase | \ | / | | \ | / MSIEDBCookieJar BSDDBCookieJar |/ MSIECookieJar """ __all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] import copy import datetime import re import time import urllib.parse, urllib.request try: import threading as _threading except ImportError: import dummy_threading as _threading import http.client # only for the default HTTP port from calendar import timegm debug = False # set to True to enable debugging via the logging module logger = None def _debug(*args): if not debug: return global logger if not logger: import logging logger = logging.getLogger("http.cookiejar") return logger.debug(*args) DEFAULT_HTTP_PORT = str(http.client.HTTP_PORT) MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " "instance initialised with one)") def _warn_unhandled_exception(): # There are a few catch-all except: statements in this module, for # catching input that's bad in unexpected ways. Warn if any # exceptions are caught there. import io, warnings, traceback f = io.StringIO() traceback.print_exc(None, f) msg = f.getvalue() warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2) # Date/time conversion # ----------------------------------------------------------------------------- EPOCH_YEAR = 1970 def _timegm(tt): year, month, mday, hour, min, sec = tt[:6] if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)): return timegm(tt) else: return None DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] MONTHS_LOWER = [] for month in MONTHS: MONTHS_LOWER.append(month.lower()) def time2isoz(t=None): """Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", representing Universal Time (UTC, aka GMT). An example of this format is: 1994-11-24 08:49:37Z """ if t is None: dt = datetime.datetime.utcnow() else: dt = datetime.datetime.utcfromtimestamp(t) return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) def time2netscape(t=None): """Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT """ if t is None: dt = datetime.datetime.utcnow() else: dt = datetime.datetime.utcfromtimestamp(t) return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], dt.year, dt.hour, dt.minute, dt.second) UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None} TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII) def offset_from_tz_string(tz): offset = None if tz in UTC_ZONES: offset = 0 else: m = TIMEZONE_RE.search(tz) if m: offset = 3600 * int(m.group(2)) if m.group(3): offset = offset + 60 * int(m.group(3)) if m.group(1) == '-': offset = -offset return offset def _str2time(day, mon, yr, hr, min, sec, tz): yr = int(yr) if yr > datetime.MAXYEAR: return None # translate month name to number # month numbers start with 1 (January) try: mon = MONTHS_LOWER.index(mon.lower())+1 except ValueError: # maybe it's already a number try: imon = int(mon) except ValueError: return None if 1 <= imon <= 12: mon = imon else: return None # make sure clock elements are defined if hr is None: hr = 0 if min is None: min = 0 if sec is None: sec = 0 day = int(day) hr = int(hr) min = int(min) sec = int(sec) if yr < 1000: # find "obvious" year cur_yr = time.localtime(time.time())[0] m = cur_yr % 100 tmp = yr yr = yr + cur_yr - m m = m - tmp if abs(m) > 50: if m > 0: yr = yr + 100 else: yr = yr - 100 # convert UTC time tuple to seconds since epoch (not timezone-adjusted) t = _timegm((yr, mon, day, hr, min, sec, tz)) if t is not None: # adjust time using timezone string, to get absolute time since epoch if tz is None: tz = "UTC" tz = tz.upper() offset = offset_from_tz_string(tz) if offset is None: return None t = t - offset return t STRICT_DATE_RE = re.compile( r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) " r"(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII) WEEKDAY_RE = re.compile( r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII) LOOSE_HTTP_DATE_RE = re.compile( r"""^ (\d\d?) # day (?:\s+|[-\/]) (\w+) # month (?:\s+|[-\/]) (\d+) # year (?: (?:\s+|:) # separator before clock (\d\d?):(\d\d) # hour:min (?::(\d\d))? # optional seconds )? # optional clock \s* ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone \s* (?:\(\w+\))? # ASCII representation of timezone in parens. \s*$""", re.X | re.ASCII) def http2time(text): """Returns time in seconds since epoch of time represented by a string. Return value is an integer. None is returned if the format of str is unrecognized, the time is outside the representable range, or the timezone string is not recognized. If the string contains no timezone, UTC is assumed. The timezone in the string may be numerical (like "-0800" or "+0100") or a string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the timezone strings equivalent to UTC (zero offset) are known to the function. The function loosely parses the following formats: Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) The parser ignores leading and trailing whitespace. The time may be absent. If the year is given with only 2 digits, the function will select the century that makes the year closest to the current date. """ # fast exit for strictly conforming string m = STRICT_DATE_RE.search(text) if m: g = m.groups() mon = MONTHS_LOWER.index(g[1].lower()) + 1 tt = (int(g[2]), mon, int(g[0]), int(g[3]), int(g[4]), float(g[5])) return _timegm(tt) # No, we need some messy parsing... # clean up text = text.lstrip() text = WEEKDAY_RE.sub("", text, 1) # Useless weekday # tz is time zone specifier string day, mon, yr, hr, min, sec, tz = [None]*7 # loose regexp parse m = LOOSE_HTTP_DATE_RE.search(text) if m is not None: day, mon, yr, hr, min, sec, tz = m.groups() else: return None # bad format return _str2time(day, mon, yr, hr, min, sec, tz) ISO_DATE_RE = re.compile( r"""^ (\d{4}) # year [-\/]? (\d\d?) # numerical month [-\/]? (\d\d?) # day (?: (?:\s+|[-:Tt]) # separator before clock (\d\d?):?(\d\d) # hour:min (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) )? # optional clock \s* ([-+]?\d\d?:?(:?\d\d)? |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) \s*$""", re.X | re. ASCII) def iso2time(text): """ As for http2time, but parses the ISO 8601 formats: 1994-02-03 14:15:29 -0100 -- ISO 8601 format 1994-02-03 14:15:29 -- zone is optional 1994-02-03 -- only date 1994-02-03T14:15:29 -- Use T as separator 19940203T141529Z -- ISO 8601 compact format 19940203 -- only date """ # clean up text = text.lstrip() # tz is time zone specifier string day, mon, yr, hr, min, sec, tz = [None]*7 # loose regexp parse m = ISO_DATE_RE.search(text) if m is not None: # XXX there's an extra bit of the timezone I'm ignoring here: is # this the right thing to do? yr, mon, day, hr, min, sec, tz, _ = m.groups() else: return None # bad format return _str2time(day, mon, yr, hr, min, sec, tz) # Header parsing # ----------------------------------------------------------------------------- def unmatched(match): """Return unmatched part of re.Match object.""" start, end = match.span(0) return match.string[:start]+match.string[end:] HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)") HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"") HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)") HEADER_ESCAPE_RE = re.compile(r"\\(.)") def split_header_words(header_values): r"""Parse header values into a list of lists containing key,value pairs. The function knows how to deal with ",", ";" and "=" as well as quoted values after "=". A list of space separated tokens are parsed as if they were separated by ";". If the header_values passed as argument contains multiple values, then they are treated as if they were a single value separated by comma ",". This means that this function is useful for parsing header fields that follow this syntax (BNF as from the HTTP/1.1 specification, but we relax the requirement for tokens). headers = #header header = (token | parameter) *( [";"] (token | parameter)) token = 1* separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) qdtext = > quoted-pair = "\" CHAR parameter = attribute "=" value attribute = token value = token | quoted-string Each header is represented by a list of key/value pairs. The value for a simple token (not part of a parameter) is None. Syntactically incorrect headers will not necessarily be parsed as you would want. This is easier to describe with some examples: >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] >>> split_header_words(['text/html; charset="iso-8859-1"']) [[('text/html', None), ('charset', 'iso-8859-1')]] >>> split_header_words([r'Basic realm="\"foo\bar\""']) [[('Basic', None), ('realm', '"foobar"')]] """ assert not isinstance(header_values, str) result = [] for text in header_values: orig_text = text pairs = [] while text: m = HEADER_TOKEN_RE.search(text) if m: text = unmatched(m) name = m.group(1) m = HEADER_QUOTED_VALUE_RE.search(text) if m: # quoted value text = unmatched(m) value = m.group(1) value = HEADER_ESCAPE_RE.sub(r"\1", value) else: m = HEADER_VALUE_RE.search(text) if m: # unquoted value text = unmatched(m) value = m.group(1) value = value.rstrip() else: # no value, a lone token value = None pairs.append((name, value)) elif text.lstrip().startswith(","): # concatenated headers, as per RFC 2616 section 4.2 text = text.lstrip()[1:] if pairs: result.append(pairs) pairs = [] else: # skip junk non_junk, nr_junk_chars = re.subn(r"^[=\s;]*", "", text) assert nr_junk_chars > 0, ( "split_header_words bug: '%s', '%s', %s" % (orig_text, text, pairs)) text = non_junk if pairs: result.append(pairs) return result HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])") def join_header_words(lists): """Do the inverse (almost) of the conversion done by split_header_words. Takes a list of lists of (key, value) pairs and produces a single header value. Attribute values are quoted if needed. >>> join_header_words([[("text/plain", None), ("charset", "iso-8859-1")]]) 'text/plain; charset="iso-8859-1"' >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859-1")]]) 'text/plain, charset="iso-8859-1"' """ headers = [] for pairs in lists: attr = [] for k, v in pairs: if v is not None: if not re.search(r"^\w+$", v): v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \ v = '"%s"' % v k = "%s=%s" % (k, v) attr.append(k) if attr: headers.append("; ".join(attr)) return ", ".join(headers) def strip_quotes(text): if text.startswith('"'): text = text[1:] if text.endswith('"'): text = text[:-1] return text def parse_ns_headers(ns_headers): """Ad-hoc parser for Netscape protocol cookie-attributes. The old Netscape cookie format for Set-Cookie can for instance contain an unquoted "," in the expires field, so we have to use this ad-hoc parser instead of split_header_words. XXX This may not make the best possible effort to parse all the crap that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient parser is probably better, so could do worse than following that if this ever gives any trouble. Currently, this is also used for parsing RFC 2109 cookies. """ known_attrs = ("expires", "domain", "path", "secure", # RFC 2109 attrs (may turn up in Netscape cookies, too) "version", "port", "max-age") result = [] for ns_header in ns_headers: pairs = [] version_set = False # XXX: The following does not strictly adhere to RFCs in that empty # names and values are legal (the former will only appear once and will # be overwritten if multiple occurrences are present). This is # mostly to deal with backwards compatibility. for ii, param in enumerate(ns_header.split(';')): param = param.strip() key, sep, val = param.partition('=') key = key.strip() if not key: if ii == 0: break else: continue # allow for a distinction between present and empty and missing # altogether val = val.strip() if sep else None if ii != 0: lc = key.lower() if lc in known_attrs: key = lc if key == "version": # This is an RFC 2109 cookie. if val is not None: val = strip_quotes(val) version_set = True elif key == "expires": # convert expires date to seconds since epoch if val is not None: val = http2time(strip_quotes(val)) # None if invalid pairs.append((key, val)) if pairs: if not version_set: pairs.append(("version", "0")) result.append(pairs) return result IPV4_RE = re.compile(r"\.\d+$", re.ASCII) def is_HDN(text): """Return True if text is a host domain name.""" # XXX # This may well be wrong. Which RFC is HDN defined in, if any (for # the purposes of RFC 2965)? # For the current implementation, what about IPv6? Remember to look # at other uses of IPV4_RE also, if change this. if IPV4_RE.search(text): return False if text == "": return False if text[0] == "." or text[-1] == ".": return False return True def domain_match(A, B): """Return True if domain A domain-matches domain B, according to RFC 2965. A and B may be host domain names or IP addresses. RFC 2965, section 1: Host names can be specified either as an IP address or a HDN string. Sometimes we compare one host name with another. (Such comparisons SHALL be case-insensitive.) Host A's name domain-matches host B's if * their host name strings string-compare equal; or * A is a HDN string and has the form NB, where N is a non-empty name string, B has the form .B', and B' is a HDN string. (So, x.y.com domain-matches .Y.com but not Y.com.) Note that domain-match is not a commutative operation: a.b.c.com domain-matches .c.com, but not the reverse. """ # Note that, if A or B are IP addresses, the only relevant part of the # definition of the domain-match algorithm is the direct string-compare. A = A.lower() B = B.lower() if A == B: return True if not is_HDN(A): return False i = A.rfind(B) if i == -1 or i == 0: # A does not have form NB, or N is the empty string return False if not B.startswith("."): return False if not is_HDN(B[1:]): return False return True def liberal_is_HDN(text): """Return True if text is a sort-of-like a host domain name. For accepting/blocking domains. """ if IPV4_RE.search(text): return False return True def user_domain_match(A, B): """For blocking/accepting domains. A and B may be host domain names or IP addresses. """ A = A.lower() B = B.lower() if not (liberal_is_HDN(A) and liberal_is_HDN(B)): if A == B: # equal IP addresses return True return False initial_dot = B.startswith(".") if initial_dot and A.endswith(B): return True if not initial_dot and A == B: return True return False cut_port_re = re.compile(r":\d+$", re.ASCII) def request_host(request): """Return request-host, as defined by RFC 2965. Variation from RFC: returned value is lowercased, for convenient comparison. """ url = request.get_full_url() host = urllib.parse.urlparse(url)[1] if host == "": host = request.get_header("Host", "") # remove port, if present host = cut_port_re.sub("", host, 1) return host.lower() def eff_request_host(request): """Return a tuple (request-host, effective request-host name). As defined by RFC 2965, except both are lowercased. """ erhn = req_host = request_host(request) if req_host.find(".") == -1 and not IPV4_RE.search(req_host): erhn = req_host + ".local" return req_host, erhn def request_path(request): """Path component of request-URI, as defined by RFC 2965.""" url = request.get_full_url() parts = urllib.parse.urlsplit(url) path = escape_path(parts.path) if not path.startswith("/"): # fix bad RFC 2396 absoluteURI path = "/" + path return path def request_port(request): host = request.host i = host.find(':') if i >= 0: port = host[i+1:] try: int(port) except ValueError: _debug("nonnumeric port: '%s'", port) return None else: port = DEFAULT_HTTP_PORT return port # Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't # need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") def uppercase_escaped_char(match): return "%%%s" % match.group(1).upper() def escape_path(path): """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" # There's no knowing what character encoding was used to create URLs # containing %-escapes, but since we have to pick one to escape invalid # path characters, we pick UTF-8, as recommended in the HTML 4.0 # specification: # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 # And here, kind of: draft-fielding-uri-rfc2396bis-03 # (And in draft IRI specification: draft-duerst-iri-05) # (And here, for new URI schemes: RFC 2718) path = urllib.parse.quote(path, HTTP_PATH_SAFE) path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) return path def reach(h): """Return reach of host h, as defined by RFC 2965, section 1. The reach R of a host name H is defined as follows: * If - H is the host domain name of a host; and, - H has the form A.B; and - A has no embedded (that is, interior) dots; and - B has at least one embedded dot, or B is the string "local". then the reach of H is .B. * Otherwise, the reach of H is H. >>> reach("www.acme.com") '.acme.com' >>> reach("acme.com") 'acme.com' >>> reach("acme.local") '.local' """ i = h.find(".") if i >= 0: #a = h[:i] # this line is only here to show what a is b = h[i+1:] i = b.find(".") if is_HDN(h) and (i >= 0 or b == "local"): return "."+b return h def is_third_party(request): """ RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction. """ req_host = request_host(request) if not domain_match(req_host, reach(request.origin_req_host)): return True else: return False class Cookie: """HTTP Cookie. This class represents both Netscape and RFC 2965 cookies. This is deliberately a very simple class. It just holds attributes. It's possible to construct Cookie instances that don't comply with the cookie standards. CookieJar.make_cookies is the factory function for Cookie objects -- it deals with cookie parsing, supplying defaults, and normalising to the representation used in this class. CookiePolicy is responsible for checking them to see whether they should be accepted from and returned to the server. Note that the port may be present in the headers, but unspecified ("Port" rather than"Port=80", for example); if this is the case, port is None. """ def __init__(self, version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest, rfc2109=False, ): if version is not None: version = int(version) if expires is not None: expires = int(float(expires)) if port is None and port_specified is True: raise ValueError("if port is None, port_specified must be false") self.version = version self.name = name self.value = value self.port = port self.port_specified = port_specified # normalise case, as per RFC 2965 section 3.3.3 self.domain = domain.lower() self.domain_specified = domain_specified # Sigh. We need to know whether the domain given in the # cookie-attribute had an initial dot, in order to follow RFC 2965 # (as clarified in draft errata). Needed for the returned $Domain # value. self.domain_initial_dot = domain_initial_dot self.path = path self.path_specified = path_specified self.secure = secure self.expires = expires self.discard = discard self.comment = comment self.comment_url = comment_url self.rfc2109 = rfc2109 self._rest = copy.copy(rest) def has_nonstandard_attr(self, name): return name in self._rest def get_nonstandard_attr(self, name, default=None): return self._rest.get(name, default) def set_nonstandard_attr(self, name, value): self._rest[name] = value def is_expired(self, now=None): if now is None: now = time.time() if (self.expires is not None) and (self.expires <= now): return True return False def __str__(self): if self.port is None: p = "" else: p = ":"+self.port limit = self.domain + p + self.path if self.value is not None: namevalue = "%s=%s" % (self.name, self.value) else: namevalue = self.name return "" % (namevalue, limit) def __repr__(self): args = [] for name in ("version", "name", "value", "port", "port_specified", "domain", "domain_specified", "domain_initial_dot", "path", "path_specified", "secure", "expires", "discard", "comment", "comment_url", ): attr = getattr(self, name) args.append("%s=%s" % (name, repr(attr))) args.append("rest=%s" % repr(self._rest)) args.append("rfc2109=%s" % repr(self.rfc2109)) return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) class CookiePolicy: """Defines which cookies get accepted from and returned to server. May also modify cookies, though this is probably a bad idea. The subclass DefaultCookiePolicy defines the standard rules for Netscape and RFC 2965 cookies -- override that if you want a customized policy. """ def set_ok(self, cookie, request): """Return true if (and only if) cookie should be accepted from server. Currently, pre-expired cookies never get this far -- the CookieJar class deletes such cookies itself. """ raise NotImplementedError() def return_ok(self, cookie, request): """Return true if (and only if) cookie should be returned to server.""" raise NotImplementedError() def domain_return_ok(self, domain, request): """Return false if cookies should not be returned, given cookie domain. """ return True def path_return_ok(self, path, request): """Return false if cookies should not be returned, given cookie path. """ return True class DefaultCookiePolicy(CookiePolicy): """Implements the standard rules for accepting and returning cookies.""" DomainStrictNoDots = 1 DomainStrictNonDomain = 2 DomainRFC2965Match = 4 DomainLiberal = 0 DomainStrict = DomainStrictNoDots|DomainStrictNonDomain def __init__(self, blocked_domains=None, allowed_domains=None, netscape=True, rfc2965=False, rfc2109_as_netscape=None, hide_cookie2=False, strict_domain=False, strict_rfc2965_unverifiable=True, strict_ns_unverifiable=False, strict_ns_domain=DomainLiberal, strict_ns_set_initial_dollar=False, strict_ns_set_path=False, ): """Constructor arguments should be passed as keyword arguments only.""" self.netscape = netscape self.rfc2965 = rfc2965 self.rfc2109_as_netscape = rfc2109_as_netscape self.hide_cookie2 = hide_cookie2 self.strict_domain = strict_domain self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable self.strict_ns_unverifiable = strict_ns_unverifiable self.strict_ns_domain = strict_ns_domain self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar self.strict_ns_set_path = strict_ns_set_path if blocked_domains is not None: self._blocked_domains = tuple(blocked_domains) else: self._blocked_domains = () if allowed_domains is not None: allowed_domains = tuple(allowed_domains) self._allowed_domains = allowed_domains def blocked_domains(self): """Return the sequence of blocked domains (as a tuple).""" return self._blocked_domains def set_blocked_domains(self, blocked_domains): """Set the sequence of blocked domains.""" self._blocked_domains = tuple(blocked_domains) def is_blocked(self, domain): for blocked_domain in self._blocked_domains: if user_domain_match(domain, blocked_domain): return True return False def allowed_domains(self): """Return None, or the sequence of allowed domains (as a tuple).""" return self._allowed_domains def set_allowed_domains(self, allowed_domains): """Set the sequence of allowed domains, or None.""" if allowed_domains is not None: allowed_domains = tuple(allowed_domains) self._allowed_domains = allowed_domains def is_not_allowed(self, domain): if self._allowed_domains is None: return False for allowed_domain in self._allowed_domains: if user_domain_match(domain, allowed_domain): return False return True def set_ok(self, cookie, request): """ If you override .set_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to accept). """ _debug(" - checking cookie %s=%s", cookie.name, cookie.value) assert cookie.name is not None for n in "version", "verifiability", "name", "path", "domain", "port": fn_name = "set_ok_"+n fn = getattr(self, fn_name) if not fn(cookie, request): return False return True def set_ok_version(self, cookie, request): if cookie.version is None: # Version is always set to 0 by parse_ns_headers if it's a Netscape # cookie, so this must be an invalid RFC 2965 cookie. _debug(" Set-Cookie2 without version attribute (%s=%s)", cookie.name, cookie.value) return False if cookie.version > 0 and not self.rfc2965: _debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: _debug(" Netscape cookies are switched off") return False return True def set_ok_verifiability(self, cookie, request): if request.unverifiable and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: _debug(" third-party RFC 2965 cookie during " "unverifiable transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: _debug(" third-party Netscape cookie during " "unverifiable transaction") return False return True def set_ok_name(self, cookie, request): # Try and stop servers setting V0 cookies designed to hack other # servers that know both V0 and V1 protocols. if (cookie.version == 0 and self.strict_ns_set_initial_dollar and cookie.name.startswith("$")): _debug(" illegal name (starts with '$'): '%s'", cookie.name) return False return True def set_ok_path(self, cookie, request): if cookie.path_specified: req_path = request_path(request) if ((cookie.version > 0 or (cookie.version == 0 and self.strict_ns_set_path)) and not req_path.startswith(cookie.path)): _debug(" path attribute %s is not a prefix of request " "path %s", cookie.path, req_path) return False return True def set_ok_domain(self, cookie, request): if self.is_blocked(cookie.domain): _debug(" domain %s is in user block-list", cookie.domain) return False if self.is_not_allowed(cookie.domain): _debug(" domain %s is not in user allow-list", cookie.domain) return False if cookie.domain_specified: req_host, erhn = eff_request_host(request) domain = cookie.domain if self.strict_domain and (domain.count(".") >= 2): # XXX This should probably be compared with the Konqueror # (kcookiejar.cpp) and Mozilla implementations, but it's a # losing battle. i = domain.rfind(".") j = domain.rfind(".", 0, i) if j == 0: # domain like .foo.bar tld = domain[i+1:] sld = domain[j+1:i] if sld.lower() in ("co", "ac", "com", "edu", "org", "net", "gov", "mil", "int", "aero", "biz", "cat", "coop", "info", "jobs", "mobi", "museum", "name", "pro", "travel", "eu") and len(tld) == 2: # domain like .co.uk _debug(" country-code second level domain %s", domain) return False if domain.startswith("."): undotted_domain = domain[1:] else: undotted_domain = domain embedded_dots = (undotted_domain.find(".") >= 0) if not embedded_dots and domain != ".local": _debug(" non-local domain %s contains no embedded dot", domain) return False if cookie.version == 0: if (not erhn.endswith(domain) and (not erhn.startswith(".") and not ("."+erhn).endswith(domain))): _debug(" effective request-host %s (even with added " "initial dot) does not end with %s", erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainRFC2965Match)): if not domain_match(erhn, domain): _debug(" effective request-host %s does not domain-match " "%s", erhn, domain) return False if (cookie.version > 0 or (self.strict_ns_domain & self.DomainStrictNoDots)): host_prefix = req_host[:-len(domain)] if (host_prefix.find(".") >= 0 and not IPV4_RE.search(req_host)): _debug(" host prefix %s for domain %s contains a dot", host_prefix, domain) return False return True def set_ok_port(self, cookie, request): if cookie.port_specified: req_port = request_port(request) if req_port is None: req_port = "80" else: req_port = str(req_port) for p in cookie.port.split(","): try: int(p) except ValueError: _debug(" bad port %s (not numeric)", p) return False if p == req_port: break else: _debug(" request port (%s) not found in %s", req_port, cookie.port) return False return True def return_ok(self, cookie, request): """ If you override .return_ok(), be sure to call this method. If it returns false, so should your subclass (assuming your subclass wants to be more strict about which cookies to return). """ # Path has already been checked by .path_return_ok(), and domain # blocking done by .domain_return_ok(). _debug(" - checking cookie %s=%s", cookie.name, cookie.value) for n in "version", "verifiability", "secure", "expires", "port", "domain": fn_name = "return_ok_"+n fn = getattr(self, fn_name) if not fn(cookie, request): return False return True def return_ok_version(self, cookie, request): if cookie.version > 0 and not self.rfc2965: _debug(" RFC 2965 cookies are switched off") return False elif cookie.version == 0 and not self.netscape: _debug(" Netscape cookies are switched off") return False return True def return_ok_verifiability(self, cookie, request): if request.unverifiable and is_third_party(request): if cookie.version > 0 and self.strict_rfc2965_unverifiable: _debug(" third-party RFC 2965 cookie during unverifiable " "transaction") return False elif cookie.version == 0 and self.strict_ns_unverifiable: _debug(" third-party Netscape cookie during unverifiable " "transaction") return False return True def return_ok_secure(self, cookie, request): if cookie.secure and request.type != "https": _debug(" secure cookie with non-secure request") return False return True def return_ok_expires(self, cookie, request): if cookie.is_expired(self._now): _debug(" cookie expired") return False return True def return_ok_port(self, cookie, request): if cookie.port: req_port = request_port(request) if req_port is None: req_port = "80" for p in cookie.port.split(","): if p == req_port: break else: _debug(" request port %s does not match cookie port %s", req_port, cookie.port) return False return True def return_ok_domain(self, cookie, request): req_host, erhn = eff_request_host(request) domain = cookie.domain if domain and not domain.startswith("."): dotdomain = "." + domain else: dotdomain = domain # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't if (cookie.version == 0 and (self.strict_ns_domain & self.DomainStrictNonDomain) and not cookie.domain_specified and domain != erhn): _debug(" cookie with unspecified domain does not string-compare " "equal to request domain") return False if cookie.version > 0 and not domain_match(erhn, domain): _debug(" effective request-host name %s does not domain-match " "RFC 2965 cookie domain %s", erhn, domain) return False if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): _debug(" request-host %s does not match Netscape cookie domain " "%s", req_host, domain) return False return True def domain_return_ok(self, domain, request): # Liberal check of. This is here as an optimization to avoid # having to load lots of MSIE cookie files unless necessary. req_host, erhn = eff_request_host(request) if not req_host.startswith("."): req_host = "."+req_host if not erhn.startswith("."): erhn = "."+erhn if domain and not domain.startswith("."): dotdomain = "." + domain else: dotdomain = domain if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): #_debug(" request domain %s does not match cookie domain %s", # req_host, domain) return False if self.is_blocked(domain): _debug(" domain %s is in user block-list", domain) return False if self.is_not_allowed(domain): _debug(" domain %s is not in user allow-list", domain) return False return True def path_return_ok(self, path, request): _debug("- checking cookie path=%s", path) req_path = request_path(request) if not req_path.startswith(path): _debug(" %s does not path-match %s", req_path, path) return False return True def vals_sorted_by_key(adict): keys = sorted(adict.keys()) return map(adict.get, keys) def deepvalues(mapping): """Iterates over nested mapping, depth-first, in sorted order by key.""" values = vals_sorted_by_key(mapping) for obj in values: mapping = False try: obj.items except AttributeError: pass else: mapping = True yield from deepvalues(obj) if not mapping: yield obj # Used as second parameter to dict.get() method, to distinguish absent # dict key from one with a None value. class Absent: pass class CookieJar: """Collection of HTTP cookies. You may not need to know about this class: try urllib.request.build_opener(HTTPCookieProcessor).open(url). """ non_word_re = re.compile(r"\W") quote_re = re.compile(r"([\"\\])") strict_domain_re = re.compile(r"\.?[^.]*") domain_re = re.compile(r"[^.]*") dots_re = re.compile(r"^\.+") magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII) def __init__(self, policy=None): if policy is None: policy = DefaultCookiePolicy() self._policy = policy self._cookies_lock = _threading.RLock() self._cookies = {} def set_policy(self, policy): self._policy = policy def _cookies_for_domain(self, domain, request): cookies = [] if not self._policy.domain_return_ok(domain, request): return [] _debug("Checking %s for cookies to return", domain) cookies_by_path = self._cookies[domain] for path in cookies_by_path.keys(): if not self._policy.path_return_ok(path, request): continue cookies_by_name = cookies_by_path[path] for cookie in cookies_by_name.values(): if not self._policy.return_ok(cookie, request): _debug(" not returning cookie") continue _debug(" it's a match") cookies.append(cookie) return cookies def _cookies_for_request(self, request): """Return a list of cookies to be returned to server.""" cookies = [] for domain in self._cookies.keys(): cookies.extend(self._cookies_for_domain(domain, request)) return cookies def _cookie_attrs(self, cookies): """Return a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request). """ # add cookies in order of most specific (ie. longest) path first cookies.sort(key=lambda a: len(a.path), reverse=True) version_set = False attrs = [] for cookie in cookies: # set version of Cookie header # XXX # What should it be if multiple matching Set-Cookie headers have # different versions themselves? # Answer: there is no answer; was supposed to be settled by # RFC 2965 errata, but that may never appear... version = cookie.version if not version_set: version_set = True if version > 0: attrs.append("$Version=%s" % version) # quote cookie value if necessary # (not for Netscape protocol, which already has any quotes # intact, due to the poorly-specified Netscape Cookie: syntax) if ((cookie.value is not None) and self.non_word_re.search(cookie.value) and version > 0): value = self.quote_re.sub(r"\\\1", cookie.value) else: value = cookie.value # add cookie-attributes to be returned in Cookie header if cookie.value is None: attrs.append(cookie.name) else: attrs.append("%s=%s" % (cookie.name, value)) if version > 0: if cookie.path_specified: attrs.append('$Path="%s"' % cookie.path) if cookie.domain.startswith("."): domain = cookie.domain if (not cookie.domain_initial_dot and domain.startswith(".")): domain = domain[1:] attrs.append('$Domain="%s"' % domain) if cookie.port is not None: p = "$Port" if cookie.port_specified: p = p + ('="%s"' % cookie.port) attrs.append(p) return attrs def add_cookie_header(self, request): """Add correct Cookie: header to request (urllib.request.Request object). The Cookie2 header is also added unless policy.hide_cookie2 is true. """ _debug("add_cookie_header") self._cookies_lock.acquire() try: self._policy._now = self._now = int(time.time()) cookies = self._cookies_for_request(request) attrs = self._cookie_attrs(cookies) if attrs: if not request.has_header("Cookie"): request.add_unredirected_header( "Cookie", "; ".join(attrs)) # if necessary, advertise that we know RFC 2965 if (self._policy.rfc2965 and not self._policy.hide_cookie2 and not request.has_header("Cookie2")): for cookie in cookies: if cookie.version != 1: request.add_unredirected_header("Cookie2", '$Version="1"') break finally: self._cookies_lock.release() self.clear_expired_cookies() def _normalized_cookie_tuples(self, attrs_set): """Return list of tuples containing normalised cookie information. attrs_set is the list of lists of key,value pairs extracted from the Set-Cookie or Set-Cookie2 headers. Tuples are name, value, standard, rest, where name and value are the cookie name and value, standard is a dictionary containing the standard cookie-attributes (discard, secure, version, expires or max-age, domain, path and port) and rest is a dictionary containing the rest of the cookie-attributes. """ cookie_tuples = [] boolean_attrs = "discard", "secure" value_attrs = ("version", "expires", "max-age", "domain", "path", "port", "comment", "commenturl") for cookie_attrs in attrs_set: name, value = cookie_attrs[0] # Build dictionary of standard cookie-attributes (standard) and # dictionary of other cookie-attributes (rest). # Note: expiry time is normalised to seconds since epoch. V0 # cookies should have the Expires cookie-attribute, and V1 cookies # should have Max-Age, but since V1 includes RFC 2109 cookies (and # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we # accept either (but prefer Max-Age). max_age_set = False bad_cookie = False standard = {} rest = {} for k, v in cookie_attrs[1:]: lc = k.lower() # don't lose case distinction for unknown fields if lc in value_attrs or lc in boolean_attrs: k = lc if k in boolean_attrs and v is None: # boolean cookie-attribute is present, but has no value # (like "discard", rather than "port=80") v = True if k in standard: # only first value is significant continue if k == "domain": if v is None: _debug(" missing value for domain attribute") bad_cookie = True break # RFC 2965 section 3.3.3 v = v.lower() if k == "expires": if max_age_set: # Prefer max-age to expires (like Mozilla) continue if v is None: _debug(" missing or invalid value for expires " "attribute: treating as session cookie") continue if k == "max-age": max_age_set = True try: v = int(v) except ValueError: _debug(" missing or invalid (non-numeric) value for " "max-age attribute") bad_cookie = True break # convert RFC 2965 Max-Age to seconds since epoch # XXX Strictly you're supposed to follow RFC 2616 # age-calculation rules. Remember that zero Max-Age # is a request to discard (old and new) cookie, though. k = "expires" v = self._now + v if (k in value_attrs) or (k in boolean_attrs): if (v is None and k not in ("port", "comment", "commenturl")): _debug(" missing value for %s attribute" % k) bad_cookie = True break standard[k] = v else: rest[k] = v if bad_cookie: continue cookie_tuples.append((name, value, standard, rest)) return cookie_tuples def _cookie_from_cookie_tuple(self, tup, request): # standard is dict of standard cookie-attributes, rest is dict of the # rest of them name, value, standard, rest = tup domain = standard.get("domain", Absent) path = standard.get("path", Absent) port = standard.get("port", Absent) expires = standard.get("expires", Absent) # set the easy defaults version = standard.get("version", None) if version is not None: try: version = int(version) except ValueError: return None # invalid version, ignore cookie secure = standard.get("secure", False) # (discard is also set if expires is Absent) discard = standard.get("discard", False) comment = standard.get("comment", None) comment_url = standard.get("commenturl", None) # set default path if path is not Absent and path != "": path_specified = True path = escape_path(path) else: path_specified = False path = request_path(request) i = path.rfind("/") if i != -1: if version == 0: # Netscape spec parts company from reality here path = path[:i] else: path = path[:i+1] if len(path) == 0: path = "/" # set default domain domain_specified = domain is not Absent # but first we have to remember whether it starts with a dot domain_initial_dot = False if domain_specified: domain_initial_dot = bool(domain.startswith(".")) if domain is Absent: req_host, erhn = eff_request_host(request) domain = erhn elif not domain.startswith("."): domain = "."+domain # set default port port_specified = False if port is not Absent: if port is None: # Port attr present, but has no value: default to request port. # Cookie should then only be sent back on that port. port = request_port(request) else: port_specified = True port = re.sub(r"\s+", "", port) else: # No port attr present. Cookie can be sent back on any port. port = None # set default expires and discard if expires is Absent: expires = None discard = True elif expires <= self._now: # Expiry date in past is request to delete cookie. This can't be # in DefaultCookiePolicy, because can't delete cookies there. try: self.clear(domain, path, name) except KeyError: pass _debug("Expiring cookie, domain='%s', path='%s', name='%s'", domain, path, name) return None return Cookie(version, name, value, port, port_specified, domain, domain_specified, domain_initial_dot, path, path_specified, secure, expires, discard, comment, comment_url, rest) def _cookies_from_attrs_set(self, attrs_set, request): cookie_tuples = self._normalized_cookie_tuples(attrs_set) cookies = [] for tup in cookie_tuples: cookie = self._cookie_from_cookie_tuple(tup, request) if cookie: cookies.append(cookie) return cookies def _process_rfc2109_cookies(self, cookies): rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None) if rfc2109_as_ns is None: rfc2109_as_ns = not self._policy.rfc2965 for cookie in cookies: if cookie.version == 1: cookie.rfc2109 = True if rfc2109_as_ns: # treat 2109 cookies as Netscape cookies rather than # as RFC2965 cookies cookie.version = 0 def make_cookies(self, response, request): """Return sequence of Cookie objects extracted from response object.""" # get cookie-attributes for RFC 2965 and Netscape protocols headers = response.info() rfc2965_hdrs = headers.get_all("Set-Cookie2", []) ns_hdrs = headers.get_all("Set-Cookie", []) rfc2965 = self._policy.rfc2965 netscape = self._policy.netscape if ((not rfc2965_hdrs and not ns_hdrs) or (not ns_hdrs and not rfc2965) or (not rfc2965_hdrs and not netscape) or (not netscape and not rfc2965)): return [] # no relevant cookie headers: quick exit try: cookies = self._cookies_from_attrs_set( split_header_words(rfc2965_hdrs), request) except Exception: _warn_unhandled_exception() cookies = [] if ns_hdrs and netscape: try: # RFC 2109 and Netscape cookies ns_cookies = self._cookies_from_attrs_set( parse_ns_headers(ns_hdrs), request) except Exception: _warn_unhandled_exception() ns_cookies = [] self._process_rfc2109_cookies(ns_cookies) # Look for Netscape cookies (from Set-Cookie headers) that match # corresponding RFC 2965 cookies (from Set-Cookie2 headers). # For each match, keep the RFC 2965 cookie and ignore the Netscape # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are # bundled in with the Netscape cookies for this purpose, which is # reasonable behaviour. if rfc2965: lookup = {} for cookie in cookies: lookup[(cookie.domain, cookie.path, cookie.name)] = None def no_matching_rfc2965(ns_cookie, lookup=lookup): key = ns_cookie.domain, ns_cookie.path, ns_cookie.name return key not in lookup ns_cookies = filter(no_matching_rfc2965, ns_cookies) if ns_cookies: cookies.extend(ns_cookies) return cookies def set_cookie_if_ok(self, cookie, request): """Set a cookie if policy says it's OK to do so.""" self._cookies_lock.acquire() try: self._policy._now = self._now = int(time.time()) if self._policy.set_ok(cookie, request): self.set_cookie(cookie) finally: self._cookies_lock.release() def set_cookie(self, cookie): """Set a cookie, without checking whether or not it should be set.""" c = self._cookies self._cookies_lock.acquire() try: if cookie.domain not in c: c[cookie.domain] = {} c2 = c[cookie.domain] if cookie.path not in c2: c2[cookie.path] = {} c3 = c2[cookie.path] c3[cookie.name] = cookie finally: self._cookies_lock.release() def extract_cookies(self, response, request): """Extract cookies from response, where allowable given the request.""" _debug("extract_cookies: %s", response.info()) self._cookies_lock.acquire() try: self._policy._now = self._now = int(time.time()) for cookie in self.make_cookies(response, request): if self._policy.set_ok(cookie, request): _debug(" setting cookie: %s", cookie) self.set_cookie(cookie) finally: self._cookies_lock.release() def clear(self, domain=None, path=None, name=None): """Clear some cookies. Invoking this method without arguments will clear all cookies. If given a single argument, only cookies belonging to that domain will be removed. If given two arguments, cookies belonging to the specified path within that domain are removed. If given three arguments, then the cookie with the specified name, path and domain is removed. Raises KeyError if no matching cookie exists. """ if name is not None: if (domain is None) or (path is None): raise ValueError( "domain and path must be given to remove a cookie by name") del self._cookies[domain][path][name] elif path is not None: if domain is None: raise ValueError( "domain must be given to remove cookies by path") del self._cookies[domain][path] elif domain is not None: del self._cookies[domain] else: self._cookies = {} def clear_session_cookies(self): """Discard all session cookies. Note that the .save() method won't save session cookies anyway, unless you ask otherwise by passing a true ignore_discard argument. """ self._cookies_lock.acquire() try: for cookie in self: if cookie.discard: self.clear(cookie.domain, cookie.path, cookie.name) finally: self._cookies_lock.release() def clear_expired_cookies(self): """Discard all expired cookies. You probably don't need to call this method: expired cookies are never sent back to the server (provided you're using DefaultCookiePolicy), this method is called by CookieJar itself every so often, and the .save() method won't save expired cookies anyway (unless you ask otherwise by passing a true ignore_expires argument). """ self._cookies_lock.acquire() try: now = time.time() for cookie in self: if cookie.is_expired(now): self.clear(cookie.domain, cookie.path, cookie.name) finally: self._cookies_lock.release() def __iter__(self): return deepvalues(self._cookies) def __len__(self): """Return number of contained cookies.""" i = 0 for cookie in self: i = i + 1 return i def __repr__(self): r = [] for cookie in self: r.append(repr(cookie)) return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) def __str__(self): r = [] for cookie in self: r.append(str(cookie)) return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) # derives from OSError for backwards-compatibility with Python 2.4.0 class LoadError(OSError): pass class FileCookieJar(CookieJar): """CookieJar that can be loaded from and saved to a file.""" def __init__(self, filename=None, delayload=False, policy=None): """ Cookies are NOT loaded from the named file until either the .load() or .revert() method is called. """ CookieJar.__init__(self, policy) if filename is not None: try: filename+"" except: raise ValueError("filename must be string-like") self.filename = filename self.delayload = bool(delayload) def save(self, filename=None, ignore_discard=False, ignore_expires=False): """Save cookies to a file.""" raise NotImplementedError() def load(self, filename=None, ignore_discard=False, ignore_expires=False): """Load cookies from a file.""" if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) with open(filename) as f: self._really_load(f, filename, ignore_discard, ignore_expires) def revert(self, filename=None, ignore_discard=False, ignore_expires=False): """Clear all cookies and reload cookies from a saved file. Raises LoadError (or OSError) if reversion is not successful; the object's state will not be altered if this happens. """ if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) self._cookies_lock.acquire() try: old_state = copy.deepcopy(self._cookies) self._cookies = {} try: self.load(filename, ignore_discard, ignore_expires) except OSError: self._cookies = old_state raise finally: self._cookies_lock.release() def lwp_cookie_str(cookie): """Return string representation of Cookie in the LWP cookie file format. Actually, the format is extended a bit -- see module docstring. """ h = [(cookie.name, cookie.value), ("path", cookie.path), ("domain", cookie.domain)] if cookie.port is not None: h.append(("port", cookie.port)) if cookie.path_specified: h.append(("path_spec", None)) if cookie.port_specified: h.append(("port_spec", None)) if cookie.domain_initial_dot: h.append(("domain_dot", None)) if cookie.secure: h.append(("secure", None)) if cookie.expires: h.append(("expires", time2isoz(float(cookie.expires)))) if cookie.discard: h.append(("discard", None)) if cookie.comment: h.append(("comment", cookie.comment)) if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) keys = sorted(cookie._rest.keys()) for k in keys: h.append((k, str(cookie._rest[k]))) h.append(("version", str(cookie.version))) return join_header_words([h]) class LWPCookieJar(FileCookieJar): """ The LWPCookieJar saves a sequence of "Set-Cookie3" lines. "Set-Cookie3" is the format used by the libwww-perl library, not known to be compatible with any browser, but which is easy to read and doesn't lose information about RFC 2965 cookies. Additional methods as_lwp_str(ignore_discard=True, ignore_expired=True) """ def as_lwp_str(self, ignore_discard=True, ignore_expires=True): """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. ignore_discard and ignore_expires: see docstring for FileCookieJar.save """ now = time.time() r = [] for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) return "\n".join(r+[""]) def save(self, filename=None, ignore_discard=False, ignore_expires=False): if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) with open(filename, "w") as f: # There really isn't an LWP Cookies 2.0 format, but this indicates # that there is extra information in here (domain_dot and # port_spec) while still being compatible with libwww-perl, I hope. f.write("#LWP-Cookies-2.0\n") f.write(self.as_lwp_str(ignore_discard, ignore_expires)) def _really_load(self, f, filename, ignore_discard, ignore_expires): magic = f.readline() if not self.magic_re.search(magic): msg = ("%r does not look like a Set-Cookie3 (LWP) format " "file" % filename) raise LoadError(msg) now = time.time() header = "Set-Cookie3:" boolean_attrs = ("port_spec", "path_spec", "domain_dot", "secure", "discard") value_attrs = ("version", "port", "path", "domain", "expires", "comment", "commenturl") try: while 1: line = f.readline() if line == "": break if not line.startswith(header): continue line = line[len(header):].strip() for data in split_header_words([line]): name, value = data[0] standard = {} rest = {} for k in boolean_attrs: standard[k] = False for k, v in data[1:]: if k is not None: lc = k.lower() else: lc = None # don't lose case distinction for unknown fields if (lc in value_attrs) or (lc in boolean_attrs): k = lc if k in boolean_attrs: if v is None: v = True standard[k] = v elif k in value_attrs: standard[k] = v else: rest[k] = v h = standard.get expires = h("expires") discard = h("discard") if expires is not None: expires = iso2time(expires) if expires is None: discard = True domain = h("domain") domain_specified = domain.startswith(".") c = Cookie(h("version"), name, value, h("port"), h("port_spec"), domain, domain_specified, h("domain_dot"), h("path"), h("path_spec"), h("secure"), expires, discard, h("comment"), h("commenturl"), rest) if not ignore_discard and c.discard: continue if not ignore_expires and c.is_expired(now): continue self.set_cookie(c) except OSError: raise except Exception: _warn_unhandled_exception() raise LoadError("invalid Set-Cookie3 format file %r: %r" % (filename, line)) class MozillaCookieJar(FileCookieJar): """ WARNING: you may want to backup your browser's cookies file if you use this class to save cookies. I *think* it works, but there have been bugs in the past! This class differs from CookieJar only in the format it uses to save and load cookies to and from a file. This class uses the Mozilla/Netscape `cookies.txt' format. lynx uses this file format, too. Don't expect cookies saved while the browser is running to be noticed by the browser (in fact, Mozilla on unix will overwrite your saved cookies if you change them on disk while it's running; on Windows, you probably can't save at all while the browser is running). Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to Netscape cookies on saving. In particular, the cookie version and port number information is lost, together with information about whether or not Path, Port and Discard were specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the domain as set in the HTTP header started with a dot (yes, I'm aware some domains in Netscape files start with a dot and some don't -- trust me, you really don't want to know any more about this). Note that though Mozilla and Netscape use the same format, they use slightly different headers. The class saves cookies using the Netscape header by default (Mozilla can cope with that). """ magic_re = re.compile("#( Netscape)? HTTP Cookie File") header = """\ # Netscape HTTP Cookie File # http://curl.haxx.se/rfc/cookie_spec.html # This is a generated file! Do not edit. """ def _really_load(self, f, filename, ignore_discard, ignore_expires): now = time.time() magic = f.readline() if not self.magic_re.search(magic): raise LoadError( "%r does not look like a Netscape format cookies file" % filename) try: while 1: line = f.readline() if line == "": break # last field may be absent, so keep any trailing tab if line.endswith("\n"): line = line[:-1] # skip comments and blank lines XXX what is $ for? if (line.strip().startswith(("#", "$")) or line.strip() == ""): continue domain, domain_specified, path, secure, expires, name, value = \ line.split("\t") secure = (secure == "TRUE") domain_specified = (domain_specified == "TRUE") if name == "": # cookies.txt regards 'Set-Cookie: foo' as a cookie # with no name, whereas http.cookiejar regards it as a # cookie with no value. name = value value = None initial_dot = domain.startswith(".") assert domain_specified == initial_dot discard = False if expires == "": expires = None discard = True # assume path_specified is false c = Cookie(0, name, value, None, False, domain, domain_specified, initial_dot, path, False, secure, expires, discard, None, None, {}) if not ignore_discard and c.discard: continue if not ignore_expires and c.is_expired(now): continue self.set_cookie(c) except OSError: raise except Exception: _warn_unhandled_exception() raise LoadError("invalid Netscape format cookies file %r: %r" % (filename, line)) def save(self, filename=None, ignore_discard=False, ignore_expires=False): if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) with open(filename, "w") as f: f.write(self.header) now = time.time() for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue if cookie.secure: secure = "TRUE" else: secure = "FALSE" if cookie.domain.startswith("."): initial_dot = "TRUE" else: initial_dot = "FALSE" if cookie.expires is not None: expires = str(cookie.expires) else: expires = "" if cookie.value is None: # cookies.txt regards 'Set-Cookie: foo' as a cookie # with no name, whereas http.cookiejar regards it as a # cookie with no value. name = "" value = cookie.name else: name = cookie.name value = cookie.value f.write( "\t".join([cookie.domain, initial_dot, cookie.path, secure, expires, name, value])+ "\n") cookies.py000064400000051616147204456360006576 0ustar00#### # Copyright 2000 by Timothy O'Malley # # All Rights Reserved # # Permission to use, copy, modify, and distribute this software # and its documentation for any purpose and without fee is hereby # granted, provided that the above copyright notice appear in all # copies and that both that copyright notice and this permission # notice appear in supporting documentation, and that the name of # Timothy O'Malley not be used in advertising or publicity # pertaining to distribution of the software without specific, written # prior permission. # # Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS # SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR # ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR # PERFORMANCE OF THIS SOFTWARE. # #### # # Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp # by Timothy O'Malley # # Cookie.py is a Python module for the handling of HTTP # cookies as a Python dictionary. See RFC 2109 for more # information on cookies. # # The original idea to treat Cookies as a dictionary came from # Dave Mitchell (davem@magnet.com) in 1995, when he released the # first version of nscookie.py. # #### r""" Here's a sample session to show how to use this module. At the moment, this is the only documentation. The Basics ---------- Importing is easy... >>> from http import cookies Most of the time you start by creating a cookie. >>> C = cookies.SimpleCookie() Once you've created your Cookie, you can add values just as if it were a dictionary. >>> C = cookies.SimpleCookie() >>> C["fig"] = "newton" >>> C["sugar"] = "wafer" >>> C.output() 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' Notice that the printable representation of a Cookie is the appropriate format for a Set-Cookie: header. This is the default behavior. You can change the header and printed attributes by using the .output() function >>> C = cookies.SimpleCookie() >>> C["rocky"] = "road" >>> C["rocky"]["path"] = "/cookie" >>> print(C.output(header="Cookie:")) Cookie: rocky=road; Path=/cookie >>> print(C.output(attrs=[], header="Cookie:")) Cookie: rocky=road The load() method of a Cookie extracts cookies from a string. In a CGI script, you would use this method to extract the cookies from the HTTP_COOKIE environment variable. >>> C = cookies.SimpleCookie() >>> C.load("chips=ahoy; vienna=finger") >>> C.output() 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' The load() method is darn-tootin smart about identifying cookies within a string. Escaped quotation marks, nested semicolons, and other such trickeries do not confuse it. >>> C = cookies.SimpleCookie() >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') >>> print(C) Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" Each element of the Cookie also supports all of the RFC 2109 Cookie attributes. Here's an example which sets the Path attribute. >>> C = cookies.SimpleCookie() >>> C["oreo"] = "doublestuff" >>> C["oreo"]["path"] = "/" >>> print(C) Set-Cookie: oreo=doublestuff; Path=/ Each dictionary element has a 'value' attribute, which gives you back the value associated with the key. >>> C = cookies.SimpleCookie() >>> C["twix"] = "none for you" >>> C["twix"].value 'none for you' The SimpleCookie expects that all values should be standard strings. Just to be sure, SimpleCookie invokes the str() builtin to convert the value to a string, when the values are set dictionary-style. >>> C = cookies.SimpleCookie() >>> C["number"] = 7 >>> C["string"] = "seven" >>> C["number"].value '7' >>> C["string"].value 'seven' >>> C.output() 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' Finis. """ # # Import our required modules # import re import string __all__ = ["CookieError", "BaseCookie", "SimpleCookie"] _nulljoin = ''.join _semispacejoin = '; '.join _spacejoin = ' '.join def _warn_deprecated_setter(setter): import warnings msg = ('The .%s setter is deprecated. The attribute will be read-only in ' 'future releases. Please use the set() method instead.' % setter) warnings.warn(msg, DeprecationWarning, stacklevel=3) # # Define an exception visible to External modules # class CookieError(Exception): pass # These quoting routines conform to the RFC2109 specification, which in # turn references the character definitions from RFC2068. They provide # a two-way quoting algorithm. Any non-text character is translated # into a 4 character sequence: a forward-slash followed by the # three-digit octal equivalent of the character. Any '\' or '"' is # quoted with a preceding '\' slash. # Because of the way browsers really handle cookies (as opposed to what # the RFC says) we also encode "," and ";". # # These are taken from RFC2068 and RFC2109. # _LegalChars is the list of chars which don't require "'s # _Translator hash-table for fast quoting # _LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" _UnescapedChars = _LegalChars + ' ()/<=>?@[]{}' _Translator = {n: '\\%03o' % n for n in set(range(256)) - set(map(ord, _UnescapedChars))} _Translator.update({ ord('"'): '\\"', ord('\\'): '\\\\', }) _is_legal_key = re.compile('[%s]+' % re.escape(_LegalChars)).fullmatch def _quote(str): r"""Quote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters. """ if str is None or _is_legal_key(str): return str else: return '"' + str.translate(_Translator) + '"' _OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") _QuotePatt = re.compile(r"[\\].") def _unquote(str): # If there aren't any doublequotes, # then there can't be any special characters. See RFC 2109. if str is None or len(str) < 2: return str if str[0] != '"' or str[-1] != '"': return str # We have to assume that we must decode this string. # Down to work. # Remove the "s str = str[1:-1] # Check for special sequences. Examples: # \012 --> \n # \" --> " # i = 0 n = len(str) res = [] while 0 <= i < n: o_match = _OctalPatt.search(str, i) q_match = _QuotePatt.search(str, i) if not o_match and not q_match: # Neither matched res.append(str[i:]) break # else: j = k = -1 if o_match: j = o_match.start(0) if q_match: k = q_match.start(0) if q_match and (not o_match or k < j): # QuotePatt matched res.append(str[i:k]) res.append(str[k+1]) i = k + 2 else: # OctalPatt matched res.append(str[i:j]) res.append(chr(int(str[j+1:j+4], 8))) i = j + 4 return _nulljoin(res) # The _getdate() routine is used to set the expiration time in the cookie's HTTP # header. By default, _getdate() returns the current time in the appropriate # "expires" format for a Set-Cookie header. The one optional argument is an # offset from now, in seconds. For example, an offset of -3600 means "one hour # ago". The offset may be a floating point number. # _weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] _monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): from time import gmtime, time now = time() year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \ (weekdayname[wd], day, monthname[month], year, hh, mm, ss) class Morsel(dict): """A class to hold ONE (key, value) pair. In a cookie, each such pair may have several attributes, so this class is used to keep the attributes associated with the appropriate key,value pair. This class also includes a coded_value attribute, which is used to hold the network representation of the value. This is most useful when Python objects are pickled for network transit. """ # RFC 2109 lists these attributes as reserved: # path comment domain # max-age secure version # # For historical reasons, these attributes are also reserved: # expires # # This is an extension from Microsoft: # httponly # # This dictionary provides a mapping from the lowercase # variant on the left to the appropriate traditional # formatting on the right. _reserved = { "expires" : "expires", "path" : "Path", "comment" : "Comment", "domain" : "Domain", "max-age" : "Max-Age", "secure" : "Secure", "httponly" : "HttpOnly", "version" : "Version", } _flags = {'secure', 'httponly'} def __init__(self): # Set defaults self._key = self._value = self._coded_value = None # Set default attributes for key in self._reserved: dict.__setitem__(self, key, "") @property def key(self): return self._key @key.setter def key(self, key): _warn_deprecated_setter('key') self._key = key @property def value(self): return self._value @value.setter def value(self, value): _warn_deprecated_setter('value') self._value = value @property def coded_value(self): return self._coded_value @coded_value.setter def coded_value(self, coded_value): _warn_deprecated_setter('coded_value') self._coded_value = coded_value def __setitem__(self, K, V): K = K.lower() if not K in self._reserved: raise CookieError("Invalid attribute %r" % (K,)) dict.__setitem__(self, K, V) def setdefault(self, key, val=None): key = key.lower() if key not in self._reserved: raise CookieError("Invalid attribute %r" % (key,)) return dict.setdefault(self, key, val) def __eq__(self, morsel): if not isinstance(morsel, Morsel): return NotImplemented return (dict.__eq__(self, morsel) and self._value == morsel._value and self._key == morsel._key and self._coded_value == morsel._coded_value) __ne__ = object.__ne__ def copy(self): morsel = Morsel() dict.update(morsel, self) morsel.__dict__.update(self.__dict__) return morsel def update(self, values): data = {} for key, val in dict(values).items(): key = key.lower() if key not in self._reserved: raise CookieError("Invalid attribute %r" % (key,)) data[key] = val dict.update(self, data) def isReservedKey(self, K): return K.lower() in self._reserved def set(self, key, val, coded_val, LegalChars=_LegalChars): if LegalChars != _LegalChars: import warnings warnings.warn( 'LegalChars parameter is deprecated, ignored and will ' 'be removed in future versions.', DeprecationWarning, stacklevel=2) if key.lower() in self._reserved: raise CookieError('Attempt to set a reserved key %r' % (key,)) if not _is_legal_key(key): raise CookieError('Illegal key %r' % (key,)) # It's a good key, so save it. self._key = key self._value = val self._coded_value = coded_val def __getstate__(self): return { 'key': self._key, 'value': self._value, 'coded_value': self._coded_value, } def __setstate__(self, state): self._key = state['key'] self._value = state['value'] self._coded_value = state['coded_value'] def output(self, attrs=None, header="Set-Cookie:"): return "%s %s" % (header, self.OutputString(attrs)) __str__ = output def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.OutputString()) def js_output(self, attrs=None): # Print javascript return """ """ % (self.OutputString(attrs).replace('"', r'\"')) def OutputString(self, attrs=None): # Build up our result # result = [] append = result.append # First, the key=value pair append("%s=%s" % (self.key, self.coded_value)) # Now add any defined attributes if attrs is None: attrs = self._reserved items = sorted(self.items()) for key, value in items: if value == "": continue if key not in attrs: continue if key == "expires" and isinstance(value, int): append("%s=%s" % (self._reserved[key], _getdate(value))) elif key == "max-age" and isinstance(value, int): append("%s=%d" % (self._reserved[key], value)) elif key == "comment" and isinstance(value, str): append("%s=%s" % (self._reserved[key], _quote(value))) elif key in self._flags: if value: append(str(self._reserved[key])) else: append("%s=%s" % (self._reserved[key], value)) # Return the result return _semispacejoin(result) # # Pattern for finding cookie # # This used to be strict parsing based on the RFC2109 and RFC2068 # specifications. I have since discovered that MSIE 3.0x doesn't # follow the character rules outlined in those specs. As a # result, the parsing rules here are less strict. # _LegalKeyChars = r"\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=" _LegalValueChars = _LegalKeyChars + r'\[\]' _CookiePattern = re.compile(r""" \s* # Optional whitespace at start of cookie (?P # Start of group 'key' [""" + _LegalKeyChars + r"""]+? # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign (?P # Start of group 'val' "(?:[^\\"]|\\.)*" # Any doublequoted string | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or [""" + _LegalValueChars + r"""]* # Any word or empty string ) # End of group 'val' )? # End of optional value group \s* # Any number of spaces. (\s+|;|$) # Ending either at space, semicolon, or EOS. """, re.ASCII | re.VERBOSE) # re.ASCII may be removed if safe. # At long last, here is the cookie class. Using this class is almost just like # using a dictionary. See this module's docstring for example usage. # class BaseCookie(dict): """A container class for a set of Morsels.""" def value_decode(self, val): """real_value, coded_value = value_decode(STRING) Called prior to setting a cookie's value from the network representation. The VALUE is the value read from HTTP header. Override this function to modify the behavior of cookies. """ return val, val def value_encode(self, val): """real_value, coded_value = value_encode(VALUE) Called prior to setting a cookie's value from the dictionary representation. The VALUE is the value being assigned. Override this function to modify the behavior of cookies. """ strval = str(val) return strval, strval def __init__(self, input=None): if input: self.load(input) def __set(self, key, real_value, coded_value): """Private method for setting a cookie's value""" M = self.get(key, Morsel()) M.set(key, real_value, coded_value) dict.__setitem__(self, key, M) def __setitem__(self, key, value): """Dictionary style assignment.""" if isinstance(value, Morsel): # allow assignment of constructed Morsels (e.g. for pickling) dict.__setitem__(self, key, value) else: rval, cval = self.value_encode(value) self.__set(key, rval, cval) def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): """Return a string suitable for HTTP.""" result = [] items = sorted(self.items()) for key, value in items: result.append(value.output(attrs, header)) return sep.join(result) __str__ = output def __repr__(self): l = [] items = sorted(self.items()) for key, value in items: l.append('%s=%s' % (key, repr(value.value))) return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) def js_output(self, attrs=None): """Return a string suitable for JavaScript.""" result = [] items = sorted(self.items()) for key, value in items: result.append(value.js_output(attrs)) return _nulljoin(result) def load(self, rawdata): """Load cookies from a string (presumably HTTP_COOKIE) or from a dictionary. Loading cookies from a dictionary 'd' is equivalent to calling: map(Cookie.__setitem__, d.keys(), d.values()) """ if isinstance(rawdata, str): self.__parse_string(rawdata) else: # self.update() wouldn't call our custom __setitem__ for key, value in rawdata.items(): self[key] = value return def __parse_string(self, str, patt=_CookiePattern): i = 0 # Our starting point n = len(str) # Length of string parsed_items = [] # Parsed (type, key, value) triples morsel_seen = False # A key=value pair was previously encountered TYPE_ATTRIBUTE = 1 TYPE_KEYVALUE = 2 # We first parse the whole cookie string and reject it if it's # syntactically invalid (this helps avoid some classes of injection # attacks). while 0 <= i < n: # Start looking for a cookie match = patt.match(str, i) if not match: # No more cookies break key, value = match.group("key"), match.group("val") i = match.end(0) if key[0] == "$": if not morsel_seen: # We ignore attributes which pertain to the cookie # mechanism as a whole, such as "$Version". # See RFC 2965. (Does anyone care?) continue parsed_items.append((TYPE_ATTRIBUTE, key[1:], value)) elif key.lower() in Morsel._reserved: if not morsel_seen: # Invalid cookie string return if value is None: if key.lower() in Morsel._flags: parsed_items.append((TYPE_ATTRIBUTE, key, True)) else: # Invalid cookie string return else: parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value))) elif value is not None: parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value))) morsel_seen = True else: # Invalid cookie string return # The cookie string is valid, apply it. M = None # current morsel for tp, key, value in parsed_items: if tp == TYPE_ATTRIBUTE: assert M is not None M[key] = value else: assert tp == TYPE_KEYVALUE rval, cval = value self.__set(key, rval, cval) M = self[key] class SimpleCookie(BaseCookie): """ SimpleCookie supports strings as cookie values. When setting the value using the dictionary assignment notation, SimpleCookie calls the builtin str() to convert the value to a string. Values received from HTTP are kept as strings. """ def value_decode(self, val): return _unquote(val), val def value_encode(self, val): strval = str(val) return strval, _quote(strval) server.py000064400000126252147204456360006447 0ustar00"""HTTP server classes. Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, and CGIHTTPRequestHandler for CGI scripts. It does, however, optionally implement HTTP/1.1 persistent connections, as of version 0.3. Notes on CGIHTTPRequestHandler ------------------------------ This class implements GET and POST requests to cgi-bin scripts. If the os.fork() function is not present (e.g. on Windows), subprocess.Popen() is used as a fallback, with slightly altered semantics. In all cases, the implementation is intentionally naive -- all requests are executed synchronously. SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL -- it may execute arbitrary Python code or external programs. Note that status code 200 is sent prior to execution of a CGI script, so scripts cannot send other status codes such as 302 (redirect). XXX To do: - log requests even later (to capture byte count) - log user-agent header and other interesting goodies - send error log to separate file """ # See also: # # HTTP Working Group T. Berners-Lee # INTERNET-DRAFT R. T. Fielding # H. Frystyk Nielsen # Expires September 8, 1995 March 8, 1995 # # URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt # # and # # Network Working Group R. Fielding # Request for Comments: 2616 et al # Obsoletes: 2068 June 1999 # Category: Standards Track # # URL: http://www.faqs.org/rfcs/rfc2616.html # Log files # --------- # # Here's a quote from the NCSA httpd docs about log file format. # # | The logfile format is as follows. Each line consists of: # | # | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb # | # | host: Either the DNS name or the IP number of the remote client # | rfc931: Any information returned by identd for this person, # | - otherwise. # | authuser: If user sent a userid for authentication, the user name, # | - otherwise. # | DD: Day # | Mon: Month (calendar name) # | YYYY: Year # | hh: hour (24-hour format, the machine's timezone) # | mm: minutes # | ss: seconds # | request: The first line of the HTTP request as sent by the client. # | ddd: the status code returned by the server, - if not available. # | bbbb: the total number of bytes sent, # | *not including the HTTP/1.0 header*, - if not available # | # | You can determine the name of the file accessed through request. # # (Actually, the latter is only true if you know the server configuration # at the time the request was made!) __version__ = "0.6" __all__ = [ "HTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler", ] import email.utils import html import http.client import io import mimetypes import os import posixpath import select import shutil import socket # For gethostbyaddr() import socketserver import sys import time import urllib.parse import copy import argparse from http import HTTPStatus # Default error message template DEFAULT_ERROR_MESSAGE = """\ Error response

Error response

Error code: %(code)d

Message: %(message)s.

Error code explanation: %(code)s - %(explain)s.

""" DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" class HTTPServer(socketserver.TCPServer): allow_reuse_address = 1 # Seems to make sense in testing environment def server_bind(self): """Override server_bind to store the server name.""" socketserver.TCPServer.server_bind(self) host, port = self.server_address[:2] self.server_name = socket.getfqdn(host) self.server_port = port class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): """HTTP request handler base class. The following explanation of HTTP serves to guide you through the code as well as to expose any misunderstandings I may have about HTTP (so you don't need to read the code to figure out I'm wrong :-). HTTP (HyperText Transfer Protocol) is an extensible protocol on top of a reliable stream transport (e.g. TCP/IP). The protocol recognizes three parts to a request: 1. One line identifying the request type and path 2. An optional set of RFC-822-style headers 3. An optional data part The headers and data are separated by a blank line. The first line of the request has the form where is a (case-sensitive) keyword such as GET or POST, is a string containing path information for the request, and should be the string "HTTP/1.0" or "HTTP/1.1". is encoded using the URL encoding scheme (using %xx to signify the ASCII character with hex code xx). The specification specifies that lines are separated by CRLF but for compatibility with the widest range of clients recommends servers also handle LF. Similarly, whitespace in the request line is treated sensibly (allowing multiple spaces between components and allowing trailing whitespace). Similarly, for output, lines ought to be separated by CRLF pairs but most clients grok LF characters just fine. If the first line of the request has the form (i.e. is left out) then this is assumed to be an HTTP 0.9 request; this form has no optional headers and data part and the reply consists of just the data. The reply form of the HTTP 1.x protocol again has three parts: 1. One line giving the response code 2. An optional set of RFC-822-style headers 3. The data Again, the headers and data are separated by a blank line. The response code line has the form where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), is a 3-digit response code indicating success or failure of the request, and is an optional human-readable string explaining what the response code means. This server parses the request and the headers, and then calls a function specific to the request type (). Specifically, a request SPAM will be handled by a method do_SPAM(). If no such method exists the server sends an error response to the client. If it exists, it is called with no arguments: do_SPAM() Note that the request name is case sensitive (i.e. SPAM and spam are different requests). The various request details are stored in instance variables: - client_address is the client IP address in the form (host, port); - command, path and version are the broken-down request line; - headers is an instance of email.message.Message (or a derived class) containing the header information; - rfile is a file object open for reading positioned at the start of the optional input data part; - wfile is a file object open for writing. IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! The first thing to be written must be the response line. Then follow 0 or more header lines, then a blank line, and then the actual data (if any). The meaning of the header lines depends on the command executed by the server; in most cases, when data is returned, there should be at least one header line of the form Content-type: / where and should be registered MIME types, e.g. "text/html" or "text/plain". """ # The Python system version, truncated to its first component. sys_version = "Python/" + sys.version.split()[0] # The server software version. You may want to override this. # The format is multiple whitespace-separated strings, # where each string is of the form name[/version]. server_version = "BaseHTTP/" + __version__ error_message_format = DEFAULT_ERROR_MESSAGE error_content_type = DEFAULT_ERROR_CONTENT_TYPE # The default request version. This only affects responses up until # the point where the request line is parsed, so it mainly decides what # the client gets back when sending a malformed request line. # Most web servers default to HTTP 0.9, i.e. don't send a status line. default_request_version = "HTTP/0.9" def parse_request(self): """Parse a request (internal). The request should be stored in self.raw_requestline; the results are in self.command, self.path, self.request_version and self.headers. Return True for success, False for failure; on failure, an error is sent back. """ self.command = None # set in case of error on the first line self.request_version = version = self.default_request_version self.close_connection = True requestline = str(self.raw_requestline, 'iso-8859-1') requestline = requestline.rstrip('\r\n') self.requestline = requestline words = requestline.split() if len(words) == 3: command, path, version = words try: if version[:5] != 'HTTP/': raise ValueError base_version_number = version.split('/', 1)[1] version_number = base_version_number.split(".") # RFC 2145 section 3.1 says there can be only one "." and # - major and minor numbers MUST be treated as # separate integers; # - HTTP/2.4 is a lower version than HTTP/2.13, which in # turn is lower than HTTP/12.3; # - Leading zeros MUST be ignored by recipients. if len(version_number) != 2: raise ValueError version_number = int(version_number[0]), int(version_number[1]) except (ValueError, IndexError): self.send_error( HTTPStatus.BAD_REQUEST, "Bad request version (%r)" % version) return False if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": self.close_connection = False if version_number >= (2, 0): self.send_error( HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, "Invalid HTTP version (%s)" % base_version_number) return False elif len(words) == 2: command, path = words self.close_connection = True if command != 'GET': self.send_error( HTTPStatus.BAD_REQUEST, "Bad HTTP/0.9 request type (%r)" % command) return False elif not words: return False else: self.send_error( HTTPStatus.BAD_REQUEST, "Bad request syntax (%r)" % requestline) return False self.command, self.path, self.request_version = command, path, version # gh-87389: The purpose of replacing '//' with '/' is to protect # against open redirect attacks possibly triggered if the path starts # with '//' because http clients treat //path as an absolute URI # without scheme (similar to http://path) rather than a path. if self.path.startswith('//'): self.path = '/' + self.path.lstrip('/') # Reduce to a single / # Examine the headers and look for a Connection directive. try: self.headers = http.client.parse_headers(self.rfile, _class=self.MessageClass) except http.client.LineTooLong as err: self.send_error( HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, "Line too long", str(err)) return False except http.client.HTTPException as err: self.send_error( HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, "Too many headers", str(err) ) return False conntype = self.headers.get('Connection', "") if conntype.lower() == 'close': self.close_connection = True elif (conntype.lower() == 'keep-alive' and self.protocol_version >= "HTTP/1.1"): self.close_connection = False # Examine the headers and look for an Expect directive expect = self.headers.get('Expect', "") if (expect.lower() == "100-continue" and self.protocol_version >= "HTTP/1.1" and self.request_version >= "HTTP/1.1"): if not self.handle_expect_100(): return False return True def handle_expect_100(self): """Decide what to do with an "Expect: 100-continue" header. If the client is expecting a 100 Continue response, we must respond with either a 100 Continue or a final response before waiting for the request body. The default is to always respond with a 100 Continue. You can behave differently (for example, reject unauthorized requests) by overriding this method. This method should either return True (possibly after sending a 100 Continue response) or send an error response and return False. """ self.send_response_only(HTTPStatus.CONTINUE) self.end_headers() return True def handle_one_request(self): """Handle a single HTTP request. You normally don't need to override this method; see the class __doc__ string for information on how to handle specific HTTP commands such as GET and POST. """ try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG) return if not self.raw_requestline: self.close_connection = True return if not self.parse_request(): # An error code has been sent, just exit return mname = 'do_' + self.command if not hasattr(self, mname): self.send_error( HTTPStatus.NOT_IMPLEMENTED, "Unsupported method (%r)" % self.command) return method = getattr(self, mname) method() self.wfile.flush() #actually send the response if not already done. except socket.timeout as e: #a read or a write timed out. Discard this connection self.log_error("Request timed out: %r", e) self.close_connection = True return def handle(self): """Handle multiple requests if necessary.""" self.close_connection = True self.handle_one_request() while not self.close_connection: self.handle_one_request() def send_error(self, code, message=None, explain=None): """Send and log an error reply. Arguments are * code: an HTTP error code 3 digits * message: a simple optional 1 line reason phrase. *( HTAB / SP / VCHAR / %x80-FF ) defaults to short entry matching the response code * explain: a detailed message defaults to the long entry matching the response code. This sends an error response (so it must be called before any output has been generated), logs the error, and finally sends a piece of HTML explaining the error to the user. """ try: shortmsg, longmsg = self.responses[code] except KeyError: shortmsg, longmsg = '???', '???' if message is None: message = shortmsg if explain is None: explain = longmsg self.log_error("code %d, message %s", code, message) self.send_response(code, message) self.send_header('Connection', 'close') # Message body is omitted for cases described in: # - RFC7230: 3.3. 1xx, 204(No Content), 304(Not Modified) # - RFC7231: 6.3.6. 205(Reset Content) body = None if (code >= 200 and code not in (HTTPStatus.NO_CONTENT, HTTPStatus.RESET_CONTENT, HTTPStatus.NOT_MODIFIED)): # HTML encode to prevent Cross Site Scripting attacks # (see bug #1100201) content = (self.error_message_format % { 'code': code, 'message': html.escape(message, quote=False), 'explain': html.escape(explain, quote=False) }) body = content.encode('UTF-8', 'replace') self.send_header("Content-Type", self.error_content_type) self.send_header('Content-Length', str(len(body))) self.end_headers() if self.command != 'HEAD' and body: self.wfile.write(body) def send_response(self, code, message=None): """Add the response header to the headers buffer and log the response code. Also send two standard headers with the server software version and the current date. """ self.log_request(code) self.send_response_only(code, message) self.send_header('Server', self.version_string()) self.send_header('Date', self.date_time_string()) def send_response_only(self, code, message=None): """Send the response header only.""" if self.request_version != 'HTTP/0.9': if message is None: if code in self.responses: message = self.responses[code][0] else: message = '' if not hasattr(self, '_headers_buffer'): self._headers_buffer = [] self._headers_buffer.append(("%s %d %s\r\n" % (self.protocol_version, code, message)).encode( 'latin-1', 'strict')) def send_header(self, keyword, value): """Send a MIME header to the headers buffer.""" if self.request_version != 'HTTP/0.9': if not hasattr(self, '_headers_buffer'): self._headers_buffer = [] self._headers_buffer.append( ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) if keyword.lower() == 'connection': if value.lower() == 'close': self.close_connection = True elif value.lower() == 'keep-alive': self.close_connection = False def end_headers(self): """Send the blank line ending the MIME headers.""" if self.request_version != 'HTTP/0.9': self._headers_buffer.append(b"\r\n") self.flush_headers() def flush_headers(self): if hasattr(self, '_headers_buffer'): self.wfile.write(b"".join(self._headers_buffer)) self._headers_buffer = [] def log_request(self, code='-', size='-'): """Log an accepted request. This is called by send_response(). """ if isinstance(code, HTTPStatus): code = code.value self.log_message('"%s" %s %s', self.requestline, str(code), str(size)) def log_error(self, format, *args): """Log an error. This is called when a request cannot be fulfilled. By default it passes the message on to log_message(). Arguments are the same as for log_message(). XXX This should go to the separate error log. """ self.log_message(format, *args) def log_message(self, format, *args): """Log an arbitrary message. This is used by all other logging functions. Override it if you have specific logging wishes. The first argument, FORMAT, is a format string for the message to be logged. If the format string contains any % escapes requiring parameters, they should be specified as subsequent arguments (it's just like printf!). The client ip and current date/time are prefixed to every message. """ sys.stderr.write("%s - - [%s] %s\n" % (self.address_string(), self.log_date_time_string(), format%args)) def version_string(self): """Return the server software version string.""" return self.server_version + ' ' + self.sys_version def date_time_string(self, timestamp=None): """Return the current date and time formatted for a message header.""" if timestamp is None: timestamp = time.time() return email.utils.formatdate(timestamp, usegmt=True) def log_date_time_string(self): """Return the current time formatted for logging.""" now = time.time() year, month, day, hh, mm, ss, x, y, z = time.localtime(now) s = "%02d/%3s/%04d %02d:%02d:%02d" % ( day, self.monthname[month], year, hh, mm, ss) return s weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] def address_string(self): """Return the client address.""" return self.client_address[0] # Essentially static class variables # The version of the HTTP protocol we support. # Set this to HTTP/1.1 to enable automatic keepalive protocol_version = "HTTP/1.0" # MessageClass used to parse headers MessageClass = http.client.HTTPMessage # hack to maintain backwards compatibility responses = { v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() } class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): """Simple HTTP request handler with GET and HEAD commands. This serves files from the current directory and any of its subdirectories. The MIME type for files is determined by calling the .guess_type() method. The GET and HEAD requests are identical except that the HEAD request omits the actual contents of the file. """ server_version = "SimpleHTTP/" + __version__ def do_GET(self): """Serve a GET request.""" f = self.send_head() if f: try: self.copyfile(f, self.wfile) finally: f.close() def do_HEAD(self): """Serve a HEAD request.""" f = self.send_head() if f: f.close() def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if os.path.isdir(path): parts = urllib.parse.urlsplit(self.path) if not parts.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(HTTPStatus.MOVED_PERMANENTLY) new_parts = (parts[0], parts[1], parts[2] + '/', parts[3], parts[4]) new_url = urllib.parse.urlunsplit(new_parts) self.send_header("Location", new_url) self.end_headers() return None for index in "index.html", "index.htm": index = os.path.join(path, index) if os.path.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: f = open(path, 'rb') except OSError: self.send_error(HTTPStatus.NOT_FOUND, "File not found") return None try: self.send_response(HTTPStatus.OK) self.send_header("Content-type", ctype) fs = os.fstat(f.fileno()) self.send_header("Content-Length", str(fs[6])) self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) self.end_headers() return f except: f.close() raise def list_directory(self, path): """Helper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). """ try: list = os.listdir(path) except OSError: self.send_error( HTTPStatus.NOT_FOUND, "No permission to list directory") return None list.sort(key=lambda a: a.lower()) r = [] try: displaypath = urllib.parse.unquote(self.path, errors='surrogatepass') except UnicodeDecodeError: displaypath = urllib.parse.unquote(path) displaypath = html.escape(displaypath, quote=False) enc = sys.getfilesystemencoding() title = 'Directory listing for %s' % displaypath r.append('') r.append('\n') r.append('' % enc) r.append('%s\n' % title) r.append('\n

%s

' % title) r.append('
\n
    ') for name in list: fullname = os.path.join(path, name) displayname = linkname = name # Append / for directories or @ for symbolic links if os.path.isdir(fullname): displayname = name + "/" linkname = name + "/" if os.path.islink(fullname): displayname = name + "@" # Note: a link to a directory displays with @ and links with / r.append('
  • %s
  • ' % (urllib.parse.quote(linkname, errors='surrogatepass'), html.escape(displayname, quote=False))) r.append('
\n
\n\n\n') encoded = '\n'.join(r).encode(enc, 'surrogateescape') f = io.BytesIO() f.write(encoded) f.seek(0) self.send_response(HTTPStatus.OK) self.send_header("Content-type", "text/html; charset=%s" % enc) self.send_header("Content-Length", str(len(encoded))) self.end_headers() return f def translate_path(self, path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) """ # abandon query parameters path = path.split('?',1)[0] path = path.split('#',1)[0] # Don't forget explicit trailing slash when normalizing. Issue17324 trailing_slash = path.rstrip().endswith('/') try: path = urllib.parse.unquote(path, errors='surrogatepass') except UnicodeDecodeError: path = urllib.parse.unquote(path) path = posixpath.normpath(path) words = path.split('/') words = filter(None, words) path = os.getcwd() for word in words: if os.path.dirname(word) or word in (os.curdir, os.pardir): # Ignore components that are not a simple file/directory name continue path = os.path.join(path, word) if trailing_slash: path += '/' return path def copyfile(self, source, outputfile): """Copy all data between two file objects. The SOURCE argument is a file object open for reading (or anything with a read() method) and the DESTINATION argument is a file object open for writing (or anything with a write() method). The only reason for overriding this would be to change the block size or perhaps to replace newlines by CRLF -- note however that this the default server uses this to copy binary data as well. """ shutil.copyfileobj(source, outputfile) def guess_type(self, path): """Guess the type of a file. Argument is a PATH (a filename). Return value is a string of the form type/subtype, usable for a MIME Content-type header. The default implementation looks the file's extension up in the table self.extensions_map, using application/octet-stream as a default; however it would be permissible (if slow) to look inside the data to make a better guess. """ base, ext = posixpath.splitext(path) if ext in self.extensions_map: return self.extensions_map[ext] ext = ext.lower() if ext in self.extensions_map: return self.extensions_map[ext] else: return self.extensions_map[''] if not mimetypes.inited: mimetypes.init() # try to read system mime.types extensions_map = mimetypes.types_map.copy() extensions_map.update({ '': 'application/octet-stream', # Default '.py': 'text/plain', '.c': 'text/plain', '.h': 'text/plain', }) # Utilities for CGIHTTPRequestHandler def _url_collapse_path(path): """ Given a URL path, remove extra '/'s and '.' path elements and collapse any '..' references and returns a collapsed path. Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. The utility of this function is limited to is_cgi method and helps preventing some security attacks. Returns: The reconstituted URL, which will always start with a '/'. Raises: IndexError if too many '..' occur within the path. """ # Query component should not be involved. path, _, query = path.partition('?') path = urllib.parse.unquote(path) # Similar to os.path.split(os.path.normpath(path)) but specific to URL # path semantics rather than local operating system semantics. path_parts = path.split('/') head_parts = [] for part in path_parts[:-1]: if part == '..': head_parts.pop() # IndexError if more '..' than prior parts elif part and part != '.': head_parts.append( part ) if path_parts: tail_part = path_parts.pop() if tail_part: if tail_part == '..': head_parts.pop() tail_part = '' elif tail_part == '.': tail_part = '' else: tail_part = '' if query: tail_part = '?'.join((tail_part, query)) splitpath = ('/' + '/'.join(head_parts), tail_part) collapsed_path = "/".join(splitpath) return collapsed_path nobody = None def nobody_uid(): """Internal routine to get nobody's uid""" global nobody if nobody: return nobody try: import pwd except ImportError: return -1 try: nobody = pwd.getpwnam('nobody')[2] except KeyError: nobody = 1 + max(x[2] for x in pwd.getpwall()) return nobody def executable(path): """Test for executable file.""" return os.access(path, os.X_OK) class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): """Complete HTTP server with GET, HEAD and POST commands. GET and HEAD also support running CGI scripts. The POST command is *only* implemented for CGI scripts. """ # Determine platform specifics have_fork = hasattr(os, 'fork') # Make rfile unbuffered -- we need to read one line and then pass # the rest to a subprocess, so we can't use buffered input. rbufsize = 0 def do_POST(self): """Serve a POST request. This is only implemented for CGI scripts. """ if self.is_cgi(): self.run_cgi() else: self.send_error( HTTPStatus.NOT_IMPLEMENTED, "Can only POST to CGI scripts") def send_head(self): """Version of send_head that support CGI scripts""" if self.is_cgi(): return self.run_cgi() else: return SimpleHTTPRequestHandler.send_head(self) def is_cgi(self): """Test whether self.path corresponds to a CGI script. Returns True and updates the cgi_info attribute to the tuple (dir, rest) if self.path requires running a CGI script. Returns False otherwise. If any exception is raised, the caller should assume that self.path was rejected as invalid and act accordingly. The default implementation tests whether the normalized url path begins with one of the strings in self.cgi_directories (and the next character is a '/' or the end of the string). """ collapsed_path = _url_collapse_path(self.path) dir_sep = collapsed_path.find('/', 1) head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] if head in self.cgi_directories: self.cgi_info = head, tail return True return False cgi_directories = ['/cgi-bin', '/htbin'] def is_executable(self, path): """Test whether argument path is an executable file.""" return executable(path) def is_python(self, path): """Test whether argument path is a Python script.""" head, tail = os.path.splitext(path) return tail.lower() in (".py", ".pyw") def run_cgi(self): """Execute a CGI script.""" dir, rest = self.cgi_info path = dir + '/' + rest i = path.find('/', len(dir)+1) while i >= 0: nextdir = path[:i] nextrest = path[i+1:] scriptdir = self.translate_path(nextdir) if os.path.isdir(scriptdir): dir, rest = nextdir, nextrest i = path.find('/', len(dir)+1) else: break # find an explicit query string, if present. rest, _, query = rest.partition('?') # dissect the part after the directory name into a script name & # a possible additional path, to be stored in PATH_INFO. i = rest.find('/') if i >= 0: script, rest = rest[:i], rest[i:] else: script, rest = rest, '' scriptname = dir + '/' + script scriptfile = self.translate_path(scriptname) if not os.path.exists(scriptfile): self.send_error( HTTPStatus.NOT_FOUND, "No such CGI script (%r)" % scriptname) return if not os.path.isfile(scriptfile): self.send_error( HTTPStatus.FORBIDDEN, "CGI script is not a plain file (%r)" % scriptname) return ispy = self.is_python(scriptname) if self.have_fork or not ispy: if not self.is_executable(scriptfile): self.send_error( HTTPStatus.FORBIDDEN, "CGI script is not executable (%r)" % scriptname) return # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html # XXX Much of the following could be prepared ahead of time! env = copy.deepcopy(os.environ) env['SERVER_SOFTWARE'] = self.version_string() env['SERVER_NAME'] = self.server.server_name env['GATEWAY_INTERFACE'] = 'CGI/1.1' env['SERVER_PROTOCOL'] = self.protocol_version env['SERVER_PORT'] = str(self.server.server_port) env['REQUEST_METHOD'] = self.command uqrest = urllib.parse.unquote(rest) env['PATH_INFO'] = uqrest env['PATH_TRANSLATED'] = self.translate_path(uqrest) env['SCRIPT_NAME'] = scriptname if query: env['QUERY_STRING'] = query env['REMOTE_ADDR'] = self.client_address[0] authorization = self.headers.get("authorization") if authorization: authorization = authorization.split() if len(authorization) == 2: import base64, binascii env['AUTH_TYPE'] = authorization[0] if authorization[0].lower() == "basic": try: authorization = authorization[1].encode('ascii') authorization = base64.decodebytes(authorization).\ decode('ascii') except (binascii.Error, UnicodeError): pass else: authorization = authorization.split(':') if len(authorization) == 2: env['REMOTE_USER'] = authorization[0] # XXX REMOTE_IDENT if self.headers.get('content-type') is None: env['CONTENT_TYPE'] = self.headers.get_content_type() else: env['CONTENT_TYPE'] = self.headers['content-type'] length = self.headers.get('content-length') if length: env['CONTENT_LENGTH'] = length referer = self.headers.get('referer') if referer: env['HTTP_REFERER'] = referer accept = [] for line in self.headers.getallmatchingheaders('accept'): if line[:1] in "\t\n\r ": accept.append(line.strip()) else: accept = accept + line[7:].split(',') env['HTTP_ACCEPT'] = ','.join(accept) ua = self.headers.get('user-agent') if ua: env['HTTP_USER_AGENT'] = ua co = filter(None, self.headers.get_all('cookie', [])) cookie_str = ', '.join(co) if cookie_str: env['HTTP_COOKIE'] = cookie_str # XXX Other HTTP_* headers # Since we're setting the env in the parent, provide empty # values to override previously set values for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): env.setdefault(k, "") self.send_response(HTTPStatus.OK, "Script output follows") self.flush_headers() decoded_query = query.replace('+', ' ') if self.have_fork: # Unix -- fork as we should args = [script] if '=' not in decoded_query: args.append(decoded_query) nobody = nobody_uid() self.wfile.flush() # Always flush before forking pid = os.fork() if pid != 0: # Parent pid, sts = os.waitpid(pid, 0) # throw away additional data [see bug #427345] while select.select([self.rfile], [], [], 0)[0]: if not self.rfile.read(1): break if sts: self.log_error("CGI script exit status %#x", sts) return # Child try: try: os.setuid(nobody) except OSError: pass os.dup2(self.rfile.fileno(), 0) os.dup2(self.wfile.fileno(), 1) os.execve(scriptfile, args, env) except: self.server.handle_error(self.request, self.client_address) os._exit(127) else: # Non-Unix -- use subprocess import subprocess cmdline = [scriptfile] if self.is_python(scriptfile): interp = sys.executable if interp.lower().endswith("w.exe"): # On Windows, use python.exe, not pythonw.exe interp = interp[:-5] + interp[-4:] cmdline = [interp, '-u'] + cmdline if '=' not in query: cmdline.append(query) self.log_message("command: %s", subprocess.list2cmdline(cmdline)) try: nbytes = int(length) except (TypeError, ValueError): nbytes = 0 p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env = env ) if self.command.lower() == "post" and nbytes > 0: data = self.rfile.read(nbytes) else: data = None # throw away additional data [see bug #427345] while select.select([self.rfile._sock], [], [], 0)[0]: if not self.rfile._sock.recv(1): break stdout, stderr = p.communicate(data) self.wfile.write(stdout) if stderr: self.log_error('%s', stderr) p.stderr.close() p.stdout.close() status = p.returncode if status: self.log_error("CGI script exit status %#x", status) else: self.log_message("CGI script exited OK") def test(HandlerClass=BaseHTTPRequestHandler, ServerClass=HTTPServer, protocol="HTTP/1.0", port=8000, bind=""): """Test the HTTP request handler class. This runs an HTTP server on port 8000 (or the port argument). """ server_address = (bind, port) HandlerClass.protocol_version = protocol with ServerClass(server_address, HandlerClass) as httpd: sa = httpd.socket.getsockname() serve_message = "Serving HTTP on {host} port {port} (http://{host}:{port}/) ..." print(serve_message.format(host=sa[0], port=sa[1])) try: httpd.serve_forever() except KeyboardInterrupt: print("\nKeyboard interrupt received, exiting.") sys.exit(0) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--cgi', action='store_true', help='Run as CGI Server') parser.add_argument('--bind', '-b', default='', metavar='ADDRESS', help='Specify alternate bind address ' '[default: all interfaces]') parser.add_argument('port', action='store', default=8000, type=int, nargs='?', help='Specify alternate port [default: 8000]') args = parser.parse_args() if args.cgi: handler_class = CGIHTTPRequestHandler else: handler_class = SimpleHTTPRequestHandler test(HandlerClass=handler_class, port=args.port, bind=args.bind)